diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..7da2093 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,39 @@ +# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile +# Use a Python image with uv pre-installed +FROM ghcr.io/astral-sh/uv:python3.10-bullseye-slim AS uv + +# Install the project into /app +WORKDIR /app + +# Enable bytecode compilation +ENV UV_COMPILE_BYTECODE=1 + +# Copy from the cache instead of linking since it's a mounted volume +ENV UV_LINK_MODE=copy + +# Install the project's dependencies using the lockfile and settings +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --frozen --no-install-project --no-dev --no-editable + +# Then, add the rest of the project source code and install it +# Installing separately from its dependencies allows optimal layer caching +ADD . /app +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-dev --no-editable + +FROM python:3.10-slim-bullseye + +WORKDIR /app + +COPY --from=uv /root/.local /root/.local +COPY --from=uv --chown=app:app /app/.venv /app/.venv + +# Place executables in the environment at the front of the path +ENV PATH="/app/.venv/bin:$PATH" + +# Entry point for running the MCP server +ENTRYPOINT ["uv", "run", "mcp-server-qdrant"] + +# To use the server, pass required arguments like --qdrant-url, --collection-name, etc. diff --git a/README.md b/README.md index e05f8b7..6eef79d 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # mcp-server-qdrant: A Qdrant MCP server -[![smithery badge](https://smithery.ai/badge/mcp-server-qdrant)](https://smithery.ai/protocol/mcp-server-qdrant) +[![smithery badge](https://smithery.ai/badge/mcp-server-qdrant)](https://smithery.ai/server/mcp-server-qdrant) > The [Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) is an open protocol that enables seamless integration between LLM applications and external data sources and tools. Whether you’re building an AI-powered IDE, enhancing a chat interface, or creating custom AI workflows, MCP provides a standardized way to connect LLMs with the context they need. @@ -41,10 +41,10 @@ uv run mcp-server-qdrant \ ### Installing via Smithery -To install Qdrant MCP Server for Claude Desktop automatically via [Smithery](https://smithery.ai/protocol/mcp-server-qdrant): +To install Qdrant MCP Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/mcp-server-qdrant): ```bash -npx @smithery/cli install mcp-server-qdrant --client claude +npx -y @smithery/cli install mcp-server-qdrant --client claude ``` ## Usage with Claude Desktop diff --git a/smithery.yaml b/smithery.yaml new file mode 100644 index 0000000..e7c0cfe --- /dev/null +++ b/smithery.yaml @@ -0,0 +1,32 @@ +# Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml + +startCommand: + type: stdio + configSchema: + # JSON Schema defining the configuration options for the MCP. + type: object + required: + - collectionName + properties: + qdrantUrl: + type: string + description: The URL of the Qdrant server. Cannot be used together with + qdrantLocalPath. + qdrantLocalPath: + type: string + description: The local path to the Qdrant database. Cannot be used together with + qdrantUrl. + qdrantApiKey: + type: string + description: The API key for the Qdrant server. + collectionName: + type: string + description: The name of the collection to use or create. + fastembedModelName: + type: string + default: sentence-transformers/all-MiniLM-L6-v2 + description: The embedding model to use. + commandFunction: + # A function that produces the CLI command to start the MCP on stdio. + |- + (config) => { const args = ['uv', 'run', 'mcp-server-qdrant']; if(config.qdrantUrl) { args.push('--qdrant-url', config.qdrantUrl); } if(config.qdrantLocalPath) { args.push('--qdrant-local-path', config.qdrantLocalPath); } if(config.qdrantApiKey) { args.push('--qdrant-api-key', config.qdrantApiKey); } args.push('--collection-name', config.collectionName); args.push('--fastembed-model-name', config.fastembedModelName || 'sentence-transformers/all-MiniLM-L6-v2'); return { command: 'uv', args: args }; }