Compare commits
41 Commits
6c22e1bb02
...
v1.0.0-alp
| Author | SHA1 | Date | |
|---|---|---|---|
| ca03d22b97 | |||
| 107db82c52 | |||
| 4b89837b43 | |||
| c8cea249bc | |||
| ae894ff52e | |||
| 7b7eea2f67 | |||
| 0f2544c960 | |||
| d7ce2ad962 | |||
| 5892eb5cda | |||
| 9b55dedec5 | |||
| 75bae256f2 | |||
| a490eab625 | |||
| 1bf18b9ce2 | |||
| c30ca25503 | |||
| 880f85a2d8 | |||
| 49c5043661 | |||
| 9e96d18315 | |||
| 88a6740b42 | |||
| 6e0afa0bfb | |||
| 80d4347378 | |||
| 3eee0bf296 | |||
| 8809095549 | |||
| a2e8d76237 | |||
| 8c25bec5a4 | |||
| 7890d79bce | |||
| 9d73ac73b1 | |||
| c9e51c71d8 | |||
| 3d68de4c54 | |||
| f921412f01 | |||
| 757afb3c41 | |||
| e235e998e4 | |||
| c57e71b92a | |||
| 987b6d087a | |||
| e6f737e2a3 | |||
| 5607946441 | |||
| 3ecd3303ce | |||
| 6060e19b31 | |||
| ee385d82ad | |||
| 7acd602ffd | |||
| 69ec6ef0e2 | |||
| f63115c8b3 |
45
.gitea/workflows/release.yml
Normal file
45
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
name: Build and Push Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
REGISTRY: git.prettyhefty.com
|
||||
IMAGE_NAME: bill/grist-mcp
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-docker
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
run: |
|
||||
git clone --depth 1 --branch ${GITHUB_REF_NAME} ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git .
|
||||
|
||||
- name: Extract version from tag
|
||||
id: version
|
||||
run: |
|
||||
VERSION=${GITHUB_REF#refs/tags/}
|
||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||
if [[ "$VERSION" == *-alpha* ]] || [[ "$VERSION" == *-beta* ]] || [[ "$VERSION" == *-rc* ]]; then
|
||||
echo "IS_PRERELEASE=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "IS_PRERELEASE=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Log in to Container Registry
|
||||
run: echo "${{ secrets.REGISTRY_TOKEN }}" | docker login ${{ env.REGISTRY }} -u ${{ gitea.actor }} --password-stdin
|
||||
|
||||
- name: Build and push Docker image
|
||||
run: |
|
||||
docker build -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.version.outputs.VERSION }} .
|
||||
docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.version.outputs.VERSION }}
|
||||
|
||||
if [ "${{ steps.version.outputs.IS_PRERELEASE }}" = "false" ]; then
|
||||
docker tag ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.version.outputs.VERSION }} ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
fi
|
||||
|
||||
- name: List images
|
||||
run: docker images | grep grist-mcp
|
||||
47
.github/workflows/build.yaml
vendored
47
.github/workflows/build.yaml
vendored
@@ -6,7 +6,8 @@ on:
|
||||
- 'v*.*.*'
|
||||
|
||||
env:
|
||||
IMAGE_NAME: grist-mcp
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -19,53 +20,27 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Determine registry
|
||||
id: registry
|
||||
run: |
|
||||
if [ "${{ vars.GITEA_ACTIONS }}" = "true" ]; then
|
||||
# Gitea: use server URL as registry
|
||||
REGISTRY="${{ github.server_url }}"
|
||||
REGISTRY="${REGISTRY#https://}"
|
||||
REGISTRY="${REGISTRY#http://}"
|
||||
echo "registry=${REGISTRY}" >> $GITHUB_OUTPUT
|
||||
echo "is_gitea=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# GitHub: use GHCR
|
||||
echo "registry=ghcr.io" >> $GITHUB_OUTPUT
|
||||
echo "is_gitea=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
if: steps.registry.outputs.is_gitea == 'false'
|
||||
- name: Log in to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to Gitea Container Registry
|
||||
if: steps.registry.outputs.is_gitea == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ steps.registry.outputs.registry }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels)
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ steps.registry.outputs.registry }}/${{ github.repository }}
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=raw,value=latest
|
||||
type=raw,value=latest,enable=${{ !contains(github.ref, '-alpha') && !contains(github.ref, '-beta') && !contains(github.ref, '-rc') }}
|
||||
|
||||
- name: Build and push
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -2,7 +2,7 @@ __pycache__/
|
||||
*.py[cod]
|
||||
.venv/
|
||||
.env
|
||||
config.yaml
|
||||
/config.yaml
|
||||
*.egg-info/
|
||||
dist/
|
||||
.pytest_cache/
|
||||
|
||||
62
CHANGELOG.md
Normal file
62
CHANGELOG.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [1.0.0] - 2026-01-01
|
||||
|
||||
Initial release of grist-mcp, an MCP server for AI agents to interact with Grist spreadsheets.
|
||||
|
||||
### Added
|
||||
|
||||
#### Core Features
|
||||
- **MCP Server**: Full Model Context Protocol implementation with SSE transport
|
||||
- **Token-based Authentication**: Secure agent authentication via `GRIST_MCP_TOKEN`
|
||||
- **Granular Permissions**: Per-document access control with `read`, `write`, and `schema` scopes
|
||||
- **Multi-tenant Support**: Configure multiple Grist instances and documents
|
||||
|
||||
#### Discovery Tools
|
||||
- `list_documents`: List accessible documents with their permissions
|
||||
|
||||
#### Read Tools
|
||||
- `list_tables`: List all tables in a document
|
||||
- `describe_table`: Get column metadata (id, type, formula)
|
||||
- `get_records`: Fetch records with optional filter, sort, and limit
|
||||
- `sql_query`: Execute read-only SELECT queries
|
||||
|
||||
#### Write Tools
|
||||
- `add_records`: Insert new records into a table
|
||||
- `update_records`: Modify existing records by ID
|
||||
- `delete_records`: Remove records by ID
|
||||
|
||||
#### Schema Tools
|
||||
- `create_table`: Create new tables with column definitions
|
||||
- `add_column`: Add columns to existing tables
|
||||
- `modify_column`: Change column type or formula
|
||||
- `delete_column`: Remove columns from tables
|
||||
|
||||
#### Infrastructure
|
||||
- **Docker Support**: Multi-stage Dockerfile with non-root user
|
||||
- **Docker Compose**: Ready-to-deploy configuration with environment variables
|
||||
- **Health Endpoint**: `/health` for container orchestration readiness checks
|
||||
- **SSE Transport**: Server-Sent Events for MCP client communication
|
||||
- **Environment Variable Substitution**: `${VAR}` syntax in config files
|
||||
|
||||
#### Testing
|
||||
- **Unit Tests**: Comprehensive coverage with pytest-httpx mocking
|
||||
- **Integration Tests**: Docker-based tests with ephemeral containers
|
||||
- **Rich Test Runner**: Progress display for test execution
|
||||
- **Test Isolation**: Dynamic port discovery for parallel test runs
|
||||
|
||||
#### Developer Experience
|
||||
- **Makefile**: Commands for testing, building, and deployment
|
||||
- **Dev Environment**: Docker Compose setup for local development
|
||||
- **MCP Config Display**: Startup message with client configuration snippet
|
||||
|
||||
### Security
|
||||
- SQL injection prevention with SELECT-only query validation
|
||||
- API key isolation per document
|
||||
- Token validation at startup (no runtime exposure)
|
||||
- Non-root container execution
|
||||
39
CLAUDE.md
39
CLAUDE.md
@@ -17,11 +17,23 @@ grist-mcp is an MCP (Model Context Protocol) server that enables AI agents to in
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Run tests
|
||||
uv run pytest -v
|
||||
# Run unit tests
|
||||
make test-unit
|
||||
# or: uv run pytest tests/unit/ -v
|
||||
|
||||
# Run a specific test file
|
||||
uv run pytest tests/test_auth.py -v
|
||||
# Run integration tests (manages containers automatically)
|
||||
make test-integration
|
||||
# or: ./scripts/run-integration-tests.sh
|
||||
|
||||
# Full pre-deploy pipeline
|
||||
make pre-deploy
|
||||
|
||||
# Development environment
|
||||
make dev-up # Start
|
||||
make dev-down # Stop
|
||||
|
||||
# Build Docker image
|
||||
make build
|
||||
|
||||
# Run the server (requires config and token)
|
||||
CONFIG_PATH=./config.yaml GRIST_MCP_TOKEN=your-token uv run python -m grist_mcp.main
|
||||
@@ -30,7 +42,7 @@ CONFIG_PATH=./config.yaml GRIST_MCP_TOKEN=your-token uv run python -m grist_mcp.
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
src/grist_mcp/
|
||||
src/grist_mcp/ # Source code
|
||||
├── main.py # Entry point, runs stdio server
|
||||
├── server.py # MCP server setup, tool registration, call_tool dispatch
|
||||
├── config.py # YAML config loading with env var substitution
|
||||
@@ -41,6 +53,14 @@ src/grist_mcp/
|
||||
├── read.py # list_tables, describe_table, get_records, sql_query
|
||||
├── write.py # add_records, update_records, delete_records
|
||||
└── schema.py # create_table, add_column, modify_column, delete_column
|
||||
tests/
|
||||
├── unit/ # Unit tests (no containers)
|
||||
└── integration/ # Integration tests (with Docker)
|
||||
deploy/
|
||||
├── dev/ # Development docker-compose
|
||||
├── test/ # Test docker-compose (ephemeral)
|
||||
└── prod/ # Production docker-compose
|
||||
scripts/ # Test automation scripts
|
||||
```
|
||||
|
||||
## Key Patterns
|
||||
@@ -71,11 +91,18 @@ The optional `client` parameter enables dependency injection for testing.
|
||||
|
||||
## Testing
|
||||
|
||||
Tests use pytest-httpx to mock Grist API responses. Each test file has fixtures for common setup:
|
||||
### Unit Tests (`tests/unit/`)
|
||||
Fast tests using pytest-httpx to mock Grist API responses. Run with `make test-unit`.
|
||||
- `test_auth.py`: Uses in-memory Config objects
|
||||
- `test_grist_client.py`: Uses HTTPXMock for API mocking
|
||||
- `test_tools_*.py`: Combine auth fixtures with mocked clients
|
||||
|
||||
### Integration Tests (`tests/integration/`)
|
||||
Tests against real Grist containers. Run with `make test-integration`.
|
||||
- Automatically manages Docker containers via `scripts/run-integration-tests.sh`
|
||||
- Uses environment variables for configuration (no hardcoded URLs)
|
||||
- Containers are ephemeral and cleaned up after tests
|
||||
|
||||
## Configuration
|
||||
|
||||
See `config.yaml.example` for the configuration format. Key points:
|
||||
|
||||
40
Makefile
Normal file
40
Makefile
Normal file
@@ -0,0 +1,40 @@
|
||||
.PHONY: help test test-unit test-integration build dev dev-up dev-down pre-deploy clean
|
||||
|
||||
VERBOSE ?= 0
|
||||
|
||||
# Default target
|
||||
help: ## Show this help
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
# Testing
|
||||
test: ## Run all tests (unit + integration) with rich progress display
|
||||
@uv run python scripts/test-runner.py $(if $(filter 1,$(VERBOSE)),-v)
|
||||
|
||||
test-unit: ## Run unit tests only
|
||||
@uv run python scripts/test-runner.py --unit-only $(if $(filter 1,$(VERBOSE)),-v)
|
||||
|
||||
test-integration: ## Run integration tests only (starts/stops containers)
|
||||
@uv run python scripts/test-runner.py --integration-only $(if $(filter 1,$(VERBOSE)),-v)
|
||||
|
||||
# Docker
|
||||
build: ## Build Docker image
|
||||
docker build -t grist-mcp:latest .
|
||||
|
||||
dev: ## Start development environment (attached, streams logs)
|
||||
cd deploy/dev && docker compose up --build
|
||||
|
||||
dev-up: ## Start development environment (detached)
|
||||
cd deploy/dev && docker compose up -d --build
|
||||
|
||||
dev-down: ## Stop development environment
|
||||
cd deploy/dev && docker compose down
|
||||
|
||||
# Pre-deployment
|
||||
pre-deploy: test ## Full pre-deployment pipeline
|
||||
@echo "Pre-deployment checks passed!"
|
||||
|
||||
# Cleanup
|
||||
clean: ## Remove test artifacts and containers
|
||||
cd deploy/test && docker compose down -v --rmi local 2>/dev/null || true
|
||||
find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true
|
||||
268
README.md
268
README.md
@@ -15,50 +15,33 @@ grist-mcp is a [Model Context Protocol (MCP)](https://modelcontextprotocol.io/)
|
||||
- **Security**: Token-based authentication with per-document permission scopes (read, write, schema)
|
||||
- **Multi-tenant**: Support multiple Grist instances and documents
|
||||
|
||||
## Requirements
|
||||
## Quick Start (Docker)
|
||||
|
||||
- Python 3.14+
|
||||
### Prerequisites
|
||||
|
||||
- Docker and Docker Compose
|
||||
- Access to one or more Grist documents with API keys
|
||||
|
||||
## Installation
|
||||
### 1. Create configuration directory
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/your-org/grist-mcp.git
|
||||
cd grist-mcp
|
||||
|
||||
# Install with uv
|
||||
uv sync --dev
|
||||
mkdir grist-mcp && cd grist-mcp
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Create a `config.yaml` file based on the example:
|
||||
### 2. Download configuration files
|
||||
|
||||
```bash
|
||||
# Download docker-compose.yml
|
||||
curl -O https://raw.githubusercontent.com/Xe138/grist-mcp-server/master/deploy/prod/docker-compose.yml
|
||||
|
||||
# Download example config
|
||||
curl -O https://raw.githubusercontent.com/Xe138/grist-mcp-server/master/config.yaml.example
|
||||
cp config.yaml.example config.yaml
|
||||
```
|
||||
|
||||
### Configuration Structure
|
||||
### 3. Generate tokens
|
||||
|
||||
```yaml
|
||||
# Document definitions
|
||||
documents:
|
||||
my-document:
|
||||
url: https://docs.getgrist.com # Grist instance URL
|
||||
doc_id: abcd1234 # Document ID from URL
|
||||
api_key: ${GRIST_API_KEY} # API key (supports env vars)
|
||||
|
||||
# Agent tokens with access scopes
|
||||
tokens:
|
||||
- token: your-secret-token # Unique token for this agent
|
||||
name: my-agent # Human-readable name
|
||||
scope:
|
||||
- document: my-document
|
||||
permissions: [read, write] # Allowed: read, write, schema
|
||||
```
|
||||
|
||||
### Generating Tokens
|
||||
Generate a secure token for your agent:
|
||||
|
||||
```bash
|
||||
python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
@@ -66,34 +49,53 @@ python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
openssl rand -base64 32
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
### 4. Configure config.yaml
|
||||
|
||||
- `CONFIG_PATH`: Path to config file (default: `/app/config.yaml`)
|
||||
- `GRIST_MCP_TOKEN`: Agent token for authentication
|
||||
- Config file supports `${VAR}` syntax for API keys
|
||||
Edit `config.yaml` to define your Grist documents and agent tokens:
|
||||
|
||||
## Usage
|
||||
```yaml
|
||||
# Document definitions
|
||||
documents:
|
||||
my-document: # Friendly name (used in token scopes)
|
||||
url: https://docs.getgrist.com # Your Grist instance URL
|
||||
doc_id: abcd1234efgh5678 # Document ID from the URL
|
||||
api_key: your-grist-api-key # Grist API key (or use ${ENV_VAR} syntax)
|
||||
|
||||
### Running the Server
|
||||
|
||||
The server uses SSE (Server-Sent Events) transport over HTTP:
|
||||
|
||||
```bash
|
||||
# Set your agent token
|
||||
export GRIST_MCP_TOKEN="your-agent-token"
|
||||
|
||||
# Run with custom config path (defaults to port 3000)
|
||||
CONFIG_PATH=./config.yaml uv run python -m grist_mcp.main
|
||||
|
||||
# Or specify a custom port
|
||||
PORT=8080 CONFIG_PATH=./config.yaml uv run python -m grist_mcp.main
|
||||
# Agent tokens with access scopes
|
||||
tokens:
|
||||
- token: your-generated-token-here # The token you generated in step 3
|
||||
name: my-agent # Human-readable name
|
||||
scope:
|
||||
- document: my-document # Must match a document name above
|
||||
permissions: [read, write] # Allowed: read, write, schema
|
||||
```
|
||||
|
||||
The server exposes two endpoints:
|
||||
- `http://localhost:3000/sse` - SSE connection endpoint
|
||||
- `http://localhost:3000/messages` - Message posting endpoint
|
||||
**Finding your Grist document ID**: Open your Grist document in a browser. The URL will look like:
|
||||
`https://docs.getgrist.com/abcd1234efgh5678/My-Document` - the document ID is `abcd1234efgh5678`.
|
||||
|
||||
### MCP Client Configuration
|
||||
**Getting a Grist API key**: In Grist, go to Profile Settings → API → Create API Key.
|
||||
|
||||
### 5. Create .env file
|
||||
|
||||
Create a `.env` file with your agent token:
|
||||
|
||||
```bash
|
||||
# .env
|
||||
GRIST_MCP_TOKEN=your-generated-token-here
|
||||
PORT=3000
|
||||
```
|
||||
|
||||
The `GRIST_MCP_TOKEN` must match one of the tokens defined in `config.yaml`.
|
||||
|
||||
### 6. Start the server
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
The server will be available at `http://localhost:3000`.
|
||||
|
||||
### 7. Configure your MCP client
|
||||
|
||||
Add to your MCP client configuration (e.g., Claude Desktop):
|
||||
|
||||
@@ -101,24 +103,13 @@ Add to your MCP client configuration (e.g., Claude Desktop):
|
||||
{
|
||||
"mcpServers": {
|
||||
"grist": {
|
||||
"type": "sse",
|
||||
"url": "http://localhost:3000/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For remote deployments, use the server's public URL:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"grist": {
|
||||
"url": "https://your-server.example.com/sse"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Available Tools
|
||||
|
||||
### Discovery
|
||||
@@ -149,6 +140,54 @@ For remote deployments, use the server's public URL:
|
||||
| `modify_column` | Change a column's type or formula |
|
||||
| `delete_column` | Remove a column from a table |
|
||||
|
||||
## Configuration Reference
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `PORT` | Server port | `3000` |
|
||||
| `GRIST_MCP_TOKEN` | Agent authentication token (required) | - |
|
||||
| `CONFIG_PATH` | Path to config file inside container | `/app/config.yaml` |
|
||||
|
||||
### config.yaml Structure
|
||||
|
||||
```yaml
|
||||
# Document definitions (each is self-contained)
|
||||
documents:
|
||||
budget-2024:
|
||||
url: https://work.getgrist.com
|
||||
doc_id: mK7xB2pQ9mN4v
|
||||
api_key: ${GRIST_WORK_API_KEY} # Supports environment variable substitution
|
||||
|
||||
personal-tracker:
|
||||
url: https://docs.getgrist.com
|
||||
doc_id: pN0zE5sT2qP7x
|
||||
api_key: ${GRIST_PERSONAL_API_KEY}
|
||||
|
||||
# Agent tokens with access scopes
|
||||
tokens:
|
||||
- token: your-secure-token-here
|
||||
name: finance-agent
|
||||
scope:
|
||||
- document: budget-2024
|
||||
permissions: [read, write] # Can read and write
|
||||
|
||||
- token: another-token-here
|
||||
name: readonly-agent
|
||||
scope:
|
||||
- document: budget-2024
|
||||
permissions: [read] # Read only
|
||||
- document: personal-tracker
|
||||
permissions: [read, write, schema] # Full access
|
||||
```
|
||||
|
||||
### Permission Levels
|
||||
|
||||
- `read`: Query tables and records, run SQL queries
|
||||
- `write`: Add, update, delete records
|
||||
- `schema`: Create tables, add/modify/delete columns
|
||||
|
||||
## Security
|
||||
|
||||
- **Token-based auth**: Each agent has a unique token with specific document access
|
||||
@@ -159,10 +198,30 @@ For remote deployments, use the server's public URL:
|
||||
|
||||
## Development
|
||||
|
||||
### Running Tests
|
||||
### Requirements
|
||||
|
||||
- Python 3.14+
|
||||
- uv package manager
|
||||
|
||||
### Local Setup
|
||||
|
||||
```bash
|
||||
uv run pytest -v
|
||||
# Clone the repository
|
||||
git clone https://github.com/Xe138/grist-mcp-server.git
|
||||
cd grist-mcp-server
|
||||
|
||||
# Install dependencies
|
||||
uv sync --dev
|
||||
|
||||
# Run tests
|
||||
make test-unit
|
||||
```
|
||||
|
||||
### Running Locally
|
||||
|
||||
```bash
|
||||
export GRIST_MCP_TOKEN="your-agent-token"
|
||||
CONFIG_PATH=./config.yaml uv run python -m grist_mcp.main
|
||||
```
|
||||
|
||||
### Project Structure
|
||||
@@ -170,7 +229,6 @@ uv run pytest -v
|
||||
```
|
||||
grist-mcp/
|
||||
├── src/grist_mcp/
|
||||
│ ├── __init__.py
|
||||
│ ├── main.py # Entry point
|
||||
│ ├── server.py # MCP server setup and tool registration
|
||||
│ ├── config.py # Configuration loading
|
||||
@@ -182,73 +240,13 @@ grist-mcp/
|
||||
│ ├── write.py # Write operations
|
||||
│ └── schema.py # Schema operations
|
||||
├── tests/
|
||||
├── config.yaml.example
|
||||
└── pyproject.toml
|
||||
```
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker and Docker Compose
|
||||
|
||||
### Quick Start
|
||||
|
||||
```bash
|
||||
# 1. Copy example files
|
||||
cp .env.example .env
|
||||
cp config.yaml.example config.yaml
|
||||
|
||||
# 2. Edit .env with your tokens and API keys
|
||||
# - Set GRIST_MCP_TOKEN to a secure agent token
|
||||
# - Set your Grist API keys
|
||||
|
||||
# 3. Edit config.yaml with your document settings
|
||||
# - Configure your Grist documents
|
||||
# - Set up token scopes and permissions
|
||||
|
||||
# 4. Start the server
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `PORT` | Server port | `3000` |
|
||||
| `GRIST_MCP_TOKEN` | Agent authentication token (required) | - |
|
||||
| `CONFIG_PATH` | Path to config file inside container | `/app/config.yaml` |
|
||||
| `GRIST_*_API_KEY` | Grist API keys referenced in config.yaml | - |
|
||||
|
||||
### Using Prebuilt Images
|
||||
|
||||
To use a prebuilt image from a container registry:
|
||||
|
||||
```yaml
|
||||
# docker-compose.yaml
|
||||
services:
|
||||
grist-mcp:
|
||||
image: your-registry/grist-mcp:latest
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: unless-stopped
|
||||
```
|
||||
|
||||
### Building Locally
|
||||
|
||||
```bash
|
||||
# Build the image
|
||||
docker build -t grist-mcp .
|
||||
|
||||
# Run directly
|
||||
docker run -p 3000:3000 \
|
||||
-v $(pwd)/config.yaml:/app/config.yaml:ro \
|
||||
--env-file .env \
|
||||
grist-mcp
|
||||
│ ├── unit/ # Unit tests
|
||||
│ └── integration/ # Integration tests
|
||||
├── deploy/
|
||||
│ ├── dev/ # Development docker-compose
|
||||
│ ├── test/ # Test docker-compose
|
||||
│ └── prod/ # Production docker-compose
|
||||
└── config.yaml.example
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
1
deploy/dev/.env.example
Normal file
1
deploy/dev/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
PORT=3010
|
||||
30
deploy/dev/config.yaml
Normal file
30
deploy/dev/config.yaml
Normal file
@@ -0,0 +1,30 @@
|
||||
# Development configuration for grist-mcp
|
||||
#
|
||||
# Token Generation:
|
||||
# python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
# openssl rand -base64 32
|
||||
|
||||
# Document definitions
|
||||
documents:
|
||||
mcp-test-document:
|
||||
url: https://grist.bballou.com/
|
||||
doc_id: mVQvKTAyZC1FWZQgfuVeHC
|
||||
api_key: 83a03433a61ee9d2f2bf055d7f4518bedef0421a
|
||||
|
||||
# Agent tokens with access scopes
|
||||
tokens:
|
||||
- token: test-token-all-permissions
|
||||
name: dev-agent
|
||||
scope:
|
||||
- document: mcp-test-document
|
||||
permissions: [read, write, schema]
|
||||
- token: test-token-read-permissions
|
||||
name: dev-agent-read
|
||||
scope:
|
||||
- document: mcp-test-document
|
||||
permissions: [read]
|
||||
- token: test-token-no-schema-permissions
|
||||
name: dev-agent-no-schema
|
||||
scope:
|
||||
- document: mcp-test-document
|
||||
permissions: [read, write]
|
||||
20
deploy/dev/docker-compose.yml
Normal file
20
deploy/dev/docker-compose.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
# Development environment - hot reload, persistent data
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ../../src:/app/src:ro
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- EXTERNAL_PORT=${PORT:-3000}
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
1
deploy/prod/.env.example
Normal file
1
deploy/prod/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
PORT=3000
|
||||
18
deploy/prod/docker-compose.yml
Normal file
18
deploy/prod/docker-compose.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Production environment
|
||||
services:
|
||||
grist-mcp:
|
||||
image: ghcr.io/xe138/grist-mcp-server:latest
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- EXTERNAL_PORT=${PORT:-3000}
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
46
deploy/test/docker-compose.yml
Normal file
46
deploy/test/docker-compose.yml
Normal file
@@ -0,0 +1,46 @@
|
||||
# Test environment - ephemeral, branch-isolated
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
container_name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "3000" # Dynamic port
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
volumes:
|
||||
- ../../tests/integration/config.test.yaml:/app/config.yaml:ro
|
||||
depends_on:
|
||||
mock-grist:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
mock-grist:
|
||||
build:
|
||||
context: ../../tests/integration/mock_grist
|
||||
container_name: mock-grist-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "8484" # Dynamic port
|
||||
environment:
|
||||
- PORT=8484
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8484/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
networks:
|
||||
test-net:
|
||||
name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
driver: bridge
|
||||
@@ -1,10 +0,0 @@
|
||||
services:
|
||||
grist-mcp:
|
||||
build: .
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: unless-stopped
|
||||
587
docs/plans/2025-12-30-docker-service-skill-adaptation.md
Normal file
587
docs/plans/2025-12-30-docker-service-skill-adaptation.md
Normal file
@@ -0,0 +1,587 @@
|
||||
# Docker Service Architecture Adaptation Plan
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** Adapt grist-mcp to follow the docker-service-architecture skill guidelines for better test isolation, environment separation, and CI/CD readiness.
|
||||
|
||||
**Architecture:** Single-service project pattern with 2-stage testing (unit → integration), environment-specific deploy configs (dev/test/prod), and branch-isolated test infrastructure.
|
||||
|
||||
**Tech Stack:** Docker Compose, Make, Python/pytest, bash scripts
|
||||
|
||||
---
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
**What we have:**
|
||||
- Single service (grist-mcp) with mock server for testing
|
||||
- 2-stage testing: unit tests (41) + integration tests (2)
|
||||
- docker-compose.test.yaml at project root
|
||||
- docker-compose.yaml for production at root
|
||||
- Basic Makefile with pre-deploy target
|
||||
|
||||
**Gaps vs. Skill Guidelines:**
|
||||
|
||||
| Area | Current | Skill Guideline |
|
||||
|------|---------|-----------------|
|
||||
| Directory structure | Flat docker-compose files at root | `deploy/{dev,test,prod}/` directories |
|
||||
| Test organization | `tests/*.py` + `tests/integration/` | `tests/unit/` + `tests/integration/` |
|
||||
| Port allocation | Fixed (3000, 8484) | Dynamic with discovery |
|
||||
| Branch isolation | None | TEST_INSTANCE_ID from git branch |
|
||||
| Container naming | Default | Instance-based (`-${TEST_INSTANCE_ID}`) |
|
||||
| Test storage | Default volumes | tmpfs for ephemeral |
|
||||
| depends_on | `service_started` | `service_healthy` |
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Restructure Tests Directory
|
||||
|
||||
**Files:**
|
||||
- Move: `tests/test_*.py` → `tests/unit/test_*.py`
|
||||
- Keep: `tests/integration/` as-is
|
||||
- Create: `tests/unit/__init__.py`
|
||||
|
||||
**Step 1: Create unit test directory and move files**
|
||||
|
||||
```bash
|
||||
mkdir -p tests/unit
|
||||
mv tests/test_*.py tests/unit/
|
||||
touch tests/unit/__init__.py
|
||||
```
|
||||
|
||||
**Step 2: Update pyproject.toml testpaths**
|
||||
|
||||
```toml
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests/unit", "tests/integration"]
|
||||
```
|
||||
|
||||
**Step 3: Update Makefile test target**
|
||||
|
||||
```makefile
|
||||
test: ## Run unit tests
|
||||
uv run pytest tests/unit/ -v
|
||||
```
|
||||
|
||||
**Step 4: Verify tests still pass**
|
||||
|
||||
```bash
|
||||
uv run pytest tests/unit/ -v
|
||||
uv run pytest tests/integration/ -v --ignore=tests/integration
|
||||
```
|
||||
|
||||
**Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/ pyproject.toml Makefile
|
||||
git commit -m "refactor: organize tests into unit/ and integration/ directories"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Create Deploy Directory Structure
|
||||
|
||||
**Files:**
|
||||
- Create: `deploy/dev/docker-compose.yml`
|
||||
- Create: `deploy/dev/.env.example`
|
||||
- Create: `deploy/test/docker-compose.yml`
|
||||
- Create: `deploy/prod/docker-compose.yml`
|
||||
- Create: `deploy/prod/.env.example`
|
||||
- Delete: `docker-compose.yaml`, `docker-compose.test.yaml` (after migration)
|
||||
|
||||
**Step 1: Create deploy directory structure**
|
||||
|
||||
```bash
|
||||
mkdir -p deploy/{dev,test,prod}
|
||||
```
|
||||
|
||||
**Step 2: Create deploy/dev/docker-compose.yml**
|
||||
|
||||
```yaml
|
||||
# Development environment - hot reload, persistent data
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ../../src:/app/src:ro
|
||||
- ../../config.yaml:/app/config.yaml:ro
|
||||
env_file:
|
||||
- .env
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
```
|
||||
|
||||
**Step 3: Create deploy/dev/.env.example**
|
||||
|
||||
```bash
|
||||
PORT=3000
|
||||
GRIST_MCP_TOKEN=your-token-here
|
||||
CONFIG_PATH=/app/config.yaml
|
||||
```
|
||||
|
||||
**Step 4: Create deploy/test/docker-compose.yml**
|
||||
|
||||
```yaml
|
||||
# Test environment - ephemeral, branch-isolated
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
container_name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "3000" # Dynamic port
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- GRIST_MCP_TOKEN=test-token
|
||||
- PORT=3000
|
||||
volumes:
|
||||
- ../../tests/integration/config.test.yaml:/app/config.yaml:ro
|
||||
depends_on:
|
||||
mock-grist:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
mock-grist:
|
||||
build:
|
||||
context: ../../tests/integration/mock_grist
|
||||
container_name: mock-grist-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "8484" # Dynamic port
|
||||
environment:
|
||||
- PORT=8484
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8484/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
networks:
|
||||
test-net:
|
||||
name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
driver: bridge
|
||||
```
|
||||
|
||||
**Step 5: Create deploy/prod/docker-compose.yml**
|
||||
|
||||
```yaml
|
||||
# Production environment - resource limits, logging, restart policy
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
cpus: "1"
|
||||
reservations:
|
||||
memory: 128M
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "50m"
|
||||
max-file: "5"
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
```
|
||||
|
||||
**Step 6: Create deploy/prod/.env.example**
|
||||
|
||||
```bash
|
||||
PORT=3000
|
||||
GRIST_MCP_TOKEN=your-production-token
|
||||
CONFIG_PATH=/app/config.yaml
|
||||
```
|
||||
|
||||
**Step 7: Verify test compose works**
|
||||
|
||||
```bash
|
||||
cd deploy/test
|
||||
TEST_INSTANCE_ID=manual docker compose up -d --build
|
||||
docker compose ps
|
||||
docker compose down -v
|
||||
```
|
||||
|
||||
**Step 8: Remove old compose files and commit**
|
||||
|
||||
```bash
|
||||
rm docker-compose.yaml docker-compose.test.yaml
|
||||
git add deploy/
|
||||
git rm docker-compose.yaml docker-compose.test.yaml
|
||||
git commit -m "refactor: move docker-compose files to deploy/ directory structure"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Add Test Isolation Scripts
|
||||
|
||||
**Files:**
|
||||
- Create: `scripts/get-test-instance-id.sh`
|
||||
- Create: `scripts/run-integration-tests.sh`
|
||||
|
||||
**Step 1: Create scripts directory**
|
||||
|
||||
```bash
|
||||
mkdir -p scripts
|
||||
```
|
||||
|
||||
**Step 2: Create get-test-instance-id.sh**
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# scripts/get-test-instance-id.sh
|
||||
# Generate a unique instance ID from git branch for parallel test isolation
|
||||
|
||||
BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown")
|
||||
# Sanitize: replace non-alphanumeric with dash, limit length
|
||||
echo "$BRANCH" | sed 's/[^a-zA-Z0-9]/-/g' | cut -c1-20
|
||||
```
|
||||
|
||||
**Step 3: Create run-integration-tests.sh**
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# scripts/run-integration-tests.sh
|
||||
# Run integration tests with branch isolation and dynamic port discovery
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
# Get branch-based instance ID
|
||||
TEST_INSTANCE_ID=$("$SCRIPT_DIR/get-test-instance-id.sh")
|
||||
export TEST_INSTANCE_ID
|
||||
|
||||
echo "Test instance ID: $TEST_INSTANCE_ID"
|
||||
|
||||
# Start containers
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose up -d --build --wait
|
||||
|
||||
# Discover dynamic ports
|
||||
GRIST_MCP_PORT=$(docker compose port grist-mcp 3000 | cut -d: -f2)
|
||||
MOCK_GRIST_PORT=$(docker compose port mock-grist 8484 | cut -d: -f2)
|
||||
|
||||
echo "grist-mcp available at: http://localhost:$GRIST_MCP_PORT"
|
||||
echo "mock-grist available at: http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Export for tests
|
||||
export GRIST_MCP_URL="http://localhost:$GRIST_MCP_PORT"
|
||||
export MOCK_GRIST_URL="http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Run tests
|
||||
cd "$PROJECT_ROOT"
|
||||
TEST_EXIT=0
|
||||
uv run pytest tests/integration/ -v || TEST_EXIT=$?
|
||||
|
||||
# Cleanup
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose down -v
|
||||
|
||||
exit $TEST_EXIT
|
||||
```
|
||||
|
||||
**Step 4: Make scripts executable**
|
||||
|
||||
```bash
|
||||
chmod +x scripts/get-test-instance-id.sh
|
||||
chmod +x scripts/run-integration-tests.sh
|
||||
```
|
||||
|
||||
**Step 5: Verify scripts work**
|
||||
|
||||
```bash
|
||||
./scripts/get-test-instance-id.sh
|
||||
./scripts/run-integration-tests.sh
|
||||
```
|
||||
|
||||
**Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add scripts/
|
||||
git commit -m "feat: add test isolation scripts with dynamic port discovery"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Update Integration Tests for Dynamic Ports
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/integration/conftest.py`
|
||||
|
||||
**Step 1: Update conftest.py to use environment variables**
|
||||
|
||||
```python
|
||||
"""Fixtures for integration tests."""
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
|
||||
# Use environment variables for dynamic port discovery
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
MAX_WAIT_SECONDS = 30
|
||||
|
||||
|
||||
def wait_for_service(url: str, timeout: int = MAX_WAIT_SECONDS) -> bool:
|
||||
"""Wait for a service to become healthy."""
|
||||
start = time.time()
|
||||
while time.time() - start < timeout:
|
||||
try:
|
||||
response = httpx.get(f"{url}/health", timeout=2.0)
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
except httpx.RequestError:
|
||||
pass
|
||||
time.sleep(0.5)
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def services_ready():
|
||||
"""Ensure both services are healthy before running tests."""
|
||||
if not wait_for_service(MOCK_GRIST_URL):
|
||||
pytest.fail(f"Mock Grist server not ready at {MOCK_GRIST_URL}")
|
||||
if not wait_for_service(GRIST_MCP_URL):
|
||||
pytest.fail(f"grist-mcp server not ready at {GRIST_MCP_URL}")
|
||||
return True
|
||||
```
|
||||
|
||||
**Step 2: Update test files to use environment URLs**
|
||||
|
||||
In `tests/integration/test_mcp_protocol.py` and `tests/integration/test_tools_integration.py`:
|
||||
|
||||
```python
|
||||
import os
|
||||
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
```
|
||||
|
||||
**Step 3: Run tests to verify**
|
||||
|
||||
```bash
|
||||
./scripts/run-integration-tests.sh
|
||||
```
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/integration/
|
||||
git commit -m "feat: support dynamic ports via environment variables in tests"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: Update Makefile
|
||||
|
||||
**Files:**
|
||||
- Modify: `Makefile`
|
||||
|
||||
**Step 1: Rewrite Makefile with new structure**
|
||||
|
||||
```makefile
|
||||
.PHONY: help test test-unit test-integration build dev-up dev-down integration pre-deploy clean
|
||||
|
||||
VERBOSE ?= 0
|
||||
PYTEST_ARGS := $(if $(filter 1,$(VERBOSE)),-v,-q)
|
||||
|
||||
# Default target
|
||||
help: ## Show this help
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
# Testing
|
||||
test: test-unit ## Run all tests (unit only by default)
|
||||
|
||||
test-unit: ## Run unit tests
|
||||
uv run pytest tests/unit/ $(PYTEST_ARGS)
|
||||
|
||||
test-integration: ## Run integration tests (starts/stops containers)
|
||||
./scripts/run-integration-tests.sh
|
||||
|
||||
# Docker
|
||||
build: ## Build Docker image
|
||||
docker build -t grist-mcp:latest .
|
||||
|
||||
dev-up: ## Start development environment
|
||||
cd deploy/dev && docker compose up -d --build
|
||||
|
||||
dev-down: ## Stop development environment
|
||||
cd deploy/dev && docker compose down
|
||||
|
||||
# Pre-deployment
|
||||
pre-deploy: test-unit test-integration ## Full pre-deployment pipeline
|
||||
@echo "Pre-deployment checks passed!"
|
||||
|
||||
# Cleanup
|
||||
clean: ## Remove test artifacts and containers
|
||||
cd deploy/test && docker compose down -v --rmi local 2>/dev/null || true
|
||||
find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true
|
||||
```
|
||||
|
||||
**Step 2: Verify Makefile targets**
|
||||
|
||||
```bash
|
||||
make help
|
||||
make test-unit
|
||||
make test-integration
|
||||
make pre-deploy
|
||||
```
|
||||
|
||||
**Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add Makefile
|
||||
git commit -m "refactor: update Makefile for new deploy/ structure"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 6: Update CLAUDE.md
|
||||
|
||||
**Files:**
|
||||
- Modify: `CLAUDE.md`
|
||||
|
||||
**Step 1: Update commands section**
|
||||
|
||||
Add to CLAUDE.md:
|
||||
|
||||
```markdown
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Run unit tests
|
||||
make test-unit
|
||||
# or: uv run pytest tests/unit/ -v
|
||||
|
||||
# Run integration tests (manages containers automatically)
|
||||
make test-integration
|
||||
# or: ./scripts/run-integration-tests.sh
|
||||
|
||||
# Full pre-deploy pipeline
|
||||
make pre-deploy
|
||||
|
||||
# Development environment
|
||||
make dev-up # Start
|
||||
make dev-down # Stop
|
||||
|
||||
# Build Docker image
|
||||
make build
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
src/grist_mcp/ # Source code
|
||||
tests/
|
||||
├── unit/ # Unit tests (no containers)
|
||||
└── integration/ # Integration tests (with Docker)
|
||||
deploy/
|
||||
├── dev/ # Development docker-compose
|
||||
├── test/ # Test docker-compose (ephemeral)
|
||||
└── prod/ # Production docker-compose
|
||||
scripts/ # Test automation scripts
|
||||
```
|
||||
```
|
||||
|
||||
**Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add CLAUDE.md
|
||||
git commit -m "docs: update CLAUDE.md with new project structure"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 7: Final Verification
|
||||
|
||||
**Step 1: Run full pre-deploy pipeline**
|
||||
|
||||
```bash
|
||||
make pre-deploy
|
||||
```
|
||||
|
||||
Expected output:
|
||||
- Unit tests pass (41 tests)
|
||||
- Integration tests pass with branch isolation
|
||||
- Containers cleaned up
|
||||
|
||||
**Step 2: Test parallel execution (optional)**
|
||||
|
||||
```bash
|
||||
# In terminal 1
|
||||
git checkout -b test-branch-1
|
||||
make test-integration &
|
||||
|
||||
# In terminal 2
|
||||
git checkout -b test-branch-2
|
||||
make test-integration &
|
||||
```
|
||||
|
||||
Both should run without port conflicts.
|
||||
|
||||
**Step 3: Commit final verification**
|
||||
|
||||
```bash
|
||||
git add .
|
||||
git commit -m "chore: complete docker-service-architecture adaptation"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Summary of Changes
|
||||
|
||||
| Before | After |
|
||||
|--------|-------|
|
||||
| `tests/test_*.py` | `tests/unit/test_*.py` |
|
||||
| `docker-compose.yaml` | `deploy/dev/docker-compose.yml` |
|
||||
| `docker-compose.test.yaml` | `deploy/test/docker-compose.yml` |
|
||||
| (none) | `deploy/prod/docker-compose.yml` |
|
||||
| Fixed ports (3000, 8484) | Dynamic ports with discovery |
|
||||
| No branch isolation | TEST_INSTANCE_ID from git branch |
|
||||
| `service_started` | `service_healthy` |
|
||||
| Basic Makefile | Environment-aware with VERBOSE support |
|
||||
|
||||
## Benefits
|
||||
|
||||
1. **Parallel testing** - Multiple branches can run tests simultaneously
|
||||
2. **Environment parity** - Clear dev/test/prod separation
|
||||
3. **CI/CD ready** - Scripts work in automated pipelines
|
||||
4. **Faster feedback** - Dynamic ports eliminate conflicts
|
||||
5. **Cleaner structure** - Tests and deploys clearly organized
|
||||
980
docs/plans/2025-12-30-pre-deployment-testing-impl.md
Normal file
980
docs/plans/2025-12-30-pre-deployment-testing-impl.md
Normal file
@@ -0,0 +1,980 @@
|
||||
# Pre-Deployment Testing Implementation Plan
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** Create a pre-deployment test pipeline with Makefile orchestration, mock Grist server, and MCP protocol integration tests.
|
||||
|
||||
**Architecture:** Makefile orchestrates unit tests, Docker builds, and integration tests. Integration tests use the MCP Python SDK to connect to the containerized grist-mcp server, which talks to a mock Grist API server. Both run in docker-compose on an isolated network.
|
||||
|
||||
**Tech Stack:** Python 3.14, pytest, MCP SDK, Starlette (mock server), Docker Compose, Make
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Add Health Endpoint to grist-mcp
|
||||
|
||||
The integration tests need to poll for service readiness. Add a `/health` endpoint.
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/grist_mcp/main.py:42-47`
|
||||
|
||||
**Step 1: Add health endpoint to main.py**
|
||||
|
||||
In `src/grist_mcp/main.py`, add a health route to the Starlette app:
|
||||
|
||||
```python
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
async def handle_health(request):
|
||||
return JSONResponse({"status": "ok"})
|
||||
```
|
||||
|
||||
And add the route:
|
||||
|
||||
```python
|
||||
return Starlette(
|
||||
routes=[
|
||||
Route("/health", endpoint=handle_health),
|
||||
Route("/sse", endpoint=handle_sse),
|
||||
Route("/messages", endpoint=handle_messages, methods=["POST"]),
|
||||
]
|
||||
)
|
||||
```
|
||||
|
||||
**Step 2: Run existing tests**
|
||||
|
||||
Run: `uv run pytest tests/test_server.py -v`
|
||||
Expected: PASS (health endpoint doesn't break existing tests)
|
||||
|
||||
**Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add src/grist_mcp/main.py
|
||||
git commit -m "feat: add /health endpoint for service readiness checks"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Create Mock Grist Server
|
||||
|
||||
**Files:**
|
||||
- Create: `tests/integration/mock_grist/__init__.py`
|
||||
- Create: `tests/integration/mock_grist/server.py`
|
||||
- Create: `tests/integration/mock_grist/Dockerfile`
|
||||
- Create: `tests/integration/mock_grist/requirements.txt`
|
||||
|
||||
**Step 1: Create directory structure**
|
||||
|
||||
```bash
|
||||
mkdir -p tests/integration/mock_grist
|
||||
```
|
||||
|
||||
**Step 2: Create requirements.txt**
|
||||
|
||||
Create `tests/integration/mock_grist/requirements.txt`:
|
||||
|
||||
```
|
||||
starlette>=0.41.0
|
||||
uvicorn>=0.32.0
|
||||
```
|
||||
|
||||
**Step 3: Create __init__.py**
|
||||
|
||||
Create empty `tests/integration/mock_grist/__init__.py`:
|
||||
|
||||
```python
|
||||
```
|
||||
|
||||
**Step 4: Create server.py**
|
||||
|
||||
Create `tests/integration/mock_grist/server.py`:
|
||||
|
||||
```python
|
||||
"""Mock Grist API server for integration testing."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from starlette.applications import Starlette
|
||||
from starlette.responses import JSONResponse
|
||||
from starlette.routing import Route
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s [MOCK-GRIST] %(message)s")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Mock data
|
||||
MOCK_TABLES = {
|
||||
"People": {
|
||||
"columns": [
|
||||
{"id": "Name", "fields": {"type": "Text"}},
|
||||
{"id": "Age", "fields": {"type": "Int"}},
|
||||
{"id": "Email", "fields": {"type": "Text"}},
|
||||
],
|
||||
"records": [
|
||||
{"id": 1, "fields": {"Name": "Alice", "Age": 30, "Email": "alice@example.com"}},
|
||||
{"id": 2, "fields": {"Name": "Bob", "Age": 25, "Email": "bob@example.com"}},
|
||||
],
|
||||
},
|
||||
"Tasks": {
|
||||
"columns": [
|
||||
{"id": "Title", "fields": {"type": "Text"}},
|
||||
{"id": "Done", "fields": {"type": "Bool"}},
|
||||
],
|
||||
"records": [
|
||||
{"id": 1, "fields": {"Title": "Write tests", "Done": False}},
|
||||
{"id": 2, "fields": {"Title": "Deploy", "Done": False}},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
# Track requests for test assertions
|
||||
request_log: list[dict] = []
|
||||
|
||||
|
||||
def log_request(method: str, path: str, body: dict | None = None):
|
||||
"""Log a request for later inspection."""
|
||||
entry = {
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"method": method,
|
||||
"path": path,
|
||||
"body": body,
|
||||
}
|
||||
request_log.append(entry)
|
||||
logger.info(f"{method} {path}" + (f" body={json.dumps(body)}" if body else ""))
|
||||
|
||||
|
||||
async def health(request):
|
||||
"""Health check endpoint."""
|
||||
return JSONResponse({"status": "ok"})
|
||||
|
||||
|
||||
async def get_request_log(request):
|
||||
"""Return the request log for test assertions."""
|
||||
return JSONResponse(request_log)
|
||||
|
||||
|
||||
async def clear_request_log(request):
|
||||
"""Clear the request log."""
|
||||
request_log.clear()
|
||||
return JSONResponse({"status": "cleared"})
|
||||
|
||||
|
||||
async def list_tables(request):
|
||||
"""GET /api/docs/{doc_id}/tables"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
log_request("GET", f"/api/docs/{doc_id}/tables")
|
||||
tables = [{"id": name} for name in MOCK_TABLES.keys()]
|
||||
return JSONResponse({"tables": tables})
|
||||
|
||||
|
||||
async def get_table_columns(request):
|
||||
"""GET /api/docs/{doc_id}/tables/{table_id}/columns"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
log_request("GET", f"/api/docs/{doc_id}/tables/{table_id}/columns")
|
||||
|
||||
if table_id not in MOCK_TABLES:
|
||||
return JSONResponse({"error": "Table not found"}, status_code=404)
|
||||
|
||||
return JSONResponse({"columns": MOCK_TABLES[table_id]["columns"]})
|
||||
|
||||
|
||||
async def get_records(request):
|
||||
"""GET /api/docs/{doc_id}/tables/{table_id}/records"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
log_request("GET", f"/api/docs/{doc_id}/tables/{table_id}/records")
|
||||
|
||||
if table_id not in MOCK_TABLES:
|
||||
return JSONResponse({"error": "Table not found"}, status_code=404)
|
||||
|
||||
return JSONResponse({"records": MOCK_TABLES[table_id]["records"]})
|
||||
|
||||
|
||||
async def add_records(request):
|
||||
"""POST /api/docs/{doc_id}/tables/{table_id}/records"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables/{table_id}/records", body)
|
||||
|
||||
# Return mock IDs for new records
|
||||
new_ids = [{"id": 100 + i} for i in range(len(body.get("records", [])))]
|
||||
return JSONResponse({"records": new_ids})
|
||||
|
||||
|
||||
async def update_records(request):
|
||||
"""PATCH /api/docs/{doc_id}/tables/{table_id}/records"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("PATCH", f"/api/docs/{doc_id}/tables/{table_id}/records", body)
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
async def delete_records(request):
|
||||
"""POST /api/docs/{doc_id}/tables/{table_id}/data/delete"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables/{table_id}/data/delete", body)
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
async def sql_query(request):
|
||||
"""GET /api/docs/{doc_id}/sql"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
query = request.query_params.get("q", "")
|
||||
log_request("GET", f"/api/docs/{doc_id}/sql?q={query}")
|
||||
|
||||
# Return mock SQL results
|
||||
return JSONResponse({
|
||||
"records": [
|
||||
{"fields": {"Name": "Alice", "Age": 30}},
|
||||
{"fields": {"Name": "Bob", "Age": 25}},
|
||||
]
|
||||
})
|
||||
|
||||
|
||||
async def create_tables(request):
|
||||
"""POST /api/docs/{doc_id}/tables"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables", body)
|
||||
|
||||
# Return the created tables with their IDs
|
||||
tables = [{"id": t["id"]} for t in body.get("tables", [])]
|
||||
return JSONResponse({"tables": tables})
|
||||
|
||||
|
||||
async def add_column(request):
|
||||
"""POST /api/docs/{doc_id}/tables/{table_id}/columns"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables/{table_id}/columns", body)
|
||||
|
||||
columns = [{"id": c["id"]} for c in body.get("columns", [])]
|
||||
return JSONResponse({"columns": columns})
|
||||
|
||||
|
||||
async def modify_column(request):
|
||||
"""PATCH /api/docs/{doc_id}/tables/{table_id}/columns/{col_id}"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
col_id = request.path_params["col_id"]
|
||||
body = await request.json()
|
||||
log_request("PATCH", f"/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}", body)
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
async def delete_column(request):
|
||||
"""DELETE /api/docs/{doc_id}/tables/{table_id}/columns/{col_id}"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
col_id = request.path_params["col_id"]
|
||||
log_request("DELETE", f"/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}")
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
app = Starlette(
|
||||
routes=[
|
||||
# Test control endpoints
|
||||
Route("/health", endpoint=health),
|
||||
Route("/_test/requests", endpoint=get_request_log),
|
||||
Route("/_test/requests/clear", endpoint=clear_request_log, methods=["POST"]),
|
||||
|
||||
# Grist API endpoints
|
||||
Route("/api/docs/{doc_id}/tables", endpoint=list_tables),
|
||||
Route("/api/docs/{doc_id}/tables", endpoint=create_tables, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns", endpoint=get_table_columns),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns", endpoint=add_column, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}", endpoint=modify_column, methods=["PATCH"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}", endpoint=delete_column, methods=["DELETE"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/records", endpoint=get_records),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/records", endpoint=add_records, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/records", endpoint=update_records, methods=["PATCH"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/data/delete", endpoint=delete_records, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/sql", endpoint=sql_query),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
port = int(os.environ.get("PORT", "8484"))
|
||||
logger.info(f"Starting mock Grist server on port {port}")
|
||||
uvicorn.run(app, host="0.0.0.0", port=port)
|
||||
```
|
||||
|
||||
**Step 5: Create Dockerfile**
|
||||
|
||||
Create `tests/integration/mock_grist/Dockerfile`:
|
||||
|
||||
```dockerfile
|
||||
FROM python:3.14-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY server.py .
|
||||
|
||||
ENV PORT=8484
|
||||
EXPOSE 8484
|
||||
|
||||
CMD ["python", "server.py"]
|
||||
```
|
||||
|
||||
**Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/integration/mock_grist/
|
||||
git commit -m "feat: add mock Grist server for integration testing"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Create Integration Test Configuration
|
||||
|
||||
**Files:**
|
||||
- Create: `tests/integration/__init__.py`
|
||||
- Create: `tests/integration/config.test.yaml`
|
||||
|
||||
**Step 1: Create __init__.py**
|
||||
|
||||
Create empty `tests/integration/__init__.py`:
|
||||
|
||||
```python
|
||||
```
|
||||
|
||||
**Step 2: Create config.test.yaml**
|
||||
|
||||
Create `tests/integration/config.test.yaml`:
|
||||
|
||||
```yaml
|
||||
documents:
|
||||
test-doc:
|
||||
url: http://mock-grist:8484
|
||||
doc_id: test-doc-id
|
||||
api_key: test-api-key
|
||||
|
||||
tokens:
|
||||
- token: test-token
|
||||
name: test-agent
|
||||
scope:
|
||||
- document: test-doc
|
||||
permissions: [read, write, schema]
|
||||
```
|
||||
|
||||
**Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/integration/__init__.py tests/integration/config.test.yaml
|
||||
git commit -m "feat: add integration test configuration"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Create Docker Compose Test Configuration
|
||||
|
||||
**Files:**
|
||||
- Create: `docker-compose.test.yaml`
|
||||
|
||||
**Step 1: Create docker-compose.test.yaml**
|
||||
|
||||
Create `docker-compose.test.yaml`:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
grist-mcp:
|
||||
build: .
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- GRIST_MCP_TOKEN=test-token
|
||||
- PORT=3000
|
||||
volumes:
|
||||
- ./tests/integration/config.test.yaml:/app/config.yaml:ro
|
||||
depends_on:
|
||||
mock-grist:
|
||||
condition: service_started
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
mock-grist:
|
||||
build: tests/integration/mock_grist
|
||||
ports:
|
||||
- "8484:8484"
|
||||
environment:
|
||||
- PORT=8484
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8484/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
networks:
|
||||
test-net:
|
||||
driver: bridge
|
||||
```
|
||||
|
||||
**Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add docker-compose.test.yaml
|
||||
git commit -m "feat: add docker-compose for integration testing"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: Create Integration Test Fixtures
|
||||
|
||||
**Files:**
|
||||
- Create: `tests/integration/conftest.py`
|
||||
|
||||
**Step 1: Create conftest.py**
|
||||
|
||||
Create `tests/integration/conftest.py`:
|
||||
|
||||
```python
|
||||
"""Fixtures for integration tests."""
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from mcp import ClientSession
|
||||
from mcp.client.sse import sse_client
|
||||
|
||||
|
||||
GRIST_MCP_URL = "http://localhost:3000"
|
||||
MOCK_GRIST_URL = "http://localhost:8484"
|
||||
MAX_WAIT_SECONDS = 30
|
||||
|
||||
|
||||
def wait_for_service(url: str, timeout: int = MAX_WAIT_SECONDS) -> bool:
|
||||
"""Wait for a service to become healthy."""
|
||||
start = time.time()
|
||||
while time.time() - start < timeout:
|
||||
try:
|
||||
response = httpx.get(f"{url}/health", timeout=2.0)
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
except httpx.RequestError:
|
||||
pass
|
||||
time.sleep(0.5)
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def services_ready():
|
||||
"""Ensure both services are healthy before running tests."""
|
||||
if not wait_for_service(MOCK_GRIST_URL):
|
||||
pytest.fail(f"Mock Grist server not ready at {MOCK_GRIST_URL}")
|
||||
if not wait_for_service(GRIST_MCP_URL):
|
||||
pytest.fail(f"grist-mcp server not ready at {GRIST_MCP_URL}")
|
||||
return True
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mcp_client(services_ready):
|
||||
"""Create an MCP client connected to grist-mcp via SSE."""
|
||||
async with sse_client(f"{GRIST_MCP_URL}/sse") as (read_stream, write_stream):
|
||||
async with ClientSession(read_stream, write_stream) as session:
|
||||
await session.initialize()
|
||||
yield session
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_grist_client(services_ready):
|
||||
"""HTTP client for interacting with mock Grist test endpoints."""
|
||||
with httpx.Client(base_url=MOCK_GRIST_URL, timeout=10.0) as client:
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_mock_grist_log(mock_grist_client):
|
||||
"""Clear the mock Grist request log before each test."""
|
||||
mock_grist_client.post("/_test/requests/clear")
|
||||
yield
|
||||
```
|
||||
|
||||
**Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/integration/conftest.py
|
||||
git commit -m "feat: add integration test fixtures with MCP client"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 6: Create MCP Protocol Tests
|
||||
|
||||
**Files:**
|
||||
- Create: `tests/integration/test_mcp_protocol.py`
|
||||
|
||||
**Step 1: Create test_mcp_protocol.py**
|
||||
|
||||
Create `tests/integration/test_mcp_protocol.py`:
|
||||
|
||||
```python
|
||||
"""Test MCP protocol compliance over SSE transport."""
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mcp_connection_initializes(mcp_client):
|
||||
"""Test that MCP client can connect and initialize."""
|
||||
# If we get here, connection and initialization succeeded
|
||||
assert mcp_client is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_tools_returns_all_tools(mcp_client):
|
||||
"""Test that list_tools returns all expected tools."""
|
||||
result = await mcp_client.list_tools()
|
||||
tool_names = [tool.name for tool in result.tools]
|
||||
|
||||
expected_tools = [
|
||||
"list_documents",
|
||||
"list_tables",
|
||||
"describe_table",
|
||||
"get_records",
|
||||
"sql_query",
|
||||
"add_records",
|
||||
"update_records",
|
||||
"delete_records",
|
||||
"create_table",
|
||||
"add_column",
|
||||
"modify_column",
|
||||
"delete_column",
|
||||
]
|
||||
|
||||
for expected in expected_tools:
|
||||
assert expected in tool_names, f"Missing tool: {expected}"
|
||||
|
||||
assert len(result.tools) == 12
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_tools_has_descriptions(mcp_client):
|
||||
"""Test that all tools have descriptions."""
|
||||
result = await mcp_client.list_tools()
|
||||
|
||||
for tool in result.tools:
|
||||
assert tool.description, f"Tool {tool.name} has no description"
|
||||
assert len(tool.description) > 10, f"Tool {tool.name} description too short"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_tools_has_input_schemas(mcp_client):
|
||||
"""Test that all tools have input schemas."""
|
||||
result = await mcp_client.list_tools()
|
||||
|
||||
for tool in result.tools:
|
||||
assert tool.inputSchema is not None, f"Tool {tool.name} has no inputSchema"
|
||||
assert "type" in tool.inputSchema, f"Tool {tool.name} schema missing type"
|
||||
```
|
||||
|
||||
**Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/integration/test_mcp_protocol.py
|
||||
git commit -m "feat: add MCP protocol compliance tests"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 7: Create Tool Integration Tests
|
||||
|
||||
**Files:**
|
||||
- Create: `tests/integration/test_tools_integration.py`
|
||||
|
||||
**Step 1: Create test_tools_integration.py**
|
||||
|
||||
Create `tests/integration/test_tools_integration.py`:
|
||||
|
||||
```python
|
||||
"""Test tool calls through MCP client to verify Grist API interactions."""
|
||||
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_documents(mcp_client):
|
||||
"""Test list_documents returns accessible documents."""
|
||||
result = await mcp_client.call_tool("list_documents", {})
|
||||
|
||||
assert len(result.content) == 1
|
||||
data = json.loads(result.content[0].text)
|
||||
|
||||
assert "documents" in data
|
||||
assert len(data["documents"]) == 1
|
||||
assert data["documents"][0]["name"] == "test-doc"
|
||||
assert "read" in data["documents"][0]["permissions"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_tables(mcp_client, mock_grist_client):
|
||||
"""Test list_tables calls correct Grist API endpoint."""
|
||||
result = await mcp_client.call_tool("list_tables", {"document": "test-doc"})
|
||||
|
||||
# Check response
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "tables" in data
|
||||
assert "People" in data["tables"]
|
||||
assert "Tasks" in data["tables"]
|
||||
|
||||
# Verify mock received correct request
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
assert len(log) >= 1
|
||||
assert log[-1]["method"] == "GET"
|
||||
assert "/tables" in log[-1]["path"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_describe_table(mcp_client, mock_grist_client):
|
||||
"""Test describe_table returns column information."""
|
||||
result = await mcp_client.call_tool(
|
||||
"describe_table",
|
||||
{"document": "test-doc", "table": "People"}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "columns" in data
|
||||
|
||||
column_ids = [c["id"] for c in data["columns"]]
|
||||
assert "Name" in column_ids
|
||||
assert "Age" in column_ids
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
assert any("/columns" in entry["path"] for entry in log)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_records(mcp_client, mock_grist_client):
|
||||
"""Test get_records fetches records from table."""
|
||||
result = await mcp_client.call_tool(
|
||||
"get_records",
|
||||
{"document": "test-doc", "table": "People"}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "records" in data
|
||||
assert len(data["records"]) == 2
|
||||
assert data["records"][0]["Name"] == "Alice"
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
assert any("/records" in entry["path"] and entry["method"] == "GET" for entry in log)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sql_query(mcp_client, mock_grist_client):
|
||||
"""Test sql_query executes SQL and returns results."""
|
||||
result = await mcp_client.call_tool(
|
||||
"sql_query",
|
||||
{"document": "test-doc", "query": "SELECT Name, Age FROM People"}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "records" in data
|
||||
assert len(data["records"]) >= 1
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
assert any("/sql" in entry["path"] for entry in log)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_records(mcp_client, mock_grist_client):
|
||||
"""Test add_records sends correct payload to Grist."""
|
||||
new_records = [
|
||||
{"Name": "Charlie", "Age": 35, "Email": "charlie@example.com"}
|
||||
]
|
||||
|
||||
result = await mcp_client.call_tool(
|
||||
"add_records",
|
||||
{"document": "test-doc", "table": "People", "records": new_records}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "record_ids" in data
|
||||
assert len(data["record_ids"]) == 1
|
||||
|
||||
# Verify API call body
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
post_requests = [e for e in log if e["method"] == "POST" and "/records" in e["path"]]
|
||||
assert len(post_requests) >= 1
|
||||
assert post_requests[-1]["body"]["records"][0]["fields"]["Name"] == "Charlie"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_records(mcp_client, mock_grist_client):
|
||||
"""Test update_records sends correct payload to Grist."""
|
||||
updates = [
|
||||
{"id": 1, "fields": {"Age": 31}}
|
||||
]
|
||||
|
||||
result = await mcp_client.call_tool(
|
||||
"update_records",
|
||||
{"document": "test-doc", "table": "People", "records": updates}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "updated" in data
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
patch_requests = [e for e in log if e["method"] == "PATCH" and "/records" in e["path"]]
|
||||
assert len(patch_requests) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_records(mcp_client, mock_grist_client):
|
||||
"""Test delete_records sends correct IDs to Grist."""
|
||||
result = await mcp_client.call_tool(
|
||||
"delete_records",
|
||||
{"document": "test-doc", "table": "People", "record_ids": [1, 2]}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "deleted" in data
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
delete_requests = [e for e in log if "/data/delete" in e["path"]]
|
||||
assert len(delete_requests) >= 1
|
||||
assert delete_requests[-1]["body"] == [1, 2]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_table(mcp_client, mock_grist_client):
|
||||
"""Test create_table sends correct schema to Grist."""
|
||||
columns = [
|
||||
{"id": "Title", "type": "Text"},
|
||||
{"id": "Count", "type": "Int"},
|
||||
]
|
||||
|
||||
result = await mcp_client.call_tool(
|
||||
"create_table",
|
||||
{"document": "test-doc", "table_id": "NewTable", "columns": columns}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "table_id" in data
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
post_tables = [e for e in log if e["method"] == "POST" and e["path"].endswith("/tables")]
|
||||
assert len(post_tables) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_column(mcp_client, mock_grist_client):
|
||||
"""Test add_column sends correct column definition."""
|
||||
result = await mcp_client.call_tool(
|
||||
"add_column",
|
||||
{
|
||||
"document": "test-doc",
|
||||
"table": "People",
|
||||
"column_id": "Phone",
|
||||
"column_type": "Text",
|
||||
}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "column_id" in data
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
post_cols = [e for e in log if e["method"] == "POST" and "/columns" in e["path"]]
|
||||
assert len(post_cols) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modify_column(mcp_client, mock_grist_client):
|
||||
"""Test modify_column sends correct update."""
|
||||
result = await mcp_client.call_tool(
|
||||
"modify_column",
|
||||
{
|
||||
"document": "test-doc",
|
||||
"table": "People",
|
||||
"column_id": "Age",
|
||||
"type": "Numeric",
|
||||
}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "modified" in data
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
patch_cols = [e for e in log if e["method"] == "PATCH" and "/columns/" in e["path"]]
|
||||
assert len(patch_cols) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_column(mcp_client, mock_grist_client):
|
||||
"""Test delete_column calls correct endpoint."""
|
||||
result = await mcp_client.call_tool(
|
||||
"delete_column",
|
||||
{
|
||||
"document": "test-doc",
|
||||
"table": "People",
|
||||
"column_id": "Email",
|
||||
}
|
||||
)
|
||||
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "deleted" in data
|
||||
|
||||
# Verify API call
|
||||
log = mock_grist_client.get("/_test/requests").json()
|
||||
delete_cols = [e for e in log if e["method"] == "DELETE" and "/columns/" in e["path"]]
|
||||
assert len(delete_cols) >= 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unauthorized_document_fails(mcp_client):
|
||||
"""Test that accessing unauthorized document returns error."""
|
||||
result = await mcp_client.call_tool(
|
||||
"list_tables",
|
||||
{"document": "unauthorized-doc"}
|
||||
)
|
||||
|
||||
assert "error" in result.content[0].text.lower() or "authorization" in result.content[0].text.lower()
|
||||
```
|
||||
|
||||
**Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/integration/test_tools_integration.py
|
||||
git commit -m "feat: add tool integration tests with Grist API validation"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 8: Create Makefile
|
||||
|
||||
**Files:**
|
||||
- Create: `Makefile`
|
||||
|
||||
**Step 1: Create Makefile**
|
||||
|
||||
Create `Makefile`:
|
||||
|
||||
```makefile
|
||||
.PHONY: help test build integration-up integration-test integration-down integration pre-deploy clean
|
||||
|
||||
# Default target
|
||||
help: ## Show this help
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
test: ## Run unit tests
|
||||
uv run pytest tests/ -v --ignore=tests/integration
|
||||
|
||||
build: ## Build Docker images for testing
|
||||
docker compose -f docker-compose.test.yaml build
|
||||
|
||||
integration-up: ## Start integration test containers
|
||||
docker compose -f docker-compose.test.yaml up -d
|
||||
@echo "Waiting for services to be ready..."
|
||||
@sleep 5
|
||||
|
||||
integration-test: ## Run integration tests (containers must be up)
|
||||
uv run pytest tests/integration/ -v
|
||||
|
||||
integration-down: ## Stop and remove test containers
|
||||
docker compose -f docker-compose.test.yaml down -v
|
||||
|
||||
integration: build integration-up ## Full integration cycle (build, up, test, down)
|
||||
@$(MAKE) integration-test || ($(MAKE) integration-down && exit 1)
|
||||
@$(MAKE) integration-down
|
||||
|
||||
pre-deploy: test integration ## Full pre-deployment pipeline (unit tests + integration)
|
||||
@echo "Pre-deployment checks passed!"
|
||||
|
||||
clean: ## Remove all test artifacts and containers
|
||||
docker compose -f docker-compose.test.yaml down -v --rmi local 2>/dev/null || true
|
||||
find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true
|
||||
```
|
||||
|
||||
**Step 2: Verify Makefile syntax**
|
||||
|
||||
Run: `make help`
|
||||
Expected: List of available targets with descriptions
|
||||
|
||||
**Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add Makefile
|
||||
git commit -m "feat: add Makefile for test orchestration"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 9: Run Full Pre-Deploy Pipeline
|
||||
|
||||
**Step 1: Run unit tests**
|
||||
|
||||
Run: `make test`
|
||||
Expected: All unit tests pass
|
||||
|
||||
**Step 2: Run full pre-deploy**
|
||||
|
||||
Run: `make pre-deploy`
|
||||
Expected: Unit tests pass, Docker builds succeed, integration tests pass, containers cleaned up
|
||||
|
||||
**Step 3: Commit any fixes needed**
|
||||
|
||||
If any tests fail, fix them and commit:
|
||||
|
||||
```bash
|
||||
git add -A
|
||||
git commit -m "fix: resolve integration test issues"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
Files created:
|
||||
- `src/grist_mcp/main.py` - Modified with /health endpoint
|
||||
- `tests/integration/mock_grist/__init__.py`
|
||||
- `tests/integration/mock_grist/server.py`
|
||||
- `tests/integration/mock_grist/Dockerfile`
|
||||
- `tests/integration/mock_grist/requirements.txt`
|
||||
- `tests/integration/__init__.py`
|
||||
- `tests/integration/config.test.yaml`
|
||||
- `tests/integration/conftest.py`
|
||||
- `tests/integration/test_mcp_protocol.py`
|
||||
- `tests/integration/test_tools_integration.py`
|
||||
- `docker-compose.test.yaml`
|
||||
- `Makefile`
|
||||
|
||||
Usage:
|
||||
```bash
|
||||
make help # Show all targets
|
||||
make test # Unit tests only
|
||||
make integration # Integration tests only
|
||||
make pre-deploy # Full pipeline
|
||||
make clean # Cleanup
|
||||
```
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "grist-mcp"
|
||||
version = "0.1.0"
|
||||
version = "1.0.0"
|
||||
description = "MCP server for AI agents to interact with Grist documents"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
@@ -17,6 +17,8 @@ dev = [
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.24.0",
|
||||
"pytest-httpx>=0.32.0",
|
||||
"pytest-timeout>=2.0.0",
|
||||
"rich>=13.0.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
@@ -25,4 +27,4 @@ build-backend = "hatchling.build"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
testpaths = ["tests/unit", "tests/integration"]
|
||||
|
||||
7
scripts/get-test-instance-id.sh
Executable file
7
scripts/get-test-instance-id.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
# scripts/get-test-instance-id.sh
|
||||
# Generate a unique instance ID from git branch for parallel test isolation
|
||||
|
||||
BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown")
|
||||
# Sanitize: replace non-alphanumeric with dash, limit length
|
||||
echo "$BRANCH" | sed 's/[^a-zA-Z0-9]/-/g' | cut -c1-20
|
||||
39
scripts/run-integration-tests.sh
Executable file
39
scripts/run-integration-tests.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
# scripts/run-integration-tests.sh
|
||||
# Run integration tests with branch isolation and dynamic port discovery
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
# Get branch-based instance ID
|
||||
TEST_INSTANCE_ID=$("$SCRIPT_DIR/get-test-instance-id.sh")
|
||||
export TEST_INSTANCE_ID
|
||||
|
||||
echo "Test instance ID: $TEST_INSTANCE_ID"
|
||||
|
||||
# Start containers
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose up -d --build --wait
|
||||
|
||||
# Discover dynamic ports
|
||||
GRIST_MCP_PORT=$(docker compose port grist-mcp 3000 | cut -d: -f2)
|
||||
MOCK_GRIST_PORT=$(docker compose port mock-grist 8484 | cut -d: -f2)
|
||||
|
||||
echo "grist-mcp available at: http://localhost:$GRIST_MCP_PORT"
|
||||
echo "mock-grist available at: http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Export for tests
|
||||
export GRIST_MCP_URL="http://localhost:$GRIST_MCP_PORT"
|
||||
export MOCK_GRIST_URL="http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Run tests
|
||||
cd "$PROJECT_ROOT"
|
||||
TEST_EXIT=0
|
||||
uv run pytest tests/integration/ -v || TEST_EXIT=$?
|
||||
|
||||
# Cleanup
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose down -v
|
||||
|
||||
exit $TEST_EXIT
|
||||
248
scripts/test-runner.py
Executable file
248
scripts/test-runner.py
Executable file
@@ -0,0 +1,248 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Rich test runner with progress display and fail-fast behavior.
|
||||
|
||||
Runs unit tests, then integration tests with real-time progress indication.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
||||
from rich.console import Console
|
||||
from rich.live import Live
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
|
||||
class Status(Enum):
|
||||
PENDING = "pending"
|
||||
RUNNING = "running"
|
||||
PASSED = "passed"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestStage:
|
||||
name: str
|
||||
command: list[str]
|
||||
status: Status = Status.PENDING
|
||||
progress: int = 0
|
||||
total: int = 0
|
||||
passed: int = 0
|
||||
failed: int = 0
|
||||
current_test: str = ""
|
||||
duration: float = 0.0
|
||||
output: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
# Regex patterns for parsing pytest output
|
||||
PYTEST_PROGRESS = re.compile(r"\[\s*(\d+)%\]")
|
||||
PYTEST_COLLECTING = re.compile(r"collected (\d+) items?")
|
||||
PYTEST_RESULT = re.compile(r"(\d+) passed")
|
||||
PYTEST_FAILED = re.compile(r"(\d+) failed")
|
||||
PYTEST_DURATION = re.compile(r"in ([\d.]+)s")
|
||||
PYTEST_TEST_LINE = re.compile(r"(tests/\S+::\S+)")
|
||||
|
||||
|
||||
class TestRunner:
|
||||
def __init__(self, verbose: bool = False):
|
||||
self.console = Console()
|
||||
self.verbose = verbose
|
||||
self.project_root = Path(__file__).parent.parent
|
||||
self.stages: list[TestStage] = []
|
||||
self.all_passed = True
|
||||
|
||||
def add_stage(self, name: str, command: list[str]) -> None:
|
||||
self.stages.append(TestStage(name=name, command=command))
|
||||
|
||||
def render_table(self) -> Table:
|
||||
table = Table(show_header=False, box=None, padding=(0, 1))
|
||||
table.add_column("Status", width=3)
|
||||
table.add_column("Name", width=20)
|
||||
table.add_column("Progress", width=30)
|
||||
table.add_column("Time", width=8)
|
||||
|
||||
for stage in self.stages:
|
||||
# Status icon
|
||||
if stage.status == Status.PENDING:
|
||||
icon = Text("○", style="dim")
|
||||
elif stage.status == Status.RUNNING:
|
||||
icon = Text("●", style="yellow")
|
||||
elif stage.status == Status.PASSED:
|
||||
icon = Text("✓", style="green")
|
||||
else:
|
||||
icon = Text("✗", style="red")
|
||||
|
||||
# Progress display
|
||||
if stage.status == Status.PENDING:
|
||||
progress = Text("pending", style="dim")
|
||||
elif stage.status == Status.RUNNING:
|
||||
if stage.total > 0:
|
||||
bar_width = 20
|
||||
filled = int(bar_width * stage.progress / 100)
|
||||
bar = "━" * filled + "░" * (bar_width - filled)
|
||||
progress = Text(f"{bar} {stage.progress:3d}% {stage.passed}/{stage.total}")
|
||||
if stage.current_test:
|
||||
progress.append(f"\n → {stage.current_test[:40]}", style="dim")
|
||||
else:
|
||||
progress = Text("collecting...", style="yellow")
|
||||
elif stage.status == Status.PASSED:
|
||||
progress = Text(f"{stage.passed}/{stage.total}", style="green")
|
||||
else:
|
||||
progress = Text(f"{stage.passed}/{stage.total} ({stage.failed} failed)", style="red")
|
||||
|
||||
# Duration
|
||||
if stage.duration > 0:
|
||||
duration = Text(f"{stage.duration:.1f}s", style="dim")
|
||||
else:
|
||||
duration = Text("")
|
||||
|
||||
table.add_row(icon, stage.name, progress, duration)
|
||||
|
||||
return table
|
||||
|
||||
def parse_output(self, stage: TestStage, line: str) -> None:
|
||||
"""Parse pytest output line and update stage state."""
|
||||
stage.output.append(line)
|
||||
|
||||
# Check for collected count
|
||||
match = PYTEST_COLLECTING.search(line)
|
||||
if match:
|
||||
stage.total = int(match.group(1))
|
||||
|
||||
# Check for progress percentage
|
||||
match = PYTEST_PROGRESS.search(line)
|
||||
if match:
|
||||
stage.progress = int(match.group(1))
|
||||
# Estimate passed based on progress
|
||||
if stage.total > 0:
|
||||
stage.passed = int(stage.total * stage.progress / 100)
|
||||
|
||||
# Check for current test
|
||||
match = PYTEST_TEST_LINE.search(line)
|
||||
if match:
|
||||
stage.current_test = match.group(1)
|
||||
|
||||
# Check for final results
|
||||
match = PYTEST_RESULT.search(line)
|
||||
if match:
|
||||
stage.passed = int(match.group(1))
|
||||
|
||||
match = PYTEST_FAILED.search(line)
|
||||
if match:
|
||||
stage.failed = int(match.group(1))
|
||||
|
||||
match = PYTEST_DURATION.search(line)
|
||||
if match:
|
||||
stage.duration = float(match.group(1))
|
||||
|
||||
def run_stage(self, stage: TestStage, live: Live) -> bool:
|
||||
"""Run a single test stage and return True if passed."""
|
||||
stage.status = Status.RUNNING
|
||||
live.update(self.render_table())
|
||||
|
||||
env = os.environ.copy()
|
||||
env["PYTHONUNBUFFERED"] = "1"
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
stage.command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
cwd=self.project_root,
|
||||
env=env,
|
||||
)
|
||||
|
||||
for line in process.stdout:
|
||||
line = line.rstrip()
|
||||
self.parse_output(stage, line)
|
||||
live.update(self.render_table())
|
||||
|
||||
if self.verbose:
|
||||
self.console.print(line)
|
||||
|
||||
process.wait()
|
||||
|
||||
if process.returncode == 0:
|
||||
stage.status = Status.PASSED
|
||||
stage.progress = 100
|
||||
return True
|
||||
else:
|
||||
stage.status = Status.FAILED
|
||||
self.all_passed = False
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
stage.status = Status.FAILED
|
||||
stage.output.append(str(e))
|
||||
self.all_passed = False
|
||||
return False
|
||||
finally:
|
||||
live.update(self.render_table())
|
||||
|
||||
def run_all(self) -> bool:
|
||||
"""Run all test stages with fail-fast behavior."""
|
||||
self.console.print()
|
||||
|
||||
with Live(self.render_table(), console=self.console, refresh_per_second=4) as live:
|
||||
for stage in self.stages:
|
||||
if not self.run_stage(stage, live):
|
||||
# Fail fast - don't run remaining stages
|
||||
break
|
||||
|
||||
self.console.print()
|
||||
|
||||
# Print summary
|
||||
if self.all_passed:
|
||||
self.console.print("[green]All tests passed![/green]")
|
||||
else:
|
||||
self.console.print("[red]Tests failed![/red]")
|
||||
# Print failure details
|
||||
for stage in self.stages:
|
||||
if stage.status == Status.FAILED:
|
||||
self.console.print(f"\n[red]Failures in {stage.name}:[/red]")
|
||||
# Print last 20 lines of output for context
|
||||
for line in stage.output[-20:]:
|
||||
self.console.print(f" {line}")
|
||||
|
||||
return self.all_passed
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Run tests with rich progress display")
|
||||
parser.add_argument("-v", "--verbose", action="store_true", help="Show full test output")
|
||||
parser.add_argument("--unit-only", action="store_true", help="Run only unit tests")
|
||||
parser.add_argument("--integration-only", action="store_true", help="Run only integration tests")
|
||||
args = parser.parse_args()
|
||||
|
||||
runner = TestRunner(verbose=args.verbose)
|
||||
|
||||
# Determine which stages to run
|
||||
run_unit = not args.integration_only
|
||||
run_integration = not args.unit_only
|
||||
|
||||
if run_unit:
|
||||
runner.add_stage(
|
||||
"Unit Tests",
|
||||
["uv", "run", "pytest", "tests/unit/", "-v", "--tb=short"],
|
||||
)
|
||||
|
||||
if run_integration:
|
||||
# Use the integration test script which handles containers
|
||||
runner.add_stage(
|
||||
"Integration Tests",
|
||||
["bash", "./scripts/run-integration-tests.sh"],
|
||||
)
|
||||
|
||||
success = runner.run_all()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -160,7 +160,8 @@ class GristClient:
|
||||
if formula is not None:
|
||||
fields["formula"] = formula
|
||||
|
||||
await self._request("PATCH", f"/tables/{table}/columns/{column_id}", json={"fields": fields})
|
||||
payload = {"columns": [{"id": column_id, "fields": fields}]}
|
||||
await self._request("PATCH", f"/tables/{table}/columns", json=payload)
|
||||
|
||||
async def delete_column(self, table: str, column_id: str) -> None:
|
||||
"""Delete a column from a table."""
|
||||
|
||||
@@ -1,59 +1,212 @@
|
||||
"""Main entry point for the MCP server with SSE transport."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import uvicorn
|
||||
from mcp.server.sse import SseServerTransport
|
||||
from starlette.applications import Starlette
|
||||
from starlette.routing import Route
|
||||
|
||||
from grist_mcp.server import create_server
|
||||
from grist_mcp.auth import AuthError
|
||||
from grist_mcp.config import Config, load_config
|
||||
from grist_mcp.auth import Authenticator, AuthError
|
||||
|
||||
|
||||
def create_app() -> Starlette:
|
||||
"""Create the Starlette ASGI application."""
|
||||
config_path = os.environ.get("CONFIG_PATH", "/app/config.yaml")
|
||||
Scope = dict[str, Any]
|
||||
Receive = Any
|
||||
Send = Any
|
||||
|
||||
if not os.path.exists(config_path):
|
||||
print(f"Error: Config file not found at {config_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def _get_bearer_token(scope: Scope) -> str | None:
|
||||
"""Extract Bearer token from Authorization header."""
|
||||
headers = dict(scope.get("headers", []))
|
||||
auth_header = headers.get(b"authorization", b"").decode()
|
||||
if auth_header.startswith("Bearer "):
|
||||
return auth_header[7:]
|
||||
return None
|
||||
|
||||
|
||||
async def send_error(send: Send, status: int, message: str) -> None:
|
||||
"""Send an HTTP error response."""
|
||||
body = json.dumps({"error": message}).encode()
|
||||
await send({
|
||||
"type": "http.response.start",
|
||||
"status": status,
|
||||
"headers": [[b"content-type", b"application/json"]],
|
||||
})
|
||||
await send({
|
||||
"type": "http.response.body",
|
||||
"body": body,
|
||||
})
|
||||
|
||||
|
||||
CONFIG_TEMPLATE = """\
|
||||
# grist-mcp configuration
|
||||
#
|
||||
# Token Generation:
|
||||
# python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
# openssl rand -base64 32
|
||||
|
||||
# Document definitions
|
||||
documents:
|
||||
my-document:
|
||||
url: https://docs.getgrist.com
|
||||
doc_id: YOUR_DOC_ID
|
||||
api_key: ${GRIST_API_KEY}
|
||||
|
||||
# Agent tokens with access scopes
|
||||
tokens:
|
||||
- token: REPLACE_WITH_GENERATED_TOKEN
|
||||
name: my-agent
|
||||
scope:
|
||||
- document: my-document
|
||||
permissions: [read, write]
|
||||
"""
|
||||
|
||||
|
||||
def _ensure_config(config_path: str) -> bool:
|
||||
"""Ensure config file exists. Creates template if missing.
|
||||
|
||||
Returns True if config is ready, False if template was created.
|
||||
"""
|
||||
path = os.path.abspath(config_path)
|
||||
|
||||
# Check if path is a directory (Docker creates this when mounting missing file)
|
||||
if os.path.isdir(path):
|
||||
print(f"ERROR: Config path is a directory: {path}")
|
||||
print()
|
||||
print("This usually means the config file doesn't exist on the host.")
|
||||
print("Please create the config file before starting the container:")
|
||||
print()
|
||||
print(f" mkdir -p $(dirname {config_path})")
|
||||
print(f" cat > {config_path} << 'EOF'")
|
||||
print(CONFIG_TEMPLATE)
|
||||
print("EOF")
|
||||
print()
|
||||
return False
|
||||
|
||||
if os.path.exists(path):
|
||||
return True
|
||||
|
||||
# Create template config
|
||||
try:
|
||||
server = create_server(config_path)
|
||||
except AuthError as e:
|
||||
print(f"Authentication error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
with open(path, "w") as f:
|
||||
f.write(CONFIG_TEMPLATE)
|
||||
print(f"Created template configuration at: {path}")
|
||||
print()
|
||||
print("Please edit this file to configure your Grist documents and agent tokens,")
|
||||
print("then restart the server.")
|
||||
except PermissionError:
|
||||
print(f"ERROR: Cannot create config file at: {path}")
|
||||
print()
|
||||
print("Please create the config file manually before starting the container.")
|
||||
print()
|
||||
return False
|
||||
|
||||
|
||||
def create_app(config: Config):
|
||||
"""Create the ASGI application."""
|
||||
auth = Authenticator(config)
|
||||
|
||||
sse = SseServerTransport("/messages")
|
||||
|
||||
async def handle_sse(request):
|
||||
async with sse.connect_sse(
|
||||
request.scope, request.receive, request._send
|
||||
) as streams:
|
||||
async def handle_sse(scope: Scope, receive: Receive, send: Send) -> None:
|
||||
# Extract and validate token from Authorization header
|
||||
token = _get_bearer_token(scope)
|
||||
if not token:
|
||||
await send_error(send, 401, "Missing Authorization header")
|
||||
return
|
||||
|
||||
try:
|
||||
agent = auth.authenticate(token)
|
||||
except AuthError as e:
|
||||
await send_error(send, 401, str(e))
|
||||
return
|
||||
|
||||
# Create a server instance for this authenticated connection
|
||||
server = create_server(auth, agent)
|
||||
|
||||
async with sse.connect_sse(scope, receive, send) as streams:
|
||||
await server.run(
|
||||
streams[0], streams[1], server.create_initialization_options()
|
||||
)
|
||||
|
||||
async def handle_messages(request):
|
||||
await sse.handle_post_message(request.scope, request.receive, request._send)
|
||||
async def handle_messages(scope: Scope, receive: Receive, send: Send) -> None:
|
||||
await sse.handle_post_message(scope, receive, send)
|
||||
|
||||
return Starlette(
|
||||
routes=[
|
||||
Route("/sse", endpoint=handle_sse),
|
||||
Route("/messages", endpoint=handle_messages, methods=["POST"]),
|
||||
]
|
||||
)
|
||||
async def handle_health(scope: Scope, receive: Receive, send: Send) -> None:
|
||||
await send({
|
||||
"type": "http.response.start",
|
||||
"status": 200,
|
||||
"headers": [[b"content-type", b"application/json"]],
|
||||
})
|
||||
await send({
|
||||
"type": "http.response.body",
|
||||
"body": b'{"status":"ok"}',
|
||||
})
|
||||
|
||||
async def handle_not_found(scope: Scope, receive: Receive, send: Send) -> None:
|
||||
await send({
|
||||
"type": "http.response.start",
|
||||
"status": 404,
|
||||
"headers": [[b"content-type", b"application/json"]],
|
||||
})
|
||||
await send({
|
||||
"type": "http.response.body",
|
||||
"body": b'{"error":"Not found"}',
|
||||
})
|
||||
|
||||
async def app(scope: Scope, receive: Receive, send: Send) -> None:
|
||||
if scope["type"] != "http":
|
||||
return
|
||||
|
||||
path = scope["path"]
|
||||
method = scope["method"]
|
||||
|
||||
if path == "/health" and method == "GET":
|
||||
await handle_health(scope, receive, send)
|
||||
elif path == "/sse" and method == "GET":
|
||||
await handle_sse(scope, receive, send)
|
||||
elif path == "/messages" and method == "POST":
|
||||
await handle_messages(scope, receive, send)
|
||||
else:
|
||||
await handle_not_found(scope, receive, send)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def _print_mcp_config(external_port: int, tokens: list) -> None:
|
||||
"""Print Claude Code MCP configuration."""
|
||||
print()
|
||||
print("Claude Code MCP configuration (copy-paste to add):")
|
||||
for t in tokens:
|
||||
config = (
|
||||
f'{{"type": "sse", "url": "http://localhost:{external_port}/sse", '
|
||||
f'"headers": {{"Authorization": "Bearer {t.token}"}}}}'
|
||||
)
|
||||
print(f" claude mcp add-json grist-{t.name} '{config}'")
|
||||
print()
|
||||
|
||||
|
||||
def main():
|
||||
"""Run the SSE server."""
|
||||
port = int(os.environ.get("PORT", "3000"))
|
||||
app = create_app()
|
||||
external_port = int(os.environ.get("EXTERNAL_PORT", str(port)))
|
||||
config_path = os.environ.get("CONFIG_PATH", "/app/config.yaml")
|
||||
|
||||
if not _ensure_config(config_path):
|
||||
return
|
||||
|
||||
config = load_config(config_path)
|
||||
|
||||
print(f"Starting grist-mcp SSE server on port {port}")
|
||||
print(f" SSE endpoint: http://0.0.0.0:{port}/sse")
|
||||
print(f" Messages endpoint: http://0.0.0.0:{port}/messages")
|
||||
|
||||
_print_mcp_config(external_port, config.tokens)
|
||||
|
||||
app = create_app(config)
|
||||
uvicorn.run(app, host="0.0.0.0", port=port)
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
"""MCP server setup and tool registration."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from mcp.server import Server
|
||||
from mcp.types import Tool, TextContent
|
||||
|
||||
from grist_mcp.config import load_config
|
||||
from grist_mcp.auth import Authenticator, AuthError, Agent
|
||||
from grist_mcp.auth import Authenticator, Agent, AuthError
|
||||
|
||||
from grist_mcp.tools.discovery import list_documents as _list_documents
|
||||
from grist_mcp.tools.read import list_tables as _list_tables
|
||||
@@ -23,27 +21,18 @@ from grist_mcp.tools.schema import modify_column as _modify_column
|
||||
from grist_mcp.tools.schema import delete_column as _delete_column
|
||||
|
||||
|
||||
def create_server(config_path: str, token: str | None = None) -> Server:
|
||||
"""Create and configure the MCP server.
|
||||
def create_server(auth: Authenticator, agent: Agent) -> Server:
|
||||
"""Create and configure the MCP server for an authenticated agent.
|
||||
|
||||
Args:
|
||||
config_path: Path to the configuration YAML file.
|
||||
token: Agent token for authentication. If not provided, reads from
|
||||
GRIST_MCP_TOKEN environment variable.
|
||||
auth: Authenticator instance for permission checks.
|
||||
agent: The authenticated agent for this server instance.
|
||||
|
||||
Raises:
|
||||
AuthError: If token is invalid or not provided.
|
||||
Returns:
|
||||
Configured MCP Server instance.
|
||||
"""
|
||||
config = load_config(config_path)
|
||||
auth = Authenticator(config)
|
||||
server = Server("grist-mcp")
|
||||
|
||||
# Authenticate agent from token (required for all tool calls)
|
||||
auth_token = token or os.environ.get("GRIST_MCP_TOKEN")
|
||||
if not auth_token:
|
||||
raise AuthError("No token provided. Set GRIST_MCP_TOKEN environment variable.")
|
||||
|
||||
_current_agent: Agent = auth.authenticate(auth_token)
|
||||
_current_agent = agent
|
||||
|
||||
@server.list_tools()
|
||||
async def list_tools() -> list[Tool]:
|
||||
|
||||
0
tests/integration/__init__.py
Normal file
0
tests/integration/__init__.py
Normal file
12
tests/integration/config.test.yaml
Normal file
12
tests/integration/config.test.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
documents:
|
||||
test-doc:
|
||||
url: http://mock-grist:8484
|
||||
doc_id: test-doc-id
|
||||
api_key: test-api-key
|
||||
|
||||
tokens:
|
||||
- token: test-token
|
||||
name: test-agent
|
||||
scope:
|
||||
- document: test-doc
|
||||
permissions: [read, write, schema]
|
||||
36
tests/integration/conftest.py
Normal file
36
tests/integration/conftest.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Fixtures for integration tests."""
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
MAX_WAIT_SECONDS = 30
|
||||
|
||||
|
||||
def wait_for_service(url: str, timeout: int = MAX_WAIT_SECONDS) -> bool:
|
||||
"""Wait for a service to become healthy."""
|
||||
start = time.time()
|
||||
while time.time() - start < timeout:
|
||||
try:
|
||||
response = httpx.get(f"{url}/health", timeout=2.0)
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
except httpx.RequestError:
|
||||
pass
|
||||
time.sleep(0.5)
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def services_ready():
|
||||
"""Ensure both services are healthy before running tests."""
|
||||
if not wait_for_service(MOCK_GRIST_URL):
|
||||
pytest.fail(f"Mock Grist server not ready at {MOCK_GRIST_URL}")
|
||||
if not wait_for_service(GRIST_MCP_URL):
|
||||
pytest.fail(f"grist-mcp server not ready at {GRIST_MCP_URL}")
|
||||
return True
|
||||
13
tests/integration/mock_grist/Dockerfile
Normal file
13
tests/integration/mock_grist/Dockerfile
Normal file
@@ -0,0 +1,13 @@
|
||||
FROM python:3.14-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY server.py .
|
||||
|
||||
ENV PORT=8484
|
||||
EXPOSE 8484
|
||||
|
||||
CMD ["python", "server.py"]
|
||||
0
tests/integration/mock_grist/__init__.py
Normal file
0
tests/integration/mock_grist/__init__.py
Normal file
2
tests/integration/mock_grist/requirements.txt
Normal file
2
tests/integration/mock_grist/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
starlette>=0.41.0
|
||||
uvicorn>=0.32.0
|
||||
217
tests/integration/mock_grist/server.py
Normal file
217
tests/integration/mock_grist/server.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""Mock Grist API server for integration testing."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from starlette.applications import Starlette
|
||||
from starlette.responses import JSONResponse
|
||||
from starlette.routing import Route
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s [MOCK-GRIST] %(message)s")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Mock data
|
||||
MOCK_TABLES = {
|
||||
"People": {
|
||||
"columns": [
|
||||
{"id": "Name", "fields": {"type": "Text"}},
|
||||
{"id": "Age", "fields": {"type": "Int"}},
|
||||
{"id": "Email", "fields": {"type": "Text"}},
|
||||
],
|
||||
"records": [
|
||||
{"id": 1, "fields": {"Name": "Alice", "Age": 30, "Email": "alice@example.com"}},
|
||||
{"id": 2, "fields": {"Name": "Bob", "Age": 25, "Email": "bob@example.com"}},
|
||||
],
|
||||
},
|
||||
"Tasks": {
|
||||
"columns": [
|
||||
{"id": "Title", "fields": {"type": "Text"}},
|
||||
{"id": "Done", "fields": {"type": "Bool"}},
|
||||
],
|
||||
"records": [
|
||||
{"id": 1, "fields": {"Title": "Write tests", "Done": False}},
|
||||
{"id": 2, "fields": {"Title": "Deploy", "Done": False}},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
# Track requests for test assertions
|
||||
request_log: list[dict] = []
|
||||
|
||||
|
||||
def log_request(method: str, path: str, body: dict | None = None):
|
||||
"""Log a request for later inspection."""
|
||||
entry = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"method": method,
|
||||
"path": path,
|
||||
"body": body,
|
||||
}
|
||||
request_log.append(entry)
|
||||
logger.info(f"{method} {path}" + (f" body={json.dumps(body)}" if body else ""))
|
||||
|
||||
|
||||
async def health(request):
|
||||
"""Health check endpoint."""
|
||||
return JSONResponse({"status": "ok"})
|
||||
|
||||
|
||||
async def get_request_log(request):
|
||||
"""Return the request log for test assertions."""
|
||||
return JSONResponse(request_log)
|
||||
|
||||
|
||||
async def clear_request_log(request):
|
||||
"""Clear the request log."""
|
||||
request_log.clear()
|
||||
return JSONResponse({"status": "cleared"})
|
||||
|
||||
|
||||
async def list_tables(request):
|
||||
"""GET /api/docs/{doc_id}/tables"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
log_request("GET", f"/api/docs/{doc_id}/tables")
|
||||
tables = [{"id": name} for name in MOCK_TABLES.keys()]
|
||||
return JSONResponse({"tables": tables})
|
||||
|
||||
|
||||
async def get_table_columns(request):
|
||||
"""GET /api/docs/{doc_id}/tables/{table_id}/columns"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
log_request("GET", f"/api/docs/{doc_id}/tables/{table_id}/columns")
|
||||
|
||||
if table_id not in MOCK_TABLES:
|
||||
return JSONResponse({"error": "Table not found"}, status_code=404)
|
||||
|
||||
return JSONResponse({"columns": MOCK_TABLES[table_id]["columns"]})
|
||||
|
||||
|
||||
async def get_records(request):
|
||||
"""GET /api/docs/{doc_id}/tables/{table_id}/records"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
log_request("GET", f"/api/docs/{doc_id}/tables/{table_id}/records")
|
||||
|
||||
if table_id not in MOCK_TABLES:
|
||||
return JSONResponse({"error": "Table not found"}, status_code=404)
|
||||
|
||||
return JSONResponse({"records": MOCK_TABLES[table_id]["records"]})
|
||||
|
||||
|
||||
async def add_records(request):
|
||||
"""POST /api/docs/{doc_id}/tables/{table_id}/records"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables/{table_id}/records", body)
|
||||
|
||||
# Return mock IDs for new records
|
||||
new_ids = [{"id": 100 + i} for i in range(len(body.get("records", [])))]
|
||||
return JSONResponse({"records": new_ids})
|
||||
|
||||
|
||||
async def update_records(request):
|
||||
"""PATCH /api/docs/{doc_id}/tables/{table_id}/records"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("PATCH", f"/api/docs/{doc_id}/tables/{table_id}/records", body)
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
async def delete_records(request):
|
||||
"""POST /api/docs/{doc_id}/tables/{table_id}/data/delete"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables/{table_id}/data/delete", body)
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
async def sql_query(request):
|
||||
"""GET /api/docs/{doc_id}/sql"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
query = request.query_params.get("q", "")
|
||||
log_request("GET", f"/api/docs/{doc_id}/sql?q={query}")
|
||||
|
||||
# Return mock SQL results
|
||||
return JSONResponse({
|
||||
"records": [
|
||||
{"fields": {"Name": "Alice", "Age": 30}},
|
||||
{"fields": {"Name": "Bob", "Age": 25}},
|
||||
]
|
||||
})
|
||||
|
||||
|
||||
async def create_tables(request):
|
||||
"""POST /api/docs/{doc_id}/tables"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables", body)
|
||||
|
||||
# Return the created tables with their IDs
|
||||
tables = [{"id": t["id"]} for t in body.get("tables", [])]
|
||||
return JSONResponse({"tables": tables})
|
||||
|
||||
|
||||
async def add_column(request):
|
||||
"""POST /api/docs/{doc_id}/tables/{table_id}/columns"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
body = await request.json()
|
||||
log_request("POST", f"/api/docs/{doc_id}/tables/{table_id}/columns", body)
|
||||
|
||||
columns = [{"id": c["id"]} for c in body.get("columns", [])]
|
||||
return JSONResponse({"columns": columns})
|
||||
|
||||
|
||||
async def modify_column(request):
|
||||
"""PATCH /api/docs/{doc_id}/tables/{table_id}/columns/{col_id}"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
col_id = request.path_params["col_id"]
|
||||
body = await request.json()
|
||||
log_request("PATCH", f"/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}", body)
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
async def delete_column(request):
|
||||
"""DELETE /api/docs/{doc_id}/tables/{table_id}/columns/{col_id}"""
|
||||
doc_id = request.path_params["doc_id"]
|
||||
table_id = request.path_params["table_id"]
|
||||
col_id = request.path_params["col_id"]
|
||||
log_request("DELETE", f"/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}")
|
||||
return JSONResponse({})
|
||||
|
||||
|
||||
app = Starlette(
|
||||
routes=[
|
||||
# Test control endpoints
|
||||
Route("/health", endpoint=health),
|
||||
Route("/_test/requests", endpoint=get_request_log),
|
||||
Route("/_test/requests/clear", endpoint=clear_request_log, methods=["POST"]),
|
||||
|
||||
# Grist API endpoints
|
||||
Route("/api/docs/{doc_id}/tables", endpoint=list_tables),
|
||||
Route("/api/docs/{doc_id}/tables", endpoint=create_tables, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns", endpoint=get_table_columns),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns", endpoint=add_column, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}", endpoint=modify_column, methods=["PATCH"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/columns/{col_id}", endpoint=delete_column, methods=["DELETE"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/records", endpoint=get_records),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/records", endpoint=add_records, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/records", endpoint=update_records, methods=["PATCH"]),
|
||||
Route("/api/docs/{doc_id}/tables/{table_id}/data/delete", endpoint=delete_records, methods=["POST"]),
|
||||
Route("/api/docs/{doc_id}/sql", endpoint=sql_query),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
port = int(os.environ.get("PORT", "8484"))
|
||||
logger.info(f"Starting mock Grist server on port {port}")
|
||||
uvicorn.run(app, host="0.0.0.0", port=port)
|
||||
62
tests/integration/test_mcp_protocol.py
Normal file
62
tests/integration/test_mcp_protocol.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""Test MCP protocol compliance over SSE transport."""
|
||||
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import pytest
|
||||
from mcp import ClientSession
|
||||
from mcp.client.sse import sse_client
|
||||
|
||||
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def create_mcp_session():
|
||||
"""Create and yield an MCP session."""
|
||||
async with sse_client(f"{GRIST_MCP_URL}/sse") as (read_stream, write_stream):
|
||||
async with ClientSession(read_stream, write_stream) as session:
|
||||
await session.initialize()
|
||||
yield session
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mcp_protocol_compliance(services_ready):
|
||||
"""Test MCP protocol compliance - connection, tools, descriptions, schemas."""
|
||||
async with create_mcp_session() as client:
|
||||
# Test 1: Connection initializes
|
||||
assert client is not None
|
||||
|
||||
# Test 2: list_tools returns all expected tools
|
||||
result = await client.list_tools()
|
||||
tool_names = [tool.name for tool in result.tools]
|
||||
|
||||
expected_tools = [
|
||||
"list_documents",
|
||||
"list_tables",
|
||||
"describe_table",
|
||||
"get_records",
|
||||
"sql_query",
|
||||
"add_records",
|
||||
"update_records",
|
||||
"delete_records",
|
||||
"create_table",
|
||||
"add_column",
|
||||
"modify_column",
|
||||
"delete_column",
|
||||
]
|
||||
|
||||
for expected in expected_tools:
|
||||
assert expected in tool_names, f"Missing tool: {expected}"
|
||||
|
||||
assert len(result.tools) == 12, f"Expected 12 tools, got {len(result.tools)}"
|
||||
|
||||
# Test 3: All tools have descriptions
|
||||
for tool in result.tools:
|
||||
assert tool.description, f"Tool {tool.name} has no description"
|
||||
assert len(tool.description) > 10, f"Tool {tool.name} description too short"
|
||||
|
||||
# Test 4: All tools have input schemas
|
||||
for tool in result.tools:
|
||||
assert tool.inputSchema is not None, f"Tool {tool.name} has no inputSchema"
|
||||
assert "type" in tool.inputSchema, f"Tool {tool.name} schema missing type"
|
||||
223
tests/integration/test_tools_integration.py
Normal file
223
tests/integration/test_tools_integration.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""Test tool calls through MCP client to verify Grist API interactions."""
|
||||
|
||||
import json
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from mcp import ClientSession
|
||||
from mcp.client.sse import sse_client
|
||||
|
||||
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def create_mcp_session():
|
||||
"""Create and yield an MCP session."""
|
||||
async with sse_client(f"{GRIST_MCP_URL}/sse") as (read_stream, write_stream):
|
||||
async with ClientSession(read_stream, write_stream) as session:
|
||||
await session.initialize()
|
||||
yield session
|
||||
|
||||
|
||||
def get_mock_request_log():
|
||||
"""Get the request log from mock Grist server."""
|
||||
with httpx.Client(base_url=MOCK_GRIST_URL, timeout=10.0) as client:
|
||||
return client.get("/_test/requests").json()
|
||||
|
||||
|
||||
def clear_mock_request_log():
|
||||
"""Clear the mock Grist request log."""
|
||||
with httpx.Client(base_url=MOCK_GRIST_URL, timeout=10.0) as client:
|
||||
client.post("/_test/requests/clear")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_all_tools(services_ready):
|
||||
"""Test all MCP tools - reads, writes, schema ops, and auth errors."""
|
||||
async with create_mcp_session() as client:
|
||||
# ===== READ TOOLS =====
|
||||
|
||||
# Test list_documents
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool("list_documents", {})
|
||||
assert len(result.content) == 1
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "documents" in data
|
||||
assert len(data["documents"]) == 1
|
||||
assert data["documents"][0]["name"] == "test-doc"
|
||||
assert "read" in data["documents"][0]["permissions"]
|
||||
|
||||
# Test list_tables
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool("list_tables", {"document": "test-doc"})
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "tables" in data
|
||||
assert "People" in data["tables"]
|
||||
assert "Tasks" in data["tables"]
|
||||
log = get_mock_request_log()
|
||||
assert any("/tables" in entry["path"] for entry in log)
|
||||
|
||||
# Test describe_table
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool(
|
||||
"describe_table",
|
||||
{"document": "test-doc", "table": "People"}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "columns" in data
|
||||
column_ids = [c["id"] for c in data["columns"]]
|
||||
assert "Name" in column_ids
|
||||
assert "Age" in column_ids
|
||||
log = get_mock_request_log()
|
||||
assert any("/columns" in entry["path"] for entry in log)
|
||||
|
||||
# Test get_records
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool(
|
||||
"get_records",
|
||||
{"document": "test-doc", "table": "People"}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "records" in data
|
||||
assert len(data["records"]) == 2
|
||||
assert data["records"][0]["Name"] == "Alice"
|
||||
log = get_mock_request_log()
|
||||
assert any("/records" in entry["path"] and entry["method"] == "GET" for entry in log)
|
||||
|
||||
# Test sql_query
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool(
|
||||
"sql_query",
|
||||
{"document": "test-doc", "query": "SELECT Name, Age FROM People"}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "records" in data
|
||||
assert len(data["records"]) >= 1
|
||||
log = get_mock_request_log()
|
||||
assert any("/sql" in entry["path"] for entry in log)
|
||||
|
||||
# ===== WRITE TOOLS =====
|
||||
|
||||
# Test add_records
|
||||
clear_mock_request_log()
|
||||
new_records = [
|
||||
{"Name": "Charlie", "Age": 35, "Email": "charlie@example.com"}
|
||||
]
|
||||
result = await client.call_tool(
|
||||
"add_records",
|
||||
{"document": "test-doc", "table": "People", "records": new_records}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "inserted_ids" in data
|
||||
assert len(data["inserted_ids"]) == 1
|
||||
log = get_mock_request_log()
|
||||
post_requests = [e for e in log if e["method"] == "POST" and "/records" in e["path"]]
|
||||
assert len(post_requests) >= 1
|
||||
assert post_requests[-1]["body"]["records"][0]["fields"]["Name"] == "Charlie"
|
||||
|
||||
# Test update_records
|
||||
clear_mock_request_log()
|
||||
updates = [{"id": 1, "fields": {"Age": 31}}]
|
||||
result = await client.call_tool(
|
||||
"update_records",
|
||||
{"document": "test-doc", "table": "People", "records": updates}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "updated" in data
|
||||
log = get_mock_request_log()
|
||||
patch_requests = [e for e in log if e["method"] == "PATCH" and "/records" in e["path"]]
|
||||
assert len(patch_requests) >= 1
|
||||
|
||||
# Test delete_records
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool(
|
||||
"delete_records",
|
||||
{"document": "test-doc", "table": "People", "record_ids": [1, 2]}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "deleted" in data
|
||||
log = get_mock_request_log()
|
||||
delete_requests = [e for e in log if "/data/delete" in e["path"]]
|
||||
assert len(delete_requests) >= 1
|
||||
assert delete_requests[-1]["body"] == [1, 2]
|
||||
|
||||
# ===== SCHEMA TOOLS =====
|
||||
|
||||
# Test create_table
|
||||
clear_mock_request_log()
|
||||
columns = [
|
||||
{"id": "Title", "type": "Text"},
|
||||
{"id": "Count", "type": "Int"},
|
||||
]
|
||||
result = await client.call_tool(
|
||||
"create_table",
|
||||
{"document": "test-doc", "table_id": "NewTable", "columns": columns}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "table_id" in data
|
||||
log = get_mock_request_log()
|
||||
post_tables = [e for e in log if e["method"] == "POST" and e["path"].endswith("/tables")]
|
||||
assert len(post_tables) >= 1
|
||||
|
||||
# Test add_column
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool(
|
||||
"add_column",
|
||||
{
|
||||
"document": "test-doc",
|
||||
"table": "People",
|
||||
"column_id": "Phone",
|
||||
"column_type": "Text",
|
||||
}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "column_id" in data
|
||||
log = get_mock_request_log()
|
||||
post_cols = [e for e in log if e["method"] == "POST" and "/columns" in e["path"]]
|
||||
assert len(post_cols) >= 1
|
||||
|
||||
# Test modify_column
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool(
|
||||
"modify_column",
|
||||
{
|
||||
"document": "test-doc",
|
||||
"table": "People",
|
||||
"column_id": "Age",
|
||||
"type": "Numeric",
|
||||
}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "modified" in data
|
||||
log = get_mock_request_log()
|
||||
patch_cols = [e for e in log if e["method"] == "PATCH" and "/columns/" in e["path"]]
|
||||
assert len(patch_cols) >= 1
|
||||
|
||||
# Test delete_column
|
||||
clear_mock_request_log()
|
||||
result = await client.call_tool(
|
||||
"delete_column",
|
||||
{
|
||||
"document": "test-doc",
|
||||
"table": "People",
|
||||
"column_id": "Email",
|
||||
}
|
||||
)
|
||||
data = json.loads(result.content[0].text)
|
||||
assert "deleted" in data
|
||||
log = get_mock_request_log()
|
||||
delete_cols = [e for e in log if e["method"] == "DELETE" and "/columns/" in e["path"]]
|
||||
assert len(delete_cols) >= 1
|
||||
|
||||
# ===== AUTHORIZATION =====
|
||||
|
||||
# Test unauthorized document fails
|
||||
result = await client.call_tool(
|
||||
"list_tables",
|
||||
{"document": "unauthorized-doc"}
|
||||
)
|
||||
assert "error" in result.content[0].text.lower() or "authorization" in result.content[0].text.lower()
|
||||
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
@@ -160,7 +160,7 @@ async def test_add_column(client, httpx_mock: HTTPXMock):
|
||||
@pytest.mark.asyncio
|
||||
async def test_modify_column(client, httpx_mock: HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url="https://grist.example.com/api/docs/abc123/tables/Table1/columns/Amount",
|
||||
url="https://grist.example.com/api/docs/abc123/tables/Table1/columns",
|
||||
method="PATCH",
|
||||
json={},
|
||||
)
|
||||
@@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
from mcp.types import ListToolsRequest
|
||||
from grist_mcp.server import create_server
|
||||
from grist_mcp.config import load_config
|
||||
from grist_mcp.auth import Authenticator
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -21,7 +23,10 @@ tokens:
|
||||
permissions: [read, write, schema]
|
||||
""")
|
||||
|
||||
server = create_server(str(config_file), token="test-token")
|
||||
config = load_config(str(config_file))
|
||||
auth = Authenticator(config)
|
||||
agent = auth.authenticate("test-token")
|
||||
server = create_server(auth, agent)
|
||||
|
||||
# Server should have tools registered
|
||||
assert server is not None
|
||||
81
uv.lock
generated
81
uv.lock
generated
@@ -169,6 +169,8 @@ dev = [
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-httpx" },
|
||||
{ name = "pytest-timeout" },
|
||||
{ name = "rich" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
@@ -178,7 +180,9 @@ requires-dist = [
|
||||
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" },
|
||||
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.24.0" },
|
||||
{ name = "pytest-httpx", marker = "extra == 'dev'", specifier = ">=0.32.0" },
|
||||
{ name = "pytest-timeout", marker = "extra == 'dev'", specifier = ">=2.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=6.0" },
|
||||
{ name = "rich", marker = "extra == 'dev'", specifier = ">=13.0.0" },
|
||||
{ name = "sse-starlette", specifier = ">=2.1.0" },
|
||||
{ name = "starlette", specifier = ">=0.41.0" },
|
||||
{ name = "uvicorn", specifier = ">=0.32.0" },
|
||||
@@ -276,9 +280,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "4.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mdurl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mcp"
|
||||
version = "1.23.1"
|
||||
version = "1.25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
@@ -296,9 +312,18 @@ dependencies = [
|
||||
{ name = "typing-inspection" },
|
||||
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/12/42/10c0c09ca27aceacd8c428956cfabdd67e3d328fe55c4abc16589285d294/mcp-1.23.1.tar.gz", hash = "sha256:7403e053e8e2283b1e6ae631423cb54736933fea70b32422152e6064556cd298", size = 596519, upload-time = "2025-12-02T18:41:12.807Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/9e/26e1d2d2c6afe15dfba5ca6799eeeea7656dce625c22766e4c57305e9cc2/mcp-1.23.1-py3-none-any.whl", hash = "sha256:3ce897fcc20a41bd50b4c58d3aa88085f11f505dcc0eaed48930012d34c731d8", size = 231433, upload-time = "2025-12-02T18:41:11.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -421,7 +446,7 @@ crypto = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.1"
|
||||
version = "9.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
@@ -430,9 +455,9 @@ dependencies = [
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -460,6 +485,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-timeout"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.2.1"
|
||||
@@ -471,11 +508,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "python-multipart"
|
||||
version = "0.0.20"
|
||||
version = "0.0.21"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -527,6 +564,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markdown-it-py" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rpds-py"
|
||||
version = "0.30.0"
|
||||
@@ -566,14 +616,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "sse-starlette"
|
||||
version = "3.0.3"
|
||||
version = "3.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "starlette" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/db/3c/fa6517610dc641262b77cc7bf994ecd17465812c1b0585fe33e11be758ab/sse_starlette-3.0.3.tar.gz", hash = "sha256:88cfb08747e16200ea990c8ca876b03910a23b547ab3bd764c0d8eb81019b971", size = 21943, upload-time = "2025-10-30T18:44:20.117Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/62/08/8f554b0e5bad3e4e880521a1686d96c05198471eed860b0eb89b57ea3636/sse_starlette-3.1.1.tar.gz", hash = "sha256:bffa531420c1793ab224f63648c059bcadc412bf9fdb1301ac8de1cf9a67b7fb", size = 24306, upload-time = "2025-12-26T15:22:53.836Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/23/a0/984525d19ca5c8a6c33911a0c164b11490dd0f90ff7fd689f704f84e9a11/sse_starlette-3.0.3-py3-none-any.whl", hash = "sha256:af5bf5a6f3933df1d9c7f8539633dc8444ca6a97ab2e2a7cd3b6e431ac03a431", size = 11765, upload-time = "2025-10-30T18:44:18.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/31/4c281581a0f8de137b710a07f65518b34bcf333b201cfa06cfda9af05f8a/sse_starlette-3.1.1-py3-none-any.whl", hash = "sha256:bb38f71ae74cfd86b529907a9fda5632195dfa6ae120f214ea4c890c7ee9d436", size = 12442, upload-time = "2025-12-26T15:22:52.911Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -611,13 +662,13 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.38.0"
|
||||
version = "0.40.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605, upload-time = "2025-10-18T13:46:44.63Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user