Compare commits
16 Commits
c57e71b92a
...
49c5043661
| Author | SHA1 | Date | |
|---|---|---|---|
| 49c5043661 | |||
| 9e96d18315 | |||
| 88a6740b42 | |||
| 6e0afa0bfb | |||
| 80d4347378 | |||
| 3eee0bf296 | |||
| 8809095549 | |||
| a2e8d76237 | |||
| 8c25bec5a4 | |||
| 7890d79bce | |||
| 9d73ac73b1 | |||
| c9e51c71d8 | |||
| 3d68de4c54 | |||
| f921412f01 | |||
| 757afb3c41 | |||
| e235e998e4 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -2,7 +2,7 @@ __pycache__/
|
||||
*.py[cod]
|
||||
.venv/
|
||||
.env
|
||||
config.yaml
|
||||
/config.yaml
|
||||
*.egg-info/
|
||||
dist/
|
||||
.pytest_cache/
|
||||
|
||||
39
CLAUDE.md
39
CLAUDE.md
@@ -17,11 +17,23 @@ grist-mcp is an MCP (Model Context Protocol) server that enables AI agents to in
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Run tests
|
||||
uv run pytest -v
|
||||
# Run unit tests
|
||||
make test-unit
|
||||
# or: uv run pytest tests/unit/ -v
|
||||
|
||||
# Run a specific test file
|
||||
uv run pytest tests/test_auth.py -v
|
||||
# Run integration tests (manages containers automatically)
|
||||
make test-integration
|
||||
# or: ./scripts/run-integration-tests.sh
|
||||
|
||||
# Full pre-deploy pipeline
|
||||
make pre-deploy
|
||||
|
||||
# Development environment
|
||||
make dev-up # Start
|
||||
make dev-down # Stop
|
||||
|
||||
# Build Docker image
|
||||
make build
|
||||
|
||||
# Run the server (requires config and token)
|
||||
CONFIG_PATH=./config.yaml GRIST_MCP_TOKEN=your-token uv run python -m grist_mcp.main
|
||||
@@ -30,7 +42,7 @@ CONFIG_PATH=./config.yaml GRIST_MCP_TOKEN=your-token uv run python -m grist_mcp.
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
src/grist_mcp/
|
||||
src/grist_mcp/ # Source code
|
||||
├── main.py # Entry point, runs stdio server
|
||||
├── server.py # MCP server setup, tool registration, call_tool dispatch
|
||||
├── config.py # YAML config loading with env var substitution
|
||||
@@ -41,6 +53,14 @@ src/grist_mcp/
|
||||
├── read.py # list_tables, describe_table, get_records, sql_query
|
||||
├── write.py # add_records, update_records, delete_records
|
||||
└── schema.py # create_table, add_column, modify_column, delete_column
|
||||
tests/
|
||||
├── unit/ # Unit tests (no containers)
|
||||
└── integration/ # Integration tests (with Docker)
|
||||
deploy/
|
||||
├── dev/ # Development docker-compose
|
||||
├── test/ # Test docker-compose (ephemeral)
|
||||
└── prod/ # Production docker-compose
|
||||
scripts/ # Test automation scripts
|
||||
```
|
||||
|
||||
## Key Patterns
|
||||
@@ -71,11 +91,18 @@ The optional `client` parameter enables dependency injection for testing.
|
||||
|
||||
## Testing
|
||||
|
||||
Tests use pytest-httpx to mock Grist API responses. Each test file has fixtures for common setup:
|
||||
### Unit Tests (`tests/unit/`)
|
||||
Fast tests using pytest-httpx to mock Grist API responses. Run with `make test-unit`.
|
||||
- `test_auth.py`: Uses in-memory Config objects
|
||||
- `test_grist_client.py`: Uses HTTPXMock for API mocking
|
||||
- `test_tools_*.py`: Combine auth fixtures with mocked clients
|
||||
|
||||
### Integration Tests (`tests/integration/`)
|
||||
Tests against real Grist containers. Run with `make test-integration`.
|
||||
- Automatically manages Docker containers via `scripts/run-integration-tests.sh`
|
||||
- Uses environment variables for configuration (no hardcoded URLs)
|
||||
- Containers are ephemeral and cleaned up after tests
|
||||
|
||||
## Configuration
|
||||
|
||||
See `config.yaml.example` for the configuration format. Key points:
|
||||
|
||||
44
Makefile
44
Makefile
@@ -1,34 +1,40 @@
|
||||
.PHONY: help test build integration-up integration-test integration-down integration pre-deploy clean
|
||||
.PHONY: help test test-unit test-integration build dev dev-up dev-down pre-deploy clean
|
||||
|
||||
VERBOSE ?= 0
|
||||
|
||||
# Default target
|
||||
help: ## Show this help
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
test: ## Run unit tests
|
||||
uv run pytest tests/ -v --ignore=tests/integration
|
||||
# Testing
|
||||
test: ## Run all tests (unit + integration) with rich progress display
|
||||
@uv run python scripts/test-runner.py $(if $(filter 1,$(VERBOSE)),-v)
|
||||
|
||||
build: ## Build Docker images for testing
|
||||
docker compose -f docker-compose.test.yaml build
|
||||
test-unit: ## Run unit tests only
|
||||
@uv run python scripts/test-runner.py --unit-only $(if $(filter 1,$(VERBOSE)),-v)
|
||||
|
||||
integration-up: ## Start integration test containers
|
||||
docker compose -f docker-compose.test.yaml up -d
|
||||
@echo "Waiting for services to be ready..."
|
||||
@sleep 5
|
||||
test-integration: ## Run integration tests only (starts/stops containers)
|
||||
@uv run python scripts/test-runner.py --integration-only $(if $(filter 1,$(VERBOSE)),-v)
|
||||
|
||||
integration-test: ## Run integration tests (containers must be up)
|
||||
uv run pytest tests/integration/ -v
|
||||
# Docker
|
||||
build: ## Build Docker image
|
||||
docker build -t grist-mcp:latest .
|
||||
|
||||
integration-down: ## Stop and remove test containers
|
||||
docker compose -f docker-compose.test.yaml down -v
|
||||
dev: ## Start development environment (attached, streams logs)
|
||||
cd deploy/dev && docker compose up --build
|
||||
|
||||
integration: build integration-up ## Full integration cycle (build, up, test, down)
|
||||
@$(MAKE) integration-test || ($(MAKE) integration-down && exit 1)
|
||||
@$(MAKE) integration-down
|
||||
dev-up: ## Start development environment (detached)
|
||||
cd deploy/dev && docker compose up -d --build
|
||||
|
||||
pre-deploy: test integration ## Full pre-deployment pipeline (unit tests + integration)
|
||||
dev-down: ## Stop development environment
|
||||
cd deploy/dev && docker compose down
|
||||
|
||||
# Pre-deployment
|
||||
pre-deploy: test ## Full pre-deployment pipeline
|
||||
@echo "Pre-deployment checks passed!"
|
||||
|
||||
clean: ## Remove all test artifacts and containers
|
||||
docker compose -f docker-compose.test.yaml down -v --rmi local 2>/dev/null || true
|
||||
# Cleanup
|
||||
clean: ## Remove test artifacts and containers
|
||||
cd deploy/test && docker compose down -v --rmi local 2>/dev/null || true
|
||||
find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true
|
||||
|
||||
1
deploy/dev/.env.example
Normal file
1
deploy/dev/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
PORT=3010
|
||||
30
deploy/dev/config.yaml
Normal file
30
deploy/dev/config.yaml
Normal file
@@ -0,0 +1,30 @@
|
||||
# Development configuration for grist-mcp
|
||||
#
|
||||
# Token Generation:
|
||||
# python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
# openssl rand -base64 32
|
||||
|
||||
# Document definitions
|
||||
documents:
|
||||
mcp-test-document:
|
||||
url: https://grist.bballou.com/
|
||||
doc_id: mVQvKTAyZC1FWZQgfuVeHC
|
||||
api_key: 83a03433a61ee9d2f2bf055d7f4518bedef0421a
|
||||
|
||||
# Agent tokens with access scopes
|
||||
tokens:
|
||||
- token: test-token-all-permissions
|
||||
name: dev-agent
|
||||
scope:
|
||||
- document: mcp-test-document
|
||||
permissions: [read, write, schema]
|
||||
- token: test-token-read-permissions
|
||||
name: dev-agent-read
|
||||
scope:
|
||||
- document: mcp-test-document
|
||||
permissions: [read]
|
||||
- token: test-token-no-schema-permissions
|
||||
name: dev-agent-no-schema
|
||||
scope:
|
||||
- document: mcp-test-document
|
||||
permissions: [read, write]
|
||||
20
deploy/dev/docker-compose.yml
Normal file
20
deploy/dev/docker-compose.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
# Development environment - hot reload, persistent data
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ../../src:/app/src:ro
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- EXTERNAL_PORT=${PORT:-3000}
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
1
deploy/prod/.env.example
Normal file
1
deploy/prod/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
PORT=3000
|
||||
32
deploy/prod/docker-compose.yml
Normal file
32
deploy/prod/docker-compose.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
# Production environment - resource limits, logging, restart policy
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- EXTERNAL_PORT=${PORT:-3000}
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
cpus: "1"
|
||||
reservations:
|
||||
memory: 128M
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "50m"
|
||||
max-file: "5"
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
@@ -1,29 +1,34 @@
|
||||
# Test environment - ephemeral, branch-isolated
|
||||
services:
|
||||
grist-mcp:
|
||||
build: .
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
container_name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "3000:3000"
|
||||
- "3000" # Dynamic port
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- GRIST_MCP_TOKEN=test-token
|
||||
- PORT=3000
|
||||
volumes:
|
||||
- ./tests/integration/config.test.yaml:/app/config.yaml:ro
|
||||
- ../../tests/integration/config.test.yaml:/app/config.yaml:ro
|
||||
depends_on:
|
||||
mock-grist:
|
||||
condition: service_started
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
mock-grist:
|
||||
build: tests/integration/mock_grist
|
||||
build:
|
||||
context: ../../tests/integration/mock_grist
|
||||
container_name: mock-grist-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "8484:8484"
|
||||
- "8484" # Dynamic port
|
||||
environment:
|
||||
- PORT=8484
|
||||
networks:
|
||||
@@ -32,8 +37,10 @@ services:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8484/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
networks:
|
||||
test-net:
|
||||
name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
driver: bridge
|
||||
@@ -1,10 +0,0 @@
|
||||
services:
|
||||
grist-mcp:
|
||||
build: .
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: unless-stopped
|
||||
587
docs/plans/2025-12-30-docker-service-skill-adaptation.md
Normal file
587
docs/plans/2025-12-30-docker-service-skill-adaptation.md
Normal file
@@ -0,0 +1,587 @@
|
||||
# Docker Service Architecture Adaptation Plan
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** Adapt grist-mcp to follow the docker-service-architecture skill guidelines for better test isolation, environment separation, and CI/CD readiness.
|
||||
|
||||
**Architecture:** Single-service project pattern with 2-stage testing (unit → integration), environment-specific deploy configs (dev/test/prod), and branch-isolated test infrastructure.
|
||||
|
||||
**Tech Stack:** Docker Compose, Make, Python/pytest, bash scripts
|
||||
|
||||
---
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
**What we have:**
|
||||
- Single service (grist-mcp) with mock server for testing
|
||||
- 2-stage testing: unit tests (41) + integration tests (2)
|
||||
- docker-compose.test.yaml at project root
|
||||
- docker-compose.yaml for production at root
|
||||
- Basic Makefile with pre-deploy target
|
||||
|
||||
**Gaps vs. Skill Guidelines:**
|
||||
|
||||
| Area | Current | Skill Guideline |
|
||||
|------|---------|-----------------|
|
||||
| Directory structure | Flat docker-compose files at root | `deploy/{dev,test,prod}/` directories |
|
||||
| Test organization | `tests/*.py` + `tests/integration/` | `tests/unit/` + `tests/integration/` |
|
||||
| Port allocation | Fixed (3000, 8484) | Dynamic with discovery |
|
||||
| Branch isolation | None | TEST_INSTANCE_ID from git branch |
|
||||
| Container naming | Default | Instance-based (`-${TEST_INSTANCE_ID}`) |
|
||||
| Test storage | Default volumes | tmpfs for ephemeral |
|
||||
| depends_on | `service_started` | `service_healthy` |
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Restructure Tests Directory
|
||||
|
||||
**Files:**
|
||||
- Move: `tests/test_*.py` → `tests/unit/test_*.py`
|
||||
- Keep: `tests/integration/` as-is
|
||||
- Create: `tests/unit/__init__.py`
|
||||
|
||||
**Step 1: Create unit test directory and move files**
|
||||
|
||||
```bash
|
||||
mkdir -p tests/unit
|
||||
mv tests/test_*.py tests/unit/
|
||||
touch tests/unit/__init__.py
|
||||
```
|
||||
|
||||
**Step 2: Update pyproject.toml testpaths**
|
||||
|
||||
```toml
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests/unit", "tests/integration"]
|
||||
```
|
||||
|
||||
**Step 3: Update Makefile test target**
|
||||
|
||||
```makefile
|
||||
test: ## Run unit tests
|
||||
uv run pytest tests/unit/ -v
|
||||
```
|
||||
|
||||
**Step 4: Verify tests still pass**
|
||||
|
||||
```bash
|
||||
uv run pytest tests/unit/ -v
|
||||
uv run pytest tests/integration/ -v --ignore=tests/integration
|
||||
```
|
||||
|
||||
**Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/ pyproject.toml Makefile
|
||||
git commit -m "refactor: organize tests into unit/ and integration/ directories"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Create Deploy Directory Structure
|
||||
|
||||
**Files:**
|
||||
- Create: `deploy/dev/docker-compose.yml`
|
||||
- Create: `deploy/dev/.env.example`
|
||||
- Create: `deploy/test/docker-compose.yml`
|
||||
- Create: `deploy/prod/docker-compose.yml`
|
||||
- Create: `deploy/prod/.env.example`
|
||||
- Delete: `docker-compose.yaml`, `docker-compose.test.yaml` (after migration)
|
||||
|
||||
**Step 1: Create deploy directory structure**
|
||||
|
||||
```bash
|
||||
mkdir -p deploy/{dev,test,prod}
|
||||
```
|
||||
|
||||
**Step 2: Create deploy/dev/docker-compose.yml**
|
||||
|
||||
```yaml
|
||||
# Development environment - hot reload, persistent data
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ../../src:/app/src:ro
|
||||
- ../../config.yaml:/app/config.yaml:ro
|
||||
env_file:
|
||||
- .env
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
```
|
||||
|
||||
**Step 3: Create deploy/dev/.env.example**
|
||||
|
||||
```bash
|
||||
PORT=3000
|
||||
GRIST_MCP_TOKEN=your-token-here
|
||||
CONFIG_PATH=/app/config.yaml
|
||||
```
|
||||
|
||||
**Step 4: Create deploy/test/docker-compose.yml**
|
||||
|
||||
```yaml
|
||||
# Test environment - ephemeral, branch-isolated
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
container_name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "3000" # Dynamic port
|
||||
environment:
|
||||
- CONFIG_PATH=/app/config.yaml
|
||||
- GRIST_MCP_TOKEN=test-token
|
||||
- PORT=3000
|
||||
volumes:
|
||||
- ../../tests/integration/config.test.yaml:/app/config.yaml:ro
|
||||
depends_on:
|
||||
mock-grist:
|
||||
condition: service_healthy
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
mock-grist:
|
||||
build:
|
||||
context: ../../tests/integration/mock_grist
|
||||
container_name: mock-grist-test-${TEST_INSTANCE_ID:-default}
|
||||
ports:
|
||||
- "8484" # Dynamic port
|
||||
environment:
|
||||
- PORT=8484
|
||||
networks:
|
||||
- test-net
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8484/health')"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
networks:
|
||||
test-net:
|
||||
name: grist-mcp-test-${TEST_INSTANCE_ID:-default}
|
||||
driver: bridge
|
||||
```
|
||||
|
||||
**Step 5: Create deploy/prod/docker-compose.yml**
|
||||
|
||||
```yaml
|
||||
# Production environment - resource limits, logging, restart policy
|
||||
services:
|
||||
grist-mcp:
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PORT:-3000}:3000"
|
||||
volumes:
|
||||
- ./config.yaml:/app/config.yaml:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
cpus: "1"
|
||||
reservations:
|
||||
memory: 128M
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "50m"
|
||||
max-file: "5"
|
||||
healthcheck:
|
||||
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:3000/health')"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
```
|
||||
|
||||
**Step 6: Create deploy/prod/.env.example**
|
||||
|
||||
```bash
|
||||
PORT=3000
|
||||
GRIST_MCP_TOKEN=your-production-token
|
||||
CONFIG_PATH=/app/config.yaml
|
||||
```
|
||||
|
||||
**Step 7: Verify test compose works**
|
||||
|
||||
```bash
|
||||
cd deploy/test
|
||||
TEST_INSTANCE_ID=manual docker compose up -d --build
|
||||
docker compose ps
|
||||
docker compose down -v
|
||||
```
|
||||
|
||||
**Step 8: Remove old compose files and commit**
|
||||
|
||||
```bash
|
||||
rm docker-compose.yaml docker-compose.test.yaml
|
||||
git add deploy/
|
||||
git rm docker-compose.yaml docker-compose.test.yaml
|
||||
git commit -m "refactor: move docker-compose files to deploy/ directory structure"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Add Test Isolation Scripts
|
||||
|
||||
**Files:**
|
||||
- Create: `scripts/get-test-instance-id.sh`
|
||||
- Create: `scripts/run-integration-tests.sh`
|
||||
|
||||
**Step 1: Create scripts directory**
|
||||
|
||||
```bash
|
||||
mkdir -p scripts
|
||||
```
|
||||
|
||||
**Step 2: Create get-test-instance-id.sh**
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# scripts/get-test-instance-id.sh
|
||||
# Generate a unique instance ID from git branch for parallel test isolation
|
||||
|
||||
BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown")
|
||||
# Sanitize: replace non-alphanumeric with dash, limit length
|
||||
echo "$BRANCH" | sed 's/[^a-zA-Z0-9]/-/g' | cut -c1-20
|
||||
```
|
||||
|
||||
**Step 3: Create run-integration-tests.sh**
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# scripts/run-integration-tests.sh
|
||||
# Run integration tests with branch isolation and dynamic port discovery
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
# Get branch-based instance ID
|
||||
TEST_INSTANCE_ID=$("$SCRIPT_DIR/get-test-instance-id.sh")
|
||||
export TEST_INSTANCE_ID
|
||||
|
||||
echo "Test instance ID: $TEST_INSTANCE_ID"
|
||||
|
||||
# Start containers
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose up -d --build --wait
|
||||
|
||||
# Discover dynamic ports
|
||||
GRIST_MCP_PORT=$(docker compose port grist-mcp 3000 | cut -d: -f2)
|
||||
MOCK_GRIST_PORT=$(docker compose port mock-grist 8484 | cut -d: -f2)
|
||||
|
||||
echo "grist-mcp available at: http://localhost:$GRIST_MCP_PORT"
|
||||
echo "mock-grist available at: http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Export for tests
|
||||
export GRIST_MCP_URL="http://localhost:$GRIST_MCP_PORT"
|
||||
export MOCK_GRIST_URL="http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Run tests
|
||||
cd "$PROJECT_ROOT"
|
||||
TEST_EXIT=0
|
||||
uv run pytest tests/integration/ -v || TEST_EXIT=$?
|
||||
|
||||
# Cleanup
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose down -v
|
||||
|
||||
exit $TEST_EXIT
|
||||
```
|
||||
|
||||
**Step 4: Make scripts executable**
|
||||
|
||||
```bash
|
||||
chmod +x scripts/get-test-instance-id.sh
|
||||
chmod +x scripts/run-integration-tests.sh
|
||||
```
|
||||
|
||||
**Step 5: Verify scripts work**
|
||||
|
||||
```bash
|
||||
./scripts/get-test-instance-id.sh
|
||||
./scripts/run-integration-tests.sh
|
||||
```
|
||||
|
||||
**Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add scripts/
|
||||
git commit -m "feat: add test isolation scripts with dynamic port discovery"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Update Integration Tests for Dynamic Ports
|
||||
|
||||
**Files:**
|
||||
- Modify: `tests/integration/conftest.py`
|
||||
|
||||
**Step 1: Update conftest.py to use environment variables**
|
||||
|
||||
```python
|
||||
"""Fixtures for integration tests."""
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
|
||||
# Use environment variables for dynamic port discovery
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
MAX_WAIT_SECONDS = 30
|
||||
|
||||
|
||||
def wait_for_service(url: str, timeout: int = MAX_WAIT_SECONDS) -> bool:
|
||||
"""Wait for a service to become healthy."""
|
||||
start = time.time()
|
||||
while time.time() - start < timeout:
|
||||
try:
|
||||
response = httpx.get(f"{url}/health", timeout=2.0)
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
except httpx.RequestError:
|
||||
pass
|
||||
time.sleep(0.5)
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def services_ready():
|
||||
"""Ensure both services are healthy before running tests."""
|
||||
if not wait_for_service(MOCK_GRIST_URL):
|
||||
pytest.fail(f"Mock Grist server not ready at {MOCK_GRIST_URL}")
|
||||
if not wait_for_service(GRIST_MCP_URL):
|
||||
pytest.fail(f"grist-mcp server not ready at {GRIST_MCP_URL}")
|
||||
return True
|
||||
```
|
||||
|
||||
**Step 2: Update test files to use environment URLs**
|
||||
|
||||
In `tests/integration/test_mcp_protocol.py` and `tests/integration/test_tools_integration.py`:
|
||||
|
||||
```python
|
||||
import os
|
||||
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
```
|
||||
|
||||
**Step 3: Run tests to verify**
|
||||
|
||||
```bash
|
||||
./scripts/run-integration-tests.sh
|
||||
```
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/integration/
|
||||
git commit -m "feat: support dynamic ports via environment variables in tests"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: Update Makefile
|
||||
|
||||
**Files:**
|
||||
- Modify: `Makefile`
|
||||
|
||||
**Step 1: Rewrite Makefile with new structure**
|
||||
|
||||
```makefile
|
||||
.PHONY: help test test-unit test-integration build dev-up dev-down integration pre-deploy clean
|
||||
|
||||
VERBOSE ?= 0
|
||||
PYTEST_ARGS := $(if $(filter 1,$(VERBOSE)),-v,-q)
|
||||
|
||||
# Default target
|
||||
help: ## Show this help
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
# Testing
|
||||
test: test-unit ## Run all tests (unit only by default)
|
||||
|
||||
test-unit: ## Run unit tests
|
||||
uv run pytest tests/unit/ $(PYTEST_ARGS)
|
||||
|
||||
test-integration: ## Run integration tests (starts/stops containers)
|
||||
./scripts/run-integration-tests.sh
|
||||
|
||||
# Docker
|
||||
build: ## Build Docker image
|
||||
docker build -t grist-mcp:latest .
|
||||
|
||||
dev-up: ## Start development environment
|
||||
cd deploy/dev && docker compose up -d --build
|
||||
|
||||
dev-down: ## Stop development environment
|
||||
cd deploy/dev && docker compose down
|
||||
|
||||
# Pre-deployment
|
||||
pre-deploy: test-unit test-integration ## Full pre-deployment pipeline
|
||||
@echo "Pre-deployment checks passed!"
|
||||
|
||||
# Cleanup
|
||||
clean: ## Remove test artifacts and containers
|
||||
cd deploy/test && docker compose down -v --rmi local 2>/dev/null || true
|
||||
find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true
|
||||
```
|
||||
|
||||
**Step 2: Verify Makefile targets**
|
||||
|
||||
```bash
|
||||
make help
|
||||
make test-unit
|
||||
make test-integration
|
||||
make pre-deploy
|
||||
```
|
||||
|
||||
**Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add Makefile
|
||||
git commit -m "refactor: update Makefile for new deploy/ structure"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 6: Update CLAUDE.md
|
||||
|
||||
**Files:**
|
||||
- Modify: `CLAUDE.md`
|
||||
|
||||
**Step 1: Update commands section**
|
||||
|
||||
Add to CLAUDE.md:
|
||||
|
||||
```markdown
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Run unit tests
|
||||
make test-unit
|
||||
# or: uv run pytest tests/unit/ -v
|
||||
|
||||
# Run integration tests (manages containers automatically)
|
||||
make test-integration
|
||||
# or: ./scripts/run-integration-tests.sh
|
||||
|
||||
# Full pre-deploy pipeline
|
||||
make pre-deploy
|
||||
|
||||
# Development environment
|
||||
make dev-up # Start
|
||||
make dev-down # Stop
|
||||
|
||||
# Build Docker image
|
||||
make build
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
src/grist_mcp/ # Source code
|
||||
tests/
|
||||
├── unit/ # Unit tests (no containers)
|
||||
└── integration/ # Integration tests (with Docker)
|
||||
deploy/
|
||||
├── dev/ # Development docker-compose
|
||||
├── test/ # Test docker-compose (ephemeral)
|
||||
└── prod/ # Production docker-compose
|
||||
scripts/ # Test automation scripts
|
||||
```
|
||||
```
|
||||
|
||||
**Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add CLAUDE.md
|
||||
git commit -m "docs: update CLAUDE.md with new project structure"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 7: Final Verification
|
||||
|
||||
**Step 1: Run full pre-deploy pipeline**
|
||||
|
||||
```bash
|
||||
make pre-deploy
|
||||
```
|
||||
|
||||
Expected output:
|
||||
- Unit tests pass (41 tests)
|
||||
- Integration tests pass with branch isolation
|
||||
- Containers cleaned up
|
||||
|
||||
**Step 2: Test parallel execution (optional)**
|
||||
|
||||
```bash
|
||||
# In terminal 1
|
||||
git checkout -b test-branch-1
|
||||
make test-integration &
|
||||
|
||||
# In terminal 2
|
||||
git checkout -b test-branch-2
|
||||
make test-integration &
|
||||
```
|
||||
|
||||
Both should run without port conflicts.
|
||||
|
||||
**Step 3: Commit final verification**
|
||||
|
||||
```bash
|
||||
git add .
|
||||
git commit -m "chore: complete docker-service-architecture adaptation"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Summary of Changes
|
||||
|
||||
| Before | After |
|
||||
|--------|-------|
|
||||
| `tests/test_*.py` | `tests/unit/test_*.py` |
|
||||
| `docker-compose.yaml` | `deploy/dev/docker-compose.yml` |
|
||||
| `docker-compose.test.yaml` | `deploy/test/docker-compose.yml` |
|
||||
| (none) | `deploy/prod/docker-compose.yml` |
|
||||
| Fixed ports (3000, 8484) | Dynamic ports with discovery |
|
||||
| No branch isolation | TEST_INSTANCE_ID from git branch |
|
||||
| `service_started` | `service_healthy` |
|
||||
| Basic Makefile | Environment-aware with VERBOSE support |
|
||||
|
||||
## Benefits
|
||||
|
||||
1. **Parallel testing** - Multiple branches can run tests simultaneously
|
||||
2. **Environment parity** - Clear dev/test/prod separation
|
||||
3. **CI/CD ready** - Scripts work in automated pipelines
|
||||
4. **Faster feedback** - Dynamic ports eliminate conflicts
|
||||
5. **Cleaner structure** - Tests and deploys clearly organized
|
||||
@@ -17,6 +17,8 @@ dev = [
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.24.0",
|
||||
"pytest-httpx>=0.32.0",
|
||||
"pytest-timeout>=2.0.0",
|
||||
"rich>=13.0.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
@@ -25,4 +27,4 @@ build-backend = "hatchling.build"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
testpaths = ["tests/unit", "tests/integration"]
|
||||
|
||||
7
scripts/get-test-instance-id.sh
Executable file
7
scripts/get-test-instance-id.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
# scripts/get-test-instance-id.sh
|
||||
# Generate a unique instance ID from git branch for parallel test isolation
|
||||
|
||||
BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown")
|
||||
# Sanitize: replace non-alphanumeric with dash, limit length
|
||||
echo "$BRANCH" | sed 's/[^a-zA-Z0-9]/-/g' | cut -c1-20
|
||||
39
scripts/run-integration-tests.sh
Executable file
39
scripts/run-integration-tests.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
# scripts/run-integration-tests.sh
|
||||
# Run integration tests with branch isolation and dynamic port discovery
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
# Get branch-based instance ID
|
||||
TEST_INSTANCE_ID=$("$SCRIPT_DIR/get-test-instance-id.sh")
|
||||
export TEST_INSTANCE_ID
|
||||
|
||||
echo "Test instance ID: $TEST_INSTANCE_ID"
|
||||
|
||||
# Start containers
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose up -d --build --wait
|
||||
|
||||
# Discover dynamic ports
|
||||
GRIST_MCP_PORT=$(docker compose port grist-mcp 3000 | cut -d: -f2)
|
||||
MOCK_GRIST_PORT=$(docker compose port mock-grist 8484 | cut -d: -f2)
|
||||
|
||||
echo "grist-mcp available at: http://localhost:$GRIST_MCP_PORT"
|
||||
echo "mock-grist available at: http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Export for tests
|
||||
export GRIST_MCP_URL="http://localhost:$GRIST_MCP_PORT"
|
||||
export MOCK_GRIST_URL="http://localhost:$MOCK_GRIST_PORT"
|
||||
|
||||
# Run tests
|
||||
cd "$PROJECT_ROOT"
|
||||
TEST_EXIT=0
|
||||
uv run pytest tests/integration/ -v || TEST_EXIT=$?
|
||||
|
||||
# Cleanup
|
||||
cd "$PROJECT_ROOT/deploy/test"
|
||||
docker compose down -v
|
||||
|
||||
exit $TEST_EXIT
|
||||
248
scripts/test-runner.py
Executable file
248
scripts/test-runner.py
Executable file
@@ -0,0 +1,248 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Rich test runner with progress display and fail-fast behavior.
|
||||
|
||||
Runs unit tests, then integration tests with real-time progress indication.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
||||
from rich.console import Console
|
||||
from rich.live import Live
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
|
||||
class Status(Enum):
|
||||
PENDING = "pending"
|
||||
RUNNING = "running"
|
||||
PASSED = "passed"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TestStage:
|
||||
name: str
|
||||
command: list[str]
|
||||
status: Status = Status.PENDING
|
||||
progress: int = 0
|
||||
total: int = 0
|
||||
passed: int = 0
|
||||
failed: int = 0
|
||||
current_test: str = ""
|
||||
duration: float = 0.0
|
||||
output: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
# Regex patterns for parsing pytest output
|
||||
PYTEST_PROGRESS = re.compile(r"\[\s*(\d+)%\]")
|
||||
PYTEST_COLLECTING = re.compile(r"collected (\d+) items?")
|
||||
PYTEST_RESULT = re.compile(r"(\d+) passed")
|
||||
PYTEST_FAILED = re.compile(r"(\d+) failed")
|
||||
PYTEST_DURATION = re.compile(r"in ([\d.]+)s")
|
||||
PYTEST_TEST_LINE = re.compile(r"(tests/\S+::\S+)")
|
||||
|
||||
|
||||
class TestRunner:
|
||||
def __init__(self, verbose: bool = False):
|
||||
self.console = Console()
|
||||
self.verbose = verbose
|
||||
self.project_root = Path(__file__).parent.parent
|
||||
self.stages: list[TestStage] = []
|
||||
self.all_passed = True
|
||||
|
||||
def add_stage(self, name: str, command: list[str]) -> None:
|
||||
self.stages.append(TestStage(name=name, command=command))
|
||||
|
||||
def render_table(self) -> Table:
|
||||
table = Table(show_header=False, box=None, padding=(0, 1))
|
||||
table.add_column("Status", width=3)
|
||||
table.add_column("Name", width=20)
|
||||
table.add_column("Progress", width=30)
|
||||
table.add_column("Time", width=8)
|
||||
|
||||
for stage in self.stages:
|
||||
# Status icon
|
||||
if stage.status == Status.PENDING:
|
||||
icon = Text("○", style="dim")
|
||||
elif stage.status == Status.RUNNING:
|
||||
icon = Text("●", style="yellow")
|
||||
elif stage.status == Status.PASSED:
|
||||
icon = Text("✓", style="green")
|
||||
else:
|
||||
icon = Text("✗", style="red")
|
||||
|
||||
# Progress display
|
||||
if stage.status == Status.PENDING:
|
||||
progress = Text("pending", style="dim")
|
||||
elif stage.status == Status.RUNNING:
|
||||
if stage.total > 0:
|
||||
bar_width = 20
|
||||
filled = int(bar_width * stage.progress / 100)
|
||||
bar = "━" * filled + "░" * (bar_width - filled)
|
||||
progress = Text(f"{bar} {stage.progress:3d}% {stage.passed}/{stage.total}")
|
||||
if stage.current_test:
|
||||
progress.append(f"\n → {stage.current_test[:40]}", style="dim")
|
||||
else:
|
||||
progress = Text("collecting...", style="yellow")
|
||||
elif stage.status == Status.PASSED:
|
||||
progress = Text(f"{stage.passed}/{stage.total}", style="green")
|
||||
else:
|
||||
progress = Text(f"{stage.passed}/{stage.total} ({stage.failed} failed)", style="red")
|
||||
|
||||
# Duration
|
||||
if stage.duration > 0:
|
||||
duration = Text(f"{stage.duration:.1f}s", style="dim")
|
||||
else:
|
||||
duration = Text("")
|
||||
|
||||
table.add_row(icon, stage.name, progress, duration)
|
||||
|
||||
return table
|
||||
|
||||
def parse_output(self, stage: TestStage, line: str) -> None:
|
||||
"""Parse pytest output line and update stage state."""
|
||||
stage.output.append(line)
|
||||
|
||||
# Check for collected count
|
||||
match = PYTEST_COLLECTING.search(line)
|
||||
if match:
|
||||
stage.total = int(match.group(1))
|
||||
|
||||
# Check for progress percentage
|
||||
match = PYTEST_PROGRESS.search(line)
|
||||
if match:
|
||||
stage.progress = int(match.group(1))
|
||||
# Estimate passed based on progress
|
||||
if stage.total > 0:
|
||||
stage.passed = int(stage.total * stage.progress / 100)
|
||||
|
||||
# Check for current test
|
||||
match = PYTEST_TEST_LINE.search(line)
|
||||
if match:
|
||||
stage.current_test = match.group(1)
|
||||
|
||||
# Check for final results
|
||||
match = PYTEST_RESULT.search(line)
|
||||
if match:
|
||||
stage.passed = int(match.group(1))
|
||||
|
||||
match = PYTEST_FAILED.search(line)
|
||||
if match:
|
||||
stage.failed = int(match.group(1))
|
||||
|
||||
match = PYTEST_DURATION.search(line)
|
||||
if match:
|
||||
stage.duration = float(match.group(1))
|
||||
|
||||
def run_stage(self, stage: TestStage, live: Live) -> bool:
|
||||
"""Run a single test stage and return True if passed."""
|
||||
stage.status = Status.RUNNING
|
||||
live.update(self.render_table())
|
||||
|
||||
env = os.environ.copy()
|
||||
env["PYTHONUNBUFFERED"] = "1"
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
stage.command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
cwd=self.project_root,
|
||||
env=env,
|
||||
)
|
||||
|
||||
for line in process.stdout:
|
||||
line = line.rstrip()
|
||||
self.parse_output(stage, line)
|
||||
live.update(self.render_table())
|
||||
|
||||
if self.verbose:
|
||||
self.console.print(line)
|
||||
|
||||
process.wait()
|
||||
|
||||
if process.returncode == 0:
|
||||
stage.status = Status.PASSED
|
||||
stage.progress = 100
|
||||
return True
|
||||
else:
|
||||
stage.status = Status.FAILED
|
||||
self.all_passed = False
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
stage.status = Status.FAILED
|
||||
stage.output.append(str(e))
|
||||
self.all_passed = False
|
||||
return False
|
||||
finally:
|
||||
live.update(self.render_table())
|
||||
|
||||
def run_all(self) -> bool:
|
||||
"""Run all test stages with fail-fast behavior."""
|
||||
self.console.print()
|
||||
|
||||
with Live(self.render_table(), console=self.console, refresh_per_second=4) as live:
|
||||
for stage in self.stages:
|
||||
if not self.run_stage(stage, live):
|
||||
# Fail fast - don't run remaining stages
|
||||
break
|
||||
|
||||
self.console.print()
|
||||
|
||||
# Print summary
|
||||
if self.all_passed:
|
||||
self.console.print("[green]All tests passed![/green]")
|
||||
else:
|
||||
self.console.print("[red]Tests failed![/red]")
|
||||
# Print failure details
|
||||
for stage in self.stages:
|
||||
if stage.status == Status.FAILED:
|
||||
self.console.print(f"\n[red]Failures in {stage.name}:[/red]")
|
||||
# Print last 20 lines of output for context
|
||||
for line in stage.output[-20:]:
|
||||
self.console.print(f" {line}")
|
||||
|
||||
return self.all_passed
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Run tests with rich progress display")
|
||||
parser.add_argument("-v", "--verbose", action="store_true", help="Show full test output")
|
||||
parser.add_argument("--unit-only", action="store_true", help="Run only unit tests")
|
||||
parser.add_argument("--integration-only", action="store_true", help="Run only integration tests")
|
||||
args = parser.parse_args()
|
||||
|
||||
runner = TestRunner(verbose=args.verbose)
|
||||
|
||||
# Determine which stages to run
|
||||
run_unit = not args.integration_only
|
||||
run_integration = not args.unit_only
|
||||
|
||||
if run_unit:
|
||||
runner.add_stage(
|
||||
"Unit Tests",
|
||||
["uv", "run", "pytest", "tests/unit/", "-v", "--tb=short"],
|
||||
)
|
||||
|
||||
if run_integration:
|
||||
# Use the integration test script which handles containers
|
||||
runner.add_stage(
|
||||
"Integration Tests",
|
||||
["bash", "./scripts/run-integration-tests.sh"],
|
||||
)
|
||||
|
||||
success = runner.run_all()
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -160,7 +160,8 @@ class GristClient:
|
||||
if formula is not None:
|
||||
fields["formula"] = formula
|
||||
|
||||
await self._request("PATCH", f"/tables/{table}/columns/{column_id}", json={"fields": fields})
|
||||
payload = {"columns": [{"id": column_id, "fields": fields}]}
|
||||
await self._request("PATCH", f"/tables/{table}/columns", json=payload)
|
||||
|
||||
async def delete_column(self, table: str, column_id: str) -> None:
|
||||
"""Delete a column from a table."""
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Main entry point for the MCP server with SSE transport."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Any
|
||||
@@ -8,7 +9,8 @@ import uvicorn
|
||||
from mcp.server.sse import SseServerTransport
|
||||
|
||||
from grist_mcp.server import create_server
|
||||
from grist_mcp.auth import AuthError
|
||||
from grist_mcp.config import Config, load_config
|
||||
from grist_mcp.auth import Authenticator, AuthError
|
||||
|
||||
|
||||
Scope = dict[str, Any]
|
||||
@@ -16,23 +18,100 @@ Receive = Any
|
||||
Send = Any
|
||||
|
||||
|
||||
def create_app():
|
||||
def _get_bearer_token(scope: Scope) -> str | None:
|
||||
"""Extract Bearer token from Authorization header."""
|
||||
headers = dict(scope.get("headers", []))
|
||||
auth_header = headers.get(b"authorization", b"").decode()
|
||||
if auth_header.startswith("Bearer "):
|
||||
return auth_header[7:]
|
||||
return None
|
||||
|
||||
|
||||
async def send_error(send: Send, status: int, message: str) -> None:
|
||||
"""Send an HTTP error response."""
|
||||
body = json.dumps({"error": message}).encode()
|
||||
await send({
|
||||
"type": "http.response.start",
|
||||
"status": status,
|
||||
"headers": [[b"content-type", b"application/json"]],
|
||||
})
|
||||
await send({
|
||||
"type": "http.response.body",
|
||||
"body": body,
|
||||
})
|
||||
|
||||
|
||||
CONFIG_TEMPLATE = """\
|
||||
# grist-mcp configuration
|
||||
#
|
||||
# Token Generation:
|
||||
# python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
# openssl rand -base64 32
|
||||
|
||||
# Document definitions
|
||||
documents:
|
||||
my-document:
|
||||
url: https://docs.getgrist.com
|
||||
doc_id: YOUR_DOC_ID
|
||||
api_key: ${GRIST_API_KEY}
|
||||
|
||||
# Agent tokens with access scopes
|
||||
tokens:
|
||||
- token: REPLACE_WITH_GENERATED_TOKEN
|
||||
name: my-agent
|
||||
scope:
|
||||
- document: my-document
|
||||
permissions: [read, write]
|
||||
"""
|
||||
|
||||
|
||||
def _ensure_config(config_path: str) -> bool:
|
||||
"""Ensure config file exists. Creates template if missing.
|
||||
|
||||
Returns True if config is ready, False if template was created.
|
||||
"""
|
||||
path = os.path.abspath(config_path)
|
||||
|
||||
# Check if path is a directory (Docker creates this when mounting missing file)
|
||||
if os.path.isdir(path):
|
||||
os.rmdir(path)
|
||||
|
||||
if os.path.exists(path):
|
||||
return True
|
||||
|
||||
# Create template config
|
||||
with open(path, "w") as f:
|
||||
f.write(CONFIG_TEMPLATE)
|
||||
|
||||
print(f"Created template configuration at: {path}")
|
||||
print()
|
||||
print("Please edit this file to configure your Grist documents and agent tokens,")
|
||||
print("then restart the server.")
|
||||
return False
|
||||
|
||||
|
||||
def create_app(config: Config):
|
||||
"""Create the ASGI application."""
|
||||
config_path = os.environ.get("CONFIG_PATH", "/app/config.yaml")
|
||||
|
||||
if not os.path.exists(config_path):
|
||||
print(f"Error: Config file not found at {config_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
server = create_server(config_path)
|
||||
except AuthError as e:
|
||||
print(f"Authentication error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
auth = Authenticator(config)
|
||||
|
||||
sse = SseServerTransport("/messages")
|
||||
|
||||
async def handle_sse(scope: Scope, receive: Receive, send: Send) -> None:
|
||||
# Extract and validate token from Authorization header
|
||||
token = _get_bearer_token(scope)
|
||||
if not token:
|
||||
await send_error(send, 401, "Missing Authorization header")
|
||||
return
|
||||
|
||||
try:
|
||||
agent = auth.authenticate(token)
|
||||
except AuthError as e:
|
||||
await send_error(send, 401, str(e))
|
||||
return
|
||||
|
||||
# Create a server instance for this authenticated connection
|
||||
server = create_server(auth, agent)
|
||||
|
||||
async with sse.connect_sse(scope, receive, send) as streams:
|
||||
await server.run(
|
||||
streams[0], streams[1], server.create_initialization_options()
|
||||
@@ -82,13 +161,37 @@ def create_app():
|
||||
return app
|
||||
|
||||
|
||||
def _print_mcp_config(external_port: int, tokens: list) -> None:
|
||||
"""Print Claude Code MCP configuration."""
|
||||
print()
|
||||
print("Claude Code MCP configuration (copy-paste to add):")
|
||||
for t in tokens:
|
||||
config = (
|
||||
f'{{"type": "sse", "url": "http://localhost:{external_port}/sse", '
|
||||
f'"headers": {{"Authorization": "Bearer {t.token}"}}}}'
|
||||
)
|
||||
print(f" claude mcp add-json grist-{t.name} '{config}'")
|
||||
print()
|
||||
|
||||
|
||||
def main():
|
||||
"""Run the SSE server."""
|
||||
port = int(os.environ.get("PORT", "3000"))
|
||||
app = create_app()
|
||||
external_port = int(os.environ.get("EXTERNAL_PORT", str(port)))
|
||||
config_path = os.environ.get("CONFIG_PATH", "/app/config.yaml")
|
||||
|
||||
if not _ensure_config(config_path):
|
||||
return
|
||||
|
||||
config = load_config(config_path)
|
||||
|
||||
print(f"Starting grist-mcp SSE server on port {port}")
|
||||
print(f" SSE endpoint: http://0.0.0.0:{port}/sse")
|
||||
print(f" Messages endpoint: http://0.0.0.0:{port}/messages")
|
||||
|
||||
_print_mcp_config(external_port, config.tokens)
|
||||
|
||||
app = create_app(config)
|
||||
uvicorn.run(app, host="0.0.0.0", port=port)
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
"""MCP server setup and tool registration."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from mcp.server import Server
|
||||
from mcp.types import Tool, TextContent
|
||||
|
||||
from grist_mcp.config import load_config
|
||||
from grist_mcp.auth import Authenticator, AuthError, Agent
|
||||
from grist_mcp.auth import Authenticator, Agent, AuthError
|
||||
|
||||
from grist_mcp.tools.discovery import list_documents as _list_documents
|
||||
from grist_mcp.tools.read import list_tables as _list_tables
|
||||
@@ -23,27 +21,18 @@ from grist_mcp.tools.schema import modify_column as _modify_column
|
||||
from grist_mcp.tools.schema import delete_column as _delete_column
|
||||
|
||||
|
||||
def create_server(config_path: str, token: str | None = None) -> Server:
|
||||
"""Create and configure the MCP server.
|
||||
def create_server(auth: Authenticator, agent: Agent) -> Server:
|
||||
"""Create and configure the MCP server for an authenticated agent.
|
||||
|
||||
Args:
|
||||
config_path: Path to the configuration YAML file.
|
||||
token: Agent token for authentication. If not provided, reads from
|
||||
GRIST_MCP_TOKEN environment variable.
|
||||
auth: Authenticator instance for permission checks.
|
||||
agent: The authenticated agent for this server instance.
|
||||
|
||||
Raises:
|
||||
AuthError: If token is invalid or not provided.
|
||||
Returns:
|
||||
Configured MCP Server instance.
|
||||
"""
|
||||
config = load_config(config_path)
|
||||
auth = Authenticator(config)
|
||||
server = Server("grist-mcp")
|
||||
|
||||
# Authenticate agent from token (required for all tool calls)
|
||||
auth_token = token or os.environ.get("GRIST_MCP_TOKEN")
|
||||
if not auth_token:
|
||||
raise AuthError("No token provided. Set GRIST_MCP_TOKEN environment variable.")
|
||||
|
||||
_current_agent: Agent = auth.authenticate(auth_token)
|
||||
_current_agent = agent
|
||||
|
||||
@server.list_tools()
|
||||
async def list_tools() -> list[Tool]:
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
"""Fixtures for integration tests."""
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
|
||||
GRIST_MCP_URL = "http://localhost:3000"
|
||||
MOCK_GRIST_URL = "http://localhost:8484"
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
MAX_WAIT_SECONDS = 30
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Test MCP protocol compliance over SSE transport."""
|
||||
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import pytest
|
||||
@@ -7,7 +8,7 @@ from mcp import ClientSession
|
||||
from mcp.client.sse import sse_client
|
||||
|
||||
|
||||
GRIST_MCP_URL = "http://localhost:3000"
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Test tool calls through MCP client to verify Grist API interactions."""
|
||||
|
||||
import json
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import httpx
|
||||
@@ -9,8 +10,8 @@ from mcp import ClientSession
|
||||
from mcp.client.sse import sse_client
|
||||
|
||||
|
||||
GRIST_MCP_URL = "http://localhost:3000"
|
||||
MOCK_GRIST_URL = "http://localhost:8484"
|
||||
GRIST_MCP_URL = os.environ.get("GRIST_MCP_URL", "http://localhost:3000")
|
||||
MOCK_GRIST_URL = os.environ.get("MOCK_GRIST_URL", "http://localhost:8484")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
||||
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
@@ -160,7 +160,7 @@ async def test_add_column(client, httpx_mock: HTTPXMock):
|
||||
@pytest.mark.asyncio
|
||||
async def test_modify_column(client, httpx_mock: HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url="https://grist.example.com/api/docs/abc123/tables/Table1/columns/Amount",
|
||||
url="https://grist.example.com/api/docs/abc123/tables/Table1/columns",
|
||||
method="PATCH",
|
||||
json={},
|
||||
)
|
||||
@@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
from mcp.types import ListToolsRequest
|
||||
from grist_mcp.server import create_server
|
||||
from grist_mcp.config import load_config
|
||||
from grist_mcp.auth import Authenticator
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -21,7 +23,10 @@ tokens:
|
||||
permissions: [read, write, schema]
|
||||
""")
|
||||
|
||||
server = create_server(str(config_file), token="test-token")
|
||||
config = load_config(str(config_file))
|
||||
auth = Authenticator(config)
|
||||
agent = auth.authenticate("test-token")
|
||||
server = create_server(auth, agent)
|
||||
|
||||
# Server should have tools registered
|
||||
assert server is not None
|
||||
81
uv.lock
generated
81
uv.lock
generated
@@ -169,6 +169,8 @@ dev = [
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-httpx" },
|
||||
{ name = "pytest-timeout" },
|
||||
{ name = "rich" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
@@ -178,7 +180,9 @@ requires-dist = [
|
||||
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" },
|
||||
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.24.0" },
|
||||
{ name = "pytest-httpx", marker = "extra == 'dev'", specifier = ">=0.32.0" },
|
||||
{ name = "pytest-timeout", marker = "extra == 'dev'", specifier = ">=2.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=6.0" },
|
||||
{ name = "rich", marker = "extra == 'dev'", specifier = ">=13.0.0" },
|
||||
{ name = "sse-starlette", specifier = ">=2.1.0" },
|
||||
{ name = "starlette", specifier = ">=0.41.0" },
|
||||
{ name = "uvicorn", specifier = ">=0.32.0" },
|
||||
@@ -276,9 +280,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "4.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mdurl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mcp"
|
||||
version = "1.23.1"
|
||||
version = "1.25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
@@ -296,9 +312,18 @@ dependencies = [
|
||||
{ name = "typing-inspection" },
|
||||
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/12/42/10c0c09ca27aceacd8c428956cfabdd67e3d328fe55c4abc16589285d294/mcp-1.23.1.tar.gz", hash = "sha256:7403e053e8e2283b1e6ae631423cb54736933fea70b32422152e6064556cd298", size = 596519, upload-time = "2025-12-02T18:41:12.807Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/9e/26e1d2d2c6afe15dfba5ca6799eeeea7656dce625c22766e4c57305e9cc2/mcp-1.23.1-py3-none-any.whl", hash = "sha256:3ce897fcc20a41bd50b4c58d3aa88085f11f505dcc0eaed48930012d34c731d8", size = 231433, upload-time = "2025-12-02T18:41:11.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -421,7 +446,7 @@ crypto = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.1"
|
||||
version = "9.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
@@ -430,9 +455,9 @@ dependencies = [
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -460,6 +485,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-timeout"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.2.1"
|
||||
@@ -471,11 +508,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "python-multipart"
|
||||
version = "0.0.20"
|
||||
version = "0.0.21"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -527,6 +564,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markdown-it-py" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rpds-py"
|
||||
version = "0.30.0"
|
||||
@@ -566,14 +616,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "sse-starlette"
|
||||
version = "3.0.3"
|
||||
version = "3.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "starlette" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/db/3c/fa6517610dc641262b77cc7bf994ecd17465812c1b0585fe33e11be758ab/sse_starlette-3.0.3.tar.gz", hash = "sha256:88cfb08747e16200ea990c8ca876b03910a23b547ab3bd764c0d8eb81019b971", size = 21943, upload-time = "2025-10-30T18:44:20.117Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/62/08/8f554b0e5bad3e4e880521a1686d96c05198471eed860b0eb89b57ea3636/sse_starlette-3.1.1.tar.gz", hash = "sha256:bffa531420c1793ab224f63648c059bcadc412bf9fdb1301ac8de1cf9a67b7fb", size = 24306, upload-time = "2025-12-26T15:22:53.836Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/23/a0/984525d19ca5c8a6c33911a0c164b11490dd0f90ff7fd689f704f84e9a11/sse_starlette-3.0.3-py3-none-any.whl", hash = "sha256:af5bf5a6f3933df1d9c7f8539633dc8444ca6a97ab2e2a7cd3b6e431ac03a431", size = 11765, upload-time = "2025-10-30T18:44:18.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/31/4c281581a0f8de137b710a07f65518b34bcf333b201cfa06cfda9af05f8a/sse_starlette-3.1.1-py3-none-any.whl", hash = "sha256:bb38f71ae74cfd86b529907a9fda5632195dfa6ae120f214ea4c890c7ee9d436", size = 12442, upload-time = "2025-12-26T15:22:52.911Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -611,13 +662,13 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.38.0"
|
||||
version = "0.40.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605, upload-time = "2025-10-18T13:46:44.63Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user