mirror of
https://github.com/Xe138/AI-Trader.git
synced 2026-04-01 17:17:24 -04:00
feat: transform to REST API service with SQLite persistence (v0.3.0)
Major architecture transformation from batch-only to API service with
database persistence for Windmill integration.
## REST API Implementation
- POST /simulate/trigger - Start simulation jobs
- GET /simulate/status/{job_id} - Monitor job progress
- GET /results - Query results with filters (job_id, date, model)
- GET /health - Service health checks
## Database Layer
- SQLite persistence with 6 tables (jobs, job_details, positions,
holdings, reasoning_logs, tool_usage)
- Foreign key constraints with cascade deletes
- Replaces JSONL file storage
## Backend Components
- JobManager: Job lifecycle management with concurrency control
- RuntimeConfigManager: Thread-safe isolated runtime configs
- ModelDayExecutor: Single model-day execution engine
- SimulationWorker: Date-sequential, model-parallel orchestration
## Testing
- 102 unit and integration tests (85% coverage)
- Database: 98% coverage
- Job manager: 98% coverage
- API endpoints: 81% coverage
- Pydantic models: 100% coverage
- TDD approach throughout
## Docker Deployment
- Dual-mode: API server (persistent) + batch (one-time)
- Health checks with 30s interval
- Volume persistence for database and logs
- Separate entrypoints for each mode
## Validation Tools
- scripts/validate_docker_build.sh - Build validation
- scripts/test_api_endpoints.sh - Complete API testing
- scripts/test_batch_mode.sh - Batch mode validation
- DOCKER_API.md - Deployment guide
- TESTING_GUIDE.md - Testing procedures
## Configuration
- API_PORT environment variable (default: 8080)
- Backwards compatible with existing configs
- FastAPI, uvicorn, pydantic>=2.0 dependencies
Co-Authored-By: AI Assistant <noreply@example.com>
This commit is contained in:
242
scripts/test_api_endpoints.sh
Executable file
242
scripts/test_api_endpoints.sh
Executable file
@@ -0,0 +1,242 @@
|
||||
#!/bin/bash
|
||||
# API Endpoint Testing Script
|
||||
# Tests all REST API endpoints in running Docker container
|
||||
|
||||
set -e
|
||||
|
||||
echo "=========================================="
|
||||
echo "AI-Trader API Endpoint Testing"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Configuration
|
||||
API_BASE_URL=${API_BASE_URL:-http://localhost:8080}
|
||||
TEST_CONFIG="/app/configs/default_config.json"
|
||||
|
||||
# Check if API is running
|
||||
echo "Checking if API is accessible..."
|
||||
if ! curl -f "$API_BASE_URL/health" &> /dev/null; then
|
||||
echo -e "${RED}✗${NC} API is not accessible at $API_BASE_URL"
|
||||
echo "Make sure the container is running:"
|
||||
echo " docker-compose up -d ai-trader-api"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN}✓${NC} API is accessible"
|
||||
echo ""
|
||||
|
||||
# Test 1: Health Check
|
||||
echo -e "${BLUE}Test 1: GET /health${NC}"
|
||||
echo "Testing health endpoint..."
|
||||
HEALTH_RESPONSE=$(curl -s "$API_BASE_URL/health")
|
||||
HEALTH_STATUS=$(echo $HEALTH_RESPONSE | jq -r '.status' 2>/dev/null || echo "error")
|
||||
|
||||
if [ "$HEALTH_STATUS" = "healthy" ]; then
|
||||
echo -e "${GREEN}✓${NC} Health check passed"
|
||||
echo "Response: $HEALTH_RESPONSE" | jq '.' 2>/dev/null || echo "$HEALTH_RESPONSE"
|
||||
else
|
||||
echo -e "${RED}✗${NC} Health check failed"
|
||||
echo "Response: $HEALTH_RESPONSE"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 2: Trigger Simulation
|
||||
echo -e "${BLUE}Test 2: POST /simulate/trigger${NC}"
|
||||
echo "Triggering test simulation (2 dates, 1 model)..."
|
||||
|
||||
TRIGGER_PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
"config_path": "$TEST_CONFIG",
|
||||
"date_range": ["2025-01-16", "2025-01-17"],
|
||||
"models": ["gpt-4"]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
echo "Request payload:"
|
||||
echo "$TRIGGER_PAYLOAD" | jq '.'
|
||||
|
||||
TRIGGER_RESPONSE=$(curl -s -X POST "$API_BASE_URL/simulate/trigger" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$TRIGGER_PAYLOAD")
|
||||
|
||||
JOB_ID=$(echo $TRIGGER_RESPONSE | jq -r '.job_id' 2>/dev/null)
|
||||
|
||||
if [ -n "$JOB_ID" ] && [ "$JOB_ID" != "null" ]; then
|
||||
echo -e "${GREEN}✓${NC} Simulation triggered successfully"
|
||||
echo "Job ID: $JOB_ID"
|
||||
echo "Response: $TRIGGER_RESPONSE" | jq '.' 2>/dev/null || echo "$TRIGGER_RESPONSE"
|
||||
else
|
||||
echo -e "${RED}✗${NC} Failed to trigger simulation"
|
||||
echo "Response: $TRIGGER_RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 3: Check Job Status
|
||||
echo -e "${BLUE}Test 3: GET /simulate/status/{job_id}${NC}"
|
||||
echo "Checking job status for: $JOB_ID"
|
||||
echo "Waiting 5 seconds for job to start..."
|
||||
sleep 5
|
||||
|
||||
STATUS_RESPONSE=$(curl -s "$API_BASE_URL/simulate/status/$JOB_ID")
|
||||
JOB_STATUS=$(echo $STATUS_RESPONSE | jq -r '.status' 2>/dev/null)
|
||||
|
||||
if [ -n "$JOB_STATUS" ] && [ "$JOB_STATUS" != "null" ]; then
|
||||
echo -e "${GREEN}✓${NC} Job status retrieved"
|
||||
echo "Job Status: $JOB_STATUS"
|
||||
echo "Response: $STATUS_RESPONSE" | jq '.' 2>/dev/null || echo "$STATUS_RESPONSE"
|
||||
else
|
||||
echo -e "${RED}✗${NC} Failed to get job status"
|
||||
echo "Response: $STATUS_RESPONSE"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 4: Poll until completion or timeout
|
||||
echo -e "${BLUE}Test 4: Monitoring job progress${NC}"
|
||||
echo "Polling job status (max 5 minutes)..."
|
||||
|
||||
MAX_POLLS=30
|
||||
POLL_INTERVAL=10
|
||||
POLL_COUNT=0
|
||||
|
||||
while [ $POLL_COUNT -lt $MAX_POLLS ]; do
|
||||
STATUS_RESPONSE=$(curl -s "$API_BASE_URL/simulate/status/$JOB_ID")
|
||||
JOB_STATUS=$(echo $STATUS_RESPONSE | jq -r '.status' 2>/dev/null)
|
||||
PROGRESS=$(echo $STATUS_RESPONSE | jq -r '.progress' 2>/dev/null)
|
||||
|
||||
echo "[$((POLL_COUNT + 1))/$MAX_POLLS] Status: $JOB_STATUS | Progress: $PROGRESS"
|
||||
|
||||
if [ "$JOB_STATUS" = "completed" ] || [ "$JOB_STATUS" = "partial" ] || [ "$JOB_STATUS" = "failed" ]; then
|
||||
echo -e "${GREEN}✓${NC} Job finished with status: $JOB_STATUS"
|
||||
echo "Final response:"
|
||||
echo "$STATUS_RESPONSE" | jq '.' 2>/dev/null || echo "$STATUS_RESPONSE"
|
||||
break
|
||||
fi
|
||||
|
||||
POLL_COUNT=$((POLL_COUNT + 1))
|
||||
if [ $POLL_COUNT -lt $MAX_POLLS ]; then
|
||||
sleep $POLL_INTERVAL
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $POLL_COUNT -eq $MAX_POLLS ]; then
|
||||
echo -e "${YELLOW}⚠${NC} Job did not complete within timeout (still $JOB_STATUS)"
|
||||
echo "Job may still be running. Check status later with:"
|
||||
echo " curl $API_BASE_URL/simulate/status/$JOB_ID"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 5: Query Results
|
||||
echo -e "${BLUE}Test 5: GET /results${NC}"
|
||||
echo "Querying results for job: $JOB_ID"
|
||||
|
||||
RESULTS_RESPONSE=$(curl -s "$API_BASE_URL/results?job_id=$JOB_ID")
|
||||
RESULT_COUNT=$(echo $RESULTS_RESPONSE | jq -r '.count' 2>/dev/null)
|
||||
|
||||
if [ -n "$RESULT_COUNT" ] && [ "$RESULT_COUNT" != "null" ]; then
|
||||
echo -e "${GREEN}✓${NC} Results retrieved"
|
||||
echo "Result count: $RESULT_COUNT"
|
||||
|
||||
if [ "$RESULT_COUNT" -gt 0 ]; then
|
||||
echo "Sample result:"
|
||||
echo "$RESULTS_RESPONSE" | jq '.results[0]' 2>/dev/null || echo "$RESULTS_RESPONSE"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} No results found (job may not be complete yet)"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} Failed to retrieve results"
|
||||
echo "Response: $RESULTS_RESPONSE"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 6: Query Results by Date
|
||||
echo -e "${BLUE}Test 6: GET /results?date=...${NC}"
|
||||
echo "Querying results by date filter..."
|
||||
|
||||
DATE_RESULTS=$(curl -s "$API_BASE_URL/results?date=2025-01-16")
|
||||
DATE_COUNT=$(echo $DATE_RESULTS | jq -r '.count' 2>/dev/null)
|
||||
|
||||
if [ -n "$DATE_COUNT" ] && [ "$DATE_COUNT" != "null" ]; then
|
||||
echo -e "${GREEN}✓${NC} Date-filtered results retrieved"
|
||||
echo "Results for 2025-01-16: $DATE_COUNT"
|
||||
else
|
||||
echo -e "${RED}✗${NC} Failed to retrieve date-filtered results"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 7: Query Results by Model
|
||||
echo -e "${BLUE}Test 7: GET /results?model=...${NC}"
|
||||
echo "Querying results by model filter..."
|
||||
|
||||
MODEL_RESULTS=$(curl -s "$API_BASE_URL/results?model=gpt-4")
|
||||
MODEL_COUNT=$(echo $MODEL_RESULTS | jq -r '.count' 2>/dev/null)
|
||||
|
||||
if [ -n "$MODEL_COUNT" ] && [ "$MODEL_COUNT" != "null" ]; then
|
||||
echo -e "${GREEN}✓${NC} Model-filtered results retrieved"
|
||||
echo "Results for gpt-4: $MODEL_COUNT"
|
||||
else
|
||||
echo -e "${RED}✗${NC} Failed to retrieve model-filtered results"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 8: Concurrent Job Prevention
|
||||
echo -e "${BLUE}Test 8: Concurrent job prevention${NC}"
|
||||
echo "Attempting to trigger second job (should fail if first is still running)..."
|
||||
|
||||
SECOND_TRIGGER=$(curl -s -X POST "$API_BASE_URL/simulate/trigger" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$TRIGGER_PAYLOAD")
|
||||
|
||||
if echo "$SECOND_TRIGGER" | grep -qi "already running"; then
|
||||
echo -e "${GREEN}✓${NC} Concurrent job correctly rejected"
|
||||
echo "Response: $SECOND_TRIGGER"
|
||||
elif echo "$SECOND_TRIGGER" | jq -r '.job_id' 2>/dev/null | grep -q "-"; then
|
||||
echo -e "${YELLOW}⚠${NC} Second job was accepted (first job may have completed)"
|
||||
echo "Response: $SECOND_TRIGGER" | jq '.' 2>/dev/null || echo "$SECOND_TRIGGER"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} Unexpected response"
|
||||
echo "Response: $SECOND_TRIGGER"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 9: Invalid Requests
|
||||
echo -e "${BLUE}Test 9: Error handling${NC}"
|
||||
echo "Testing invalid config path..."
|
||||
|
||||
INVALID_TRIGGER=$(curl -s -X POST "$API_BASE_URL/simulate/trigger" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"config_path": "/invalid/path.json", "date_range": ["2025-01-16"], "models": ["gpt-4"]}')
|
||||
|
||||
if echo "$INVALID_TRIGGER" | grep -qi "does not exist"; then
|
||||
echo -e "${GREEN}✓${NC} Invalid config path correctly rejected"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} Unexpected response for invalid config"
|
||||
echo "Response: $INVALID_TRIGGER"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Summary
|
||||
echo "=========================================="
|
||||
echo "Test Summary"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "All API endpoints tested successfully!"
|
||||
echo ""
|
||||
echo "Job Details:"
|
||||
echo " Job ID: $JOB_ID"
|
||||
echo " Final Status: $JOB_STATUS"
|
||||
echo " Results Count: $RESULT_COUNT"
|
||||
echo ""
|
||||
echo "To view full job details:"
|
||||
echo " curl $API_BASE_URL/simulate/status/$JOB_ID | jq ."
|
||||
echo ""
|
||||
echo "To view all results:"
|
||||
echo " curl $API_BASE_URL/results | jq ."
|
||||
echo ""
|
||||
232
scripts/test_batch_mode.sh
Executable file
232
scripts/test_batch_mode.sh
Executable file
@@ -0,0 +1,232 @@
|
||||
#!/bin/bash
|
||||
# Batch Mode Testing Script
|
||||
# Tests Docker batch mode with one-time simulation
|
||||
|
||||
set -e
|
||||
|
||||
echo "=========================================="
|
||||
echo "AI-Trader Batch Mode Testing"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Check prerequisites
|
||||
echo "Checking prerequisites..."
|
||||
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo -e "${RED}✗${NC} Docker not installed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f .env ]; then
|
||||
echo -e "${RED}✗${NC} .env file not found"
|
||||
echo "Copy .env.example to .env and configure API keys"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}✓${NC} Prerequisites OK"
|
||||
echo ""
|
||||
|
||||
# Check if custom config exists
|
||||
CONFIG_FILE=${1:-configs/default_config.json}
|
||||
|
||||
if [ ! -f "$CONFIG_FILE" ]; then
|
||||
echo -e "${YELLOW}⚠${NC} Config file not found: $CONFIG_FILE"
|
||||
echo "Creating test config..."
|
||||
|
||||
mkdir -p configs
|
||||
|
||||
cat > configs/test_batch.json <<EOF
|
||||
{
|
||||
"agent_type": "BaseAgent",
|
||||
"date_range": {
|
||||
"init_date": "2025-01-16",
|
||||
"end_date": "2025-01-17"
|
||||
},
|
||||
"models": [
|
||||
{
|
||||
"name": "GPT-4 Test",
|
||||
"basemodel": "gpt-4",
|
||||
"signature": "gpt-4-test",
|
||||
"enabled": true
|
||||
}
|
||||
],
|
||||
"agent_config": {
|
||||
"max_steps": 10,
|
||||
"initial_cash": 10000.0
|
||||
},
|
||||
"log_config": {
|
||||
"log_path": "./data/agent_data"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
CONFIG_FILE="configs/test_batch.json"
|
||||
echo -e "${GREEN}✓${NC} Created test config: $CONFIG_FILE"
|
||||
fi
|
||||
|
||||
echo "Using config: $CONFIG_FILE"
|
||||
echo ""
|
||||
|
||||
# Test 1: Build image
|
||||
echo -e "${BLUE}Test 1: Building Docker image${NC}"
|
||||
echo "This may take a few minutes..."
|
||||
|
||||
if docker build -t ai-trader-batch-test . > /tmp/docker-build.log 2>&1; then
|
||||
echo -e "${GREEN}✓${NC} Image built successfully"
|
||||
else
|
||||
echo -e "${RED}✗${NC} Build failed"
|
||||
echo "Check logs: /tmp/docker-build.log"
|
||||
tail -20 /tmp/docker-build.log
|
||||
exit 1
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 2: Run batch simulation
|
||||
echo -e "${BLUE}Test 2: Running batch simulation${NC}"
|
||||
echo "Starting container in batch mode..."
|
||||
echo "Config: $CONFIG_FILE"
|
||||
echo ""
|
||||
|
||||
# Use docker-compose if available, otherwise docker run
|
||||
if command -v docker-compose &> /dev/null || docker compose version &> /dev/null; then
|
||||
echo "Using docker-compose..."
|
||||
|
||||
# Ensure API is stopped
|
||||
docker-compose down 2>/dev/null || true
|
||||
|
||||
# Run batch mode
|
||||
echo "Executing: docker-compose --profile batch run --rm ai-trader-batch $CONFIG_FILE"
|
||||
docker-compose --profile batch run --rm ai-trader-batch "$CONFIG_FILE"
|
||||
BATCH_EXIT_CODE=$?
|
||||
else
|
||||
echo "Using docker run..."
|
||||
docker run --rm \
|
||||
--env-file .env \
|
||||
-v "$(pwd)/data:/app/data" \
|
||||
-v "$(pwd)/logs:/app/logs" \
|
||||
-v "$(pwd)/configs:/app/configs" \
|
||||
ai-trader-batch-test \
|
||||
"$CONFIG_FILE"
|
||||
BATCH_EXIT_CODE=$?
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Test 3: Check exit code
|
||||
echo -e "${BLUE}Test 3: Checking exit status${NC}"
|
||||
|
||||
if [ $BATCH_EXIT_CODE -eq 0 ]; then
|
||||
echo -e "${GREEN}✓${NC} Batch simulation completed successfully (exit code: 0)"
|
||||
else
|
||||
echo -e "${RED}✗${NC} Batch simulation failed (exit code: $BATCH_EXIT_CODE)"
|
||||
echo "Check logs in ./logs/ directory"
|
||||
exit 1
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 4: Verify output files
|
||||
echo -e "${BLUE}Test 4: Verifying output files${NC}"
|
||||
|
||||
# Check if data directory has position files
|
||||
POSITION_FILES=$(find data/agent_data -name "position.jsonl" 2>/dev/null | wc -l)
|
||||
|
||||
if [ $POSITION_FILES -gt 0 ]; then
|
||||
echo -e "${GREEN}✓${NC} Found $POSITION_FILES position file(s)"
|
||||
|
||||
# Show sample position data
|
||||
SAMPLE_POSITION=$(find data/agent_data -name "position.jsonl" 2>/dev/null | head -1)
|
||||
if [ -n "$SAMPLE_POSITION" ]; then
|
||||
echo "Sample position data from: $SAMPLE_POSITION"
|
||||
head -1 "$SAMPLE_POSITION" | jq '.' 2>/dev/null || head -1 "$SAMPLE_POSITION"
|
||||
fi
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} No position files found"
|
||||
echo "This could indicate the simulation didn't complete trading"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Check log files
|
||||
LOG_COUNT=$(find logs -name "*.log" 2>/dev/null | wc -l)
|
||||
if [ $LOG_COUNT -gt 0 ]; then
|
||||
echo -e "${GREEN}✓${NC} Found $LOG_COUNT log file(s)"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} No log files found"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 5: Check price data
|
||||
echo -e "${BLUE}Test 5: Checking price data${NC}"
|
||||
|
||||
if [ -f "data/merged.jsonl" ]; then
|
||||
STOCK_COUNT=$(wc -l < data/merged.jsonl)
|
||||
echo -e "${GREEN}✓${NC} Price data exists: $STOCK_COUNT stocks"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} No price data file found"
|
||||
echo "First run will download price data"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 6: Re-run to test data persistence
|
||||
echo -e "${BLUE}Test 6: Testing data persistence${NC}"
|
||||
echo "Running batch mode again to verify data persists..."
|
||||
echo ""
|
||||
|
||||
if command -v docker-compose &> /dev/null || docker compose version &> /dev/null; then
|
||||
docker-compose --profile batch run --rm ai-trader-batch "$CONFIG_FILE" > /tmp/batch-second-run.log 2>&1
|
||||
SECOND_EXIT_CODE=$?
|
||||
else
|
||||
docker run --rm \
|
||||
--env-file .env \
|
||||
-v "$(pwd)/data:/app/data" \
|
||||
-v "$(pwd)/logs:/app/logs" \
|
||||
-v "$(pwd)/configs:/app/configs" \
|
||||
ai-trader-batch-test \
|
||||
"$CONFIG_FILE" > /tmp/batch-second-run.log 2>&1
|
||||
SECOND_EXIT_CODE=$?
|
||||
fi
|
||||
|
||||
if [ $SECOND_EXIT_CODE -eq 0 ]; then
|
||||
echo -e "${GREEN}✓${NC} Second run completed successfully"
|
||||
|
||||
# Check if it reused price data (should be faster)
|
||||
if grep -q "Using existing price data" /tmp/batch-second-run.log; then
|
||||
echo -e "${GREEN}✓${NC} Price data was reused (data persistence working)"
|
||||
else
|
||||
echo -e "${YELLOW}⚠${NC} Could not verify price data reuse"
|
||||
fi
|
||||
else
|
||||
echo -e "${RED}✗${NC} Second run failed"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Summary
|
||||
echo "=========================================="
|
||||
echo "Batch Mode Test Summary"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "Tests completed:"
|
||||
echo " ✓ Docker image build"
|
||||
echo " ✓ Batch mode execution"
|
||||
echo " ✓ Exit code verification"
|
||||
echo " ✓ Output file generation"
|
||||
echo " ✓ Data persistence"
|
||||
echo ""
|
||||
echo "Output locations:"
|
||||
echo " Position data: data/agent_data/*/position/"
|
||||
echo " Trading logs: data/agent_data/*/log/"
|
||||
echo " System logs: logs/"
|
||||
echo " Price data: data/merged.jsonl"
|
||||
echo ""
|
||||
echo "To view position data:"
|
||||
echo " find data/agent_data -name 'position.jsonl' -exec cat {} \;"
|
||||
echo ""
|
||||
echo "To view trading logs:"
|
||||
echo " find data/agent_data -name 'log.jsonl' | head -1 | xargs cat"
|
||||
echo ""
|
||||
221
scripts/validate_docker_build.sh
Executable file
221
scripts/validate_docker_build.sh
Executable file
@@ -0,0 +1,221 @@
|
||||
#!/bin/bash
|
||||
# Docker Build & Validation Script
|
||||
# Run this script to validate the Docker setup before production deployment
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
echo "=========================================="
|
||||
echo "AI-Trader Docker Build Validation"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Function to print status
|
||||
print_status() {
|
||||
if [ $1 -eq 0 ]; then
|
||||
echo -e "${GREEN}✓${NC} $2"
|
||||
else
|
||||
echo -e "${RED}✗${NC} $2"
|
||||
fi
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}⚠${NC} $1"
|
||||
}
|
||||
|
||||
# Step 1: Check prerequisites
|
||||
echo "Step 1: Checking prerequisites..."
|
||||
|
||||
# Check if Docker is installed
|
||||
if command -v docker &> /dev/null; then
|
||||
print_status 0 "Docker is installed: $(docker --version)"
|
||||
else
|
||||
print_status 1 "Docker is not installed"
|
||||
echo "Please install Docker: https://docs.docker.com/get-docker/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if Docker daemon is running
|
||||
if docker info &> /dev/null; then
|
||||
print_status 0 "Docker daemon is running"
|
||||
else
|
||||
print_status 1 "Docker daemon is not running"
|
||||
echo "Please start Docker Desktop or Docker daemon"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if docker-compose is available
|
||||
if command -v docker-compose &> /dev/null; then
|
||||
print_status 0 "docker-compose is installed: $(docker-compose --version)"
|
||||
elif docker compose version &> /dev/null; then
|
||||
print_status 0 "docker compose (plugin) is available"
|
||||
COMPOSE_CMD="docker compose"
|
||||
else
|
||||
print_status 1 "docker-compose is not available"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Default to docker-compose if not set
|
||||
COMPOSE_CMD=${COMPOSE_CMD:-docker-compose}
|
||||
|
||||
echo ""
|
||||
|
||||
# Step 2: Check environment file
|
||||
echo "Step 2: Checking environment configuration..."
|
||||
|
||||
if [ -f .env ]; then
|
||||
print_status 0 ".env file exists"
|
||||
|
||||
# Check required variables
|
||||
required_vars=("OPENAI_API_KEY" "ALPHAADVANTAGE_API_KEY" "JINA_API_KEY")
|
||||
missing_vars=()
|
||||
|
||||
for var in "${required_vars[@]}"; do
|
||||
if grep -q "^${var}=" .env && ! grep -q "^${var}=your_.*_key_here" .env && ! grep -q "^${var}=$" .env; then
|
||||
print_status 0 "$var is set"
|
||||
else
|
||||
missing_vars+=("$var")
|
||||
print_status 1 "$var is missing or not configured"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ ${#missing_vars[@]} -gt 0 ]; then
|
||||
print_warning "Some required environment variables are not configured"
|
||||
echo "Please edit .env and add:"
|
||||
for var in "${missing_vars[@]}"; do
|
||||
echo " - $var"
|
||||
done
|
||||
echo ""
|
||||
read -p "Continue anyway? (y/n) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
print_status 1 ".env file not found"
|
||||
echo "Creating .env from .env.example..."
|
||||
cp .env.example .env
|
||||
print_warning "Please edit .env and add your API keys before continuing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Step 3: Build Docker image
|
||||
echo "Step 3: Building Docker image..."
|
||||
echo "This may take several minutes on first build..."
|
||||
echo ""
|
||||
|
||||
if docker build -t ai-trader-test . ; then
|
||||
print_status 0 "Docker image built successfully"
|
||||
else
|
||||
print_status 1 "Docker build failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Step 4: Check image
|
||||
echo "Step 4: Verifying Docker image..."
|
||||
|
||||
IMAGE_SIZE=$(docker images ai-trader-test --format "{{.Size}}")
|
||||
print_status 0 "Image size: $IMAGE_SIZE"
|
||||
|
||||
# List exposed ports
|
||||
EXPOSED_PORTS=$(docker inspect ai-trader-test --format '{{range $p, $conf := .Config.ExposedPorts}}{{$p}} {{end}}')
|
||||
print_status 0 "Exposed ports: $EXPOSED_PORTS"
|
||||
|
||||
echo ""
|
||||
|
||||
# Step 5: Test API mode startup (brief)
|
||||
echo "Step 5: Testing API mode startup..."
|
||||
echo "Starting container in background..."
|
||||
|
||||
$COMPOSE_CMD up -d ai-trader-api
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
print_status 0 "Container started successfully"
|
||||
|
||||
echo "Waiting 10 seconds for services to initialize..."
|
||||
sleep 10
|
||||
|
||||
# Check if container is still running
|
||||
if docker ps | grep -q ai-trader-api; then
|
||||
print_status 0 "Container is running"
|
||||
|
||||
# Check logs for errors
|
||||
ERROR_COUNT=$(docker logs ai-trader-api 2>&1 | grep -i "error" | grep -v "ERROR:" | wc -l)
|
||||
if [ $ERROR_COUNT -gt 0 ]; then
|
||||
print_warning "Found $ERROR_COUNT error messages in logs"
|
||||
echo "Check logs with: docker logs ai-trader-api"
|
||||
else
|
||||
print_status 0 "No critical errors in logs"
|
||||
fi
|
||||
else
|
||||
print_status 1 "Container stopped unexpectedly"
|
||||
echo "Check logs with: docker logs ai-trader-api"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
print_status 1 "Failed to start container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Step 6: Test health endpoint
|
||||
echo "Step 6: Testing health endpoint..."
|
||||
|
||||
# Wait a bit more for API to be ready
|
||||
sleep 5
|
||||
|
||||
if curl -f http://localhost:8080/health &> /dev/null; then
|
||||
print_status 0 "Health endpoint responding"
|
||||
|
||||
# Get health details
|
||||
HEALTH_DATA=$(curl -s http://localhost:8080/health)
|
||||
echo "Health response: $HEALTH_DATA"
|
||||
else
|
||||
print_status 1 "Health endpoint not responding"
|
||||
print_warning "This could indicate:"
|
||||
echo " - API server failed to start"
|
||||
echo " - Port 8080 is already in use"
|
||||
echo " - MCP services failed to initialize"
|
||||
echo ""
|
||||
echo "Check logs with: docker logs ai-trader-api"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Step 7: Cleanup
|
||||
echo "Step 7: Cleanup..."
|
||||
read -p "Stop the container? (y/n) " -n 1 -r
|
||||
echo
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||
$COMPOSE_CMD down
|
||||
print_status 0 "Container stopped"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
echo "Validation Summary"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo "1. If all checks passed, proceed with API endpoint testing:"
|
||||
echo " bash scripts/test_api_endpoints.sh"
|
||||
echo ""
|
||||
echo "2. Test batch mode:"
|
||||
echo " bash scripts/test_batch_mode.sh"
|
||||
echo ""
|
||||
echo "3. If any checks failed, review logs:"
|
||||
echo " docker logs ai-trader-api"
|
||||
echo ""
|
||||
echo "4. For troubleshooting, see: DOCKER_API.md"
|
||||
echo ""
|
||||
Reference in New Issue
Block a user