test: improve test coverage from 61% to 84.81%

Major improvements:
- Fixed all 42 broken tests (database connection leaks)
- Added db_connection() context manager for proper cleanup
- Created comprehensive test suites for undertested modules

New test coverage:
- tools/general_tools.py: 26 tests (97% coverage)
- tools/price_tools.py: 11 tests (validates NASDAQ symbols, date handling)
- api/price_data_manager.py: 12 tests (85% coverage)
- api/routes/results_v2.py: 3 tests (98% coverage)
- agent/reasoning_summarizer.py: 2 tests (87% coverage)
- api/routes/period_metrics.py: 2 edge case tests (100% coverage)
- agent/mock_provider: 1 test (100% coverage)

Database fixes:
- Added db_connection() context manager to prevent leaks
- Updated 16+ test files to use context managers
- Fixed drop_all_tables() to match new schema
- Added CHECK constraint for action_type
- Added ON DELETE CASCADE to trading_days foreign key

Test improvements:
- Updated SQL INSERT statements with all required fields
- Fixed date parameter handling in API integration tests
- Added edge case tests for validation functions
- Fixed import errors across test suite

Results:
- Total coverage: 84.81% (was 61%)
- Tests passing: 406 (was 364 with 42 failures)
- Total lines covered: 6364 of 7504

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-07 21:02:38 -05:00
parent 61baf3f90f
commit 14cf88f642
30 changed files with 1840 additions and 1013 deletions

View File

@@ -52,7 +52,7 @@ def test_config_override_models_only(test_configs):
# Run merge
result = subprocess.run(
[
"python", "-c",
"python3", "-c",
f"import sys; sys.path.insert(0, '.'); "
f"from tools.config_merger import DEFAULT_CONFIG_PATH, CUSTOM_CONFIG_PATH, OUTPUT_CONFIG_PATH, merge_and_validate; "
f"import tools.config_merger; "
@@ -102,7 +102,7 @@ def test_config_validation_fails_gracefully(test_configs):
# Run merge (should fail)
result = subprocess.run(
[
"python", "-c",
"python3", "-c",
f"import sys; sys.path.insert(0, '.'); "
f"from tools.config_merger import merge_and_validate; "
f"import tools.config_merger; "

View File

@@ -129,20 +129,19 @@ def test_dev_database_isolation(dev_mode_env, tmp_path):
- initialize_dev_database() creates a fresh, empty dev database
- Both databases can coexist without interference
"""
from api.database import get_db_connection, initialize_database
from api.database import get_db_connection, initialize_database, db_connection
# Initialize prod database with some data
prod_db = str(tmp_path / "test_prod.db")
initialize_database(prod_db)
conn = get_db_connection(prod_db)
conn.execute(
"INSERT INTO jobs (job_id, config_path, status, date_range, models, created_at) "
"VALUES (?, ?, ?, ?, ?, ?)",
("prod-job", "config.json", "running", "2025-01-01:2025-01-31", '["model1"]', "2025-01-01T00:00:00")
)
conn.commit()
conn.close()
with db_connection(prod_db) as conn:
conn.execute(
"INSERT INTO jobs (job_id, config_path, status, date_range, models, created_at) "
"VALUES (?, ?, ?, ?, ?, ?)",
("prod-job", "config.json", "running", "2025-01-01:2025-01-31", '["model1"]', "2025-01-01T00:00:00")
)
conn.commit()
# Initialize dev database (different path)
dev_db = str(tmp_path / "test_dev.db")
@@ -150,18 +149,16 @@ def test_dev_database_isolation(dev_mode_env, tmp_path):
initialize_dev_database(dev_db)
# Verify prod data still exists (unchanged by dev database creation)
conn = get_db_connection(prod_db)
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM jobs WHERE job_id = 'prod-job'")
assert cursor.fetchone()[0] == 1
conn.close()
with db_connection(prod_db) as conn:
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM jobs WHERE job_id = 'prod-job'")
assert cursor.fetchone()[0] == 1
# Verify dev database is empty (fresh initialization)
conn = get_db_connection(dev_db)
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM jobs")
assert cursor.fetchone()[0] == 0
conn.close()
with db_connection(dev_db) as conn:
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM jobs")
assert cursor.fetchone()[0] == 0
def test_preserve_dev_data_flag(dev_mode_env, tmp_path):
@@ -175,29 +172,27 @@ def test_preserve_dev_data_flag(dev_mode_env, tmp_path):
"""
os.environ["PRESERVE_DEV_DATA"] = "true"
from api.database import initialize_dev_database, get_db_connection, initialize_database
from api.database import initialize_dev_database, get_db_connection, initialize_database, db_connection
dev_db = str(tmp_path / "test_dev_preserve.db")
# Create database with initial data
initialize_database(dev_db)
conn = get_db_connection(dev_db)
conn.execute(
"INSERT INTO jobs (job_id, config_path, status, date_range, models, created_at) "
"VALUES (?, ?, ?, ?, ?, ?)",
("dev-job-1", "config.json", "completed", "2025-01-01:2025-01-31", '["model1"]', "2025-01-01T00:00:00")
)
conn.commit()
conn.close()
with db_connection(dev_db) as conn:
conn.execute(
"INSERT INTO jobs (job_id, config_path, status, date_range, models, created_at) "
"VALUES (?, ?, ?, ?, ?, ?)",
("dev-job-1", "config.json", "completed", "2025-01-01:2025-01-31", '["model1"]', "2025-01-01T00:00:00")
)
conn.commit()
# Initialize again with PRESERVE_DEV_DATA=true (should NOT delete data)
initialize_dev_database(dev_db)
# Verify data is preserved
conn = get_db_connection(dev_db)
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM jobs WHERE job_id = 'dev-job-1'")
count = cursor.fetchone()[0]
conn.close()
with db_connection(dev_db) as conn:
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM jobs WHERE job_id = 'dev-job-1'")
count = cursor.fetchone()[0]
assert count == 1, "Data should be preserved when PRESERVE_DEV_DATA=true"

View File

@@ -6,7 +6,7 @@ import json
from pathlib import Path
from api.job_manager import JobManager
from api.model_day_executor import ModelDayExecutor
from api.database import get_db_connection
from api.database import get_db_connection, db_connection
pytestmark = pytest.mark.integration
@@ -19,87 +19,86 @@ def temp_env(tmp_path):
db_path = str(tmp_path / "test_jobs.db")
# Initialize database
conn = get_db_connection(db_path)
cursor = conn.cursor()
with db_connection(db_path) as conn:
cursor = conn.cursor()
# Create schema
cursor.execute("""
CREATE TABLE IF NOT EXISTS jobs (
job_id TEXT PRIMARY KEY,
config_path TEXT NOT NULL,
status TEXT NOT NULL,
date_range TEXT NOT NULL,
models TEXT NOT NULL,
created_at TEXT NOT NULL,
started_at TEXT,
updated_at TEXT,
completed_at TEXT,
total_duration_seconds REAL,
error TEXT,
warnings TEXT
)
""")
# Create schema
cursor.execute("""
CREATE TABLE IF NOT EXISTS jobs (
job_id TEXT PRIMARY KEY,
config_path TEXT NOT NULL,
status TEXT NOT NULL,
date_range TEXT NOT NULL,
models TEXT NOT NULL,
created_at TEXT NOT NULL,
started_at TEXT,
updated_at TEXT,
completed_at TEXT,
total_duration_seconds REAL,
error TEXT,
warnings TEXT
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS job_details (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id TEXT NOT NULL,
date TEXT NOT NULL,
model TEXT NOT NULL,
status TEXT NOT NULL,
started_at TEXT,
completed_at TEXT,
duration_seconds REAL,
error TEXT,
FOREIGN KEY (job_id) REFERENCES jobs(job_id) ON DELETE CASCADE,
UNIQUE(job_id, date, model)
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS job_details (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id TEXT NOT NULL,
date TEXT NOT NULL,
model TEXT NOT NULL,
status TEXT NOT NULL,
started_at TEXT,
completed_at TEXT,
duration_seconds REAL,
error TEXT,
FOREIGN KEY (job_id) REFERENCES jobs(job_id) ON DELETE CASCADE,
UNIQUE(job_id, date, model)
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS trading_days (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id TEXT NOT NULL,
model TEXT NOT NULL,
date TEXT NOT NULL,
starting_cash REAL NOT NULL,
ending_cash REAL NOT NULL,
profit REAL NOT NULL,
return_pct REAL NOT NULL,
portfolio_value REAL NOT NULL,
reasoning_summary TEXT,
reasoning_full TEXT,
completed_at TEXT,
session_duration_seconds REAL,
UNIQUE(job_id, model, date)
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS trading_days (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id TEXT NOT NULL,
model TEXT NOT NULL,
date TEXT NOT NULL,
starting_cash REAL NOT NULL,
ending_cash REAL NOT NULL,
profit REAL NOT NULL,
return_pct REAL NOT NULL,
portfolio_value REAL NOT NULL,
reasoning_summary TEXT,
reasoning_full TEXT,
completed_at TEXT,
session_duration_seconds REAL,
UNIQUE(job_id, model, date)
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS holdings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
trading_day_id INTEGER NOT NULL,
symbol TEXT NOT NULL,
quantity INTEGER NOT NULL,
FOREIGN KEY (trading_day_id) REFERENCES trading_days(id) ON DELETE CASCADE
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS holdings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
trading_day_id INTEGER NOT NULL,
symbol TEXT NOT NULL,
quantity INTEGER NOT NULL,
FOREIGN KEY (trading_day_id) REFERENCES trading_days(id) ON DELETE CASCADE
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS actions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
trading_day_id INTEGER NOT NULL,
action_type TEXT NOT NULL,
symbol TEXT NOT NULL,
quantity INTEGER NOT NULL,
price REAL NOT NULL,
created_at TEXT NOT NULL,
FOREIGN KEY (trading_day_id) REFERENCES trading_days(id) ON DELETE CASCADE
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS actions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
trading_day_id INTEGER NOT NULL,
action_type TEXT NOT NULL,
symbol TEXT NOT NULL,
quantity INTEGER NOT NULL,
price REAL NOT NULL,
created_at TEXT NOT NULL,
FOREIGN KEY (trading_day_id) REFERENCES trading_days(id) ON DELETE CASCADE
)
""")
conn.commit()
conn.close()
conn.commit()
# Create mock config
config_path = str(tmp_path / "test_config.json")
@@ -146,29 +145,28 @@ def test_duplicate_simulation_is_skipped(temp_env):
job_id_1 = result_1["job_id"]
# Simulate completion by manually inserting trading_day record
conn = get_db_connection(temp_env["db_path"])
cursor = conn.cursor()
with db_connection(temp_env["db_path"]) as conn:
cursor = conn.cursor()
cursor.execute("""
INSERT INTO trading_days (
job_id, model, date, starting_cash, ending_cash,
profit, return_pct, portfolio_value, completed_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
job_id_1,
"test-model",
"2025-10-15",
10000.0,
9500.0,
-500.0,
-5.0,
9500.0,
"2025-11-07T01:00:00Z"
))
cursor.execute("""
INSERT INTO trading_days (
job_id, model, date, starting_cash, ending_cash,
profit, return_pct, portfolio_value, completed_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
job_id_1,
"test-model",
"2025-10-15",
10000.0,
9500.0,
-500.0,
-5.0,
9500.0,
"2025-11-07T01:00:00Z"
))
conn.commit()
conn.close()
conn.commit()
# Mark job_detail as completed
manager.update_job_detail_status(

View File

@@ -13,7 +13,7 @@ from unittest.mock import patch, Mock
from datetime import datetime
from api.price_data_manager import PriceDataManager, RateLimitError, DownloadError
from api.database import initialize_database, get_db_connection
from api.database import initialize_database, get_db_connection, db_connection
from api.date_utils import expand_date_range
@@ -130,12 +130,11 @@ class TestEndToEndDownload:
assert available_dates == ["2025-01-20", "2025-01-21"]
# Verify coverage tracking
conn = get_db_connection(manager.db_path)
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM price_data_coverage")
coverage_count = cursor.fetchone()[0]
assert coverage_count == 5 # One record per symbol
conn.close()
with db_connection(manager.db_path) as conn:
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM price_data_coverage")
coverage_count = cursor.fetchone()[0]
assert coverage_count == 5 # One record per symbol
@patch('api.price_data_manager.requests.get')
def test_download_with_partial_existing_data(self, mock_get, manager, mock_alpha_vantage_response):
@@ -340,15 +339,14 @@ class TestCoverageTracking:
manager._update_coverage("AAPL", dates[0], dates[1])
# Verify coverage was recorded
conn = get_db_connection(manager.db_path)
cursor = conn.cursor()
cursor.execute("""
SELECT symbol, start_date, end_date, source
FROM price_data_coverage
WHERE symbol = 'AAPL'
""")
row = cursor.fetchone()
conn.close()
with db_connection(manager.db_path) as conn:
cursor = conn.cursor()
cursor.execute("""
SELECT symbol, start_date, end_date, source
FROM price_data_coverage
WHERE symbol = 'AAPL'
""")
row = cursor.fetchone()
assert row is not None
assert row[0] == "AAPL"
@@ -444,10 +442,9 @@ class TestDataValidation:
assert set(stored_dates) == requested_dates
# Verify in database
conn = get_db_connection(manager.db_path)
cursor = conn.cursor()
cursor.execute("SELECT date FROM price_data WHERE symbol = 'AAPL' ORDER BY date")
db_dates = [row[0] for row in cursor.fetchall()]
conn.close()
with db_connection(manager.db_path) as conn:
cursor = conn.cursor()
cursor.execute("SELECT date FROM price_data WHERE symbol = 'AAPL' ORDER BY date")
db_dates = [row[0] for row in cursor.fetchall()]
assert db_dates == ["2025-01-20", "2025-01-21"]

View File

@@ -40,8 +40,8 @@ class TestResultsAPIV2:
# Insert sample data
db.connection.execute(
"INSERT INTO jobs (job_id, status) VALUES (?, ?)",
("test-job", "completed")
"INSERT INTO jobs (job_id, config_path, status, date_range, models, created_at) VALUES (?, ?, ?, ?, ?, ?)",
("test-job", "config.json", "completed", '["2025-01-15", "2025-01-16"]', '["gpt-4"]', "2025-01-15T00:00:00Z")
)
# Day 1
@@ -66,7 +66,7 @@ class TestResultsAPIV2:
def test_results_without_reasoning(self, client, db):
"""Test default response excludes reasoning."""
response = client.get("/results?job_id=test-job")
response = client.get("/results?job_id=test-job&start_date=2025-01-15&end_date=2025-01-15")
assert response.status_code == 200
data = response.json()
@@ -76,7 +76,7 @@ class TestResultsAPIV2:
def test_results_with_summary(self, client, db):
"""Test including reasoning summary."""
response = client.get("/results?job_id=test-job&reasoning=summary")
response = client.get("/results?job_id=test-job&start_date=2025-01-15&end_date=2025-01-15&reasoning=summary")
data = response.json()
result = data["results"][0]
@@ -85,7 +85,7 @@ class TestResultsAPIV2:
def test_results_structure(self, client, db):
"""Test complete response structure."""
response = client.get("/results?job_id=test-job")
response = client.get("/results?job_id=test-job&start_date=2025-01-15&end_date=2025-01-15")
result = response.json()["results"][0]
@@ -124,14 +124,14 @@ class TestResultsAPIV2:
def test_results_filtering_by_date(self, client, db):
"""Test filtering results by date."""
response = client.get("/results?date=2025-01-15")
response = client.get("/results?start_date=2025-01-15&end_date=2025-01-15")
results = response.json()["results"]
assert all(r["date"] == "2025-01-15" for r in results)
def test_results_filtering_by_model(self, client, db):
"""Test filtering results by model."""
response = client.get("/results?model=gpt-4")
response = client.get("/results?model=gpt-4&start_date=2025-01-15&end_date=2025-01-15")
results = response.json()["results"]
assert all(r["model"] == "gpt-4" for r in results)

View File

@@ -71,8 +71,8 @@ def test_results_with_full_reasoning_replaces_old_endpoint(tmp_path):
client = TestClient(app)
# Query new endpoint
response = client.get("/results?job_id=test-job-123&reasoning=full")
# Query new endpoint with explicit date to avoid default lookback filter
response = client.get("/results?job_id=test-job-123&start_date=2025-01-15&end_date=2025-01-15&reasoning=full")
assert response.status_code == 200
data = response.json()