feat: add duplicate detection to job creation

- Skip already-completed model-day pairs in create_job()
- Return warnings for skipped simulations
- Raise error if all simulations are already completed
- Update create_job() return type from str to Dict[str, Any]
- Update all callers to handle new dict return type
- Add comprehensive test coverage for duplicate detection
- Log warnings when simulations are skipped
This commit is contained in:
2025-11-07 13:03:31 -05:00
parent 406bb281b2
commit fbe383772a
10 changed files with 1549 additions and 91 deletions

View File

@@ -56,7 +56,7 @@ class JobManager:
date_range: List[str], date_range: List[str],
models: List[str], models: List[str],
model_day_filter: Optional[List[tuple]] = None model_day_filter: Optional[List[tuple]] = None
) -> str: ) -> Dict[str, Any]:
""" """
Create new simulation job. Create new simulation job.
@@ -68,10 +68,12 @@ class JobManager:
If None, creates job_details for all model-date combinations. If None, creates job_details for all model-date combinations.
Returns: Returns:
job_id: UUID of created job Dict with:
- job_id: UUID of created job
- warnings: List of warning messages for skipped simulations
Raises: Raises:
ValueError: If another job is already running/pending ValueError: If another job is already running/pending or if all simulations are already completed
""" """
if not self.can_start_new_job(): if not self.can_start_new_job():
raise ValueError("Another simulation job is already running or pending") raise ValueError("Another simulation job is already running or pending")
@@ -83,6 +85,43 @@ class JobManager:
cursor = conn.cursor() cursor = conn.cursor()
try: try:
# Determine which model-day pairs to check
if model_day_filter is not None:
pairs_to_check = model_day_filter
else:
pairs_to_check = [(model, date) for date in date_range for model in models]
# Check for already-completed simulations
skipped_pairs = []
pending_pairs = []
for model, date in pairs_to_check:
cursor.execute("""
SELECT COUNT(*)
FROM job_details
WHERE model = ? AND date = ? AND status = 'completed'
""", (model, date))
count = cursor.fetchone()[0]
if count > 0:
skipped_pairs.append((model, date))
logger.info(f"Skipping {model}/{date} - already completed in previous job")
else:
pending_pairs.append((model, date))
# If all simulations are already completed, raise error
if len(pending_pairs) == 0:
warnings = [
f"Skipped {model}/{date} - already completed"
for model, date in skipped_pairs
]
raise ValueError(
f"All requested simulations are already completed. "
f"Skipped {len(skipped_pairs)} model-day pair(s). "
f"Details: {warnings}"
)
# Insert job # Insert job
cursor.execute(""" cursor.execute("""
INSERT INTO jobs ( INSERT INTO jobs (
@@ -98,10 +137,8 @@ class JobManager:
created_at created_at
)) ))
# Create job_details based on filter # Create job_details only for pending pairs
if model_day_filter is not None: for model, date in pending_pairs:
# Only create job_details for specified model-day pairs
for model, date in model_day_filter:
cursor.execute(""" cursor.execute("""
INSERT INTO job_details ( INSERT INTO job_details (
job_id, date, model, status job_id, date, model, status
@@ -109,23 +146,23 @@ class JobManager:
VALUES (?, ?, ?, ?) VALUES (?, ?, ?, ?)
""", (job_id, date, model, "pending")) """, (job_id, date, model, "pending"))
logger.info(f"Created job {job_id} with {len(model_day_filter)} model-day tasks (filtered)") logger.info(f"Created job {job_id} with {len(pending_pairs)} model-day tasks")
else:
# Create job_details for all model-day combinations
for date in date_range:
for model in models:
cursor.execute("""
INSERT INTO job_details (
job_id, date, model, status
)
VALUES (?, ?, ?, ?)
""", (job_id, date, model, "pending"))
logger.info(f"Created job {job_id} with {len(date_range)} dates and {len(models)} models") if skipped_pairs:
logger.info(f"Skipped {len(skipped_pairs)} already-completed simulations")
conn.commit() conn.commit()
return job_id # Prepare warnings
warnings = [
f"Skipped {model}/{date} - already completed"
for model, date in skipped_pairs
]
return {
"job_id": job_id,
"warnings": warnings
}
finally: finally:
conn.close() conn.close()

View File

@@ -280,12 +280,18 @@ def create_app(
# Create job immediately with all requested dates # Create job immediately with all requested dates
# Worker will handle data download and filtering # Worker will handle data download and filtering
job_id = job_manager.create_job( result = job_manager.create_job(
config_path=config_path, config_path=config_path,
date_range=all_dates, date_range=all_dates,
models=models_to_run, models=models_to_run,
model_day_filter=None # Worker will filter based on available data model_day_filter=None # Worker will filter based on available data
) )
job_id = result["job_id"]
warnings = result.get("warnings", [])
# Log warnings if any simulations were skipped
if warnings:
logger.warning(f"Job {job_id} created with {len(warnings)} skipped simulations: {warnings}")
# Start worker in background thread (only if not in test mode) # Start worker in background thread (only if not in test mode)
if not getattr(app.state, "test_mode", False): if not getattr(app.state, "test_mode", False):

File diff suppressed because it is too large Load Diff

View File

@@ -405,11 +405,12 @@ class TestAsyncDownload:
db_path = api_client.db_path db_path = api_client.db_path
job_manager = JobManager(db_path=db_path) job_manager = JobManager(db_path=db_path)
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="config.json", config_path="config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Add warnings # Add warnings
warnings = ["Rate limited", "Skipped 1 date"] warnings = ["Rate limited", "Skipped 1 date"]

View File

@@ -12,11 +12,12 @@ def test_worker_prepares_data_before_execution(tmp_path):
job_manager = JobManager(db_path=db_path) job_manager = JobManager(db_path=db_path)
# Create job # Create job
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="configs/default_config.json", config_path="configs/default_config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=db_path) worker = SimulationWorker(job_id=job_id, db_path=db_path)
@@ -46,11 +47,12 @@ def test_worker_handles_no_available_dates(tmp_path):
initialize_database(db_path) initialize_database(db_path)
job_manager = JobManager(db_path=db_path) job_manager = JobManager(db_path=db_path)
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="configs/default_config.json", config_path="configs/default_config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=db_path) worker = SimulationWorker(job_id=job_id, db_path=db_path)
@@ -74,11 +76,12 @@ def test_worker_stores_warnings(tmp_path):
initialize_database(db_path) initialize_database(db_path)
job_manager = JobManager(db_path=db_path) job_manager = JobManager(db_path=db_path)
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="configs/default_config.json", config_path="configs/default_config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=db_path) worker = SimulationWorker(job_id=job_id, db_path=db_path)

View File

@@ -26,11 +26,12 @@ class TestJobCreation:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5", "claude-3.7-sonnet"] models=["gpt-5", "claude-3.7-sonnet"]
) )
job_id = job_result["job_id"]
assert job_id is not None assert job_id is not None
job = manager.get_job(job_id) job = manager.get_job(job_id)
@@ -44,11 +45,12 @@ class TestJobCreation:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
progress = manager.get_job_progress(job_id) progress = manager.get_job_progress(job_id)
assert progress["total_model_days"] == 2 # 2 dates × 1 model assert progress["total_model_days"] == 2 # 2 dates × 1 model
@@ -60,11 +62,12 @@ class TestJobCreation:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job1_id = manager.create_job( job1_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16"], ["2025-01-16"],
["gpt-5"] ["gpt-5"]
) )
job1_id = job1_result["job_id"]
with pytest.raises(ValueError, match="Another simulation job is already running"): with pytest.raises(ValueError, match="Another simulation job is already running"):
manager.create_job( manager.create_job(
@@ -78,20 +81,22 @@ class TestJobCreation:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job1_id = manager.create_job( job1_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16"], ["2025-01-16"],
["gpt-5"] ["gpt-5"]
) )
job1_id = job1_result["job_id"]
manager.update_job_status(job1_id, "completed") manager.update_job_status(job1_id, "completed")
# Now second job should be allowed # Now second job should be allowed
job2_id = manager.create_job( job2_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-17"], ["2025-01-17"],
["gpt-5"] ["gpt-5"]
) )
job2_id = job2_result["job_id"]
assert job2_id is not None assert job2_id is not None
@@ -104,11 +109,12 @@ class TestJobStatusTransitions:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16"], ["2025-01-16"],
["gpt-5"] ["gpt-5"]
) )
job_id = job_result["job_id"]
# Update detail to running # Update detail to running
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running")
@@ -122,11 +128,12 @@ class TestJobStatusTransitions:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16"], ["2025-01-16"],
["gpt-5"] ["gpt-5"]
) )
job_id = job_result["job_id"]
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running")
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "completed") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "completed")
@@ -141,11 +148,12 @@ class TestJobStatusTransitions:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16"], ["2025-01-16"],
["gpt-5", "claude-3.7-sonnet"] ["gpt-5", "claude-3.7-sonnet"]
) )
job_id = job_result["job_id"]
# First model succeeds # First model succeeds
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running")
@@ -183,10 +191,12 @@ class TestJobRetrieval:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job1_id = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"]) job1_result = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"])
job1_id = job1_result["job_id"]
manager.update_job_status(job1_id, "completed") manager.update_job_status(job1_id, "completed")
job2_id = manager.create_job("configs/test.json", ["2025-01-17"], ["gpt-5"]) job2_result = manager.create_job("configs/test.json", ["2025-01-17"], ["gpt-5"])
job2_id = job2_result["job_id"]
current = manager.get_current_job() current = manager.get_current_job()
assert current["job_id"] == job2_id assert current["job_id"] == job2_id
@@ -204,11 +214,12 @@ class TestJobRetrieval:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16", "2025-01-17"], ["2025-01-16", "2025-01-17"],
["gpt-5"] ["gpt-5"]
) )
job_id = job_result["job_id"]
found = manager.find_job_by_date_range(["2025-01-16", "2025-01-17"]) found = manager.find_job_by_date_range(["2025-01-16", "2025-01-17"])
assert found["job_id"] == job_id assert found["job_id"] == job_id
@@ -237,11 +248,12 @@ class TestJobProgress:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16", "2025-01-17"], ["2025-01-16", "2025-01-17"],
["gpt-5"] ["gpt-5"]
) )
job_id = job_result["job_id"]
progress = manager.get_job_progress(job_id) progress = manager.get_job_progress(job_id)
assert progress["total_model_days"] == 2 assert progress["total_model_days"] == 2
@@ -254,11 +266,12 @@ class TestJobProgress:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16"], ["2025-01-16"],
["gpt-5"] ["gpt-5"]
) )
job_id = job_result["job_id"]
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running")
@@ -270,11 +283,12 @@ class TestJobProgress:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
"configs/test.json", "configs/test.json",
["2025-01-16"], ["2025-01-16"],
["gpt-5", "claude-3.7-sonnet"] ["gpt-5", "claude-3.7-sonnet"]
) )
job_id = job_result["job_id"]
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "completed") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "completed")
@@ -311,7 +325,8 @@ class TestConcurrencyControl:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"]) job_result = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"])
job_id = job_result["job_id"]
manager.update_job_status(job_id, "running") manager.update_job_status(job_id, "running")
assert manager.can_start_new_job() is False assert manager.can_start_new_job() is False
@@ -321,7 +336,8 @@ class TestConcurrencyControl:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"]) job_result = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"])
job_id = job_result["job_id"]
manager.update_job_status(job_id, "completed") manager.update_job_status(job_id, "completed")
assert manager.can_start_new_job() is True assert manager.can_start_new_job() is True
@@ -331,13 +347,15 @@ class TestConcurrencyControl:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job1_id = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"]) job1_result = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"])
job1_id = job1_result["job_id"]
# Complete first job # Complete first job
manager.update_job_status(job1_id, "completed") manager.update_job_status(job1_id, "completed")
# Create second job # Create second job
job2_id = manager.create_job("configs/test.json", ["2025-01-17"], ["gpt-5"]) job2_result = manager.create_job("configs/test.json", ["2025-01-17"], ["gpt-5"])
job2_id = job2_result["job_id"]
running = manager.get_running_jobs() running = manager.get_running_jobs()
assert len(running) == 1 assert len(running) == 1
@@ -368,12 +386,13 @@ class TestJobCleanup:
conn.close() conn.close()
# Create recent job # Create recent job
recent_id = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"]) recent_result = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"])
recent_id = recent_result["job_id"]
# Cleanup jobs older than 30 days # Cleanup jobs older than 30 days
result = manager.cleanup_old_jobs(days=30) cleanup_result = manager.cleanup_old_jobs(days=30)
assert result["jobs_deleted"] == 1 assert cleanup_result["jobs_deleted"] == 1
assert manager.get_job("old-job") is None assert manager.get_job("old-job") is None
assert manager.get_job(recent_id) is not None assert manager.get_job(recent_id) is not None
@@ -387,7 +406,8 @@ class TestJobUpdateOperations:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"]) job_result = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"])
job_id = job_result["job_id"]
manager.update_job_status(job_id, "failed", error="MCP service unavailable") manager.update_job_status(job_id, "failed", error="MCP service unavailable")
@@ -401,7 +421,8 @@ class TestJobUpdateOperations:
import time import time
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"]) job_result = manager.create_job("configs/test.json", ["2025-01-16"], ["gpt-5"])
job_id = job_result["job_id"]
# Start # Start
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "running")
@@ -432,11 +453,12 @@ class TestJobWarnings:
job_manager = JobManager(db_path=clean_db) job_manager = JobManager(db_path=clean_db)
# Create a job # Create a job
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="config.json", config_path="config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Add warnings # Add warnings
warnings = ["Rate limit reached", "Skipped 2 dates"] warnings = ["Rate limit reached", "Skipped 2 dates"]
@@ -457,11 +479,12 @@ class TestStaleJobCleanup:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Job is pending - simulate container restart # Job is pending - simulate container restart
result = manager.cleanup_stale_jobs() result = manager.cleanup_stale_jobs()
@@ -478,11 +501,12 @@ class TestStaleJobCleanup:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Mark job as running and complete one model-day # Mark job as running and complete one model-day
manager.update_job_status(job_id, "running") manager.update_job_status(job_id, "running")
@@ -502,11 +526,12 @@ class TestStaleJobCleanup:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Mark as downloading data # Mark as downloading data
manager.update_job_status(job_id, "downloading_data") manager.update_job_status(job_id, "downloading_data")
@@ -524,11 +549,12 @@ class TestStaleJobCleanup:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Mark job as running, one detail running, one pending # Mark job as running, one detail running, one pending
manager.update_job_status(job_id, "running") manager.update_job_status(job_id, "running")
@@ -552,11 +578,12 @@ class TestStaleJobCleanup:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Complete the job # Complete the job
manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "completed") manager.update_job_detail_status(job_id, "2025-01-16", "gpt-5", "completed")
@@ -575,28 +602,31 @@ class TestStaleJobCleanup:
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
# Create first job # Create first job
job1_id = manager.create_job( job1_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job1_id = job1_result["job_id"]
manager.update_job_status(job1_id, "running") manager.update_job_status(job1_id, "running")
manager.update_job_status(job1_id, "completed") manager.update_job_status(job1_id, "completed")
# Create second job (pending) # Create second job (pending)
job2_id = manager.create_job( job2_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-17"], date_range=["2025-01-17"],
models=["gpt-5"] models=["gpt-5"]
) )
job2_id = job2_result["job_id"]
# Create third job (running) # Create third job (running)
manager.update_job_status(job2_id, "completed") manager.update_job_status(job2_id, "completed")
job3_id = manager.create_job( job3_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-18"], date_range=["2025-01-18"],
models=["gpt-5"] models=["gpt-5"]
) )
job3_id = job3_result["job_id"]
manager.update_job_status(job3_id, "running") manager.update_job_status(job3_id, "running")
# Simulate container restart # Simulate container restart

View File

@@ -0,0 +1,181 @@
"""Test duplicate detection in job creation."""
import pytest
import tempfile
import os
from pathlib import Path
from api.job_manager import JobManager
@pytest.fixture
def temp_db():
"""Create temporary database for testing."""
fd, path = tempfile.mkstemp(suffix='.db')
os.close(fd)
# Initialize schema
from api.database import get_db_connection
conn = get_db_connection(path)
cursor = conn.cursor()
# Create jobs table
cursor.execute("""
CREATE TABLE IF NOT EXISTS jobs (
job_id TEXT PRIMARY KEY,
config_path TEXT NOT NULL,
status TEXT NOT NULL,
date_range TEXT NOT NULL,
models TEXT NOT NULL,
created_at TEXT NOT NULL,
started_at TEXT,
updated_at TEXT,
completed_at TEXT,
total_duration_seconds REAL,
error TEXT,
warnings TEXT
)
""")
# Create job_details table
cursor.execute("""
CREATE TABLE IF NOT EXISTS job_details (
id INTEGER PRIMARY KEY AUTOINCREMENT,
job_id TEXT NOT NULL,
date TEXT NOT NULL,
model TEXT NOT NULL,
status TEXT NOT NULL,
started_at TEXT,
completed_at TEXT,
duration_seconds REAL,
error TEXT,
FOREIGN KEY (job_id) REFERENCES jobs(job_id) ON DELETE CASCADE,
UNIQUE(job_id, date, model)
)
""")
conn.commit()
conn.close()
yield path
# Cleanup
if os.path.exists(path):
os.remove(path)
def test_create_job_with_filter_skips_completed_simulations(temp_db):
"""Test that job creation with model_day_filter skips already-completed pairs."""
manager = JobManager(db_path=temp_db)
# Create first job and mark model-day as completed
result_1 = manager.create_job(
config_path="test_config.json",
date_range=["2025-10-15", "2025-10-16"],
models=["deepseek-chat-v3.1"],
model_day_filter=[("deepseek-chat-v3.1", "2025-10-15")]
)
job_id_1 = result_1["job_id"]
# Mark as completed
manager.update_job_detail_status(
job_id_1,
"2025-10-15",
"deepseek-chat-v3.1",
"completed"
)
# Try to create second job with overlapping date
model_day_filter = [
("deepseek-chat-v3.1", "2025-10-15"), # Already completed
("deepseek-chat-v3.1", "2025-10-16") # Not yet completed
]
result_2 = manager.create_job(
config_path="test_config.json",
date_range=["2025-10-15", "2025-10-16"],
models=["deepseek-chat-v3.1"],
model_day_filter=model_day_filter
)
job_id_2 = result_2["job_id"]
# Get job details for second job
details = manager.get_job_details(job_id_2)
# Should only have 2025-10-16 (2025-10-15 was skipped as already completed)
assert len(details) == 1
assert details[0]["date"] == "2025-10-16"
assert details[0]["model"] == "deepseek-chat-v3.1"
def test_create_job_without_filter_skips_all_completed_simulations(temp_db):
"""Test that job creation without filter skips all completed model-day pairs."""
manager = JobManager(db_path=temp_db)
# Create first job and complete some model-days
result_1 = manager.create_job(
config_path="test_config.json",
date_range=["2025-10-15"],
models=["model-a", "model-b"]
)
job_id_1 = result_1["job_id"]
# Mark model-a/2025-10-15 as completed
manager.update_job_detail_status(job_id_1, "2025-10-15", "model-a", "completed")
# Mark model-b/2025-10-15 as failed to complete the job
manager.update_job_detail_status(job_id_1, "2025-10-15", "model-b", "failed")
# Create second job with same date range and models
result_2 = manager.create_job(
config_path="test_config.json",
date_range=["2025-10-15", "2025-10-16"],
models=["model-a", "model-b"]
)
job_id_2 = result_2["job_id"]
# Get job details for second job
details = manager.get_job_details(job_id_2)
# Should have 3 entries (skip only completed model-a/2025-10-15):
# - model-b/2025-10-15 (failed in job 1, so not skipped - retry)
# - model-a/2025-10-16 (new date)
# - model-b/2025-10-16 (new date)
assert len(details) == 3
dates_models = [(d["date"], d["model"]) for d in details]
assert ("2025-10-15", "model-a") not in dates_models # Skipped (completed)
assert ("2025-10-15", "model-b") in dates_models # NOT skipped (failed, not completed)
assert ("2025-10-16", "model-a") in dates_models
assert ("2025-10-16", "model-b") in dates_models
def test_create_job_returns_warnings_for_skipped_simulations(temp_db):
"""Test that skipped simulations are returned as warnings."""
manager = JobManager(db_path=temp_db)
# Create and complete first simulation
result_1 = manager.create_job(
config_path="test_config.json",
date_range=["2025-10-15"],
models=["model-a"]
)
job_id_1 = result_1["job_id"]
manager.update_job_detail_status(job_id_1, "2025-10-15", "model-a", "completed")
# Try to create job with overlapping date (one completed, one new)
result = manager.create_job(
config_path="test_config.json",
date_range=["2025-10-15", "2025-10-16"], # Add new date
models=["model-a"]
)
# Result should be a dict with job_id and warnings
assert isinstance(result, dict)
assert "job_id" in result
assert "warnings" in result
assert len(result["warnings"]) == 1
assert "model-a" in result["warnings"][0]
assert "2025-10-15" in result["warnings"][0]
# Verify job_details only has the new date
details = manager.get_job_details(result["job_id"])
assert len(details) == 1
assert details[0]["date"] == "2025-10-16"

View File

@@ -41,11 +41,12 @@ class TestSkipStatusDatabase:
def test_skipped_status_allowed_in_job_details(self, job_manager): def test_skipped_status_allowed_in_job_details(self, job_manager):
"""Test job_details accepts 'skipped' status without constraint violation.""" """Test job_details accepts 'skipped' status without constraint violation."""
# Create job # Create job
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02"], date_range=["2025-10-01", "2025-10-02"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
# Mark a detail as skipped - should not raise constraint violation # Mark a detail as skipped - should not raise constraint violation
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
@@ -70,11 +71,12 @@ class TestJobCompletionWithSkipped:
def test_job_completes_with_all_dates_skipped(self, job_manager): def test_job_completes_with_all_dates_skipped(self, job_manager):
"""Test job transitions to completed when all dates are skipped.""" """Test job transitions to completed when all dates are skipped."""
# Create job with 3 dates # Create job with 3 dates
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02", "2025-10-03"], date_range=["2025-10-01", "2025-10-02", "2025-10-03"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
# Mark all as skipped # Mark all as skipped
for date in ["2025-10-01", "2025-10-02", "2025-10-03"]: for date in ["2025-10-01", "2025-10-02", "2025-10-03"]:
@@ -93,11 +95,12 @@ class TestJobCompletionWithSkipped:
def test_job_completes_with_mixed_completed_and_skipped(self, job_manager): def test_job_completes_with_mixed_completed_and_skipped(self, job_manager):
"""Test job completes when some dates completed, some skipped.""" """Test job completes when some dates completed, some skipped."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02", "2025-10-03"], date_range=["2025-10-01", "2025-10-02", "2025-10-03"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
# Mark some completed, some skipped # Mark some completed, some skipped
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
@@ -119,11 +122,12 @@ class TestJobCompletionWithSkipped:
def test_job_partial_with_mixed_completed_failed_skipped(self, job_manager): def test_job_partial_with_mixed_completed_failed_skipped(self, job_manager):
"""Test job status 'partial' when some failed, some completed, some skipped.""" """Test job status 'partial' when some failed, some completed, some skipped."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02", "2025-10-03"], date_range=["2025-10-01", "2025-10-02", "2025-10-03"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
# Mix of statuses # Mix of statuses
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
@@ -145,11 +149,12 @@ class TestJobCompletionWithSkipped:
def test_job_remains_running_with_pending_dates(self, job_manager): def test_job_remains_running_with_pending_dates(self, job_manager):
"""Test job stays running when some dates are still pending.""" """Test job stays running when some dates are still pending."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02", "2025-10-03"], date_range=["2025-10-01", "2025-10-02", "2025-10-03"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
# Only mark some as terminal states # Only mark some as terminal states
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
@@ -173,11 +178,12 @@ class TestProgressTrackingWithSkipped:
def test_progress_includes_skipped_count(self, job_manager): def test_progress_includes_skipped_count(self, job_manager):
"""Test get_job_progress returns skipped count.""" """Test get_job_progress returns skipped count."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02", "2025-10-03", "2025-10-04"], date_range=["2025-10-01", "2025-10-02", "2025-10-03", "2025-10-04"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
# Set various statuses # Set various statuses
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
@@ -205,11 +211,12 @@ class TestProgressTrackingWithSkipped:
def test_progress_all_skipped(self, job_manager): def test_progress_all_skipped(self, job_manager):
"""Test progress when all dates are skipped.""" """Test progress when all dates are skipped."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02"], date_range=["2025-10-01", "2025-10-02"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
# Mark all as skipped # Mark all as skipped
for date in ["2025-10-01", "2025-10-02"]: for date in ["2025-10-01", "2025-10-02"]:
@@ -231,11 +238,12 @@ class TestMultiModelSkipHandling:
def test_different_models_different_skip_states(self, job_manager): def test_different_models_different_skip_states(self, job_manager):
"""Test that different models can have different skip states for same date.""" """Test that different models can have different skip states for same date."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02"], date_range=["2025-10-01", "2025-10-02"],
models=["model-a", "model-b"] models=["model-a", "model-b"]
) )
job_id = job_result["job_id"]
# Model A: 10/1 skipped (already completed), 10/2 completed # Model A: 10/1 skipped (already completed), 10/2 completed
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
@@ -276,11 +284,12 @@ class TestMultiModelSkipHandling:
def test_job_completes_with_per_model_skips(self, job_manager): def test_job_completes_with_per_model_skips(self, job_manager):
"""Test job completes when different models have different skip patterns.""" """Test job completes when different models have different skip patterns."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01", "2025-10-02"], date_range=["2025-10-01", "2025-10-02"],
models=["model-a", "model-b"] models=["model-a", "model-b"]
) )
job_id = job_result["job_id"]
# Model A: one skipped, one completed # Model A: one skipped, one completed
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
@@ -318,11 +327,12 @@ class TestSkipReasons:
def test_skip_reason_already_completed(self, job_manager): def test_skip_reason_already_completed(self, job_manager):
"""Test 'Already completed' skip reason is stored.""" """Test 'Already completed' skip reason is stored."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
job_id=job_id, date="2025-10-01", model="test-model", job_id=job_id, date="2025-10-01", model="test-model",
@@ -334,11 +344,12 @@ class TestSkipReasons:
def test_skip_reason_incomplete_price_data(self, job_manager): def test_skip_reason_incomplete_price_data(self, job_manager):
"""Test 'Incomplete price data' skip reason is stored.""" """Test 'Incomplete price data' skip reason is stored."""
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="test_config.json", config_path="test_config.json",
date_range=["2025-10-04"], date_range=["2025-10-04"],
models=["test-model"] models=["test-model"]
) )
job_id = job_result["job_id"]
job_manager.update_job_detail_status( job_manager.update_job_detail_status(
job_id=job_id, date="2025-10-04", model="test-model", job_id=job_id, date="2025-10-04", model="test-model",

View File

@@ -112,11 +112,12 @@ class TestModelDayExecutorExecution:
# Create job and job_detail # Create job and job_detail
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path=str(config_path), config_path=str(config_path),
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Mock agent execution # Mock agent execution
mock_agent = create_mock_agent( mock_agent = create_mock_agent(
@@ -156,11 +157,12 @@ class TestModelDayExecutorExecution:
# Create job # Create job
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Mock agent to raise error # Mock agent to raise error
with patch("api.model_day_executor.RuntimeConfigManager") as mock_runtime: with patch("api.model_day_executor.RuntimeConfigManager") as mock_runtime:
@@ -212,11 +214,12 @@ class TestModelDayExecutorDataPersistence:
# Create job # Create job
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path=str(config_path), config_path=str(config_path),
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Mock successful execution (no trades) # Mock successful execution (no trades)
mock_agent = create_mock_agent( mock_agent = create_mock_agent(
@@ -269,11 +272,12 @@ class TestModelDayExecutorDataPersistence:
# Create job # Create job
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
# Mock execution with reasoning # Mock execution with reasoning
mock_agent = create_mock_agent( mock_agent = create_mock_agent(
@@ -320,11 +324,12 @@ class TestModelDayExecutorCleanup:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
mock_agent = create_mock_agent( mock_agent = create_mock_agent(
session_result={"success": True} session_result={"success": True}
@@ -355,11 +360,12 @@ class TestModelDayExecutorCleanup:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
with patch("api.model_day_executor.RuntimeConfigManager") as mock_runtime: with patch("api.model_day_executor.RuntimeConfigManager") as mock_runtime:
mock_instance = Mock() mock_instance = Mock()

View File

@@ -41,11 +41,12 @@ class TestSimulationWorkerExecution:
# Create job with 2 dates and 2 models = 4 model-days # Create job with 2 dates and 2 models = 4 model-days
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5", "claude-3.7-sonnet"] models=["gpt-5", "claude-3.7-sonnet"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
@@ -73,11 +74,12 @@ class TestSimulationWorkerExecution:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5", "claude-3.7-sonnet"] models=["gpt-5", "claude-3.7-sonnet"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
@@ -118,11 +120,12 @@ class TestSimulationWorkerExecution:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
@@ -159,11 +162,12 @@ class TestSimulationWorkerExecution:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5", "claude-3.7-sonnet"] models=["gpt-5", "claude-3.7-sonnet"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
@@ -214,11 +218,12 @@ class TestSimulationWorkerErrorHandling:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5", "claude-3.7-sonnet", "gemini"] models=["gpt-5", "claude-3.7-sonnet", "gemini"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
@@ -259,11 +264,12 @@ class TestSimulationWorkerErrorHandling:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
@@ -289,11 +295,12 @@ class TestSimulationWorkerConcurrency:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16"], date_range=["2025-01-16"],
models=["gpt-5", "claude-3.7-sonnet"] models=["gpt-5", "claude-3.7-sonnet"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
@@ -335,11 +342,12 @@ class TestSimulationWorkerJobRetrieval:
from api.job_manager import JobManager from api.job_manager import JobManager
manager = JobManager(db_path=clean_db) manager = JobManager(db_path=clean_db)
job_id = manager.create_job( job_result = manager.create_job(
config_path="configs/test.json", config_path="configs/test.json",
date_range=["2025-01-16", "2025-01-17"], date_range=["2025-01-16", "2025-01-17"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=clean_db) worker = SimulationWorker(job_id=job_id, db_path=clean_db)
job_info = worker.get_job_info() job_info = worker.get_job_info()
@@ -469,11 +477,12 @@ class TestSimulationWorkerHelperMethods:
job_manager = JobManager(db_path=db_path) job_manager = JobManager(db_path=db_path)
# Create job # Create job
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="config.json", config_path="config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=db_path) worker = SimulationWorker(job_id=job_id, db_path=db_path)
@@ -498,11 +507,12 @@ class TestSimulationWorkerHelperMethods:
job_manager = JobManager(db_path=db_path) job_manager = JobManager(db_path=db_path)
# Create job # Create job
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="config.json", config_path="config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=db_path) worker = SimulationWorker(job_id=job_id, db_path=db_path)
@@ -545,11 +555,12 @@ class TestSimulationWorkerHelperMethods:
initialize_database(db_path) initialize_database(db_path)
job_manager = JobManager(db_path=db_path) job_manager = JobManager(db_path=db_path)
job_id = job_manager.create_job( job_result = job_manager.create_job(
config_path="config.json", config_path="config.json",
date_range=["2025-10-01"], date_range=["2025-10-01"],
models=["gpt-5"] models=["gpt-5"]
) )
job_id = job_result["job_id"]
worker = SimulationWorker(job_id=job_id, db_path=db_path) worker = SimulationWorker(job_id=job_id, db_path=db_path)