feat: implement XER MCP Server with 9 schedule query tools
Implement complete MCP server for parsing Primavera P6 XER files and exposing schedule data through MCP tools. All 4 user stories complete. Tools implemented: - load_xer: Parse XER files into SQLite database - list_activities: Query activities with pagination and filtering - get_activity: Get activity details by ID - list_relationships: Query activity dependencies - get_predecessors/get_successors: Query activity relationships - get_project_summary: Project overview with counts - list_milestones: Query milestone activities - get_critical_path: Query driving path activities Features: - Tab-delimited XER format parsing with pluggable table handlers - In-memory SQLite database for fast queries - Pagination with 100-item default limit - Multi-project file support with project selection - ISO8601 date formatting - NO_FILE_LOADED error handling for all query tools Test coverage: 81 tests (contract, integration, unit)
This commit is contained in:
3
src/xer_mcp/__init__.py
Normal file
3
src/xer_mcp/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""XER MCP Server - MCP tools for querying Primavera P6 XER schedule data."""
|
||||
|
||||
__version__ = "0.1.0"
|
||||
14
src/xer_mcp/__main__.py
Normal file
14
src/xer_mcp/__main__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Main entry point for the XER MCP Server."""
|
||||
|
||||
import asyncio
|
||||
|
||||
from xer_mcp.server import run_server
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run the XER MCP Server."""
|
||||
asyncio.run(run_server())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
66
src/xer_mcp/db/__init__.py
Normal file
66
src/xer_mcp/db/__init__.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Database connection management for XER MCP Server."""
|
||||
|
||||
import sqlite3
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
|
||||
from xer_mcp.db.schema import get_schema
|
||||
|
||||
|
||||
class DatabaseManager:
|
||||
"""Manages SQLite database connections and schema initialization."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize database manager with in-memory database."""
|
||||
self._connection: sqlite3.Connection | None = None
|
||||
|
||||
def initialize(self) -> None:
|
||||
"""Initialize the in-memory database with schema."""
|
||||
self._connection = sqlite3.connect(":memory:", check_same_thread=False)
|
||||
self._connection.row_factory = sqlite3.Row
|
||||
self._connection.executescript(get_schema())
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear all data from the database."""
|
||||
if self._connection is None:
|
||||
return
|
||||
|
||||
tables = ["relationships", "activities", "wbs", "calendars", "projects"]
|
||||
for table in tables:
|
||||
self._connection.execute(f"DELETE FROM {table}") # noqa: S608
|
||||
self._connection.commit()
|
||||
|
||||
@property
|
||||
def connection(self) -> sqlite3.Connection:
|
||||
"""Get the database connection."""
|
||||
if self._connection is None:
|
||||
raise RuntimeError("Database not initialized. Call initialize() first.")
|
||||
return self._connection
|
||||
|
||||
@contextmanager
|
||||
def cursor(self) -> Generator[sqlite3.Cursor]:
|
||||
"""Get a cursor for executing queries."""
|
||||
cur = self.connection.cursor()
|
||||
try:
|
||||
yield cur
|
||||
finally:
|
||||
cur.close()
|
||||
|
||||
def commit(self) -> None:
|
||||
"""Commit the current transaction."""
|
||||
self.connection.commit()
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the database connection."""
|
||||
if self._connection is not None:
|
||||
self._connection.close()
|
||||
self._connection = None
|
||||
|
||||
@property
|
||||
def is_initialized(self) -> bool:
|
||||
"""Check if the database is initialized."""
|
||||
return self._connection is not None
|
||||
|
||||
|
||||
# Global database manager instance
|
||||
db = DatabaseManager()
|
||||
140
src/xer_mcp/db/loader.py
Normal file
140
src/xer_mcp/db/loader.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""Database loader for parsed XER data."""
|
||||
|
||||
from xer_mcp.db import db
|
||||
from xer_mcp.parser.xer_parser import ParsedXer
|
||||
|
||||
|
||||
def load_parsed_data(parsed: ParsedXer, project_id: str) -> None:
|
||||
"""Load parsed XER data into the database.
|
||||
|
||||
Only loads data for the specified project.
|
||||
|
||||
Args:
|
||||
parsed: Parsed XER data
|
||||
project_id: ID of the project to load
|
||||
"""
|
||||
# Find the project
|
||||
project = next((p for p in parsed.projects if p["proj_id"] == project_id), None)
|
||||
if project is None:
|
||||
raise ValueError(f"Project {project_id} not found in parsed data")
|
||||
|
||||
# Clear existing data
|
||||
db.clear()
|
||||
|
||||
with db.cursor() as cur:
|
||||
# Insert project
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO projects (proj_id, proj_short_name, plan_start_date, plan_end_date)
|
||||
VALUES (?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
project["proj_id"],
|
||||
project["proj_short_name"],
|
||||
project["plan_start_date"],
|
||||
project["plan_end_date"],
|
||||
),
|
||||
)
|
||||
|
||||
# Insert WBS elements for this project
|
||||
for wbs in parsed.projwbs:
|
||||
if wbs["proj_id"] == project_id:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO wbs (wbs_id, proj_id, parent_wbs_id, wbs_short_name, wbs_name)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
wbs["wbs_id"],
|
||||
wbs["proj_id"],
|
||||
wbs["parent_wbs_id"] or None,
|
||||
wbs["wbs_short_name"],
|
||||
wbs["wbs_name"],
|
||||
),
|
||||
)
|
||||
|
||||
# Insert calendars (all calendars, they may be shared)
|
||||
for cal in parsed.calendars:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT OR IGNORE INTO calendars (clndr_id, clndr_name, day_hr_cnt, week_hr_cnt)
|
||||
VALUES (?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
cal["clndr_id"],
|
||||
cal["clndr_name"],
|
||||
cal["day_hr_cnt"],
|
||||
cal["week_hr_cnt"],
|
||||
),
|
||||
)
|
||||
|
||||
# Insert activities for this project
|
||||
for task in parsed.tasks:
|
||||
if task["proj_id"] == project_id:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO activities (
|
||||
task_id, proj_id, wbs_id, task_code, task_name, task_type,
|
||||
target_start_date, target_end_date, act_start_date, act_end_date,
|
||||
total_float_hr_cnt, driving_path_flag, status_code
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
task["task_id"],
|
||||
task["proj_id"],
|
||||
task["wbs_id"],
|
||||
task["task_code"],
|
||||
task["task_name"],
|
||||
task["task_type"],
|
||||
task["target_start_date"],
|
||||
task["target_end_date"],
|
||||
task["act_start_date"],
|
||||
task["act_end_date"],
|
||||
task["total_float_hr_cnt"],
|
||||
1 if task["driving_path_flag"] else 0,
|
||||
task["status_code"],
|
||||
),
|
||||
)
|
||||
|
||||
# Build set of task IDs in this project
|
||||
project_task_ids = {t["task_id"] for t in parsed.tasks if t["proj_id"] == project_id}
|
||||
|
||||
# Insert relationships where both tasks are in this project
|
||||
for rel in parsed.taskpreds:
|
||||
if (
|
||||
rel.get("proj_id") == project_id
|
||||
and rel["task_id"] in project_task_ids
|
||||
and rel["pred_task_id"] in project_task_ids
|
||||
):
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO relationships (
|
||||
task_pred_id, task_id, pred_task_id, pred_type, lag_hr_cnt
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
rel["task_pred_id"],
|
||||
rel["task_id"],
|
||||
rel["pred_task_id"],
|
||||
rel["pred_type"],
|
||||
rel["lag_hr_cnt"],
|
||||
),
|
||||
)
|
||||
|
||||
db.commit()
|
||||
|
||||
|
||||
def get_activity_count() -> int:
|
||||
"""Get the count of activities in the database."""
|
||||
with db.cursor() as cur:
|
||||
cur.execute("SELECT COUNT(*) FROM activities")
|
||||
return cur.fetchone()[0]
|
||||
|
||||
|
||||
def get_relationship_count() -> int:
|
||||
"""Get the count of relationships in the database."""
|
||||
with db.cursor() as cur:
|
||||
cur.execute("SELECT COUNT(*) FROM relationships")
|
||||
return cur.fetchone()[0]
|
||||
392
src/xer_mcp/db/queries.py
Normal file
392
src/xer_mcp/db/queries.py
Normal file
@@ -0,0 +1,392 @@
|
||||
"""Database query functions for XER data."""
|
||||
|
||||
from xer_mcp.db import db
|
||||
|
||||
|
||||
def query_activities(
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
start_date: str | None = None,
|
||||
end_date: str | None = None,
|
||||
wbs_id: str | None = None,
|
||||
activity_type: str | None = None,
|
||||
) -> tuple[list[dict], int]:
|
||||
"""Query activities with pagination and filtering.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of results to return
|
||||
offset: Number of results to skip
|
||||
start_date: Filter activities starting on or after this date (YYYY-MM-DD)
|
||||
end_date: Filter activities ending on or before this date (YYYY-MM-DD)
|
||||
wbs_id: Filter by WBS ID
|
||||
activity_type: Filter by task type (TT_Task, TT_Mile, etc.)
|
||||
|
||||
Returns:
|
||||
Tuple of (list of activity dicts, total count matching filters)
|
||||
"""
|
||||
# Build WHERE clause
|
||||
conditions = []
|
||||
params: list = []
|
||||
|
||||
if start_date:
|
||||
conditions.append("target_start_date >= ?")
|
||||
params.append(f"{start_date}T00:00:00")
|
||||
|
||||
if end_date:
|
||||
conditions.append("target_end_date <= ?")
|
||||
params.append(f"{end_date}T23:59:59")
|
||||
|
||||
if wbs_id:
|
||||
conditions.append("wbs_id = ?")
|
||||
params.append(wbs_id)
|
||||
|
||||
if activity_type:
|
||||
conditions.append("task_type = ?")
|
||||
params.append(activity_type)
|
||||
|
||||
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
||||
|
||||
# Get total count
|
||||
with db.cursor() as cur:
|
||||
cur.execute(f"SELECT COUNT(*) FROM activities WHERE {where_clause}", params) # noqa: S608
|
||||
total = cur.fetchone()[0]
|
||||
|
||||
# Get paginated results
|
||||
query = f"""
|
||||
SELECT task_id, task_code, task_name, task_type,
|
||||
target_start_date, target_end_date, status_code,
|
||||
driving_path_flag, wbs_id, total_float_hr_cnt
|
||||
FROM activities
|
||||
WHERE {where_clause}
|
||||
ORDER BY target_start_date, task_code
|
||||
LIMIT ? OFFSET ?
|
||||
""" # noqa: S608
|
||||
|
||||
with db.cursor() as cur:
|
||||
cur.execute(query, [*params, limit, offset])
|
||||
rows = cur.fetchall()
|
||||
|
||||
activities = [
|
||||
{
|
||||
"task_id": row[0],
|
||||
"task_code": row[1],
|
||||
"task_name": row[2],
|
||||
"task_type": row[3],
|
||||
"target_start_date": row[4],
|
||||
"target_end_date": row[5],
|
||||
"status_code": row[6],
|
||||
"driving_path_flag": bool(row[7]),
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
return activities, total
|
||||
|
||||
|
||||
def get_activity_by_id(activity_id: str) -> dict | None:
|
||||
"""Get a single activity by ID with full details.
|
||||
|
||||
Args:
|
||||
activity_id: The task_id to look up
|
||||
|
||||
Returns:
|
||||
Activity dict with all fields, or None if not found
|
||||
"""
|
||||
query = """
|
||||
SELECT a.task_id, a.task_code, a.task_name, a.task_type,
|
||||
a.wbs_id, w.wbs_name,
|
||||
a.target_start_date, a.target_end_date,
|
||||
a.act_start_date, a.act_end_date,
|
||||
a.total_float_hr_cnt, a.status_code, a.driving_path_flag
|
||||
FROM activities a
|
||||
LEFT JOIN wbs w ON a.wbs_id = w.wbs_id
|
||||
WHERE a.task_id = ?
|
||||
"""
|
||||
|
||||
with db.cursor() as cur:
|
||||
cur.execute(query, (activity_id,))
|
||||
row = cur.fetchone()
|
||||
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
# Count predecessors and successors
|
||||
with db.cursor() as cur:
|
||||
cur.execute(
|
||||
"SELECT COUNT(*) FROM relationships WHERE task_id = ?",
|
||||
(activity_id,),
|
||||
)
|
||||
predecessor_count = cur.fetchone()[0]
|
||||
|
||||
cur.execute(
|
||||
"SELECT COUNT(*) FROM relationships WHERE pred_task_id = ?",
|
||||
(activity_id,),
|
||||
)
|
||||
successor_count = cur.fetchone()[0]
|
||||
|
||||
return {
|
||||
"task_id": row[0],
|
||||
"task_code": row[1],
|
||||
"task_name": row[2],
|
||||
"task_type": row[3],
|
||||
"wbs_id": row[4],
|
||||
"wbs_name": row[5],
|
||||
"target_start_date": row[6],
|
||||
"target_end_date": row[7],
|
||||
"act_start_date": row[8],
|
||||
"act_end_date": row[9],
|
||||
"total_float_hr_cnt": row[10],
|
||||
"status_code": row[11],
|
||||
"driving_path_flag": bool(row[12]),
|
||||
"predecessor_count": predecessor_count,
|
||||
"successor_count": successor_count,
|
||||
}
|
||||
|
||||
|
||||
def query_relationships(
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> tuple[list[dict], int]:
|
||||
"""Query relationships with pagination.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of results to return
|
||||
offset: Number of results to skip
|
||||
|
||||
Returns:
|
||||
Tuple of (list of relationship dicts, total count)
|
||||
"""
|
||||
# Get total count
|
||||
with db.cursor() as cur:
|
||||
cur.execute("SELECT COUNT(*) FROM relationships")
|
||||
total = cur.fetchone()[0]
|
||||
|
||||
# Get paginated results with activity names
|
||||
query = """
|
||||
SELECT r.task_pred_id, r.task_id, a1.task_name,
|
||||
r.pred_task_id, a2.task_name,
|
||||
r.pred_type, r.lag_hr_cnt
|
||||
FROM relationships r
|
||||
LEFT JOIN activities a1 ON r.task_id = a1.task_id
|
||||
LEFT JOIN activities a2 ON r.pred_task_id = a2.task_id
|
||||
ORDER BY r.task_pred_id
|
||||
LIMIT ? OFFSET ?
|
||||
"""
|
||||
|
||||
with db.cursor() as cur:
|
||||
cur.execute(query, (limit, offset))
|
||||
rows = cur.fetchall()
|
||||
|
||||
# Convert pred_type from internal format (PR_FS) to API format (FS)
|
||||
def format_pred_type(pred_type: str) -> str:
|
||||
if pred_type.startswith("PR_"):
|
||||
return pred_type[3:]
|
||||
return pred_type
|
||||
|
||||
relationships = [
|
||||
{
|
||||
"task_pred_id": row[0],
|
||||
"task_id": row[1],
|
||||
"task_name": row[2],
|
||||
"pred_task_id": row[3],
|
||||
"pred_task_name": row[4],
|
||||
"pred_type": format_pred_type(row[5]),
|
||||
"lag_hr_cnt": row[6],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
return relationships, total
|
||||
|
||||
|
||||
def get_predecessors(activity_id: str) -> list[dict]:
|
||||
"""Get predecessor activities for a given activity.
|
||||
|
||||
Args:
|
||||
activity_id: The task_id to find predecessors for
|
||||
|
||||
Returns:
|
||||
List of predecessor activity dicts with relationship info
|
||||
"""
|
||||
query = """
|
||||
SELECT a.task_id, a.task_code, a.task_name,
|
||||
r.pred_type, r.lag_hr_cnt
|
||||
FROM relationships r
|
||||
JOIN activities a ON r.pred_task_id = a.task_id
|
||||
WHERE r.task_id = ?
|
||||
ORDER BY a.task_code
|
||||
"""
|
||||
|
||||
with db.cursor() as cur:
|
||||
cur.execute(query, (activity_id,))
|
||||
rows = cur.fetchall()
|
||||
|
||||
def format_pred_type(pred_type: str) -> str:
|
||||
if pred_type.startswith("PR_"):
|
||||
return pred_type[3:]
|
||||
return pred_type
|
||||
|
||||
return [
|
||||
{
|
||||
"task_id": row[0],
|
||||
"task_code": row[1],
|
||||
"task_name": row[2],
|
||||
"relationship_type": format_pred_type(row[3]),
|
||||
"lag_hr_cnt": row[4],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def get_successors(activity_id: str) -> list[dict]:
|
||||
"""Get successor activities for a given activity.
|
||||
|
||||
Args:
|
||||
activity_id: The task_id to find successors for
|
||||
|
||||
Returns:
|
||||
List of successor activity dicts with relationship info
|
||||
"""
|
||||
query = """
|
||||
SELECT a.task_id, a.task_code, a.task_name,
|
||||
r.pred_type, r.lag_hr_cnt
|
||||
FROM relationships r
|
||||
JOIN activities a ON r.task_id = a.task_id
|
||||
WHERE r.pred_task_id = ?
|
||||
ORDER BY a.task_code
|
||||
"""
|
||||
|
||||
with db.cursor() as cur:
|
||||
cur.execute(query, (activity_id,))
|
||||
rows = cur.fetchall()
|
||||
|
||||
def format_pred_type(pred_type: str) -> str:
|
||||
if pred_type.startswith("PR_"):
|
||||
return pred_type[3:]
|
||||
return pred_type
|
||||
|
||||
return [
|
||||
{
|
||||
"task_id": row[0],
|
||||
"task_code": row[1],
|
||||
"task_name": row[2],
|
||||
"relationship_type": format_pred_type(row[3]),
|
||||
"lag_hr_cnt": row[4],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def get_project_summary(project_id: str) -> dict | None:
|
||||
"""Get project summary information.
|
||||
|
||||
Args:
|
||||
project_id: The project ID to get summary for
|
||||
|
||||
Returns:
|
||||
Dictionary with project summary or None if not found
|
||||
"""
|
||||
# Get project info
|
||||
with db.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT proj_id, proj_short_name, plan_start_date, plan_end_date
|
||||
FROM projects
|
||||
WHERE proj_id = ?
|
||||
""",
|
||||
(project_id,),
|
||||
)
|
||||
project_row = cur.fetchone()
|
||||
|
||||
if project_row is None:
|
||||
return None
|
||||
|
||||
# Get activity count
|
||||
with db.cursor() as cur:
|
||||
cur.execute("SELECT COUNT(*) FROM activities")
|
||||
activity_count = cur.fetchone()[0]
|
||||
|
||||
# Get milestone count
|
||||
with db.cursor() as cur:
|
||||
cur.execute("SELECT COUNT(*) FROM activities WHERE task_type = 'TT_Mile'")
|
||||
milestone_count = cur.fetchone()[0]
|
||||
|
||||
# Get critical activity count
|
||||
with db.cursor() as cur:
|
||||
cur.execute("SELECT COUNT(*) FROM activities WHERE driving_path_flag = 1")
|
||||
critical_count = cur.fetchone()[0]
|
||||
|
||||
return {
|
||||
"project_id": project_row[0],
|
||||
"project_name": project_row[1],
|
||||
"plan_start_date": project_row[2],
|
||||
"plan_end_date": project_row[3],
|
||||
"activity_count": activity_count,
|
||||
"milestone_count": milestone_count,
|
||||
"critical_activity_count": critical_count,
|
||||
}
|
||||
|
||||
|
||||
def query_milestones() -> list[dict]:
|
||||
"""Query all milestone activities.
|
||||
|
||||
Returns:
|
||||
List of milestone activity dicts
|
||||
"""
|
||||
query = """
|
||||
SELECT task_id, task_code, task_name,
|
||||
target_start_date, target_end_date, status_code
|
||||
FROM activities
|
||||
WHERE task_type = 'TT_Mile'
|
||||
ORDER BY target_start_date, task_code
|
||||
"""
|
||||
|
||||
with db.cursor() as cur:
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
return [
|
||||
{
|
||||
"task_id": row[0],
|
||||
"task_code": row[1],
|
||||
"task_name": row[2],
|
||||
"target_start_date": row[3],
|
||||
"target_end_date": row[4],
|
||||
"status_code": row[5],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def query_critical_path() -> list[dict]:
|
||||
"""Query all activities on the critical path.
|
||||
|
||||
Returns:
|
||||
List of critical path activity dicts ordered by start date
|
||||
"""
|
||||
query = """
|
||||
SELECT task_id, task_code, task_name, task_type,
|
||||
target_start_date, target_end_date,
|
||||
total_float_hr_cnt, status_code
|
||||
FROM activities
|
||||
WHERE driving_path_flag = 1
|
||||
ORDER BY target_start_date, task_code
|
||||
"""
|
||||
|
||||
with db.cursor() as cur:
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
return [
|
||||
{
|
||||
"task_id": row[0],
|
||||
"task_code": row[1],
|
||||
"task_name": row[2],
|
||||
"task_type": row[3],
|
||||
"target_start_date": row[4],
|
||||
"target_end_date": row[5],
|
||||
"total_float_hr_cnt": row[6],
|
||||
"status_code": row[7],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
77
src/xer_mcp/db/schema.py
Normal file
77
src/xer_mcp/db/schema.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""SQLite database schema for XER MCP Server."""
|
||||
|
||||
SCHEMA_SQL = """
|
||||
-- Projects
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
proj_id TEXT PRIMARY KEY,
|
||||
proj_short_name TEXT NOT NULL,
|
||||
plan_start_date TEXT,
|
||||
plan_end_date TEXT,
|
||||
loaded_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
-- Activities
|
||||
CREATE TABLE IF NOT EXISTS activities (
|
||||
task_id TEXT PRIMARY KEY,
|
||||
proj_id TEXT NOT NULL,
|
||||
wbs_id TEXT,
|
||||
task_code TEXT NOT NULL,
|
||||
task_name TEXT NOT NULL,
|
||||
task_type TEXT NOT NULL,
|
||||
target_start_date TEXT,
|
||||
target_end_date TEXT,
|
||||
act_start_date TEXT,
|
||||
act_end_date TEXT,
|
||||
total_float_hr_cnt REAL,
|
||||
driving_path_flag INTEGER DEFAULT 0,
|
||||
status_code TEXT,
|
||||
FOREIGN KEY (proj_id) REFERENCES projects(proj_id)
|
||||
);
|
||||
|
||||
-- Relationships
|
||||
CREATE TABLE IF NOT EXISTS relationships (
|
||||
task_pred_id TEXT PRIMARY KEY,
|
||||
task_id TEXT NOT NULL,
|
||||
pred_task_id TEXT NOT NULL,
|
||||
pred_type TEXT NOT NULL,
|
||||
lag_hr_cnt REAL DEFAULT 0,
|
||||
FOREIGN KEY (task_id) REFERENCES activities(task_id),
|
||||
FOREIGN KEY (pred_task_id) REFERENCES activities(task_id)
|
||||
);
|
||||
|
||||
-- WBS (Work Breakdown Structure)
|
||||
CREATE TABLE IF NOT EXISTS wbs (
|
||||
wbs_id TEXT PRIMARY KEY,
|
||||
proj_id TEXT NOT NULL,
|
||||
parent_wbs_id TEXT,
|
||||
wbs_short_name TEXT NOT NULL,
|
||||
wbs_name TEXT,
|
||||
FOREIGN KEY (proj_id) REFERENCES projects(proj_id),
|
||||
FOREIGN KEY (parent_wbs_id) REFERENCES wbs(wbs_id)
|
||||
);
|
||||
|
||||
-- Calendars (internal use only)
|
||||
CREATE TABLE IF NOT EXISTS calendars (
|
||||
clndr_id TEXT PRIMARY KEY,
|
||||
clndr_name TEXT NOT NULL,
|
||||
day_hr_cnt REAL,
|
||||
week_hr_cnt REAL
|
||||
);
|
||||
|
||||
-- Indexes for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_activities_proj ON activities(proj_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_activities_wbs ON activities(wbs_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_activities_type ON activities(task_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_activities_critical ON activities(driving_path_flag)
|
||||
WHERE driving_path_flag = 1;
|
||||
CREATE INDEX IF NOT EXISTS idx_activities_dates ON activities(target_start_date, target_end_date);
|
||||
CREATE INDEX IF NOT EXISTS idx_relationships_task ON relationships(task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_relationships_pred ON relationships(pred_task_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_wbs_parent ON wbs(parent_wbs_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_wbs_proj ON wbs(proj_id);
|
||||
"""
|
||||
|
||||
|
||||
def get_schema() -> str:
|
||||
"""Return the complete SQLite schema."""
|
||||
return SCHEMA_SQL
|
||||
58
src/xer_mcp/errors.py
Normal file
58
src/xer_mcp/errors.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Error types for XER MCP Server."""
|
||||
|
||||
|
||||
class XerMcpError(Exception):
|
||||
"""Base exception for XER MCP Server."""
|
||||
|
||||
def __init__(self, code: str, message: str) -> None:
|
||||
self.code = code
|
||||
self.message = message
|
||||
super().__init__(f"{code}: {message}")
|
||||
|
||||
|
||||
class FileNotFoundError(XerMcpError):
|
||||
"""Raised when the specified XER file does not exist."""
|
||||
|
||||
def __init__(self, file_path: str) -> None:
|
||||
super().__init__(
|
||||
"FILE_NOT_FOUND",
|
||||
f"XER file not found: {file_path}",
|
||||
)
|
||||
|
||||
|
||||
class ParseError(XerMcpError):
|
||||
"""Raised when the XER file cannot be parsed."""
|
||||
|
||||
def __init__(self, message: str) -> None:
|
||||
super().__init__("PARSE_ERROR", message)
|
||||
|
||||
|
||||
class NoFileLoadedError(XerMcpError):
|
||||
"""Raised when a query is attempted before loading an XER file."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
"NO_FILE_LOADED",
|
||||
"No XER file is loaded. Use the load_xer tool first.",
|
||||
)
|
||||
|
||||
|
||||
class ProjectSelectionRequiredError(XerMcpError):
|
||||
"""Raised when a multi-project file requires explicit project selection."""
|
||||
|
||||
def __init__(self, available_projects: list[dict]) -> None:
|
||||
self.available_projects = available_projects
|
||||
super().__init__(
|
||||
"PROJECT_SELECTION_REQUIRED",
|
||||
"Multiple projects found. Please specify project_id.",
|
||||
)
|
||||
|
||||
|
||||
class ActivityNotFoundError(XerMcpError):
|
||||
"""Raised when the specified activity does not exist."""
|
||||
|
||||
def __init__(self, activity_id: str) -> None:
|
||||
super().__init__(
|
||||
"ACTIVITY_NOT_FOUND",
|
||||
f"Activity not found: {activity_id}",
|
||||
)
|
||||
17
src/xer_mcp/models/__init__.py
Normal file
17
src/xer_mcp/models/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Data models for XER MCP Server."""
|
||||
|
||||
from xer_mcp.models.activity import Activity
|
||||
from xer_mcp.models.calendar import Calendar
|
||||
from xer_mcp.models.pagination import PaginationMetadata
|
||||
from xer_mcp.models.project import Project
|
||||
from xer_mcp.models.relationship import Relationship
|
||||
from xer_mcp.models.wbs import WBS
|
||||
|
||||
__all__ = [
|
||||
"Activity",
|
||||
"Calendar",
|
||||
"PaginationMetadata",
|
||||
"Project",
|
||||
"Relationship",
|
||||
"WBS",
|
||||
]
|
||||
23
src/xer_mcp/models/activity.py
Normal file
23
src/xer_mcp/models/activity.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Activity data model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@dataclass
|
||||
class Activity:
|
||||
"""A unit of work in the schedule."""
|
||||
|
||||
task_id: str
|
||||
proj_id: str
|
||||
task_code: str
|
||||
task_name: str
|
||||
task_type: str
|
||||
wbs_id: str | None = None
|
||||
target_start_date: datetime | None = None
|
||||
target_end_date: datetime | None = None
|
||||
act_start_date: datetime | None = None
|
||||
act_end_date: datetime | None = None
|
||||
total_float_hr_cnt: float | None = None
|
||||
driving_path_flag: bool = False
|
||||
status_code: str | None = None
|
||||
13
src/xer_mcp/models/calendar.py
Normal file
13
src/xer_mcp/models/calendar.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Calendar data model (internal use only)."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Calendar:
|
||||
"""Work schedule definition. Not exposed via MCP tools."""
|
||||
|
||||
clndr_id: str
|
||||
clndr_name: str
|
||||
day_hr_cnt: float | None = None
|
||||
week_hr_cnt: float | None = None
|
||||
13
src/xer_mcp/models/pagination.py
Normal file
13
src/xer_mcp/models/pagination.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Pagination metadata model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class PaginationMetadata:
|
||||
"""Response wrapper for paginated queries."""
|
||||
|
||||
total_count: int
|
||||
offset: int
|
||||
limit: int
|
||||
has_more: bool
|
||||
15
src/xer_mcp/models/project.py
Normal file
15
src/xer_mcp/models/project.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Project data model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@dataclass
|
||||
class Project:
|
||||
"""Top-level container representing a P6 project."""
|
||||
|
||||
proj_id: str
|
||||
proj_short_name: str
|
||||
plan_start_date: datetime | None = None
|
||||
plan_end_date: datetime | None = None
|
||||
loaded_at: datetime | None = None
|
||||
14
src/xer_mcp/models/relationship.py
Normal file
14
src/xer_mcp/models/relationship.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Relationship data model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Relationship:
|
||||
"""A dependency link between two activities."""
|
||||
|
||||
task_pred_id: str
|
||||
task_id: str
|
||||
pred_task_id: str
|
||||
pred_type: str
|
||||
lag_hr_cnt: float = 0.0
|
||||
14
src/xer_mcp/models/wbs.py
Normal file
14
src/xer_mcp/models/wbs.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Work Breakdown Structure (WBS) data model."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class WBS:
|
||||
"""Hierarchical organization of activities."""
|
||||
|
||||
wbs_id: str
|
||||
proj_id: str
|
||||
wbs_short_name: str
|
||||
parent_wbs_id: str | None = None
|
||||
wbs_name: str | None = None
|
||||
1
src/xer_mcp/parser/__init__.py
Normal file
1
src/xer_mcp/parser/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""XER file parser module."""
|
||||
27
src/xer_mcp/parser/table_handlers/__init__.py
Normal file
27
src/xer_mcp/parser/table_handlers/__init__.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""XER table handlers module."""
|
||||
|
||||
from xer_mcp.parser.table_handlers.base import TableHandler
|
||||
from xer_mcp.parser.table_handlers.calendar import CalendarHandler
|
||||
from xer_mcp.parser.table_handlers.project import ProjectHandler
|
||||
from xer_mcp.parser.table_handlers.projwbs import ProjwbsHandler
|
||||
from xer_mcp.parser.table_handlers.task import TaskHandler
|
||||
from xer_mcp.parser.table_handlers.taskpred import TaskpredHandler
|
||||
|
||||
# Registry mapping table names to handlers
|
||||
TABLE_HANDLERS: dict[str, type[TableHandler]] = {
|
||||
"PROJECT": ProjectHandler,
|
||||
"TASK": TaskHandler,
|
||||
"TASKPRED": TaskpredHandler,
|
||||
"PROJWBS": ProjwbsHandler,
|
||||
"CALENDAR": CalendarHandler,
|
||||
}
|
||||
|
||||
__all__ = [
|
||||
"CalendarHandler",
|
||||
"ProjectHandler",
|
||||
"ProjwbsHandler",
|
||||
"TABLE_HANDLERS",
|
||||
"TableHandler",
|
||||
"TaskHandler",
|
||||
"TaskpredHandler",
|
||||
]
|
||||
30
src/xer_mcp/parser/table_handlers/base.py
Normal file
30
src/xer_mcp/parser/table_handlers/base.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Base class for XER table handlers."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class TableHandler(ABC):
|
||||
"""Abstract base class for XER table handlers.
|
||||
|
||||
Each handler is responsible for parsing a specific table type
|
||||
from the XER file and returning structured data.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def table_name(self) -> str:
|
||||
"""Return the XER table name this handler processes (e.g., 'PROJECT', 'TASK')."""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def parse_row(self, fields: list[str], values: list[str]) -> dict | None:
|
||||
"""Parse a single row of data from the XER file.
|
||||
|
||||
Args:
|
||||
fields: List of column names from the %F line
|
||||
values: List of values from the %R line
|
||||
|
||||
Returns:
|
||||
Dictionary of parsed data, or None if the row should be skipped
|
||||
"""
|
||||
...
|
||||
32
src/xer_mcp/parser/table_handlers/calendar.py
Normal file
32
src/xer_mcp/parser/table_handlers/calendar.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""CALENDAR table handler."""
|
||||
|
||||
from xer_mcp.parser.table_handlers.base import TableHandler
|
||||
|
||||
|
||||
class CalendarHandler(TableHandler):
|
||||
"""Handler for CALENDAR table in XER files."""
|
||||
|
||||
@property
|
||||
def table_name(self) -> str:
|
||||
return "CALENDAR"
|
||||
|
||||
def parse_row(self, fields: list[str], values: list[str]) -> dict | None:
|
||||
"""Parse a CALENDAR row."""
|
||||
if len(values) < len(fields):
|
||||
values = values + [""] * (len(fields) - len(values))
|
||||
|
||||
data = dict(zip(fields, values, strict=False))
|
||||
|
||||
# Parse numeric fields
|
||||
day_hr_str = data.get("day_hr_cnt", "")
|
||||
day_hr = float(day_hr_str) if day_hr_str else None
|
||||
|
||||
week_hr_str = data.get("week_hr_cnt", "")
|
||||
week_hr = float(week_hr_str) if week_hr_str else None
|
||||
|
||||
return {
|
||||
"clndr_id": data.get("clndr_id", ""),
|
||||
"clndr_name": data.get("clndr_name", ""),
|
||||
"day_hr_cnt": day_hr,
|
||||
"week_hr_cnt": week_hr,
|
||||
}
|
||||
38
src/xer_mcp/parser/table_handlers/project.py
Normal file
38
src/xer_mcp/parser/table_handlers/project.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""PROJECT table handler."""
|
||||
|
||||
from xer_mcp.parser.table_handlers.base import TableHandler
|
||||
|
||||
|
||||
def convert_date(date_str: str | None) -> str | None:
|
||||
"""Convert XER date format to ISO8601.
|
||||
|
||||
XER format: "YYYY-MM-DD HH:MM"
|
||||
ISO8601 format: "YYYY-MM-DDTHH:MM:SS"
|
||||
"""
|
||||
if not date_str or date_str.strip() == "":
|
||||
return None
|
||||
# Replace space with T and add seconds
|
||||
return date_str.replace(" ", "T") + ":00"
|
||||
|
||||
|
||||
class ProjectHandler(TableHandler):
|
||||
"""Handler for PROJECT table in XER files."""
|
||||
|
||||
@property
|
||||
def table_name(self) -> str:
|
||||
return "PROJECT"
|
||||
|
||||
def parse_row(self, fields: list[str], values: list[str]) -> dict | None:
|
||||
"""Parse a PROJECT row."""
|
||||
if len(values) < len(fields):
|
||||
# Pad with empty strings if needed
|
||||
values = values + [""] * (len(fields) - len(values))
|
||||
|
||||
data = dict(zip(fields, values, strict=False))
|
||||
|
||||
return {
|
||||
"proj_id": data.get("proj_id", ""),
|
||||
"proj_short_name": data.get("proj_short_name", ""),
|
||||
"plan_start_date": convert_date(data.get("plan_start_date")),
|
||||
"plan_end_date": convert_date(data.get("plan_end_date")),
|
||||
}
|
||||
26
src/xer_mcp/parser/table_handlers/projwbs.py
Normal file
26
src/xer_mcp/parser/table_handlers/projwbs.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""PROJWBS table handler."""
|
||||
|
||||
from xer_mcp.parser.table_handlers.base import TableHandler
|
||||
|
||||
|
||||
class ProjwbsHandler(TableHandler):
|
||||
"""Handler for PROJWBS (WBS) table in XER files."""
|
||||
|
||||
@property
|
||||
def table_name(self) -> str:
|
||||
return "PROJWBS"
|
||||
|
||||
def parse_row(self, fields: list[str], values: list[str]) -> dict | None:
|
||||
"""Parse a PROJWBS row."""
|
||||
if len(values) < len(fields):
|
||||
values = values + [""] * (len(fields) - len(values))
|
||||
|
||||
data = dict(zip(fields, values, strict=False))
|
||||
|
||||
return {
|
||||
"wbs_id": data.get("wbs_id", ""),
|
||||
"proj_id": data.get("proj_id", ""),
|
||||
"parent_wbs_id": data.get("parent_wbs_id", ""),
|
||||
"wbs_short_name": data.get("wbs_short_name", ""),
|
||||
"wbs_name": data.get("wbs_name") or None,
|
||||
}
|
||||
43
src/xer_mcp/parser/table_handlers/task.py
Normal file
43
src/xer_mcp/parser/table_handlers/task.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""TASK table handler."""
|
||||
|
||||
from xer_mcp.parser.table_handlers.base import TableHandler
|
||||
from xer_mcp.parser.table_handlers.project import convert_date
|
||||
|
||||
|
||||
class TaskHandler(TableHandler):
|
||||
"""Handler for TASK table in XER files."""
|
||||
|
||||
@property
|
||||
def table_name(self) -> str:
|
||||
return "TASK"
|
||||
|
||||
def parse_row(self, fields: list[str], values: list[str]) -> dict | None:
|
||||
"""Parse a TASK row."""
|
||||
if len(values) < len(fields):
|
||||
values = values + [""] * (len(fields) - len(values))
|
||||
|
||||
data = dict(zip(fields, values, strict=False))
|
||||
|
||||
# Parse driving_path_flag (Y/N -> bool)
|
||||
driving_flag = data.get("driving_path_flag", "N")
|
||||
driving_path = driving_flag.upper() == "Y" if driving_flag else False
|
||||
|
||||
# Parse total_float_hr_cnt
|
||||
float_str = data.get("total_float_hr_cnt", "")
|
||||
total_float = float(float_str) if float_str else None
|
||||
|
||||
return {
|
||||
"task_id": data.get("task_id", ""),
|
||||
"proj_id": data.get("proj_id", ""),
|
||||
"wbs_id": data.get("wbs_id") or None,
|
||||
"task_code": data.get("task_code", ""),
|
||||
"task_name": data.get("task_name", ""),
|
||||
"task_type": data.get("task_type", ""),
|
||||
"status_code": data.get("status_code") or None,
|
||||
"target_start_date": convert_date(data.get("target_start_date")),
|
||||
"target_end_date": convert_date(data.get("target_end_date")),
|
||||
"act_start_date": convert_date(data.get("act_start_date")),
|
||||
"act_end_date": convert_date(data.get("act_end_date")),
|
||||
"total_float_hr_cnt": total_float,
|
||||
"driving_path_flag": driving_path,
|
||||
}
|
||||
31
src/xer_mcp/parser/table_handlers/taskpred.py
Normal file
31
src/xer_mcp/parser/table_handlers/taskpred.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""TASKPRED table handler."""
|
||||
|
||||
from xer_mcp.parser.table_handlers.base import TableHandler
|
||||
|
||||
|
||||
class TaskpredHandler(TableHandler):
|
||||
"""Handler for TASKPRED (relationships) table in XER files."""
|
||||
|
||||
@property
|
||||
def table_name(self) -> str:
|
||||
return "TASKPRED"
|
||||
|
||||
def parse_row(self, fields: list[str], values: list[str]) -> dict | None:
|
||||
"""Parse a TASKPRED row."""
|
||||
if len(values) < len(fields):
|
||||
values = values + [""] * (len(fields) - len(values))
|
||||
|
||||
data = dict(zip(fields, values, strict=False))
|
||||
|
||||
# Parse lag_hr_cnt
|
||||
lag_str = data.get("lag_hr_cnt", "0")
|
||||
lag_hr = float(lag_str) if lag_str else 0.0
|
||||
|
||||
return {
|
||||
"task_pred_id": data.get("task_pred_id", ""),
|
||||
"task_id": data.get("task_id", ""),
|
||||
"pred_task_id": data.get("pred_task_id", ""),
|
||||
"proj_id": data.get("proj_id", ""),
|
||||
"pred_type": data.get("pred_type", ""),
|
||||
"lag_hr_cnt": lag_hr,
|
||||
}
|
||||
127
src/xer_mcp/parser/xer_parser.py
Normal file
127
src/xer_mcp/parser/xer_parser.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""XER file parser."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
from xer_mcp.errors import FileNotFoundError, ParseError
|
||||
from xer_mcp.parser.table_handlers import TABLE_HANDLERS
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedXer:
|
||||
"""Container for parsed XER data."""
|
||||
|
||||
projects: list[dict] = field(default_factory=list)
|
||||
tasks: list[dict] = field(default_factory=list)
|
||||
taskpreds: list[dict] = field(default_factory=list)
|
||||
projwbs: list[dict] = field(default_factory=list)
|
||||
calendars: list[dict] = field(default_factory=list)
|
||||
|
||||
|
||||
class XerParser:
|
||||
"""Parser for Primavera P6 XER files.
|
||||
|
||||
XER files are tab-delimited with the following structure:
|
||||
- ERMHDR line: header with version info
|
||||
- %T lines: table name declarations
|
||||
- %F lines: field (column) names
|
||||
- %R lines: data rows
|
||||
"""
|
||||
|
||||
def parse(self, file_path: Path | str) -> ParsedXer:
|
||||
"""Parse an XER file and return structured data.
|
||||
|
||||
Args:
|
||||
file_path: Path to the XER file
|
||||
|
||||
Returns:
|
||||
ParsedXer containing all parsed tables
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If file doesn't exist
|
||||
ParseError: If file is invalid or cannot be parsed
|
||||
"""
|
||||
path = Path(file_path)
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(str(path))
|
||||
|
||||
try:
|
||||
content = path.read_text(encoding="utf-8", errors="replace")
|
||||
except OSError as e:
|
||||
raise ParseError(f"Cannot read file: {e}") from e
|
||||
|
||||
return self._parse_content(content)
|
||||
|
||||
def _parse_content(self, content: str) -> ParsedXer:
|
||||
"""Parse XER content string."""
|
||||
lines = content.split("\n")
|
||||
if not lines:
|
||||
raise ParseError("Empty file")
|
||||
|
||||
# Check for ERMHDR line
|
||||
first_line = lines[0].strip()
|
||||
if not first_line.startswith("ERMHDR"):
|
||||
raise ParseError("Invalid XER file: missing ERMHDR header")
|
||||
|
||||
result = ParsedXer()
|
||||
current_table: str | None = None
|
||||
current_fields: list[str] = []
|
||||
|
||||
for line in lines[1:]:
|
||||
line = line.rstrip("\r\n")
|
||||
if not line:
|
||||
continue
|
||||
|
||||
parts = line.split("\t")
|
||||
if not parts:
|
||||
continue
|
||||
|
||||
marker = parts[0]
|
||||
|
||||
if marker == "%T":
|
||||
# Table declaration
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
current_table = parts[1]
|
||||
current_fields = []
|
||||
|
||||
elif marker == "%F":
|
||||
# Field names
|
||||
current_fields = parts[1:]
|
||||
|
||||
elif marker == "%R":
|
||||
# Data row
|
||||
if current_table and current_fields:
|
||||
values = parts[1:]
|
||||
row_data = self._parse_row(current_table, current_fields, values)
|
||||
if row_data:
|
||||
self._add_to_result(result, current_table, row_data)
|
||||
|
||||
# Validate we got at least some data
|
||||
if not result.projects:
|
||||
raise ParseError("No PROJECT data found in XER file")
|
||||
|
||||
return result
|
||||
|
||||
def _parse_row(self, table_name: str, fields: list[str], values: list[str]) -> dict | None:
|
||||
"""Parse a single data row using the appropriate handler."""
|
||||
handler_class = TABLE_HANDLERS.get(table_name)
|
||||
if handler_class is None:
|
||||
# Unknown table, skip
|
||||
return None
|
||||
|
||||
handler = handler_class()
|
||||
return handler.parse_row(fields, values)
|
||||
|
||||
def _add_to_result(self, result: ParsedXer, table_name: str, row_data: dict) -> None:
|
||||
"""Add parsed row to the appropriate result list."""
|
||||
if table_name == "PROJECT":
|
||||
result.projects.append(row_data)
|
||||
elif table_name == "TASK":
|
||||
result.tasks.append(row_data)
|
||||
elif table_name == "TASKPRED":
|
||||
result.taskpreds.append(row_data)
|
||||
elif table_name == "PROJWBS":
|
||||
result.projwbs.append(row_data)
|
||||
elif table_name == "CALENDAR":
|
||||
result.calendars.append(row_data)
|
||||
273
src/xer_mcp/server.py
Normal file
273
src/xer_mcp/server.py
Normal file
@@ -0,0 +1,273 @@
|
||||
"""MCP Server for XER file analysis."""
|
||||
|
||||
from mcp.server import Server
|
||||
from mcp.server.stdio import stdio_server
|
||||
from mcp.types import TextContent, Tool
|
||||
|
||||
from xer_mcp.db import db
|
||||
|
||||
# Create MCP server instance
|
||||
server = Server("xer-mcp")
|
||||
|
||||
# Server state
|
||||
_file_loaded: bool = False
|
||||
_current_project_id: str | None = None
|
||||
|
||||
|
||||
def is_file_loaded() -> bool:
|
||||
"""Check if an XER file has been loaded."""
|
||||
return _file_loaded
|
||||
|
||||
|
||||
def get_current_project_id() -> str | None:
|
||||
"""Get the currently selected project ID."""
|
||||
return _current_project_id
|
||||
|
||||
|
||||
def set_file_loaded(loaded: bool, project_id: str | None = None) -> None:
|
||||
"""Set the file loaded state."""
|
||||
global _file_loaded, _current_project_id
|
||||
_file_loaded = loaded
|
||||
_current_project_id = project_id
|
||||
|
||||
|
||||
@server.list_tools()
|
||||
async def list_tools() -> list[Tool]:
|
||||
"""List available MCP tools."""
|
||||
return [
|
||||
Tool(
|
||||
name="load_xer",
|
||||
description="Load a Primavera P6 XER file and parse its schedule data. "
|
||||
"For multi-project files, specify project_id to select a project.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"file_path": {
|
||||
"type": "string",
|
||||
"description": "Absolute path to the XER file",
|
||||
},
|
||||
"project_id": {
|
||||
"type": "string",
|
||||
"description": "Project ID to select (required for multi-project files)",
|
||||
},
|
||||
},
|
||||
"required": ["file_path"],
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="list_activities",
|
||||
description="List activities from the loaded XER file with optional filtering and pagination.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"start_date": {
|
||||
"type": "string",
|
||||
"format": "date",
|
||||
"description": "Filter activities starting on or after this date (YYYY-MM-DD)",
|
||||
},
|
||||
"end_date": {
|
||||
"type": "string",
|
||||
"format": "date",
|
||||
"description": "Filter activities ending on or before this date (YYYY-MM-DD)",
|
||||
},
|
||||
"wbs_id": {
|
||||
"type": "string",
|
||||
"description": "Filter by WBS element ID",
|
||||
},
|
||||
"activity_type": {
|
||||
"type": "string",
|
||||
"enum": ["TT_Task", "TT_Mile", "TT_LOE", "TT_WBS", "TT_Rsrc"],
|
||||
"description": "Filter by activity type",
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"default": 100,
|
||||
"minimum": 1,
|
||||
"maximum": 1000,
|
||||
"description": "Maximum number of activities to return",
|
||||
},
|
||||
"offset": {
|
||||
"type": "integer",
|
||||
"default": 0,
|
||||
"minimum": 0,
|
||||
"description": "Number of activities to skip",
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="get_activity",
|
||||
description="Get detailed information for a specific activity by ID.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"activity_id": {
|
||||
"type": "string",
|
||||
"description": "The task_id of the activity",
|
||||
},
|
||||
},
|
||||
"required": ["activity_id"],
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="list_relationships",
|
||||
description="List all activity relationships (dependencies) with pagination.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"default": 100,
|
||||
"minimum": 1,
|
||||
"maximum": 1000,
|
||||
"description": "Maximum number of relationships to return",
|
||||
},
|
||||
"offset": {
|
||||
"type": "integer",
|
||||
"default": 0,
|
||||
"minimum": 0,
|
||||
"description": "Number of relationships to skip",
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="get_predecessors",
|
||||
description="Get all predecessor activities for a given activity.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"activity_id": {
|
||||
"type": "string",
|
||||
"description": "The task_id of the activity",
|
||||
},
|
||||
},
|
||||
"required": ["activity_id"],
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="get_successors",
|
||||
description="Get all successor activities for a given activity.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"activity_id": {
|
||||
"type": "string",
|
||||
"description": "The task_id of the activity",
|
||||
},
|
||||
},
|
||||
"required": ["activity_id"],
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="get_project_summary",
|
||||
description="Get a summary of the loaded project including dates and activity counts.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="list_milestones",
|
||||
description="List all milestone activities in the loaded project.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
},
|
||||
),
|
||||
Tool(
|
||||
name="get_critical_path",
|
||||
description="Get all activities on the critical path that determine project duration.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@server.call_tool()
|
||||
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
||||
"""Handle MCP tool calls."""
|
||||
import json
|
||||
|
||||
if name == "load_xer":
|
||||
from xer_mcp.tools.load_xer import load_xer
|
||||
|
||||
result = await load_xer(
|
||||
file_path=arguments["file_path"],
|
||||
project_id=arguments.get("project_id"),
|
||||
)
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "list_activities":
|
||||
from xer_mcp.tools.list_activities import list_activities
|
||||
|
||||
result = await list_activities(
|
||||
start_date=arguments.get("start_date"),
|
||||
end_date=arguments.get("end_date"),
|
||||
wbs_id=arguments.get("wbs_id"),
|
||||
activity_type=arguments.get("activity_type"),
|
||||
limit=arguments.get("limit", 100),
|
||||
offset=arguments.get("offset", 0),
|
||||
)
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "get_activity":
|
||||
from xer_mcp.tools.get_activity import get_activity
|
||||
|
||||
result = await get_activity(activity_id=arguments["activity_id"])
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "list_relationships":
|
||||
from xer_mcp.tools.list_relationships import list_relationships
|
||||
|
||||
result = await list_relationships(
|
||||
limit=arguments.get("limit", 100),
|
||||
offset=arguments.get("offset", 0),
|
||||
)
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "get_predecessors":
|
||||
from xer_mcp.tools.get_predecessors import get_predecessors
|
||||
|
||||
result = await get_predecessors(activity_id=arguments["activity_id"])
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "get_successors":
|
||||
from xer_mcp.tools.get_successors import get_successors
|
||||
|
||||
result = await get_successors(activity_id=arguments["activity_id"])
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "get_project_summary":
|
||||
from xer_mcp.tools.get_project_summary import get_project_summary
|
||||
|
||||
result = await get_project_summary()
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "list_milestones":
|
||||
from xer_mcp.tools.list_milestones import list_milestones
|
||||
|
||||
result = await list_milestones()
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
if name == "get_critical_path":
|
||||
from xer_mcp.tools.get_critical_path import get_critical_path
|
||||
|
||||
result = await get_critical_path()
|
||||
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
||||
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
|
||||
async def run_server() -> None:
|
||||
"""Run the MCP server with stdio transport."""
|
||||
db.initialize()
|
||||
|
||||
async with stdio_server() as (read_stream, write_stream):
|
||||
await server.run(
|
||||
read_stream,
|
||||
write_stream,
|
||||
server.create_initialization_options(),
|
||||
)
|
||||
1
src/xer_mcp/tools/__init__.py
Normal file
1
src/xer_mcp/tools/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""MCP tools for XER file analysis."""
|
||||
34
src/xer_mcp/tools/get_activity.py
Normal file
34
src/xer_mcp/tools/get_activity.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""get_activity MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import get_activity_by_id
|
||||
from xer_mcp.server import is_file_loaded
|
||||
|
||||
|
||||
async def get_activity(activity_id: str) -> dict:
|
||||
"""Get detailed information for a specific activity by ID.
|
||||
|
||||
Args:
|
||||
activity_id: The task_id of the activity
|
||||
|
||||
Returns:
|
||||
Dictionary with complete activity details or error
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
activity = get_activity_by_id(activity_id)
|
||||
|
||||
if activity is None:
|
||||
return {
|
||||
"error": {
|
||||
"code": "ACTIVITY_NOT_FOUND",
|
||||
"message": f"Activity not found: {activity_id}",
|
||||
}
|
||||
}
|
||||
|
||||
return activity
|
||||
36
src/xer_mcp/tools/get_critical_path.py
Normal file
36
src/xer_mcp/tools/get_critical_path.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""get_critical_path MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import query_critical_path
|
||||
from xer_mcp.server import is_file_loaded
|
||||
|
||||
|
||||
async def get_critical_path() -> dict:
|
||||
"""Get all activities on the critical path.
|
||||
|
||||
Returns activities where driving_path_flag is set, ordered by target start date.
|
||||
The critical path determines the minimum project duration.
|
||||
|
||||
Returns:
|
||||
Dictionary with list of critical path activities, each containing:
|
||||
- task_id: Activity ID
|
||||
- task_code: Activity code
|
||||
- task_name: Activity name
|
||||
- task_type: Activity type (TT_Task, TT_Mile, etc.)
|
||||
- target_start_date: Target start date
|
||||
- target_end_date: Target end date
|
||||
- total_float_hr_cnt: Total float in hours
|
||||
- status_code: Activity status
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
critical_activities = query_critical_path()
|
||||
|
||||
return {
|
||||
"critical_activities": critical_activities,
|
||||
}
|
||||
29
src/xer_mcp/tools/get_predecessors.py
Normal file
29
src/xer_mcp/tools/get_predecessors.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""get_predecessors MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import get_predecessors as query_predecessors
|
||||
from xer_mcp.server import is_file_loaded
|
||||
|
||||
|
||||
async def get_predecessors(activity_id: str) -> dict:
|
||||
"""Get predecessor activities for a given activity.
|
||||
|
||||
Args:
|
||||
activity_id: The task_id to find predecessors for
|
||||
|
||||
Returns:
|
||||
Dictionary with activity_id and list of predecessor activities
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
predecessors = query_predecessors(activity_id)
|
||||
|
||||
return {
|
||||
"activity_id": activity_id,
|
||||
"predecessors": predecessors,
|
||||
}
|
||||
45
src/xer_mcp/tools/get_project_summary.py
Normal file
45
src/xer_mcp/tools/get_project_summary.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""get_project_summary MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import get_project_summary as query_project_summary
|
||||
from xer_mcp.server import get_current_project_id, is_file_loaded
|
||||
|
||||
|
||||
async def get_project_summary() -> dict:
|
||||
"""Get a summary of the loaded project.
|
||||
|
||||
Returns:
|
||||
Dictionary with project summary information including:
|
||||
- project_name: Name of the project
|
||||
- plan_start_date: Planned start date
|
||||
- plan_end_date: Planned end date
|
||||
- activity_count: Total number of activities
|
||||
- milestone_count: Number of milestone activities
|
||||
- critical_activity_count: Number of activities on critical path
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
project_id = get_current_project_id()
|
||||
if project_id is None:
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_PROJECT_SELECTED",
|
||||
"message": "No project is selected.",
|
||||
}
|
||||
}
|
||||
|
||||
summary = query_project_summary(project_id)
|
||||
if summary is None:
|
||||
return {
|
||||
"error": {
|
||||
"code": "PROJECT_NOT_FOUND",
|
||||
"message": f"Project with ID {project_id} not found.",
|
||||
}
|
||||
}
|
||||
|
||||
return summary
|
||||
29
src/xer_mcp/tools/get_successors.py
Normal file
29
src/xer_mcp/tools/get_successors.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""get_successors MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import get_successors as query_successors
|
||||
from xer_mcp.server import is_file_loaded
|
||||
|
||||
|
||||
async def get_successors(activity_id: str) -> dict:
|
||||
"""Get successor activities for a given activity.
|
||||
|
||||
Args:
|
||||
activity_id: The task_id to find successors for
|
||||
|
||||
Returns:
|
||||
Dictionary with activity_id and list of successor activities
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
successors = query_successors(activity_id)
|
||||
|
||||
return {
|
||||
"activity_id": activity_id,
|
||||
"successors": successors,
|
||||
}
|
||||
55
src/xer_mcp/tools/list_activities.py
Normal file
55
src/xer_mcp/tools/list_activities.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""list_activities MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import query_activities
|
||||
from xer_mcp.server import is_file_loaded
|
||||
|
||||
|
||||
async def list_activities(
|
||||
start_date: str | None = None,
|
||||
end_date: str | None = None,
|
||||
wbs_id: str | None = None,
|
||||
activity_type: str | None = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> dict:
|
||||
"""List activities from the loaded XER file with optional filtering.
|
||||
|
||||
Args:
|
||||
start_date: Filter activities starting on or after this date (YYYY-MM-DD)
|
||||
end_date: Filter activities ending on or before this date (YYYY-MM-DD)
|
||||
wbs_id: Filter by WBS element ID
|
||||
activity_type: Filter by activity type (TT_Task, TT_Mile, etc.)
|
||||
limit: Maximum number of activities to return (default 100)
|
||||
offset: Number of activities to skip (default 0)
|
||||
|
||||
Returns:
|
||||
Dictionary with activities list and pagination metadata
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
activities, total_count = query_activities(
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
wbs_id=wbs_id,
|
||||
activity_type=activity_type,
|
||||
)
|
||||
|
||||
has_more = (offset + len(activities)) < total_count
|
||||
|
||||
return {
|
||||
"activities": activities,
|
||||
"pagination": {
|
||||
"total_count": total_count,
|
||||
"offset": offset,
|
||||
"limit": limit,
|
||||
"has_more": has_more,
|
||||
},
|
||||
}
|
||||
31
src/xer_mcp/tools/list_milestones.py
Normal file
31
src/xer_mcp/tools/list_milestones.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""list_milestones MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import query_milestones
|
||||
from xer_mcp.server import is_file_loaded
|
||||
|
||||
|
||||
async def list_milestones() -> dict:
|
||||
"""List all milestone activities in the loaded project.
|
||||
|
||||
Returns:
|
||||
Dictionary with list of milestones, each containing:
|
||||
- task_id: Activity ID
|
||||
- task_code: Activity code
|
||||
- task_name: Activity name
|
||||
- target_start_date: Target start date
|
||||
- target_end_date: Target end date
|
||||
- status_code: Activity status
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
milestones = query_milestones()
|
||||
|
||||
return {
|
||||
"milestones": milestones,
|
||||
}
|
||||
40
src/xer_mcp/tools/list_relationships.py
Normal file
40
src/xer_mcp/tools/list_relationships.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""list_relationships MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db.queries import query_relationships
|
||||
from xer_mcp.server import is_file_loaded
|
||||
|
||||
|
||||
async def list_relationships(
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> dict:
|
||||
"""List all relationships (dependencies) with pagination.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of relationships to return (default 100)
|
||||
offset: Number of relationships to skip (default 0)
|
||||
|
||||
Returns:
|
||||
Dictionary with relationships list and pagination metadata
|
||||
"""
|
||||
if not is_file_loaded():
|
||||
return {
|
||||
"error": {
|
||||
"code": "NO_FILE_LOADED",
|
||||
"message": "No XER file is loaded. Use the load_xer tool first.",
|
||||
}
|
||||
}
|
||||
|
||||
relationships, total_count = query_relationships(limit=limit, offset=offset)
|
||||
|
||||
has_more = (offset + len(relationships)) < total_count
|
||||
|
||||
return {
|
||||
"relationships": relationships,
|
||||
"pagination": {
|
||||
"total_count": total_count,
|
||||
"offset": offset,
|
||||
"limit": limit,
|
||||
"has_more": has_more,
|
||||
},
|
||||
}
|
||||
86
src/xer_mcp/tools/load_xer.py
Normal file
86
src/xer_mcp/tools/load_xer.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""load_xer MCP tool implementation."""
|
||||
|
||||
from xer_mcp.db import db
|
||||
from xer_mcp.db.loader import get_activity_count, get_relationship_count, load_parsed_data
|
||||
from xer_mcp.errors import FileNotFoundError, ParseError
|
||||
from xer_mcp.parser.xer_parser import XerParser
|
||||
from xer_mcp.server import set_file_loaded
|
||||
|
||||
|
||||
async def load_xer(file_path: str, project_id: str | None = None) -> dict:
|
||||
"""Load a Primavera P6 XER file and parse its schedule data.
|
||||
|
||||
Args:
|
||||
file_path: Absolute path to the XER file
|
||||
project_id: Project ID to select (required for multi-project files)
|
||||
|
||||
Returns:
|
||||
Dictionary with success status and project info or error details
|
||||
"""
|
||||
# Ensure database is initialized
|
||||
if not db.is_initialized:
|
||||
db.initialize()
|
||||
|
||||
parser = XerParser()
|
||||
|
||||
try:
|
||||
parsed = parser.parse(file_path)
|
||||
except FileNotFoundError as e:
|
||||
return {
|
||||
"success": False,
|
||||
"error": {"code": e.code, "message": e.message},
|
||||
}
|
||||
except ParseError as e:
|
||||
return {
|
||||
"success": False,
|
||||
"error": {"code": e.code, "message": e.message},
|
||||
}
|
||||
|
||||
# Handle multi-project files
|
||||
if len(parsed.projects) > 1 and project_id is None:
|
||||
available = [
|
||||
{"proj_id": p["proj_id"], "proj_short_name": p["proj_short_name"]}
|
||||
for p in parsed.projects
|
||||
]
|
||||
return {
|
||||
"success": False,
|
||||
"available_projects": available,
|
||||
"message": "Multiple projects found. Please specify project_id.",
|
||||
}
|
||||
|
||||
# Auto-select if single project
|
||||
if project_id is None:
|
||||
project_id = parsed.projects[0]["proj_id"]
|
||||
|
||||
# Find the selected project
|
||||
project = next((p for p in parsed.projects if p["proj_id"] == project_id), None)
|
||||
if project is None:
|
||||
return {
|
||||
"success": False,
|
||||
"error": {
|
||||
"code": "PROJECT_NOT_FOUND",
|
||||
"message": f"Project {project_id} not found in file",
|
||||
},
|
||||
}
|
||||
|
||||
# Load data into database
|
||||
load_parsed_data(parsed, project_id)
|
||||
|
||||
# Mark file as loaded
|
||||
set_file_loaded(True, project_id)
|
||||
|
||||
# Get counts
|
||||
activity_count = get_activity_count()
|
||||
relationship_count = get_relationship_count()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"project": {
|
||||
"proj_id": project["proj_id"],
|
||||
"proj_short_name": project["proj_short_name"],
|
||||
"plan_start_date": project["plan_start_date"],
|
||||
"plan_end_date": project["plan_end_date"],
|
||||
},
|
||||
"activity_count": activity_count,
|
||||
"relationship_count": relationship_count,
|
||||
}
|
||||
Reference in New Issue
Block a user