"""Entry point for computing DPS metrics from TL combat logs."""
from __future__ import annotations
import argparse
import json
import sys
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Tuple
from time import perf_counter
from dps_logs.event_store import DuckDBEventStore, EventStoreError, build_event_rows
from dps_logs.metrics import build_summary, summarize_run
from dps_logs.parser import load_log_file, select_log_files
from dps_logs.reporting import build_markdown_report
ROOT_DIR = Path(__file__).parent
DEFAULT_SAMPLE_PATH = ROOT_DIR / "sample_data" / "example_log_01.txt"
SMOKE_OUTPUT_PATH = ROOT_DIR / "reports" / "packaged_smoke.json"
SMOKE_QUERIES: Tuple[Dict[str, str], ...] = (
{
"name": "top_damage_skills",
"description": "Top 5 skills by total damage and hit counts.",
"sql": """
SELECT
skill_name,
SUM(damage) AS total_damage,
COUNT(*) AS total_events,
SUM(CASE WHEN is_crit THEN 1 ELSE 0 END) AS crit_hits
FROM events
GROUP BY 1
ORDER BY total_damage DESC
LIMIT 5
""",
},
{
"name": "best_runs",
"description": "Highest DPS runs from the sample log.",
"sql": """
WITH per_run AS (
SELECT
run_id,
MIN(ts) AS start_ts,
MAX(ts) AS end_ts,
SUM(damage) AS total_damage
FROM events
GROUP BY 1
)
SELECT
run_id,
total_damage,
ROUND(total_damage / NULLIF(DATEDIFF('second', start_ts, end_ts), 0), 3) AS dps
FROM per_run
ORDER BY dps DESC
LIMIT 3
""",
},
)
ProgressCallback = Optional[Callable[[str], None]]
def analyze_logs_with_events(
input_path: Path,
*,
limit_runs: int | None = None,
progress_callback: ProgressCallback = None,
) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]:
selected_files = select_log_files(input_path, limit_runs=limit_runs)
_emit_progress(
progress_callback,
"Selected {count} files: {names}".format(
count=len(selected_files),
names=", ".join(path.name for path in selected_files) or "(none)",
),
)
runs: List[Dict[str, Any]] = []
event_rows: List[Dict[str, Any]] = []
event_count = 0
parse_start = perf_counter()
total_files = len(selected_files)
for index, file_path in enumerate(selected_files, start=1):
_emit_progress(progress_callback, f"Parsing {index}/{total_files}: {file_path.name}")
events = load_log_file(file_path)
if not events:
continue
run_id = file_path.name
runs.append(summarize_run(run_id, events))
run_rows = build_event_rows(run_id, events)
event_rows.extend(run_rows)
event_count += len(run_rows)
parse_elapsed = perf_counter() - parse_start
print(
f"Parsed {event_count} events in {parse_elapsed:.3f}s",
file=sys.stderr,
flush=True,
)
summary = build_summary(runs)
payload = {
"generated_at": datetime.now(timezone.utc).isoformat(),
"source": str(input_path.resolve()),
"runs": runs,
"summary": summary,
}
return payload, event_rows
def analyze_logs(input_path: Path, *, limit_runs: int | None = None) -> Dict[str, Any]:
payload, _ = analyze_logs_with_events(input_path, limit_runs=limit_runs)
return payload
def _emit_progress(callback: ProgressCallback, message: str) -> None:
if callback and message:
callback(message)
def _resolve_input_path(args: argparse.Namespace) -> Path:
if args.sample:
return DEFAULT_SAMPLE_PATH
if args.input_path is not None:
return Path(args.input_path)
return DEFAULT_SAMPLE_PATH
def _write_text(path: Path, content: str) -> None:
parent = path.parent
if parent and not parent.exists():
parent.mkdir(parents=True, exist_ok=True)
path.write_text(content, encoding="utf-8")
def main() -> None:
parser = argparse.ArgumentParser(description="Compute TL DPS metrics from log files.")
parser.add_argument(
"--input-path",
type=Path,
help="File or directory that contains TL combat logs (defaults to sample_data).",
)
parser.add_argument(
"--sample",
action="store_true",
help="Force the analyzer to read from the bundled sample_data directory.",
)
parser.add_argument(
"--pretty",
action="store_true",
help="Pretty-print the resulting JSON (indent=2).",
)
parser.add_argument(
"--limit-runs",
type=int,
metavar="N",
help="When INPUT_PATH is a directory, analyze only the newest N log files.",
)
parser.add_argument(
"--output-json",
type=Path,
help="Optional path to write the JSON payload (always indented).",
)
parser.add_argument(
"--output-md",
type=Path,
help="Optional path to write a human-readable Markdown report.",
)
parser.add_argument(
"--smoke-packaged",
action="store_true",
help="Run the packaged smoke test (analysis + canned queries + coach self-test).",
)
args = parser.parse_args()
if args.smoke_packaged:
sys.exit(run_smoke_packaged())
if args.limit_runs is not None and args.limit_runs <= 0:
parser.error("--limit-runs must be a positive integer")
input_path = _resolve_input_path(args)
payload = analyze_logs(input_path, limit_runs=args.limit_runs)
json_kwargs = {"indent": 2} if args.pretty else {}
print(json.dumps(payload, **json_kwargs))
if args.output_json:
_write_text(Path(args.output_json), json.dumps(payload, indent=2))
if args.output_md:
markdown = build_markdown_report(payload)
_write_text(Path(args.output_md), markdown)
def run_smoke_packaged(output_path: Optional[Path] = None) -> int:
target = output_path or SMOKE_OUTPUT_PATH
try:
payload, event_rows = analyze_logs_with_events(DEFAULT_SAMPLE_PATH)
event_store = DuckDBEventStore()
event_store.refresh(event_rows)
query_payloads: Dict[str, Any] = {}
for query in SMOKE_QUERIES:
name = query["name"]
sql = query["sql"]
try:
result = event_store.query(sql)
except EventStoreError as exc:
raise RuntimeError(f"Query '{name}' failed: {exc}") from exc
query_payloads[name] = {
"description": query["description"],
"sql": sql.strip(),
"result": result.to_payload(),
}
target.parent.mkdir(parents=True, exist_ok=True)
_write_text(
target,
json.dumps(
{
"payload": payload,
"queries": query_payloads,
},
indent=2,
),
)
# Validate the bundled coach model responds deterministically.
try:
from app.model_manager import LocalModelManager
manager = LocalModelManager()
manager.run_self_test()
except Exception as exc: # pragma: no cover - requires llama-cpp runtime
raise RuntimeError(f"Coach self-test failed: {exc}") from exc
except Exception as exc: # pragma: no cover - CLI smoke path
print(f"SMOKE FAIL: {exc}", file=sys.stderr)
return 1
print("SMOKE PASS")
return 0
if __name__ == "__main__":
main()