dataset / app /checkpoint.py
parthpatel01's picture
Add files using upload-large-folder tool
45a77a4 verified
from __future__ import annotations
from pathlib import Path
from datetime import datetime
from typing import Optional, Any, Dict, Sequence, Mapping
import json
import pandas as pd
from .models import PositionState, TradePlan
class CheckpointManager:
"""
Handles resumable state:
- checkpoint.json : stateful info (position, active plan, memory, etc.)
- *_log.parquet files : incremental logs (append only, deduped on write)
"""
def __init__(self, out_dir: Path):
self.out_dir = out_dir
self.out_dir.mkdir(parents=True, exist_ok=True)
self.ckpt_file = self.out_dir / "checkpoint.json"
self.trade_file = self.out_dir / "trade_log.parquet"
self.stats_file = self.out_dir / "stats_log.parquet"
self.expert_file = self.out_dir / "expert_log.parquet"
self.summary_file = self.out_dir / "summary_log.parquet"
# ---------- load ----------
def load(self):
"""
Returns:
last_ts: Optional[datetime]
state: PositionState
current_plan: Optional[TradePlan]
last_close_plan: Optional[TradePlan]
memory_str: str
logs: dict[str, pd.DataFrame] # already-written logs (so we can seed in-memory lists)
"""
trade_df = pd.read_parquet(self.trade_file) if self.trade_file.exists() else pd.DataFrame()
stats_df = pd.read_parquet(self.stats_file) if self.stats_file.exists() else pd.DataFrame()
expert_df = pd.read_parquet(self.expert_file) if self.expert_file.exists() else pd.DataFrame()
summary_df = pd.read_parquet(self.summary_file) if self.summary_file.exists() else pd.DataFrame()
if not self.ckpt_file.exists():
# fresh start: no open position, no plan, empty memory
return (
None,
PositionState(),
None,
None,
"No trade completed",
{
"trade": trade_df,
"stats": stats_df,
"expert": expert_df,
"summary": summary_df,
},
)
raw = json.loads(self.ckpt_file.read_text())
last_ts = (
datetime.fromisoformat(raw["last_timestamp_processed"])
if raw.get("last_timestamp_processed")
else None
)
# restore stateful objects
state_obj = PositionState.model_validate(raw["state"])
current_plan_obj = (
TradePlan.model_validate(raw["current_plan"])
if raw.get("current_plan") is not None
else None
)
last_close_plan_obj = (
TradePlan.model_validate(raw["last_close_plan"])
if raw.get("last_close_plan") is not None
else None
)
memory_str = raw.get("memory_str", "No trade completed")
return (
last_ts,
state_obj,
current_plan_obj,
last_close_plan_obj,
memory_str,
{
"trade": trade_df,
"stats": stats_df,
"expert": expert_df,
"summary": summary_df,
},
)
# ---------- save ----------
def save(
self,
*,
last_ts: datetime,
state: PositionState,
current_plan: Optional[TradePlan],
last_close_plan: Optional[TradePlan],
memory_str: str,
trade_log: Sequence[Mapping[str, Any]],
stats_log: Sequence[Mapping[str, Any]],
expert_log: Sequence[Mapping[str, Any]],
summary_log: Sequence[Mapping[str, Any]],
):
"""
Append new logs to parquet and write checkpoint.json with the latest state.
"""
def _append(df_path: Path, rows: Sequence[Mapping[str, Any]]):
if not rows:
return
# turn sequence of mappings into a DataFrame
new_df = pd.DataFrame(list(rows))
if df_path.exists():
old = pd.read_parquet(df_path)
merged = pd.concat([old, new_df], ignore_index=True)
merged = merged.drop_duplicates()
merged.to_parquet(df_path, index=False)
else:
new_df.to_parquet(df_path, index=False)
# append logs
_append(self.trade_file, trade_log)
_append(self.stats_file, stats_log)
_append(self.expert_file, expert_log)
_append(self.summary_file, summary_log)
payload = {
"last_timestamp_processed": last_ts.isoformat(),
"state": state.model_dump(),
"current_plan": current_plan.model_dump() if current_plan else None,
"last_close_plan": last_close_plan.model_dump() if last_close_plan else None,
"memory_str": memory_str,
}
self.ckpt_file.write_text(json.dumps(payload, indent=2, default=str))