File size: 5,106 Bytes
45a77a4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
from __future__ import annotations
from pathlib import Path
from datetime import datetime
from typing import Optional, Any, Dict, Sequence, Mapping

import json
import pandas as pd

from .models import PositionState, TradePlan

class CheckpointManager:
    """

    Handles resumable state:

    - checkpoint.json     : stateful info (position, active plan, memory, etc.)

    - *_log.parquet files : incremental logs (append only, deduped on write)

    """

    def __init__(self, out_dir: Path):
        self.out_dir = out_dir
        self.out_dir.mkdir(parents=True, exist_ok=True)

        self.ckpt_file     = self.out_dir / "checkpoint.json"
        self.trade_file    = self.out_dir / "trade_log.parquet"
        self.stats_file    = self.out_dir / "stats_log.parquet"
        self.expert_file   = self.out_dir / "expert_log.parquet"
        self.summary_file  = self.out_dir / "summary_log.parquet"

    # ---------- load ----------
    def load(self):
        """

        Returns:

            last_ts: Optional[datetime]

            state: PositionState

            current_plan: Optional[TradePlan]

            last_close_plan: Optional[TradePlan]

            memory_str: str

            logs: dict[str, pd.DataFrame]  # already-written logs (so we can seed in-memory lists)

        """
        trade_df   = pd.read_parquet(self.trade_file)   if self.trade_file.exists()   else pd.DataFrame()
        stats_df   = pd.read_parquet(self.stats_file)   if self.stats_file.exists()   else pd.DataFrame()
        expert_df  = pd.read_parquet(self.expert_file)  if self.expert_file.exists()  else pd.DataFrame()
        summary_df = pd.read_parquet(self.summary_file) if self.summary_file.exists() else pd.DataFrame()

        if not self.ckpt_file.exists():
            # fresh start: no open position, no plan, empty memory
            return (
                None,
                PositionState(),
                None,
                None,
                "No trade completed",
                {
                    "trade": trade_df,
                    "stats": stats_df,
                    "expert": expert_df,
                    "summary": summary_df,
                },
            )

        raw = json.loads(self.ckpt_file.read_text())

        last_ts = (
            datetime.fromisoformat(raw["last_timestamp_processed"])
            if raw.get("last_timestamp_processed")
            else None
        )

        # restore stateful objects
        state_obj = PositionState.model_validate(raw["state"])

        current_plan_obj = (
            TradePlan.model_validate(raw["current_plan"])
            if raw.get("current_plan") is not None
            else None
        )

        last_close_plan_obj = (
            TradePlan.model_validate(raw["last_close_plan"])
            if raw.get("last_close_plan") is not None
            else None
        )

        memory_str = raw.get("memory_str", "No trade completed")

        return (
            last_ts,
            state_obj,
            current_plan_obj,
            last_close_plan_obj,
            memory_str,
            {
                "trade": trade_df,
                "stats": stats_df,
                "expert": expert_df,
                "summary": summary_df,
            },
        )

    # ---------- save ----------
    def save(

    self,

    *,

    last_ts: datetime,

    state: PositionState,

    current_plan: Optional[TradePlan],

    last_close_plan: Optional[TradePlan],

    memory_str: str,

    trade_log: Sequence[Mapping[str, Any]],

    stats_log: Sequence[Mapping[str, Any]],

    expert_log: Sequence[Mapping[str, Any]],

    summary_log: Sequence[Mapping[str, Any]],

):
        """

        Append new logs to parquet and write checkpoint.json with the latest state.

        """
        def _append(df_path: Path, rows: Sequence[Mapping[str, Any]]):
            if not rows:
                return

            # turn sequence of mappings into a DataFrame
            new_df = pd.DataFrame(list(rows))

            if df_path.exists():
                old = pd.read_parquet(df_path)
                merged = pd.concat([old, new_df], ignore_index=True)
                merged = merged.drop_duplicates()
                merged.to_parquet(df_path, index=False)
            else:
                new_df.to_parquet(df_path, index=False)

        # append logs
        _append(self.trade_file,   trade_log)
        _append(self.stats_file,   stats_log)
        _append(self.expert_file,  expert_log)
        _append(self.summary_file, summary_log)

        payload = {
            "last_timestamp_processed": last_ts.isoformat(),
            "state": state.model_dump(),
            "current_plan": current_plan.model_dump() if current_plan else None,
            "last_close_plan": last_close_plan.model_dump() if last_close_plan else None,
            "memory_str": memory_str,
        }
        self.ckpt_file.write_text(json.dumps(payload, indent=2, default=str))