heisbuba commited on
Commit
415b289
·
verified ·
1 Parent(s): c932df8

Update src/services/journal_engine.py

Browse files
Files changed (1) hide show
  1. src/services/journal_engine.py +15 -23
src/services/journal_engine.py CHANGED
@@ -11,12 +11,13 @@ from googleapiclient.http import MediaIoBaseUpload, MediaIoBaseDownload
11
  from google.oauth2.credentials import Credentials
12
  from ..config import get_user_keys, update_user_keys
13
 
 
14
  SCOPES = ['https://www.googleapis.com/auth/drive.appdata']
15
 
16
  class JournalEngine:
17
  @staticmethod
18
  def get_flow():
19
- # [FIX] Essential for HF Spaces
20
  os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
21
  return Flow.from_client_config(
22
  client_config={
@@ -33,6 +34,7 @@ class JournalEngine:
33
 
34
  @staticmethod
35
  def get_creds(uid):
 
36
  user_data = get_user_keys(uid)
37
  token_json = user_data.get("google_token_json")
38
  if not token_json: return None
@@ -44,10 +46,12 @@ class JournalEngine:
44
 
45
  @staticmethod
46
  def get_drive_service(creds):
 
47
  return build('drive', 'v3', credentials=creds)
48
 
49
  @staticmethod
50
  def load_journal(service, file_id):
 
51
  try:
52
  request = service.files().get_media(fileId=file_id)
53
  fh = io.BytesIO()
@@ -63,6 +67,7 @@ class JournalEngine:
63
 
64
  @staticmethod
65
  def save_to_drive(service, file_id, journal_data):
 
66
  media = MediaIoBaseUpload(
67
  io.BytesIO(json.dumps(journal_data).encode('utf-8')),
68
  mimetype='application/json',
@@ -72,6 +77,7 @@ class JournalEngine:
72
 
73
  @staticmethod
74
  def initialize_journal(service):
 
75
  try:
76
  response = service.files().list(
77
  q="name='journal.json' and 'appDataFolder' in parents",
@@ -83,7 +89,6 @@ class JournalEngine:
83
  files = response.get('files', [])
84
  if files: return files[0]['id']
85
 
86
- # Create if missing
87
  file_metadata = {'name': 'journal.json', 'parents': ['appDataFolder']}
88
  media = MediaIoBaseUpload(
89
  io.BytesIO(json.dumps([]).encode('utf-8')),
@@ -98,15 +103,14 @@ class JournalEngine:
98
 
99
  @classmethod
100
  def save_trade(cls, service, file_id, trade_data):
101
- """Smart Update: Edits existing ID or Appends new UUID + Auto-Tags Date."""
102
  journal = cls.load_journal(service, file_id)
103
 
104
- # Temporal Injection
105
  if 'trade_date' in trade_data:
106
  try:
107
  dt = datetime.datetime.strptime(trade_data['trade_date'], "%Y-%m-%d")
108
- trade_data['week'] = dt.strftime("%Y-W%W") # e.g. 2026-W05
109
- trade_data['month'] = dt.strftime("%Y-%m") # e.g. 2026-02
110
  except ValueError:
111
  pass
112
 
@@ -130,10 +134,9 @@ class JournalEngine:
130
 
131
  @classmethod
132
  def delete_trade(cls, service, file_id, trade_id):
133
- """Removes a trade by UUID and saves the list."""
134
  journal = cls.load_journal(service, file_id)
135
 
136
- # Strict string comparison to avoid integer/string mismatch
137
  initial_len = len(journal)
138
  new_journal = [t for t in journal if str(t.get('id')) != str(trade_id)]
139
 
@@ -144,6 +147,7 @@ class JournalEngine:
144
 
145
  @staticmethod
146
  def parse_pnl(pnl_str):
 
147
  try:
148
  clean = re.sub(r'[^\d\.-]', '', str(pnl_str))
149
  return float(clean) if clean else 0.0
@@ -151,6 +155,7 @@ class JournalEngine:
151
 
152
  @classmethod
153
  def calculate_stats(cls, journal_data):
 
154
  if not journal_data: return {"winrate": "0%", "best_trade": "--", "bias": "Neutral"}
155
 
156
  wins = [t for t in journal_data if cls.parse_pnl(t.get('pnl', 0)) > 0]
@@ -159,7 +164,6 @@ class JournalEngine:
159
 
160
  best_trade = max(journal_data, key=lambda x: cls.parse_pnl(x.get('pnl', 0)), default={})
161
 
162
- # Handle conditional reviews as bias proxy if explicit bias missing
163
  biases = []
164
  for t in journal_data:
165
  if t.get('bias'):
@@ -177,44 +181,32 @@ class JournalEngine:
177
 
178
  @staticmethod
179
  def prepare_ai_payload(journal_data):
180
- """
181
- Surgically converts filtered journal trades into a Markdown table.
182
- Uses exact keys from trading_journal.html: trade_date, ticker, pnl, review, tags.
183
- """
184
  if not journal_data:
185
  return "No trading data available for the current filter."
186
 
187
  try:
188
- # 1. Convert to DataFrame
189
  df = pd.DataFrame(journal_data)
190
-
191
- # 2. Define the columns Gemini needs based on your HTML structure
192
- # We map 'review' instead of 'notes' to align with your dashboard
193
  essential_cols = [
194
  'trade_date', 'ticker', 'strategy', 'rrr',
195
  'pnl', 'rules_followed', 'review', 'tags'
196
  ]
197
 
198
- # 3. Filter to existing columns to prevent KeyError
199
  existing_cols = [c for c in essential_cols if c in df.columns]
200
  df_filtered = df[existing_cols].copy()
201
 
202
- # 4. Data Sanitization for AI Readability
203
  if 'tags' in df_filtered.columns:
204
- # Handle both list and string formats for tags
205
  df_filtered['tags'] = df_filtered['tags'].apply(
206
  lambda x: ", ".join(x) if isinstance(x, list) else x
207
  )
208
 
209
  if 'rules_followed' in df_filtered.columns:
210
- # Map boolean-like strings to human-readable terms
211
  df_filtered['rules_followed'] = df_filtered['rules_followed'].apply(
212
  lambda x: "Disciplined" if str(x).lower() == "true" else "Mistake"
213
  )
214
 
215
- # 5. Export to Markdown table
216
  return df_filtered.to_markdown(index=False)
217
 
218
  except Exception as e:
219
- print(f"Surgery Error in prepare_ai_payload: {e}")
220
  return f"Error preparing data for AI: {str(e)}"
 
11
  from google.oauth2.credentials import Credentials
12
  from ..config import get_user_keys, update_user_keys
13
 
14
+ # Drive scope for application-specific data
15
  SCOPES = ['https://www.googleapis.com/auth/drive.appdata']
16
 
17
  class JournalEngine:
18
  @staticmethod
19
  def get_flow():
20
+ # Allow OAuth over HTTP for local dev or specific hosting environments
21
  os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
22
  return Flow.from_client_config(
23
  client_config={
 
34
 
35
  @staticmethod
36
  def get_creds(uid):
37
+ # Load and parse user credentials from database
38
  user_data = get_user_keys(uid)
39
  token_json = user_data.get("google_token_json")
40
  if not token_json: return None
 
46
 
47
  @staticmethod
48
  def get_drive_service(creds):
49
+ # Initialize Google Drive API client
50
  return build('drive', 'v3', credentials=creds)
51
 
52
  @staticmethod
53
  def load_journal(service, file_id):
54
+ # Download journal.json from Drive and parse to list
55
  try:
56
  request = service.files().get_media(fileId=file_id)
57
  fh = io.BytesIO()
 
67
 
68
  @staticmethod
69
  def save_to_drive(service, file_id, journal_data):
70
+ # Upload current journal state to Drive
71
  media = MediaIoBaseUpload(
72
  io.BytesIO(json.dumps(journal_data).encode('utf-8')),
73
  mimetype='application/json',
 
77
 
78
  @staticmethod
79
  def initialize_journal(service):
80
+ # Find existing journal or create a new one in hidden app data folder
81
  try:
82
  response = service.files().list(
83
  q="name='journal.json' and 'appDataFolder' in parents",
 
89
  files = response.get('files', [])
90
  if files: return files[0]['id']
91
 
 
92
  file_metadata = {'name': 'journal.json', 'parents': ['appDataFolder']}
93
  media = MediaIoBaseUpload(
94
  io.BytesIO(json.dumps([]).encode('utf-8')),
 
103
 
104
  @classmethod
105
  def save_trade(cls, service, file_id, trade_data):
106
+ # Add new trade with ID/date tags or update existing record
107
  journal = cls.load_journal(service, file_id)
108
 
 
109
  if 'trade_date' in trade_data:
110
  try:
111
  dt = datetime.datetime.strptime(trade_data['trade_date'], "%Y-%m-%d")
112
+ trade_data['week'] = dt.strftime("%Y-W%W")
113
+ trade_data['month'] = dt.strftime("%Y-%m")
114
  except ValueError:
115
  pass
116
 
 
134
 
135
  @classmethod
136
  def delete_trade(cls, service, file_id, trade_id):
137
+ # Remove trade by ID and sync with Drive
138
  journal = cls.load_journal(service, file_id)
139
 
 
140
  initial_len = len(journal)
141
  new_journal = [t for t in journal if str(t.get('id')) != str(trade_id)]
142
 
 
147
 
148
  @staticmethod
149
  def parse_pnl(pnl_str):
150
+ # Clean PnL string and convert to float
151
  try:
152
  clean = re.sub(r'[^\d\.-]', '', str(pnl_str))
153
  return float(clean) if clean else 0.0
 
155
 
156
  @classmethod
157
  def calculate_stats(cls, journal_data):
158
+ # Compute winrate, best trade, and dominant bias
159
  if not journal_data: return {"winrate": "0%", "best_trade": "--", "bias": "Neutral"}
160
 
161
  wins = [t for t in journal_data if cls.parse_pnl(t.get('pnl', 0)) > 0]
 
164
 
165
  best_trade = max(journal_data, key=lambda x: cls.parse_pnl(x.get('pnl', 0)), default={})
166
 
 
167
  biases = []
168
  for t in journal_data:
169
  if t.get('bias'):
 
181
 
182
  @staticmethod
183
  def prepare_ai_payload(journal_data):
184
+ # Format journal entries into Markdown table for AI processing
 
 
 
185
  if not journal_data:
186
  return "No trading data available for the current filter."
187
 
188
  try:
 
189
  df = pd.DataFrame(journal_data)
 
 
 
190
  essential_cols = [
191
  'trade_date', 'ticker', 'strategy', 'rrr',
192
  'pnl', 'rules_followed', 'review', 'tags'
193
  ]
194
 
 
195
  existing_cols = [c for c in essential_cols if c in df.columns]
196
  df_filtered = df[existing_cols].copy()
197
 
 
198
  if 'tags' in df_filtered.columns:
 
199
  df_filtered['tags'] = df_filtered['tags'].apply(
200
  lambda x: ", ".join(x) if isinstance(x, list) else x
201
  )
202
 
203
  if 'rules_followed' in df_filtered.columns:
 
204
  df_filtered['rules_followed'] = df_filtered['rules_followed'].apply(
205
  lambda x: "Disciplined" if str(x).lower() == "true" else "Mistake"
206
  )
207
 
 
208
  return df_filtered.to_markdown(index=False)
209
 
210
  except Exception as e:
211
+ print(f"Error in prepare_ai_payload: {e}")
212
  return f"Error preparing data for AI: {str(e)}"