tracker2 / db_signals.py
Dooratre's picture
Update db_signals.py
63162af verified
import requests
from bs4 import BeautifulSoup
import json
# NOTE: Keep this token safe. If you rotate it, update here.
TOKEN = "_device_id=0038e28d4f7d4f9baf8f76b6b9fb8980; GHCC=Required:1-Analytics:1-SocialMedia:1-Advertising:1; MicrosoftApplicationsTelemetryDeviceId=c58113b4-9acb-4ba8-b9f2-4217bdef379a; MSFPC=GUID=79b87b010d464a8783fbf43e19eccddf&HASH=79b8&LV=202408&V=4&LU=1723654762596; _octo=GH1.1.1517954811.1753352111; cpu_bucket=lg; preferred_color_mode=dark; tz=Africa%2FTripoli; ai_session=v+6N2XPmhlxugZWyisQ+ZD|1753646990500|1753646990500; saved_user_sessions=155741452%3ASnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; user_session=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; __Host-user_session_same_site=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; tz=Africa%2FTripoli; color_mode=%7B%22color_mode%22%3A%22auto%22%2C%22light_theme%22%3A%7B%22name%22%3A%22light%22%2C%22color_mode%22%3A%22light%22%7D%2C%22dark_theme%22%3A%7B%22name%22%3A%22dark%22%2C%22color_mode%22%3A%22dark%22%7D%7D; logged_in=yes; dotcom_user=omarnuwrar; _gh_sess=7FzMK5K8ffnmyyh5LKKv%2FOXiqZJR4qLXxTdaV66E844ZCPq5qw%2FClaVmXHNfu8oc61N461wsjEr7d8vhEwrs0N0X7ITUed9Zj01RnwHGT8mMRUn6oYSv94LpIh2FwmotPwp8jkSQkZ%2BotdEpdYtp3ZoJZKfiZOcpHBtT7g2VwIPgoW2Qx5RpnKNdI3Hq31C6IIPaSzAqqny7O7c6L8nWv1nfx%2FAbF4UFSo7UfW%2F9JLUYF5lVJ2kXdYoesKOL7c2KItGDTaZCwjYr9cHKlHWD4E9wLo22GjFveVKxrEz5dgIrNdAj8WxWXuY5Ou4eYmxaBn2ovIhvnFz8%2F6qLURX81YxLLZbymGERA3MaRzDDzY3yE76U8y8lLPve0Duqc0lr34R3XUiMKE5A3%2FNPQ273e36yNlLsgBGDyuYIEcsQ84XLq2IQygBxX4y%2B6WSPwXAgOku6MiEP8Ro9ihF6scOhbJRrVCPp0toSY3RmJToUy6XRmBF2B0oyJstKbqLPfmZI8p%2B2bQo8DBKARHWWUzTJdjF%2BfgZtm%2Flb3qijcKT5I6SPU%2BiLMH%2Fl2GwHw73d1OFGUNy4tdLT5SO5vCFrf1GIiV7qUhhQdA21dXsAeQ4qoF5LHiGftyhOUBHto3ZZB%2FJ87uqACflXOfbbTCQCAYNa2u4o8I9iKQp9r2ripVxqQF1oyVu12FSIN%2BS%2Fd4Rm%2FN7E1tOw3tcVgYcsFEcbsOViUZBXXmo1Qfd9H%2B4IGnbv3hZe%2FPeJqb33SxWeQpamEWhLjVJL2hMCbZ8v79azeUL93QzkLXuryStKTXOdoyrbD2n93V36z5Sxhzi9Ku6OxVK1PCZW0R7JiYtQOWoeMAMd4oe3Bqrxyc%2BdAdb0sW3L%2FOD8J2nbvJ5gGA%3D%3D--Ngvrt5zzlDZazWNi--k%2F8wjhX57aMmLOJc8i6L7w%3D%3D"
EDIT_URL = "https://github.com/omarnuwrar/Trading/edit/main/signals2.json"
SAVE_URL = "https://github.com/omarnuwrar/Trading/tree-save/main/signals2.json"
BLOB_URL = "https://github.com/omarnuwrar/Trading/blob/main/signals2.json"
# ------------------------------------------------------------
# 1) Fetch authenticity_token and commitOid from the edit page
# ------------------------------------------------------------
def fetch_authenticity_token_and_commit_oid():
headers = {
"cookie": TOKEN,
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
"x-github-target": "dotcom",
"x-react-router": "json",
"x-requested-with": "XMLHttpRequest",
}
response = requests.get(EDIT_URL, headers=headers)
if response.status_code != 200:
print(f"Error: Failed to fetch the page. Status code: {response.status_code}")
return None, None
soup = BeautifulSoup(response.text, 'html.parser')
script_tag = soup.find("script", {"type": "application/json", "data-target": "react-app.embeddedData"})
if not script_tag:
print("Error: Could not find the required <script> tag.")
return None, None
try:
json_data = json.loads(script_tag.string.strip())
authenticity_token = json_data["payload"]["csrf_tokens"]["/omarnuwrar/Trading/tree-save/main/signals2.json"]["post"]
commit_oid = json_data["payload"]["webCommitInfo"]["commitOid"]
return authenticity_token, commit_oid
except (KeyError, json.JSONDecodeError) as e:
print(f"Error: Failed to extract data. Details: {str(e)}")
return None, None
# ------------------------------------------------------------
# 2) Update the signals2.json file with new content (string)
# ------------------------------------------------------------
def update_user_json_file(authenticity_token, commit_oid, new_content: str):
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
"x-requested-with": "XMLHttpRequest",
"github-verified-fetch": "true",
"content-type": "application/x-www-form-urlencoded",
"cookie": TOKEN,
}
payload = {
"message": "Update signals2.json",
"placeholder_message": "Update signals2.json",
"description": "",
"commit-choice": "direct",
"target_branch": "main",
"quick_pull": "",
"guidance_task": "",
"commit": commit_oid,
"same_repo": "1",
"pr": "",
"content_changed": "true",
"filename": "signals2.json",
"new_filename": "signals2.json",
"value": new_content,
"authenticity_token": authenticity_token,
}
response = requests.post(SAVE_URL, headers=headers, data=payload)
if response.status_code == 200:
return {"success": True, "message": "signals2.json has been updated!"}
else:
return {"success": False, "message": f"Request failed with status code {response.status_code}", "details": response.text}
# ------------------------------------------------------------
# 3) Read the current signals2.json content (as text), robustly
# ------------------------------------------------------------
def get_user_json_file(authenticity_token: str = None, commit_oid: str = None) -> dict:
"""
Returns:
{"success": True, "content": "<raw json text>"} on success
{"success": False, "message": "..."} on failure
We scrape the blob page and extract the embedded rawLines, joining them safely.
This works whether signals2.json is an object {"scenario": ...}, an array [ ... ], or [].
"""
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
"Cookie": TOKEN,
}
try:
resp = requests.get(BLOB_URL, headers=headers)
resp.raise_for_status()
soup = BeautifulSoup(resp.text, 'html.parser')
script_tag = soup.find('script', {'type': 'application/json', 'data-target': 'react-app.embeddedData'})
if not script_tag:
return {"success": False, "message": "Embedded JSON script tag not found"}
embedded_data = json.loads(script_tag.string)
raw_lines = embedded_data.get("payload", {}).get("blob", {}).get("rawLines", [])
if not raw_lines:
# Empty file or cannot parse; treat as empty
return {"success": True, "content": ""}
# Join lines into full JSON text; rawLines is a list of strings comprising the file content lines.
# In many cases signals2.json is one line, but we join generally.
json_text = "\n".join(raw_lines)
return {"success": True, "content": json_text}
except requests.exceptions.RequestException as e:
return {"success": False, "message": f"HTTP error fetching blob: {e}"}
except json.JSONDecodeError as je:
return {"success": False, "message": f"Error parsing embedded JSON: {je}"}
except Exception as ex:
return {"success": False, "message": f"Unexpected error: {ex}"}
# ------------------------------------------------------------
# 4) Convenience: return parsed JSON (object or list), or defaults
# ------------------------------------------------------------
def read_signals_json():
"""
Returns a tuple (ok: bool, data: object, note: str)
- If ok=True, data is parsed JSON (dict or list).
- If ok=False, data is None and note explains why.
"""
res = get_user_json_file()
if not res.get("success"):
return False, None, res.get("message", "Unknown error")
raw = res.get("content", "")
if not raw:
# Empty file -> treat as empty array by convention
return True, [], "Empty content"
try:
parsed = json.loads(raw)
return True, parsed, "OK"
except Exception as e:
return False, None, f"JSON parse error: {e}"