johnbridges commited on
Commit
d757694
·
1 Parent(s): 2e0a1a4
Files changed (1) hide show
  1. timesfm_backend.py +48 -0
timesfm_backend.py CHANGED
@@ -49,6 +49,54 @@ def _as_1d_float_tensor(series: List[float], device: str, dtype: torch.dtype) ->
49
  t = torch.tensor(series, dtype=torch.float32) # keep input parse stable
50
  return t.to(device=device, dtype=dtype)
51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  # --------------------------------------------------------------------------------------
53
  # Fallback forecaster (naive)
54
  # --------------------------------------------------------------------------------------
 
49
  t = torch.tensor(series, dtype=torch.float32) # keep input parse stable
50
  return t.to(device=device, dtype=dtype)
51
 
52
+
53
+ async def stream(self, request: Dict[str, Any]):
54
+ """
55
+ Chat-compatible shim: runs forecast() and yields a single OpenAI-style
56
+ chat.completion.chunk where the assistant's content is a compact JSON
57
+ payload containing the forecast result.
58
+ """
59
+ rid = f"chatcmpl-timesfm-{int(time.time())}"
60
+ now = int(time.time())
61
+
62
+ # map chat-ish payloads to our forecast inputs
63
+ # accept both direct {series,horizon,freq,...} and nested {data:{...}}
64
+ payload = request or {}
65
+ if "data" in payload and isinstance(payload["data"], dict):
66
+ payload = payload["data"]
67
+
68
+ # allow callers to stick series/fc config under "timeseries" too
69
+ if "timeseries" in payload and isinstance(payload["timeseries"], dict):
70
+ payload = {**payload, **payload["timeseries"]}
71
+
72
+ # run the forecast
73
+ result = await self.forecast(payload)
74
+
75
+ # minimal content: JSON string so your .NET backend can parse easily
76
+ import json as _json
77
+ content = _json.dumps({
78
+ "model": result.get("model"),
79
+ "horizon": result.get("horizon"),
80
+ "freq": result.get("freq"),
81
+ "forecast": result.get("forecast"),
82
+ "note": result.get("note", None),
83
+ "backend": "timesfm"
84
+ }, separators=(",", ":"), ensure_ascii=False)
85
+
86
+ yield {
87
+ "id": rid,
88
+ "object": "chat.completion.chunk",
89
+ "created": now,
90
+ "model": result.get("model"),
91
+ "choices": [
92
+ {
93
+ "index": 0,
94
+ "delta": {"role": "assistant", "content": content},
95
+ "finish_reason": "stop",
96
+ }
97
+ ],
98
+ }
99
+
100
  # --------------------------------------------------------------------------------------
101
  # Fallback forecaster (naive)
102
  # --------------------------------------------------------------------------------------