AdarshJi commited on
Commit
edcbcee
·
verified ·
1 Parent(s): 2fa9208

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +5 -5
main.py CHANGED
@@ -145,7 +145,7 @@ def FREEGPT(
145
  stream : bool = True,
146
  timeout: Optional[float] = None
147
  ):
148
- md = next((item["tag"] + "/" + item["model"] for item in MODELS if item["model"] == model), "@cf/meta/llama-3.2-1b-instruct")
149
 
150
  URL = f"https://llmchat.in/inference/stream?model={md}"
151
 
@@ -166,7 +166,7 @@ def FREEGPT(
166
  "messages": messages,
167
  "stream": stream,
168
  **({"max_tokens": max_token} if max_token is not None else {}),
169
- **({"max_tokens": next((item["max_tokens"] for item in MODELS if item["model"] == model and item["max_tokens"] is not None), None)} if next((True for item in MODELS if item["model"] == model and item["max_tokens"] is not None), None) else {})
170
  }
171
 
172
 
@@ -228,9 +228,9 @@ def QWEN(
228
 
229
  headers = {"Accept": "*/*","Content-Type": "application/json","Origin": "https://teichai-qwen3-4b-thinking-2507-claude-4-5-opus.hf.space","Referer": "https://teichai-qwen3-4b-thinking-2507-claude-4-5-opus.hf.space/","User-Agent": "python-requests/2.x"}
230
 
231
- c = t()
232
  RESPO = RQ.post(API_URL, headers=headers, json=payload, stream=stream, timeout=timeout)
233
- print(c-t())
234
  # print(RESPO)
235
  buffer_lines = []
236
  for raw in RESPO.iter_lines():
@@ -675,4 +675,4 @@ def info():
675
  # ---------------------------------------------------------------------
676
 
677
  # if __name__ == "__main__":
678
- # app.run(host="0.0.0.0", port=5550, threaded=True)
 
145
  stream : bool = True,
146
  timeout: Optional[float] = None
147
  ):
148
+ md = next((item["tag"] + "/" + item["model"] for item in M2 if item["model"] == model), "@cf/meta/llama-3.2-1b-instruct")
149
 
150
  URL = f"https://llmchat.in/inference/stream?model={md}"
151
 
 
166
  "messages": messages,
167
  "stream": stream,
168
  **({"max_tokens": max_token} if max_token is not None else {}),
169
+ **({"max_tokens": next((item["max_tokens"] for item in M2 if item["model"] == model and item["max_tokens"] is not None), None)} if next((True for item in M2 if item["model"] == model and item["max_tokens"] is not None), None) else {})
170
  }
171
 
172
 
 
228
 
229
  headers = {"Accept": "*/*","Content-Type": "application/json","Origin": "https://teichai-qwen3-4b-thinking-2507-claude-4-5-opus.hf.space","Referer": "https://teichai-qwen3-4b-thinking-2507-claude-4-5-opus.hf.space/","User-Agent": "python-requests/2.x"}
230
 
231
+
232
  RESPO = RQ.post(API_URL, headers=headers, json=payload, stream=stream, timeout=timeout)
233
+
234
  # print(RESPO)
235
  buffer_lines = []
236
  for raw in RESPO.iter_lines():
 
675
  # ---------------------------------------------------------------------
676
 
677
  # if __name__ == "__main__":
678
+ # app.run(host="0.0.0.0", port=5550, threaded=True)