ArthurCFR commited on
Commit
193419a
·
1 Parent(s): 92c7923

Fix GPT-4 Turbo max_tokens limit (4096 max)

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -78,7 +78,7 @@ def load_from_hf_space(repo_id=HF_REPO_ID, token=HF_TOKEN):
78
  return None
79
 
80
  # --- OpenAI API function ---
81
- def call_openai_api(prompt_text, max_tokens=5000):
82
  """
83
  Call OpenAI API to generate JSON response from the prompt template
84
  """
 
78
  return None
79
 
80
  # --- OpenAI API function ---
81
+ def call_openai_api(prompt_text, max_tokens=4000):
82
  """
83
  Call OpenAI API to generate JSON response from the prompt template
84
  """