NCTCMumbai commited on
Commit
3d567bc
·
verified ·
1 Parent(s): c0b241b

Update backend/query_llm.py

Browse files
Files changed (1) hide show
  1. backend/query_llm.py +8 -8
backend/query_llm.py CHANGED
@@ -18,16 +18,16 @@ repetition_penalty = 1.2
18
  OPENAI_KEY = getenv("OPENAI_API_KEY")
19
  HF_TOKEN = getenv("HUGGING_FACE_HUB_TOKEN")
20
 
21
- #hf_client = InferenceClient(
22
- # "mistralai/Mistral-7B-Instruct-v0.1",
23
- # token=HF_TOKEN
24
- # )
25
 
26
 
27
- hf_client = InferenceClient(
28
- "mistralai/Mixtral-8x7B-Instruct-v0.1",
29
- token=HF_TOKEN
30
- )
31
  def format_prompt(message: str, api_kind: str):
32
  """
33
  Formats the given message using a chat template.
 
18
  OPENAI_KEY = getenv("OPENAI_API_KEY")
19
  HF_TOKEN = getenv("HUGGING_FACE_HUB_TOKEN")
20
 
21
+ hf_client = InferenceClient(
22
+ "mistralai/Mistral-7B-Instruct-v0.1",
23
+ token=HF_TOKEN
24
+ )
25
 
26
 
27
+ # hf_client = InferenceClient(
28
+ # "mistralai/Mixtral-8x7B-Instruct-v0.1",
29
+ # token=HF_TOKEN
30
+ # )
31
  def format_prompt(message: str, api_kind: str):
32
  """
33
  Formats the given message using a chat template.