Cachoups commited on
Commit
278db2f
·
verified ·
1 Parent(s): 83ebb0e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -92,8 +92,7 @@ llm = HuggingFaceHub(
92
  model_kwargs={
93
  "temperature": 0.1,
94
  "max_length": 10,
95
- "return_full_text": False,
96
- "stop_sequences": ["Explanation:", "Original question:", "\n"]
97
  }
98
  )
99
 
@@ -102,7 +101,7 @@ def ra(user_question):
102
  prompt = f"Rewrite the following question with correct grammar. ONLY return the corrected question. Do NOT include any explanation.\n\n{user_question}"
103
 
104
  # Pass the prompt to the LLM and get the response
105
- rephrased_query = llm(prompt) # Replace `llm` with the appropriate LLM function or API call
106
  new_query = rephrased_query.strip()
107
  return {'context' : retriever(new_query), 'question': new_query}
108
  # chain = RunnablePassthrough() | RunnableLambda(ra) | prompt_template | client.chat_completion() | StrOutputParser() for notebook
 
92
  model_kwargs={
93
  "temperature": 0.1,
94
  "max_length": 10,
95
+ "return_full_text": False
 
96
  }
97
  )
98
 
 
101
  prompt = f"Rewrite the following question with correct grammar. ONLY return the corrected question. Do NOT include any explanation.\n\n{user_question}"
102
 
103
  # Pass the prompt to the LLM and get the response
104
+ rephrased_query = llm.invoke(prompt) # Replace `llm` with the appropriate LLM function or API call
105
  new_query = rephrased_query.strip()
106
  return {'context' : retriever(new_query), 'question': new_query}
107
  # chain = RunnablePassthrough() | RunnableLambda(ra) | prompt_template | client.chat_completion() | StrOutputParser() for notebook