Umer797 commited on
Commit
11517f4
·
verified ·
1 Parent(s): 5a5c9b9

Update llm_node.py

Browse files
Files changed (1) hide show
  1. llm_node.py +4 -31
llm_node.py CHANGED
@@ -1,32 +1,5 @@
1
- import os
2
- from huggingface_hub import InferenceClient
3
-
4
  def llm_node(question, search_result):
5
- client = InferenceClient(token=os.getenv("HUGGINGFACEHUB_API_TOKEN"))
6
-
7
- model_id = "google/flan-t5-small" # ✅ Small free-tier model
8
-
9
- prompt = f"""You are solving a GAIA benchmark evaluation question.
10
-
11
- Here’s the question:
12
- {question}
13
-
14
- Here’s retrieved information:
15
- {search_result}
16
-
17
- ⚠️ VERY IMPORTANT:
18
- - ONLY return the final answer, exactly as required.
19
- - Do NOT include explanations, prefixes, or notes.
20
- - If the question asks for a list, give only the list, in the requested format.
21
-
22
- Your answer:"""
23
-
24
- response = client.text_generation(
25
- model=model_id,
26
- prompt=prompt,
27
- max_new_tokens=200, # smaller due to model limits
28
- temperature=0.1,
29
- top_p=0.9
30
- )
31
-
32
- return response.strip()
 
 
 
 
1
  def llm_node(question, search_result):
2
+ # Use search result directly, bypassing heavy LLM calls
3
+ # Only light formatting / phrasing
4
+ combined_answer = f"{search_result.strip()}"
5
+ return combined_answer