pavan-d commited on
Commit
be4b3ca
·
verified ·
1 Parent(s): 76a7a2a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -8
app.py CHANGED
@@ -4,6 +4,9 @@ import requests
4
  import inspect
5
  import pandas as pd
6
 
 
 
 
7
  # (Keep Constants as is)
8
  # --- Constants ---
9
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
@@ -12,16 +15,31 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
12
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
13
  class BasicAgent:
14
  def __init__(self):
15
- print("BasicAgent initialized.")
 
 
 
 
 
 
 
 
 
 
16
  def __call__(self, question: str) -> str:
17
- print(f"Agent received question: {question}")
 
 
 
18
 
19
- # Simple hardcoded logic
20
- if "capital of France" in question.lower():
21
- return "Paris"
22
- elif "2 + 2" in question:
23
- return "4"
24
- else:
 
 
25
  return "I don't know"
26
 
27
  def run_and_submit_all( profile: gr.OAuthProfile | None):
 
4
  import inspect
5
  import pandas as pd
6
 
7
+ import torch
8
+ from transformers import pipeline
9
+
10
  # (Keep Constants as is)
11
  # --- Constants ---
12
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
 
15
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
16
  class BasicAgent:
17
  def __init__(self):
18
+ print("🔄 Loading HF LLM pipeline...")
19
+ self.llm = pipeline(
20
+ "text-generation",
21
+ model="mistralai/Mistral-7B-Instruct-v0.2",
22
+ tokenizer="mistralai/Mistral-7B-Instruct-v0.2",
23
+ max_new_tokens=200,
24
+ temperature=0,
25
+ device=0 if torch.cuda.is_available() else -1 # Use GPU if available
26
+ )
27
+ print("✅ LLM Loaded.")
28
+
29
  def __call__(self, question: str) -> str:
30
+ print(f"🤖 Agent received question: {question}")
31
+
32
+ # Format as instruction
33
+ prompt = f"[INST] {question} [/INST]"
34
 
35
+ try:
36
+ output = self.llm(prompt)[0]["generated_text"]
37
+ # Strip original prompt from output
38
+ answer = output.replace(prompt, "").strip()
39
+ print(f"✅ Answer: {answer}")
40
+ return answer
41
+ except Exception as e:
42
+ print(f"❌ Error: {e}")
43
  return "I don't know"
44
 
45
  def run_and_submit_all( profile: gr.OAuthProfile | None):