pathmohd123 commited on
Commit
235b2f6
Β·
verified Β·
1 Parent(s): 6ca36fc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -10
app.py CHANGED
@@ -13,22 +13,22 @@ load_dotenv()
13
  hf_token = os.getenv("HF_TOKEN")
14
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
15
 
16
- # ──────────────── Load Mistral Model ────────────────
17
  try:
18
- pipe = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1", token=hf_token)
19
  except Exception as e:
20
  raise RuntimeError(f"Model loading failed: {e}")
21
 
22
  # ──────────────── Agent Class ────────────────
23
- class MistralAgent:
24
  def __init__(self):
25
- print("βœ… MistralAgent initialized.")
26
 
27
  def __call__(self, question: str) -> str:
28
  try:
29
- prompt = f"[INST] {question} [/INST]"
30
  output = pipe(prompt, max_new_tokens=100, do_sample=False)
31
- return output[0]["generated_text"].split("[/INST]")[-1].strip()
32
  except Exception as e:
33
  return f"LLM Error: {e}"
34
 
@@ -58,7 +58,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
58
  code_link = f"https://huggingface.co/spaces/{space_id}/tree/main" if space_id else ""
59
 
60
  try:
61
- agent = MistralAgent()
62
  except Exception as e:
63
  return f"❌ Error initializing agent: {e}", None
64
 
@@ -111,7 +111,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
111
 
112
  # ──────────────── Gradio UI ────────────────
113
  with gr.Blocks() as demo:
114
- gr.Markdown("# 🧠 Mistral-7B Agent Evaluation")
115
  gr.Markdown(
116
  """
117
  **Instructions:**
@@ -130,5 +130,4 @@ with gr.Blocks() as demo:
130
 
131
  if __name__ == "__main__":
132
  print("Launching Gradio Interface...")
133
- demo.launch(debug=True, server_name="0.0.0.0", server_port=7860)
134
-
 
13
  hf_token = os.getenv("HF_TOKEN")
14
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
15
 
16
+ # ──────────────── Load Falcon Model ────────────────
17
  try:
18
+ pipe = pipeline("text-generation", model="tiiuae/falcon-rw-1b", token=hf_token)
19
  except Exception as e:
20
  raise RuntimeError(f"Model loading failed: {e}")
21
 
22
  # ──────────────── Agent Class ────────────────
23
+ class FalconAgent:
24
  def __init__(self):
25
+ print("βœ… FalconAgent initialized.")
26
 
27
  def __call__(self, question: str) -> str:
28
  try:
29
+ prompt = f"{question}"
30
  output = pipe(prompt, max_new_tokens=100, do_sample=False)
31
+ return output[0]["generated_text"].replace(prompt, "").strip()
32
  except Exception as e:
33
  return f"LLM Error: {e}"
34
 
 
58
  code_link = f"https://huggingface.co/spaces/{space_id}/tree/main" if space_id else ""
59
 
60
  try:
61
+ agent = FalconAgent()
62
  except Exception as e:
63
  return f"❌ Error initializing agent: {e}", None
64
 
 
111
 
112
  # ──────────────── Gradio UI ────────────────
113
  with gr.Blocks() as demo:
114
+ gr.Markdown("# 🧠 Falcon-RW-1B Agent Evaluation")
115
  gr.Markdown(
116
  """
117
  **Instructions:**
 
130
 
131
  if __name__ == "__main__":
132
  print("Launching Gradio Interface...")
133
+ demo.launch(debug=True, server_name="0.0.0.0", server_port=7860)