Alihamas212 commited on
Commit
f9fa566
·
verified ·
1 Parent(s): 37be719

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -23
app.py CHANGED
@@ -1,34 +1,26 @@
1
  import gradio as gr
2
  import os
3
- from openrouter import Client
4
 
5
- # --- Setup the OpenRouter Client ---
6
- # IMPORTANT: This gets the API key from Hugging Face "Secrets"
7
- # DO NOT paste your key here.
8
  api_key = os.environ.get("OPENROUTER_API_KEY")
9
 
10
- # Check if the API key is available
11
  if not api_key:
12
- # If running locally, you might use a different way to load keys
13
- # For this example, we'll raise an error if it's not set in the HF Space
14
  print("OPENROUTER_API_KEY not found in environment secrets!")
15
- # We can still run the app but show an error message
16
  client = None
17
  else:
18
- client = Client(api_key=api_key)
 
 
 
19
 
20
- # --- The Core Function ---
21
- # This is what Gradio will run when the user clicks "Submit"
22
  def compare_models(user_prompt):
23
  if not client:
24
  return "Error: API Key not configured.", "Error: API Key not configured.", "Error: API Key not configured."
25
 
26
- # Define the models you want to compare
27
- model_1 = "mistralai/mistral-7b-instruct" # Fast and cheap
28
- model_2 = "meta-llama/llama-3-8b-instruct" # Good all-rounder
29
- model_3 = "google/gemini-flash-1.5" # Fast and capable
30
 
31
- # --- Call Model 1 ---
32
  try:
33
  response_1 = client.chat.completions.create(
34
  model=model_1,
@@ -38,7 +30,6 @@ def compare_models(user_prompt):
38
  except Exception as e:
39
  output_1 = f"Error calling {model_1}: {e}"
40
 
41
- # --- Call Model 2 ---
42
  try:
43
  response_2 = client.chat.completions.create(
44
  model=model_2,
@@ -48,7 +39,6 @@ def compare_models(user_prompt):
48
  except Exception as e:
49
  output_2 = f"Error calling {model_2}: {e}"
50
 
51
- # --- Call Model 3 ---
52
  try:
53
  response_3 = client.chat.completions.create(
54
  model=model_3,
@@ -60,10 +50,6 @@ def compare_models(user_prompt):
60
 
61
  return output_1, output_2, output_3
62
 
63
- # --- Create the Gradio Interface ---
64
-
65
- # Use gr.Interface for a simple all-in-one UI
66
- # We use 'parallel' to show outputs side-by-side
67
  demo = gr.Interface(
68
  fn=compare_models,
69
  inputs=gr.Textbox(label="Enter your prompt", lines=3),
@@ -76,5 +62,4 @@ demo = gr.Interface(
76
  description="Enter one prompt and see the results from three different AI models side-by-side. (Powered by OpenRouter)"
77
  )
78
 
79
- # Launch the app!
80
  demo.launch()
 
1
  import gradio as gr
2
  import os
3
+ from openai import OpenAI
4
 
 
 
 
5
  api_key = os.environ.get("OPENROUTER_API_KEY")
6
 
 
7
  if not api_key:
 
 
8
  print("OPENROUTER_API_KEY not found in environment secrets!")
 
9
  client = None
10
  else:
11
+ client = OpenAI(
12
+ base_url="https://openrouter.ai/api/v1",
13
+ api_key=api_key,
14
+ )
15
 
 
 
16
  def compare_models(user_prompt):
17
  if not client:
18
  return "Error: API Key not configured.", "Error: API Key not configured.", "Error: API Key not configured."
19
 
20
+ model_1 = "mistralai/mistral-7b-instruct"
21
+ model_2 = "meta-llama/llama-3-8b-instruct"
22
+ model_3 = "google/gemini-flash-1.5"
 
23
 
 
24
  try:
25
  response_1 = client.chat.completions.create(
26
  model=model_1,
 
30
  except Exception as e:
31
  output_1 = f"Error calling {model_1}: {e}"
32
 
 
33
  try:
34
  response_2 = client.chat.completions.create(
35
  model=model_2,
 
39
  except Exception as e:
40
  output_2 = f"Error calling {model_2}: {e}"
41
 
 
42
  try:
43
  response_3 = client.chat.completions.create(
44
  model=model_3,
 
50
 
51
  return output_1, output_2, output_3
52
 
 
 
 
 
53
  demo = gr.Interface(
54
  fn=compare_models,
55
  inputs=gr.Textbox(label="Enter your prompt", lines=3),
 
62
  description="Enter one prompt and see the results from three different AI models side-by-side. (Powered by OpenRouter)"
63
  )
64
 
 
65
  demo.launch()