Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -16,6 +16,25 @@ from langchain_core.messages import HumanMessage
|
|
| 16 |
# --- Constants ---
|
| 17 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
| 18 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
@tool
|
| 20 |
def add(a:int,b:int)->int:
|
| 21 |
"""
|
|
@@ -146,7 +165,7 @@ class BasicAgent:
|
|
| 146 |
model = InferenceClientModel(
|
| 147 |
model_id="deepseek-ai/deepseek-coder-6.7b-instruct", # Example OpenRouter model ID
|
| 148 |
token=token, # Set your Hugging Face token in the environment
|
| 149 |
-
provider="
|
| 150 |
)
|
| 151 |
self.agent= CodeAgent(
|
| 152 |
tools = [add, subtract, multiply, divide, web_search, image_generation_tool],
|
|
@@ -156,14 +175,20 @@ class BasicAgent:
|
|
| 156 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
| 157 |
# Inject system prompt + question
|
| 158 |
question_with_prompt = f"{self.system_prompt}\n\nContext: {context}\n\nQuestion: {question.strip()}"
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 167 |
|
| 168 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
| 169 |
"""
|
|
|
|
| 16 |
# --- Constants ---
|
| 17 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
| 18 |
|
| 19 |
+
def openrouter_inference(prompt, model="deepseek-ai/deepseek-coder-6.7b-instruct"):
|
| 20 |
+
api_key = os.environ["OPENROUTER_API_KEY"]
|
| 21 |
+
url = "https://openrouter.ai/api/v1/chat/completions"
|
| 22 |
+
headers = {
|
| 23 |
+
"Authorization": f"Bearer {api_key}",
|
| 24 |
+
"Content-Type": "application/json"
|
| 25 |
+
}
|
| 26 |
+
payload = {
|
| 27 |
+
"model": model,
|
| 28 |
+
"messages": [
|
| 29 |
+
{"role": "user", "content": prompt}
|
| 30 |
+
]
|
| 31 |
+
}
|
| 32 |
+
response = requests.post(url, headers=headers, json=payload)
|
| 33 |
+
response.raise_for_status()
|
| 34 |
+
data = response.json()
|
| 35 |
+
# Extract the answer from the response
|
| 36 |
+
return data["choices"][0]["message"]["content"]
|
| 37 |
+
|
| 38 |
@tool
|
| 39 |
def add(a:int,b:int)->int:
|
| 40 |
"""
|
|
|
|
| 165 |
model = InferenceClientModel(
|
| 166 |
model_id="deepseek-ai/deepseek-coder-6.7b-instruct", # Example OpenRouter model ID
|
| 167 |
token=token, # Set your Hugging Face token in the environment
|
| 168 |
+
provider="auto" # Use openrouter as the provider
|
| 169 |
)
|
| 170 |
self.agent= CodeAgent(
|
| 171 |
tools = [add, subtract, multiply, divide, web_search, image_generation_tool],
|
|
|
|
| 175 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
| 176 |
# Inject system prompt + question
|
| 177 |
question_with_prompt = f"{self.system_prompt}\n\nContext: {context}\n\nQuestion: {question.strip()}"
|
| 178 |
+
try:
|
| 179 |
+
answer = openrouter_inference(question_with_prompt)
|
| 180 |
+
except Exception as e:
|
| 181 |
+
print(f"Error calling OpenRouter: {e}")
|
| 182 |
+
answer = "Sorry, I couldn't get an answer from the model."
|
| 183 |
+
print(f"Agent returning answer: {answer.strip()}")
|
| 184 |
+
return answer.strip()
|
| 185 |
+
# # Fix: handle dict or string
|
| 186 |
+
# if isinstance(answer, dict) and "content" in answer:
|
| 187 |
+
# result = answer["content"]
|
| 188 |
+
# else:
|
| 189 |
+
# result = str(answer)
|
| 190 |
+
# print(f"Agent returning answer: {result.strip()}")
|
| 191 |
+
# return result.strip()
|
| 192 |
|
| 193 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
| 194 |
"""
|