small bugfix + anthropic slow + try deepinframeta
Browse files
agent.py
CHANGED
|
@@ -77,18 +77,34 @@ class BoomBot:
|
|
| 77 |
|
| 78 |
elif self.provider == "deepinfra":
|
| 79 |
deepinfra_model = "Qwen/Qwen3-235B-A22B"
|
| 80 |
-
return OpenAIServerModel(
|
| 81 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
api_base="https://api.deepinfra.com/v1/openai",
|
| 83 |
-
api_key=os.getenv("
|
| 84 |
flatten_messages_as_text=True,
|
| 85 |
max_tokens=8192,
|
| 86 |
-
temperature=0.
|
| 87 |
)
|
| 88 |
elif self.provider == "meta":
|
| 89 |
meta_model = "meta-llama/Llama-3.3-70B-Instruct-Turbo"
|
| 90 |
-
return OpenAIServerModel(
|
| 91 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
api_base="https://api.deepinfra.com/v1/openai",
|
| 93 |
api_key=os.getenv("DEEPINFRA_API_KEY"),
|
| 94 |
flatten_messages_as_text=True,
|
|
@@ -286,9 +302,9 @@ class BoomBot:
|
|
| 286 |
return final_answer
|
| 287 |
|
| 288 |
|
| 289 |
-
#
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
|
| 293 |
-
|
| 294 |
-
|
|
|
|
| 77 |
|
| 78 |
elif self.provider == "deepinfra":
|
| 79 |
deepinfra_model = "Qwen/Qwen3-235B-A22B"
|
| 80 |
+
# return OpenAIServerModel(
|
| 81 |
+
# model_id=deepinfra_model,
|
| 82 |
+
# api_base="https://api.deepinfra.com/v1/openai",
|
| 83 |
+
# api_key=os.getenv("ANTHROPIC_API_KEY"),
|
| 84 |
+
# flatten_messages_as_text=True,
|
| 85 |
+
# max_tokens=8192,
|
| 86 |
+
# temperature=0.1,
|
| 87 |
+
# )
|
| 88 |
+
return LiteLLMModel(
|
| 89 |
+
model_id="deepinfra/"+ deepinfra_model,
|
| 90 |
api_base="https://api.deepinfra.com/v1/openai",
|
| 91 |
+
api_key=os.getenv("DEEPINFRA_API_KEY"),
|
| 92 |
flatten_messages_as_text=True,
|
| 93 |
max_tokens=8192,
|
| 94 |
+
temperature=0.7,
|
| 95 |
)
|
| 96 |
elif self.provider == "meta":
|
| 97 |
meta_model = "meta-llama/Llama-3.3-70B-Instruct-Turbo"
|
| 98 |
+
# return OpenAIServerModel(
|
| 99 |
+
# model_id=meta_model,
|
| 100 |
+
# api_base="https://api.deepinfra.com/v1/openai",
|
| 101 |
+
# api_key=os.getenv("DEEPINFRA_API_KEY"),
|
| 102 |
+
# flatten_messages_as_text=True,
|
| 103 |
+
# max_tokens=8192,
|
| 104 |
+
# temperature=0.7,
|
| 105 |
+
# )
|
| 106 |
+
return LiteLLMModel(
|
| 107 |
+
model_id="deepinfra/"+ meta_model,
|
| 108 |
api_base="https://api.deepinfra.com/v1/openai",
|
| 109 |
api_key=os.getenv("DEEPINFRA_API_KEY"),
|
| 110 |
flatten_messages_as_text=True,
|
|
|
|
| 302 |
return final_answer
|
| 303 |
|
| 304 |
|
| 305 |
+
# Example of how to use this code (commented out)
|
| 306 |
+
if __name__ == "__main__":
|
| 307 |
+
agent = BoomBot(provider="meta")
|
| 308 |
+
question = "In the year 2020, where were koi fish found in the watershed with the id 02040203? Give only the name of the pond, lake, or stream where the fish were found, and not the name of the city or county."
|
| 309 |
+
response = agent.run(question=question, task_id="1", to_download=False)
|
| 310 |
+
print(f"Response: {response}")
|
app.py
CHANGED
|
@@ -4,7 +4,7 @@ import os
|
|
| 4 |
import gradio as gr
|
| 5 |
import pandas as pd
|
| 6 |
import requests
|
| 7 |
-
|
| 8 |
from agent import BoomBot
|
| 9 |
|
| 10 |
# (Keep Constants as is)
|
|
@@ -20,10 +20,17 @@ load_dotenv()
|
|
| 20 |
class BasicAgent:
|
| 21 |
def __init__(self):
|
| 22 |
print("BasicAgent initialized.")
|
| 23 |
-
self.agent = BoomBot(provider="
|
| 24 |
|
| 25 |
def __call__(self, question: str, task_id: str, to_download) -> str:
|
| 26 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
return self.agent.run(question, task_id, to_download)
|
| 28 |
|
| 29 |
|
|
@@ -84,6 +91,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
|
|
| 84 |
for item in questions_data:
|
| 85 |
task_id = item.get("task_id")
|
| 86 |
question_text = item.get("question")
|
|
|
|
| 87 |
file_name = item.get("file_name", "")
|
| 88 |
|
| 89 |
if file_name.strip() != "":
|
|
@@ -106,6 +114,9 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
|
|
| 106 |
"Submitted Answer": submitted_answer,
|
| 107 |
}
|
| 108 |
)
|
|
|
|
|
|
|
|
|
|
| 109 |
except Exception as e:
|
| 110 |
print(f"Error running agent on task {task_id}: {e}")
|
| 111 |
results_log.append(
|
|
|
|
| 4 |
import gradio as gr
|
| 5 |
import pandas as pd
|
| 6 |
import requests
|
| 7 |
+
import time
|
| 8 |
from agent import BoomBot
|
| 9 |
|
| 10 |
# (Keep Constants as is)
|
|
|
|
| 20 |
class BasicAgent:
|
| 21 |
def __init__(self):
|
| 22 |
print("BasicAgent initialized.")
|
| 23 |
+
self.agent = BoomBot(provider="meta")
|
| 24 |
|
| 25 |
def __call__(self, question: str, task_id: str, to_download) -> str:
|
| 26 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
| 27 |
+
excluded = ["youtube", "video", "chess"]
|
| 28 |
+
q = question.lower()
|
| 29 |
+
# skip if it mentions ANY excluded term, or if it doesn’t mention ANY required term
|
| 30 |
+
if any(exc in q for exc in excluded):
|
| 31 |
+
llm_answer = "NOT ATTEMPTED"
|
| 32 |
+
return llm_answer
|
| 33 |
+
|
| 34 |
return self.agent.run(question, task_id, to_download)
|
| 35 |
|
| 36 |
|
|
|
|
| 91 |
for item in questions_data:
|
| 92 |
task_id = item.get("task_id")
|
| 93 |
question_text = item.get("question")
|
| 94 |
+
|
| 95 |
file_name = item.get("file_name", "")
|
| 96 |
|
| 97 |
if file_name.strip() != "":
|
|
|
|
| 114 |
"Submitted Answer": submitted_answer,
|
| 115 |
}
|
| 116 |
)
|
| 117 |
+
#anthropic caps at 50k tks peer min so sleep
|
| 118 |
+
if agent.agent.provider == "anthropic":
|
| 119 |
+
time.sleep(60)
|
| 120 |
except Exception as e:
|
| 121 |
print(f"Error running agent on task {task_id}: {e}")
|
| 122 |
results_log.append(
|
tools.py
CHANGED
|
@@ -641,10 +641,7 @@ class QueryVectorStoreTool(Tool):
|
|
| 641 |
def forward(self, query: str) -> str:
|
| 642 |
collection_name = "vectorstore"
|
| 643 |
|
| 644 |
-
|
| 645 |
-
k = 3
|
| 646 |
-
if k > 30:
|
| 647 |
-
k = 30
|
| 648 |
|
| 649 |
print(f"🔎 Querying vector store '{collection_name}' with: '{query}'")
|
| 650 |
try:
|
|
|
|
| 641 |
def forward(self, query: str) -> str:
|
| 642 |
collection_name = "vectorstore"
|
| 643 |
|
| 644 |
+
k = 5
|
|
|
|
|
|
|
|
|
|
| 645 |
|
| 646 |
print(f"🔎 Querying vector store '{collection_name}' with: '{query}'")
|
| 647 |
try:
|