Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
import os
|
| 3 |
|
|
|
|
| 4 |
from smolagents import InferenceClientModel, CodeAgent, MCPClient
|
| 5 |
|
| 6 |
|
|
@@ -10,15 +11,19 @@ try:
|
|
| 10 |
)
|
| 11 |
tools = mcp_client.get_tools()
|
| 12 |
|
| 13 |
-
model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
|
| 15 |
|
| 16 |
demo = gr.ChatInterface(
|
| 17 |
fn=lambda message, history: str(agent.run(message)),
|
| 18 |
type="messages",
|
| 19 |
-
examples=["What is the RS method of investing as taught by Premal Sir."],
|
| 20 |
title="RSWarriors - RS chatbot",
|
| 21 |
-
description="RS Chatbot - Remember AI makes mistakes.\n
|
| 22 |
)
|
| 23 |
|
| 24 |
demo.launch()
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import os
|
| 3 |
|
| 4 |
+
from smolagents import OpenAIServerModel
|
| 5 |
from smolagents import InferenceClientModel, CodeAgent, MCPClient
|
| 6 |
|
| 7 |
|
|
|
|
| 11 |
)
|
| 12 |
tools = mcp_client.get_tools()
|
| 13 |
|
| 14 |
+
# model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
|
| 15 |
+
model = OpenAIServerModel(
|
| 16 |
+
model_id="openai/gpt-oss-120b",
|
| 17 |
+
api_base=os.getenv("LLM_BASSE_URL") #"https://openrouter.ai/api/v1",
|
| 18 |
+
api_key=os.getenv("LLM_API_TOKEN"),
|
| 19 |
+
)
|
| 20 |
agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
|
| 21 |
|
| 22 |
demo = gr.ChatInterface(
|
| 23 |
fn=lambda message, history: str(agent.run(message)),
|
| 24 |
type="messages",
|
|
|
|
| 25 |
title="RSWarriors - RS chatbot",
|
| 26 |
+
description="RS Chatbot - Remember AI makes mistakes.\n!!!DO NOT TAKE INVESTMENT DECISIONS BASED ON THIS OUTPUT!!!",
|
| 27 |
)
|
| 28 |
|
| 29 |
demo.launch()
|