bharatcoder commited on
Commit
ec33f39
·
verified ·
1 Parent(s): e454a25

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -11
app.py CHANGED
@@ -1,34 +1,65 @@
1
  import gradio as gr
2
  import os
3
-
4
  from smolagents import OpenAIServerModel
5
- from smolagents import InferenceClientModel, CodeAgent, MCPClient
6
-
7
 
8
  try:
9
- system_prompt = """You are an expert of Relative Strength (RS) investing style, as taught by Premal Sir.
10
- You reply with facts on RS methodology, taking help from tools accessible with you."""
11
 
12
  mcp_client = MCPClient(
13
- {"url": "https://bharatcoder-rs-studies.hf.space/gradio_api/mcp/", "transport": "streamable-http",}
 
 
 
14
  )
 
15
  tools = mcp_client.get_tools()
16
-
17
  # model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
18
  model = OpenAIServerModel(
19
  model_id=os.getenv("LLM_MODEL_ID"),
20
  api_base=os.getenv("LLM_BASSE_URL"),
21
  api_key=os.getenv("LLM_API_TOKEN"),
22
  )
23
- agent = CodeAgent(tools=[*tools], model=model, instructions=system_prompt, additional_authorized_imports=["json", "ast", "urllib", "base64"])
24
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  demo = gr.ChatInterface(
26
- fn=lambda message, history: str(agent.run(message)),
27
  type="messages",
28
  title="RSWarriors - RS chatbot",
29
  description="RS Chatbot - Remember AI makes mistakes.\n!!!DO NOT TAKE INVESTMENT DECISIONS BASED ON THIS OUTPUT!!!",
30
  )
31
-
32
  demo.launch()
 
33
  finally:
34
  mcp_client.disconnect()
 
1
  import gradio as gr
2
  import os
 
3
  from smolagents import OpenAIServerModel
4
+ from smolagents import CodeAgent, MCPClient
5
+ from smolagents.gradio_ui import stream_to_gradio
6
 
7
  try:
8
+ system_prompt = """You are an expert of Relative Strength (RS) investing style, as taught by Premal Sir. You reply with facts on RS methodology, taking help from tools accessible with you."""
 
9
 
10
  mcp_client = MCPClient(
11
+ {
12
+ "url": "https://bharatcoder-rs-studies.hf.space/gradio_api/mcp/",
13
+ "transport": "streamable-http",
14
+ }
15
  )
16
+
17
  tools = mcp_client.get_tools()
18
+
19
  # model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
20
  model = OpenAIServerModel(
21
  model_id=os.getenv("LLM_MODEL_ID"),
22
  api_base=os.getenv("LLM_BASSE_URL"),
23
  api_key=os.getenv("LLM_API_TOKEN"),
24
  )
25
+
26
+ # Enable streaming in the agent
27
+ agent = CodeAgent(
28
+ tools=[*tools],
29
+ model=model,
30
+ instructions=system_prompt,
31
+ additional_authorized_imports=["json", "ast", "urllib", "base64"],
32
+ stream_outputs=True # Enable streaming
33
+ )
34
+
35
+ # Streaming function for ChatInterface
36
+ def chat_with_agent(message, history):
37
+ """
38
+ Generator function that yields streaming responses from the agent.
39
+ """
40
+ try:
41
+ # Use stream_to_gradio to get streaming messages
42
+ for gradio_message in stream_to_gradio(
43
+ agent=agent,
44
+ task=message,
45
+ ):
46
+ # gradio_message is a ChatMessage object
47
+ # Extract the content to yield
48
+ if hasattr(gradio_message, 'content'):
49
+ yield gradio_message.content
50
+ else:
51
+ yield str(gradio_message)
52
+ except Exception as e:
53
+ yield f"Error: {str(e)}"
54
+
55
  demo = gr.ChatInterface(
56
+ fn=chat_with_agent,
57
  type="messages",
58
  title="RSWarriors - RS chatbot",
59
  description="RS Chatbot - Remember AI makes mistakes.\n!!!DO NOT TAKE INVESTMENT DECISIONS BASED ON THIS OUTPUT!!!",
60
  )
61
+
62
  demo.launch()
63
+
64
  finally:
65
  mcp_client.disconnect()