File size: 2,431 Bytes
8524fc2
9bccd16
b1dce5c
ec33f39
 
8524fc2
9bccd16
e6bd6b2
 
 
 
 
043d1d9
9bccd16
ec33f39
 
 
 
9bccd16
ec33f39
9bccd16
ec33f39
b1dce5c
 
2f3cad9
6beecb1
b1dce5c
 
ec33f39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9bccd16
ec33f39
9bccd16
 
b1dce5c
9bccd16
ec33f39
8524fc2
ec33f39
9bccd16
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import gradio as gr
import os
from smolagents import OpenAIServerModel
from smolagents import CodeAgent, MCPClient
from smolagents.gradio_ui import stream_to_gradio

try:
    system_prompt = """You are an expert of Relative Strength (RS) investing style, as taught by Premal Sir. 
    You reply with facts on RS methodology, taking help from tools accessible with you.
    There can be situations when a clear answer is not revealed from the tool searches. In that case reply as: 
    \"No Clear Answer For The Request, Please Ask in RSWarriors Forum!!!\" and terminate the processing. You will
    never make up or assume."""
    
    mcp_client = MCPClient(
        {
            "url": "https://bharatcoder-rs-studies.hf.space/gradio_api/mcp/",
            "transport": "streamable-http",
        }
    )
    
    tools = mcp_client.get_tools()
    
    # model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
    model = OpenAIServerModel(
        model_id=os.getenv("LLM_MODEL_ID"),
        api_base=os.getenv("LLM_BASSE_URL"),
        api_key=os.getenv("LLM_API_TOKEN"),
    )
    
    # Enable streaming in the agent
    agent = CodeAgent(
        tools=[*tools],
        model=model,
        instructions=system_prompt,
        additional_authorized_imports=["json", "ast", "urllib", "base64"],
        stream_outputs=True  # Enable streaming
    )
    
    # Streaming function for ChatInterface
    def chat_with_agent(message, history):
        """
        Generator function that yields streaming responses from the agent.
        """
        try:
            # Use stream_to_gradio to get streaming messages
            for gradio_message in stream_to_gradio(
                agent=agent,
                task=message,
            ):
                # gradio_message is a ChatMessage object
                # Extract the content to yield
                if hasattr(gradio_message, 'content'):
                    yield gradio_message.content
                else:
                    yield str(gradio_message)
        except Exception as e:
            yield f"Error: {str(e)}"
    
    demo = gr.ChatInterface(
        fn=chat_with_agent,
        type="messages",
        title="RSWarriors - RS chatbot",
        description="RS Chatbot - Remember AI makes mistakes.\n!!!DO NOT TAKE INVESTMENT DECISIONS BASED ON THIS OUTPUT!!!",
    )
    
    demo.launch()
    
finally:
    mcp_client.disconnect()