msmaje commited on
Commit
0d043af
·
verified ·
1 Parent(s): 114b9d5

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +133 -0
app.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import requests
3
+ import json
4
+ from typing import Iterator
5
+
6
+ class OllamaChat:
7
+ def __init__(self, model_name: str = "llama2", base_url: str = "http://localhost:11434"):
8
+ self.model_name = model_name
9
+ self.base_url = base_url
10
+
11
+ def generate_response(self, message: str, history: list = None) -> Iterator[str]:
12
+ try:
13
+ messages = [{"role": "system", "content": "You are a helpful assistant. Please respond to the user queries."}]
14
+ if history:
15
+ for human_msg, ai_msg in history:
16
+ messages.append({"role": "user", "content": human_msg})
17
+ if ai_msg:
18
+ messages.append({"role": "assistant", "content": ai_msg})
19
+ messages.append({"role": "user", "content": message})
20
+ response = requests.post(
21
+ f"{self.base_url}/api/chat",
22
+ json={"model": self.model_name, "messages": messages, "stream": True},
23
+ stream=True,
24
+ timeout=60
25
+ )
26
+ if response.status_code != 200:
27
+ yield f"[error] Failed to connect to Ollama server (Status: {response.status_code})"
28
+ return
29
+
30
+ full_response = ""
31
+ for line in response.iter_lines():
32
+ if line:
33
+ try:
34
+ data = json.loads(line.decode('utf-8'))
35
+ if 'message' in data and 'content' in data['message']:
36
+ content = data['message']['content']
37
+ full_response += content
38
+ yield full_response
39
+ if data.get('done', False):
40
+ break
41
+ except json.JSONDecodeError:
42
+ continue
43
+
44
+ except requests.exceptions.RequestException as e:
45
+ yield f"[error] Connection issue: {str(e)}"
46
+ except Exception as e:
47
+ yield f"[error] Unexpected error: {str(e)}"
48
+
49
+ def create_chat_interface():
50
+ ollama_chat = OllamaChat()
51
+
52
+ def respond(message, history_state):
53
+ if not message.strip():
54
+ return gr.update(), gr.update(), history_state # no change
55
+
56
+ response_generator = ollama_chat.generate_response(message, history_state)
57
+ final_response = ""
58
+ for response in response_generator:
59
+ if response.startswith("[error]"):
60
+ # Show error message as popup toast or side message
61
+ return gr.update(), gr.update(value=message), gr.update(value=history_state)
62
+
63
+ final_response = response
64
+
65
+ history_state.append((message, final_response))
66
+ return gr.update(value=history_state), gr.update(value=""), gr.update(value=history_state)
67
+
68
+ with gr.Blocks(title="LangChain Demo with Llama2", theme=gr.themes.Soft(), css="""
69
+ .gr-block {
70
+ max-width: 960px;
71
+ margin: auto;
72
+ }
73
+ @media (max-width: 768px) {
74
+ #chatbot { height: 300px !important; }
75
+ }
76
+ """) as demo:
77
+ gr.Markdown("# 🦙 LangChain Demo with Llama2 API")
78
+ gr.Markdown("Chat with Llama2 using LangChain and Ollama")
79
+
80
+ history_state = gr.State([])
81
+
82
+ chatbot = gr.Chatbot(
83
+ value=[],
84
+ elem_id="chatbot",
85
+ elem_classes="chatbot-box",
86
+ bubble_full_width=False,
87
+ height=500
88
+ )
89
+
90
+ with gr.Column():
91
+ with gr.Row():
92
+ msg = gr.Textbox(
93
+ placeholder="Enter your message here...",
94
+ container=False,
95
+ scale=6,
96
+ label="Your Message"
97
+ )
98
+ submit_btn = gr.Button("Send", scale=2, variant="primary")
99
+ clear_btn = gr.Button("Clear", scale=2, variant="secondary")
100
+
101
+ # Events
102
+ msg.submit(respond, [msg, history_state], [chatbot, msg, history_state])
103
+ submit_btn.click(respond, [msg, history_state], [chatbot, msg, history_state])
104
+ clear_btn.click(lambda: ([], "", []), outputs=[chatbot, msg, history_state])
105
+
106
+ gr.Examples(
107
+ examples=[
108
+ "What is artificial intelligence?",
109
+ "Explain machine learning in simple terms",
110
+ "Write a short poem about technology",
111
+ "What are the benefits of renewable energy?"
112
+ ],
113
+ inputs=msg
114
+ )
115
+
116
+ gr.Markdown("""
117
+ ### Instructions:
118
+ 1. Type your question in the text box above
119
+ 2. Click 'Send' or press Enter to get a response
120
+ 3. Use 'Clear' to reset the conversation
121
+
122
+ **Note**: This demo requires Ollama to be running with the Llama2 model installed.
123
+ """)
124
+
125
+ return demo
126
+
127
+ if __name__ == "__main__":
128
+ demo = create_chat_interface()
129
+ demo.launch(
130
+ server_name="0.0.0.0",
131
+ server_port=7860,
132
+ share=False
133
+ )