anktechsol commited on
Commit
bd1ec04
·
verified ·
1 Parent(s): f690793

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +100 -0
app.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
+ import os
4
+
5
+ # Initialize the Inference Client
6
+ client = InferenceClient(token=os.environ.get("HF_TOKEN"))
7
+
8
+ # List of available models (you can add more)
9
+ AVAILABLE_MODELS = [
10
+ "meta-llama/Llama-3.3-70B-Instruct",
11
+ "Qwen/Qwen2.5-Coder-32B-Instruct",
12
+ "mistralai/Mistral-7B-Instruct-v0.3",
13
+ "google/gemma-2-9b-it",
14
+ "microsoft/Phi-3.5-mini-instruct",
15
+ "HuggingFaceH4/zephyr-7b-beta",
16
+ "tiiuae/falcon-7b-instruct",
17
+ "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
18
+ ]
19
+
20
+ def chat_with_models(message, history, selected_models):
21
+ """
22
+ Send message to selected models and return their responses
23
+ """
24
+ if not selected_models:
25
+ return history + [(message, "⚠️ Please select at least one model first!")]
26
+
27
+ if len(selected_models) > 5:
28
+ return history + [(message, "⚠️ Please select maximum 5 models only!")]
29
+
30
+ # Create response text
31
+ responses = []
32
+
33
+ for model_name in selected_models:
34
+ try:
35
+ response = client.chat_completion(
36
+ model=model_name,
37
+ messages=[{"role": "user", "content": message}],
38
+ max_tokens=500,
39
+ )
40
+ model_response = response.choices[0].message.content
41
+ responses.append(f"**{model_name}:**\n{model_response}\n")
42
+ except Exception as e:
43
+ responses.append(f"**{model_name}:**\n❌ Error: {str(e)}\n")
44
+
45
+ combined_response = "\n---\n\n".join(responses)
46
+ return history + [(message, combined_response)]
47
+
48
+ # Create the Gradio interface
49
+ with gr.Blocks(title="Anki-Chat: Multi-Model Chat", theme=gr.themes.Soft()) as demo:
50
+ gr.Markdown(
51
+ """
52
+ # 🤖 Anki-Chat: Multi-Model Chat Interface
53
+
54
+ Select up to 5 AI models and chat with them simultaneously to compare their responses!
55
+ """
56
+ )
57
+
58
+ with gr.Row():
59
+ with gr.Column(scale=1):
60
+ gr.Markdown("### Select Models (Max 5)")
61
+ model_checkboxes = gr.CheckboxGroup(
62
+ choices=AVAILABLE_MODELS,
63
+ label="Available Models",
64
+ value=[AVAILABLE_MODELS[0]], # Default to first model
65
+ )
66
+
67
+ with gr.Column(scale=2):
68
+ chatbot = gr.Chatbot(
69
+ label="Chat",
70
+ height=500,
71
+ )
72
+
73
+ with gr.Row():
74
+ msg = gr.Textbox(
75
+ label="Your Message",
76
+ placeholder="Type your message here...",
77
+ scale=4,
78
+ )
79
+ send_btn = gr.Button("Send", scale=1, variant="primary")
80
+
81
+ clear_btn = gr.Button("Clear Chat")
82
+
83
+ # Event handlers
84
+ msg.submit(chat_with_models, [msg, chatbot, model_checkboxes], [chatbot]).then(
85
+ lambda: "", None, [msg]
86
+ )
87
+ send_btn.click(chat_with_models, [msg, chatbot, model_checkboxes], [chatbot]).then(
88
+ lambda: "", None, [msg]
89
+ )
90
+ clear_btn.click(lambda: [], None, [chatbot])
91
+
92
+ gr.Markdown(
93
+ """
94
+ ---
95
+ **Note:** This app uses Hugging Face's Inference API. Some models may take longer to respond or may be rate-limited.
96
+ """
97
+ )
98
+
99
+ if __name__ == "__main__":
100
+ demo.launch()