IniNLP247 commited on
Commit
6c92b85
·
verified ·
1 Parent(s): 6911cbd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +140 -53
app.py CHANGED
@@ -1,70 +1,157 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
 
3
 
 
 
4
 
5
- def respond(
6
- message,
7
- history: list[dict[str, str]],
8
- system_message,
9
- max_tokens,
10
- temperature,
11
- top_p,
12
- hf_token: gr.OAuthToken,
13
- ):
14
- """
15
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
16
- """
17
- client = InferenceClient(token=hf_token.token, model="openai/gpt-oss-20b")
18
 
19
- messages = [{"role": "system", "content": system_message}]
 
 
 
20
 
21
- messages.extend(history)
 
 
 
 
22
 
23
- messages.append({"role": "user", "content": message})
 
 
 
 
 
 
 
 
 
 
 
24
 
25
- response = ""
26
 
27
- for message in client.chat_completion(
28
- messages,
29
- max_tokens=max_tokens,
30
- stream=True,
31
- temperature=temperature,
32
- top_p=top_p,
33
- ):
34
- choices = message.choices
35
- token = ""
36
- if len(choices) and choices[0].delta.content:
37
- token = choices[0].delta.content
38
 
39
- response += token
40
- yield response
 
41
 
 
42
 
 
43
  """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
 
 
 
 
 
 
 
 
 
 
 
 
45
  """
46
- chatbot = gr.ChatInterface(
47
- respond,
48
- type="messages",
49
- additional_inputs=[
50
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
51
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
52
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
53
- gr.Slider(
54
- minimum=0.1,
55
- maximum=1.0,
56
- value=0.95,
57
- step=0.05,
58
- label="Top-p (nucleus sampling)",
59
- ),
60
- ],
61
- )
62
 
63
- with gr.Blocks() as demo:
64
- with gr.Sidebar():
65
- gr.LoginButton()
66
- chatbot.render()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
 
 
 
 
 
68
 
69
  if __name__ == "__main__":
70
- demo.launch()
 
1
  import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
+ import torch
4
 
5
+ # Model setup
6
+ model_name = "IniNLP247/Kenko-mental-health-llama-3-model"
7
 
8
+ print("🔄 Loading Kenko Mental Health Model...")
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
+ # Load tokenizer and model
11
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
12
+ if tokenizer.pad_token is None:
13
+ tokenizer.pad_token = tokenizer.eos_token
14
 
15
+ model = AutoModelForCausalLM.from_pretrained(
16
+ model_name,
17
+ torch_dtype=torch.float16, # Changed from 'dtype' to 'torch_dtype'
18
+ device_map="auto"
19
+ )
20
 
21
+ # Create pipeline for easier inference
22
+ pipe = pipeline(
23
+ "text-generation",
24
+ model=model,
25
+ tokenizer=tokenizer,
26
+ return_full_text=False,
27
+ max_new_tokens=300,
28
+ temperature=0.7,
29
+ top_p=0.9,
30
+ repetition_penalty=1.1,
31
+ pad_token_id=tokenizer.pad_token_id
32
+ )
33
 
34
+ print("✅ Model loaded successfully!")
35
 
36
+ def chat_with_kenko(message, history):
37
+ """Chat function for Gradio interface"""
38
+ # Build conversation context
39
+ conversation = ""
40
+ for user_msg, bot_msg in history:
41
+ conversation += f"User: {user_msg}\nKenko: {bot_msg}\n\n"
 
 
 
 
 
42
 
43
+ # Create prompt in instruction format
44
+ prompt = f"""### Instruction:
45
+ You are Kenko, a compassionate mental health therapist. Provide empathetic, helpful, and professional responses to support the user's mental wellbeing.
46
 
47
+ {conversation}User: {message}
48
 
49
+ ### Response:
50
  """
51
+
52
+ # Generate response
53
+ try:
54
+ response = pipe(prompt)[0]['generated_text']
55
+ return response.strip()
56
+ except Exception as e:
57
+ return f"I'm sorry, I'm having trouble processing your message right now. Error: {str(e)}"
58
+
59
+ # Custom CSS for a calming interface
60
+ css = """
61
+ .gradio-container {
62
+ font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
63
+ }
64
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
 
66
+ # Create Gradio interface
67
+ with gr.Blocks(
68
+ title="Kenko - Mental Health Assistant",
69
+ theme=gr.themes.Soft(),
70
+ css=css
71
+ ) as demo:
72
+
73
+ gr.Markdown("""
74
+ # 🧠💚 Kenko - Your Mental Health Assistant
75
+
76
+ Welcome! I'm Kenko, an AI mental health therapist here to provide support, guidance, and a listening ear.
77
+ Feel free to share what's on your mind - I'm here to help you through whatever you're experiencing.
78
+
79
+ *Please remember: I'm an AI assistant and cannot replace professional mental health care. In crisis situations, please contact emergency services or a mental health professional.*
80
+ """)
81
+
82
+ chatbot = gr.Chatbot(
83
+ height=500,
84
+ show_label=False,
85
+ container=True,
86
+ bubble_full_width=False,
87
+ avatar_images=("👤", "🧠")
88
+ )
89
+
90
+ with gr.Row():
91
+ msg = gr.Textbox(
92
+ placeholder="Share what's on your mind... (press Enter to send)",
93
+ container=False,
94
+ scale=7,
95
+ lines=2,
96
+ max_lines=4
97
+ )
98
+ send_btn = gr.Button("Send 💬", scale=1, variant="primary")
99
+
100
+ with gr.Row():
101
+ clear_btn = gr.Button("🗑️ Clear Chat", scale=1, variant="secondary")
102
+ examples_btn = gr.Button("💡 Example Topics", scale=1, variant="secondary")
103
+
104
+ # Example prompts
105
+ with gr.Row(visible=False) as examples_row:
106
+ gr.Examples(
107
+ examples=[
108
+ "I've been feeling really anxious lately and I don't know why.",
109
+ "I'm having trouble sleeping and my mind won't stop racing.",
110
+ "I feel overwhelmed with work and personal responsibilities.",
111
+ "I'm struggling with low self-esteem and negative thoughts.",
112
+ "I'm having difficulty in my relationships.",
113
+ "I feel lonely and isolated.",
114
+ "I'm dealing with grief and loss.",
115
+ "I want to build better coping strategies."
116
+ ],
117
+ inputs=msg,
118
+ label="Try these conversation starters:"
119
+ )
120
+
121
+ with gr.Accordion("ℹ️ About Kenko", open=False):
122
+ gr.Markdown("""
123
+ **What I can help with:**
124
+ - Active listening and emotional support
125
+ - Coping strategies and stress management techniques
126
+ - Guidance on anxiety, depression, and mood concerns
127
+ - Relationship and communication advice
128
+ - Mindfulness and self-care suggestions
129
+ - Building healthy habits and routines
130
+
131
+ **Important Notes:**
132
+ - I'm an AI trained to provide mental health support
133
+ - For immediate crisis support, contact emergency services (911) or crisis hotlines
134
+ - Consider professional therapy for ongoing mental health needs
135
+ - I don't diagnose conditions or prescribe medications
136
+
137
+ **Privacy:** Your conversations are not stored or shared.
138
+ """)
139
+
140
+ def respond(message, chat_history):
141
+ if not message.strip():
142
+ return "", chat_history
143
+
144
+ bot_response = chat_with_kenko(message, chat_history)
145
+ chat_history.append((message, bot_response))
146
+ return "", chat_history
147
+
148
+ def toggle_examples():
149
+ return gr.Row(visible=True)
150
 
151
+ msg.submit(respond, [msg, chatbot], [msg, chatbot])
152
+ send_btn.click(respond, [msg, chatbot], [msg, chatbot])
153
+ clear_btn.click(lambda: [], outputs=chatbot)
154
+ examples_btn.click(toggle_examples, outputs=examples_row)
155
 
156
  if __name__ == "__main__":
157
+ demo.launch() # Simplified for HF Spaces - no need for server config