DronA23 commited on
Commit
13f0358
Β·
verified Β·
1 Parent(s): 96677e2

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -0
app.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ import torch
4
+ from transformers import pipeline
5
+
6
+ # ─── 1) SET UP PIPELINES ──────────────────────────────────────────────────────
7
+ # Sentiment analysis (local)
8
+ sentiment_pipe = pipeline("sentiment-analysis")
9
+
10
+ # Text generation chat (local GPT-2)
11
+ device = 0 if torch.cuda.is_available() else -1
12
+ chat_pipe = pipeline(
13
+ "text-generation",
14
+ model="distilgpt2",
15
+ tokenizer="distilgpt2",
16
+ device=device,
17
+ max_new_tokens=100,
18
+ do_sample=True,
19
+ temperature=0.7
20
+ )
21
+
22
+ def respond(message, chat_history):
23
+ """
24
+ - If the user starts with "Sentiment:", run sentiment analysis on the rest.
25
+ - Otherwise fall back to GPT-2 chat continuation.
26
+ """
27
+ if message.lower().startswith("sentiment:"):
28
+ text = message[len("sentiment:"):].strip()
29
+ result = sentiment_pipe(text)[0]
30
+ label = result["label"]
31
+ score = result["score"]
32
+ reply = f"πŸ” Sentiment: **{label}** (score: {score:.3f})"
33
+ else:
34
+ # GPT-2 continuation
35
+ out = chat_pipe(message)
36
+ # the pipeline returns [{'generated_text': "..."}]
37
+ reply = out[0]["generated_text"].strip()
38
+ chat_history.append(("You", message))
39
+ chat_history.append(("Bot", reply))
40
+ return chat_history
41
+
42
+ # ─── 2) BUILD THE UI ───────────────────────────────────────────────────────────
43
+ with gr.Blocks() as demo:
44
+ gr.Markdown("## 😊 Sentiment-&-Chat Bot\n"
45
+ "_Type `Sentiment: <your text>` to analyze sentiment, or just chat!_")
46
+ chat = gr.Chatbot()
47
+ msg = gr.Textbox(placeholder="Type here…", show_label=False)
48
+
49
+ msg.submit(respond, [msg, chat], [chat])
50
+
51
+ # queue() creates the `/api/predict` endpoint Spaces needs
52
+ demo = demo.queue()
53
+
54
+ if __name__ == "__main__":
55
+ port = int(os.environ.get("PORT", 7860))
56
+ demo.launch(server_name="0.0.0.0", server_port=port)