Spaces:
Build error
Build error
| import os | |
| import gradio as gr | |
| import torch | |
| from transformers import pipeline | |
| # βββ 1) SET UP PIPELINES ββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| # Sentiment analysis (local) | |
| sentiment_pipe = pipeline("sentiment-analysis") | |
| # Text generation chat (local GPT-2) | |
| device = 0 if torch.cuda.is_available() else -1 | |
| chat_pipe = pipeline( | |
| "text-generation", | |
| model="distilgpt2", | |
| tokenizer="distilgpt2", | |
| device=device, | |
| max_new_tokens=100, | |
| do_sample=True, | |
| temperature=0.7 | |
| ) | |
| def respond(message, chat_history): | |
| """ | |
| - If the user starts with "Sentiment:", run sentiment analysis on the rest. | |
| - Otherwise fall back to GPT-2 chat continuation. | |
| """ | |
| if message.lower().startswith("sentiment:"): | |
| text = message[len("sentiment:"):].strip() | |
| result = sentiment_pipe(text)[0] | |
| label = result["label"] | |
| score = result["score"] | |
| reply = f"π Sentiment: **{label}** (score: {score:.3f})" | |
| else: | |
| # GPT-2 continuation | |
| out = chat_pipe(message) | |
| # the pipeline returns [{'generated_text': "..."}] | |
| reply = out[0]["generated_text"].strip() | |
| chat_history.append(("You", message)) | |
| chat_history.append(("Bot", reply)) | |
| return chat_history | |
| # βββ 2) BUILD THE UI βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## π Sentiment-&-Chat Bot\n" | |
| "_Type `Sentiment: <your text>` to analyze sentiment, or just chat!_") | |
| chat = gr.Chatbot() | |
| msg = gr.Textbox(placeholder="Type hereβ¦", show_label=False) | |
| msg.submit(respond, [msg, chat], [chat]) | |
| # queue() creates the `/api/predict` endpoint Spaces needs | |
| demo = demo.queue() | |
| if __name__ == "__main__": | |
| port = int(os.environ.get("PORT", 7860)) | |
| demo.launch(server_name="0.0.0.0", server_port=port) | |