Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| # Models | |
| models = { | |
| "text_generator": {"model": "EleutherAI/gpt-neo-1.3B", "task": "text-generation"}, | |
| "sentiment": {"model": "cardiffnlp/twitter-roberta-base-sentiment-latest", "task": "sentiment-analysis"}, | |
| "dialogue": {"model": "microsoft/DialoGPT-medium", "task": "text-generation"}, | |
| "summarizer": {"model": "sshleifer/distilbart-cnn-12-6", "task": "summarization"}, | |
| "ner": {"model": "dslim/bert-base-NER", "task": "ner"}, | |
| } | |
| # Initialize pipelines | |
| pipelines = {} | |
| for key, info in models.items(): | |
| pipelines[key] = pipeline(info["task"], model=info["model"]) | |
| def generate_text(prompt, max_length): | |
| result = pipelines["text_generator"](prompt, max_length=int(max_length), num_return_sequences=1) | |
| return result[0]['generated_text'] | |
| def analyze_sentiment(text): | |
| result = pipelines["sentiment"](text) | |
| return [{"label": res['label'], "score": f"{res['score']:.2f}"} for res in result] | |
| def converse(message, state): | |
| chat_history_str = state or "" | |
| chat_input_ids = pipelines["dialogue"].tokenizer.encode(chat_history_str + pipelines["dialogue"].tokenizer.eos_token + message, return_tensors='pt') | |
| bot_input_ids = pipelines["dialogue"].model.generate(chat_input_ids, max_length=1000, pad_token_id=pipelines["dialogue"].tokenizer.eos_token_id) | |
| bot_response = pipelines["dialogue"].tokenizer.decode(bot_input_ids[:, chat_input_ids.shape[-1]:][0], skip_special_tokens=True) | |
| chat_history = chat_history_str + pipelines["dialogue"].tokenizer.eos_token + message + pipelines["dialogue"].tokenizer.eos_token + bot_response | |
| return bot_response, chat_history | |
| def summarize_text(text, max_length, min_length): | |
| result = pipelines["summarizer"](text, max_length=int(max_length), min_length=int(min_length), do_sample=False) | |
| return result[0]['summary_text'] | |
| def ner_analysis(text): | |
| result = pipelines["ner"](text, aggregation_strategy="max") | |
| formatted = [f"{res['entity_group']}: {res['word']} ({res['score']:.2f})" for res in result] | |
| return "\n".join(formatted) | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# AI Art Main Interface") | |
| gr.Markdown("Use various AI models below") | |
| with gr.Tab("Text Generator"): | |
| gr.Markdown("## Text Generator (EleutherAI/gpt-neo-1.3B)") | |
| gen_prompt = gr.Textbox(label="Prompt") | |
| gen_len = gr.Slider(10, 200, 50, label="Max Length") | |
| gen_btn = gr.Button() | |
| gen_out = gr.Textbox(label="Output") | |
| gen_btn.click(generate_text, [gen_prompt, gen_len], gen_out) | |
| with gr.Tab("Sentiment Analysis"): | |
| gr.Markdown("## Sentiment Analyzer (cardiffnlp/twitter-roberta-base-sentiment-latest)") | |
| sent_text = gr.Textbox(label="Text") | |
| sent_btn = gr.Button() | |
| sent_out = gr.JSON(label="Result") | |
| sent_btn.click(analyze_sentiment, sent_text, sent_out) | |
| with gr.Tab("Dialogue Bot"): | |
| gr.Markdown("## Dialogue Bot (microsoft/DialoGPT-medium)") | |
| chat_state = gr.State() | |
| chat_msg = gr.Textbox(label="Message") | |
| chat_send = gr.Button() | |
| chat_out = gr.Chatbot() | |
| chat_send.click(converse, [chat_msg, chat_state], [chat_out, chat_state]) | |
| with gr.Tab("Text Summarizer"): | |
| gr.Markdown("## Text Summarizer (sshleifer/distilbart-cnn-12-6)") | |
| sum_text = gr.Textbox(lines=5, label="Text") | |
| sum_max = gr.Slider(10, 200, 100, label="Max Length") | |
| sum_min = gr.Slider(5, 50, 10, label="Min Length") | |
| sum_btn = gr.Button() | |
| sum_out = gr.Textbox(label="Summary") | |
| sum_btn.click(summarize_text, [sum_text, sum_max, sum_min], sum_out) | |
| with gr.Tab("Named Entity Recognition"): | |
| gr.Markdown("## Named Entity Recognition (dslim/bert-base-NER)") | |
| ner_text = gr.Textbox(lines=3, label="Text") | |
| ner_btn = gr.Button() | |
| ner_out = gr.Textbox(lines=5, label="Entities") | |
| ner_btn.click(ner_analysis, ner_text, ner_out) | |
| if __name__ == "__main__": | |
| demo.launch() | |