Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| from sentence_transformers import SentenceTransformer, util | |
| # Carregamento dos modelos | |
| model_a = pipeline("text-generation", model="tiiuae/falcon-7b-instruct") | |
| model_b = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1") | |
| similarity_model = SentenceTransformer("sentence-transformers/paraphrase-MiniLM-L6-v2") | |
| def comparar_respostas(prompt): | |
| resp_a = model_a(prompt, max_new_tokens=80)[0]["generated_text"] | |
| resp_b = model_b(prompt, max_new_tokens=80)[0]["generated_text"] | |
| emb_a = similarity_model.encode(resp_a, convert_to_tensor=True) | |
| emb_b = similarity_model.encode(resp_b, convert_to_tensor=True) | |
| similaridade = util.cos_sim(emb_a, emb_b).item() | |
| return resp_a.strip(), resp_b.strip(), f"{similaridade:.4f}" | |
| interface = gr.Interface( | |
| fn=comparar_respostas, | |
| inputs=gr.Textbox(label="Digite seu prompt"), | |
| outputs=[ | |
| gr.Textbox(label="Resposta do Modelo A (Falcon)"), | |
| gr.Textbox(label="Resposta do Modelo B (Mistral)"), | |
| gr.Textbox(label="Similaridade entre as respostas") | |
| ], | |
| title="Comparador de Modelos LLM - Hugging Face" | |
| ) | |
| interface.launch() | |