Spaces:
Sleeping
Sleeping
File size: 1,190 Bytes
a415299 e21a6e7 1869c5e b488926 6a41474 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 | import gradio as gr
from transformers import pipeline
from sentence_transformers import SentenceTransformer, util
# Carregamento dos modelos
model_a = pipeline("text-generation", model="tiiuae/falcon-7b-instruct")
model_b = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1")
similarity_model = SentenceTransformer("sentence-transformers/paraphrase-MiniLM-L6-v2")
def comparar_respostas(prompt):
resp_a = model_a(prompt, max_new_tokens=80)[0]["generated_text"]
resp_b = model_b(prompt, max_new_tokens=80)[0]["generated_text"]
emb_a = similarity_model.encode(resp_a, convert_to_tensor=True)
emb_b = similarity_model.encode(resp_b, convert_to_tensor=True)
similaridade = util.cos_sim(emb_a, emb_b).item()
return resp_a.strip(), resp_b.strip(), f"{similaridade:.4f}"
interface = gr.Interface(
fn=comparar_respostas,
inputs=gr.Textbox(label="Digite seu prompt"),
outputs=[
gr.Textbox(label="Resposta do Modelo A (Falcon)"),
gr.Textbox(label="Resposta do Modelo B (Mistral)"),
gr.Textbox(label="Similaridade entre as respostas")
],
title="Comparador de Modelos LLM - Hugging Face"
)
interface.launch()
|