Spaces:
Sleeping
Sleeping
File size: 1,916 Bytes
a415299 af19806 e21a6e7 b488926 7b3747b e21a6e7 7b31077 a415299 e21a6e7 7b3747b a415299 e21a6e7 7b3747b e21a6e7 7b3747b e21a6e7 7b3747b e21a6e7 7b3747b a415299 7b31077 e21a6e7 7b3747b a415299 af19806 e21a6e7 7b31077 af19806 7b3747b e21a6e7 7b31077 e21a6e7 7b3747b af19806 7b3747b e21a6e7 7b3747b e21a6e7 a415299 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | import gradio as gr
from huggingface_hub import InferenceClient
from sentence_transformers import SentenceTransformer, util
from transformers import pipeline
# Modelos
chat_model_zephyr = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
chat_model_gpt2 = pipeline("text-generation", model="gpt2", max_new_tokens=100)
# Similaridade
similarity_model = SentenceTransformer("all-MiniLM-L6-v2")
def get_zephyr_response(question):
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": question}
]
response = chat_model_zephyr.chat_completion(
messages,
max_tokens=256,
temperature=0.7,
top_p=0.95,
)
return response.choices[0].message.content.strip()
def get_gpt2_response(question):
generated = chat_model_gpt2(question)[0]["generated_text"]
return generated.strip()
def compare_answers(answer1, answer2):
emb1 = similarity_model.encode(answer1, convert_to_tensor=True)
emb2 = similarity_model.encode(answer2, convert_to_tensor=True)
similarity = util.cos_sim(emb1, emb2).item()
return round(similarity, 3)
def respond(question):
answer_zephyr = get_zephyr_response(question)
answer_gpt2 = get_gpt2_response(question)
similarity = compare_answers(answer_zephyr, answer_gpt2)
return (
f"🧠 Zephyr-7b:\n{answer_zephyr}\n\n"
f"🤖 GPT-2:\n{answer_gpt2}\n\n"
f"🔍 Similaridade Semântica: **{similarity}**"
)
with gr.Blocks() as demo:
gr.Markdown("# 🤖 Comparador de Respostas (sem contexto)\nDigite uma pergunta e veja as respostas de dois modelos.")
question = gr.Textbox(label="Pergunta")
submit = gr.Button("Comparar Respostas")
output = gr.Textbox(label="Respostas e Similaridade", lines=15)
submit.click(respond, inputs=question, outputs=output)
if __name__ == "__main__":
demo.launch()
|