| import gradio as gr |
| from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline |
|
|
| |
| |
| |
| model_name = "TheBloke/guanaco-7B-GPTQ" |
| tokenizer = AutoTokenizer.from_pretrained(model_name) |
| model = AutoModelForCausalLM.from_pretrained(model_name) |
| chat_pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) |
|
|
| |
| |
| |
| with open("cv.txt", "r", encoding="utf-8") as f: |
| cv_text = f.read() |
|
|
| |
| |
| |
| def responder(pregunta): |
| prompt = f""" |
| Usa solo la información de este CV para responder en español, primera persona, de forma breve y profesional. |
| Pregunta: {pregunta} |
| CV: |
| {cv_text} |
| """ |
| respuesta = chat_pipe(prompt, max_length=200, do_sample=False)[0]['generated_text'] |
| return respuesta |
|
|
| |
| |
| |
| with gr.Blocks() as demo: |
|
|
| |
| gr.Image(value="marianobot.png", interactive=False) |
|
|
| |
| gr.Markdown("<h2>🤖 MarianoBot – ¡Descubre y pregunta todo lo que quieras!</h2>", elem_id="titulo") |
|
|
| |
| chatbot = gr.Chatbot(type="messages", value=[{"role":"assistant","content":"¡Hola! ¡Pregúntame para conocer más sobre mí!"}]) |
|
|
| |
| question_input = gr.Textbox( |
| label="Escribe tu pregunta...", |
| placeholder="Pregunta sobre mi experiencia, habilidades o trayectoria", |
| lines=1 |
| ) |
|
|
| |
| submit_button = gr.Button("Hacer pregunta", elem_id="boton-naranja") |
|
|
| |
| def enviar(input_text, history): |
| answer = responder(input_text) |
| history.append({"role":"user","content":input_text}) |
| history.append({"role":"assistant","content":answer}) |
| return history, "" |
|
|
| |
| question_input.submit(enviar, [question_input, chatbot], [chatbot, question_input]) |
| submit_button.click(enviar, [question_input, chatbot], [chatbot, question_input]) |
|
|
| |
| |
| |
| demo.launch() |
|
|