Spaces:
Sleeping
Sleeping
| from huggingface_hub import InferenceClient | |
| import gradio as gr | |
| import re | |
| client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1") | |
| def format_prompt(message, history): | |
| WELCOME_MESSAGE = ( | |
| "Hola, soy un asistente creado por el equipo de Desarrollo de PosperPanam谩. " | |
| "Estoy aqu铆 para ayudarte a generar consultas SQL y comandos de configuraci贸n. " | |
| "驴C贸mo puedo ayudarte hoy?" | |
| ) | |
| prompt = f"<s> {WELCOME_MESSAGE}</s>" | |
| for user_prompt, bot_response in history: | |
| prompt += f" [INST] {user_prompt} [/INST] {bot_response}" | |
| prompt += f" [INST] {message} [/INST]" | |
| return prompt | |
| def replace_variables_with_placeholders(query): | |
| # Reemplazar variables en la cl谩usula WHERE con ? | |
| return re.sub(r"= '.*?'", "= ?", query) | |
| def generate(prompt, history, temperature=0.2, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0): | |
| # Verificar si el prompt est谩 relacionado con SQL o configuraci贸n | |
| if not any(keyword in prompt.lower() for keyword in ["sql", "consulta", "configuraci贸n", "comando"]): | |
| yield "Lo siento, no puedo realizar esa tarea. Mi funci贸n es generar consultas SQL y comandos de configuraci贸n." | |
| return | |
| temperature = float(temperature) | |
| if temperature < 1e-2: | |
| temperature = 1e-2 | |
| top_p = float(top_p) | |
| generate_kwargs = { | |
| "temperature": temperature, | |
| "max_new_tokens": max_new_tokens, | |
| "top_p": top_p, | |
| "repetition_penalty": repetition_penalty, | |
| "do_sample": True, | |
| "seed": 42, | |
| } | |
| formatted_prompt = format_prompt(prompt, history) | |
| stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False) | |
| output = "" | |
| for response in stream: | |
| output += response.token.text | |
| # Reemplazar variables en la cl谩usula WHERE con ? | |
| final_output = replace_variables_with_placeholders(output) | |
| # Verificar si la respuesta contiene la consulta deseada y devolver solo la consulta | |
| if "SELECT" in final_output.upper() and "FROM" in final_output.upper(): | |
| final_output = final_output.strip().split("\n")[0] | |
| final_output = "\n".join(line for line in output.split("\n") if line.strip().startswith(("SELECT", "FROM", "JOIN", "WHERE"))) | |
| yield final_output.strip() | |
| mychatbot = gr.Chatbot( | |
| avatar_images=["./user.png", "./botm.png"], | |
| bubble_full_width=False, | |
| show_label=False, | |
| show_copy_button=True, | |
| likeable=True, | |
| ) | |
| demo = gr.ChatInterface( | |
| fn=generate, | |
| chatbot=mychatbot, | |
| title="Asistente de configuraci贸n y consultas", | |
| retry_btn=None, | |
| undo_btn=None, | |
| theme="huggingface", | |
| ) | |
| demo.queue().launch(show_api=True) | |