Spaces:
Sleeping
Sleeping
se realiza cambio de nombre microsoft/Phi-3-mini-4k-instruct - CHATBOT
Browse files
app.py
CHANGED
|
@@ -9,14 +9,14 @@ hf_token = os.getenv("HF_API_TOKEN")
|
|
| 9 |
# Clase para manejar m煤ltiples modelos
|
| 10 |
class ModelHandler:
|
| 11 |
def __init__(self, model_names, token):
|
| 12 |
-
self.clients = {
|
| 13 |
-
self.current_model = model_names[0]
|
| 14 |
|
| 15 |
-
def switch_model(self,
|
| 16 |
-
if
|
| 17 |
-
self.current_model =
|
| 18 |
else:
|
| 19 |
-
raise ValueError(f"Modelo {
|
| 20 |
|
| 21 |
def generate_response(self, input_text):
|
| 22 |
prompt = f"Debes de responder a cualquier pregunta:\nPregunta: {input_text}"
|
|
@@ -31,10 +31,10 @@ class ModelHandler:
|
|
| 31 |
except Exception as e:
|
| 32 |
return f"Error al realizar la inferencia: {e}"
|
| 33 |
|
| 34 |
-
# Lista de modelos disponibles
|
| 35 |
-
model_names =
|
| 36 |
-
"microsoft/Phi-3-mini-4k-instruct"
|
| 37 |
-
|
| 38 |
|
| 39 |
# Inicializa el manejador de modelos
|
| 40 |
model_handler = ModelHandler(model_names, hf_token)
|
|
@@ -68,8 +68,8 @@ with gr.Blocks(title="Multi-Model LLM Chatbot with Image Generation") as demo:
|
|
| 68 |
)
|
| 69 |
with gr.Row():
|
| 70 |
model_dropdown = gr.Dropdown(
|
| 71 |
-
choices=model_names + ["Generaci贸n de Im谩genes"],
|
| 72 |
-
value=
|
| 73 |
label="Seleccionar Acci贸n/Modelo",
|
| 74 |
interactive=True
|
| 75 |
)
|
|
|
|
| 9 |
# Clase para manejar m煤ltiples modelos
|
| 10 |
class ModelHandler:
|
| 11 |
def __init__(self, model_names, token):
|
| 12 |
+
self.clients = {model_key: InferenceClient(model_name, token=token) for model_key, model_name in model_names.items()}
|
| 13 |
+
self.current_model = list(model_names.keys())[0]
|
| 14 |
|
| 15 |
+
def switch_model(self, model_key):
|
| 16 |
+
if model_key in self.clients:
|
| 17 |
+
self.current_model = model_key
|
| 18 |
else:
|
| 19 |
+
raise ValueError(f"Modelo {model_key} no est谩 disponible.")
|
| 20 |
|
| 21 |
def generate_response(self, input_text):
|
| 22 |
prompt = f"Debes de responder a cualquier pregunta:\nPregunta: {input_text}"
|
|
|
|
| 31 |
except Exception as e:
|
| 32 |
return f"Error al realizar la inferencia: {e}"
|
| 33 |
|
| 34 |
+
# Lista de modelos disponibles (con nombres amigables para la interfaz)
|
| 35 |
+
model_names = {
|
| 36 |
+
"CHATBOT": "microsoft/Phi-3-mini-4k-instruct"
|
| 37 |
+
}
|
| 38 |
|
| 39 |
# Inicializa el manejador de modelos
|
| 40 |
model_handler = ModelHandler(model_names, hf_token)
|
|
|
|
| 68 |
)
|
| 69 |
with gr.Row():
|
| 70 |
model_dropdown = gr.Dropdown(
|
| 71 |
+
choices=list(model_names.keys()) + ["Generaci贸n de Im谩genes"],
|
| 72 |
+
value="CHATBOT",
|
| 73 |
label="Seleccionar Acci贸n/Modelo",
|
| 74 |
interactive=True
|
| 75 |
)
|