Update app.py
Browse files
app.py
CHANGED
|
@@ -131,6 +131,18 @@ def predict_for_engine(prompt: str) -> str:
|
|
| 131 |
def generate_advanced(prompt: str, system: str, max_new_tokens: int, temperature: float, top_p: float) -> str:
|
| 132 |
return _generate(prompt=prompt, system=system, max_new_tokens=max_new_tokens, temperature=temperature, top_p=top_p)
|
| 133 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
# ------------------- HTTP (opcional, clientes puros) -------------------
|
| 135 |
# Si quieres, puedes añadir un endpoint HTTP POST /generate (FastAPI),
|
| 136 |
# pero con Gradio Client es suficiente para engine/local.
|
|
|
|
| 131 |
def generate_advanced(prompt: str, system: str, max_new_tokens: int, temperature: float, top_p: float) -> str:
|
| 132 |
return _generate(prompt=prompt, system=system, max_new_tokens=max_new_tokens, temperature=temperature, top_p=top_p)
|
| 133 |
|
| 134 |
+
def salamandra_chat_endpoint(prompt: str) -> Dict[str, Any]:
|
| 135 |
+
global _salamandra
|
| 136 |
+
if _salamandra is None:
|
| 137 |
+
_salamandra = SalamandraClient() # usa tu clase
|
| 138 |
+
|
| 139 |
+
try:
|
| 140 |
+
text = _salamandra.chat(prompt)
|
| 141 |
+
except Exception as e:
|
| 142 |
+
text = f"Error ejecutando SalamandraClient: {str(e)}"
|
| 143 |
+
|
| 144 |
+
return {"text": text}
|
| 145 |
+
|
| 146 |
# ------------------- HTTP (opcional, clientes puros) -------------------
|
| 147 |
# Si quieres, puedes añadir un endpoint HTTP POST /generate (FastAPI),
|
| 148 |
# pero con Gradio Client es suficiente para engine/local.
|