Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -38,16 +38,26 @@ class GeminiLLM(LLM):
|
|
| 38 |
data = {
|
| 39 |
"contents": [
|
| 40 |
{
|
| 41 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
}
|
| 43 |
],
|
| 44 |
"generationConfig": {
|
| 45 |
"temperature": self.temperature
|
| 46 |
}
|
| 47 |
}
|
| 48 |
-
|
| 49 |
return requests.post(url, headers=headers, json=data)
|
| 50 |
|
|
|
|
|
|
|
| 51 |
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
| 52 |
"""Envía el prompt a la API de Gemini y devuelve la respuesta.
|
| 53 |
Si la cuota se supera, intenta con la API key alternativa."""
|
|
|
|
| 38 |
data = {
|
| 39 |
"contents": [
|
| 40 |
{
|
| 41 |
+
"role": "system",
|
| 42 |
+
"content": {
|
| 43 |
+
"parts": "parts": ["You are an agent. Please respond concisely **only with the answer**, no extra explanations."]
|
| 44 |
+
}
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"role": "user",
|
| 48 |
+
"content": {
|
| 49 |
+
"parts": [prompt]
|
| 50 |
+
}
|
| 51 |
}
|
| 52 |
],
|
| 53 |
"generationConfig": {
|
| 54 |
"temperature": self.temperature
|
| 55 |
}
|
| 56 |
}
|
|
|
|
| 57 |
return requests.post(url, headers=headers, json=data)
|
| 58 |
|
| 59 |
+
|
| 60 |
+
|
| 61 |
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
| 62 |
"""Envía el prompt a la API de Gemini y devuelve la respuesta.
|
| 63 |
Si la cuota se supera, intenta con la API key alternativa."""
|