Spaces:
Runtime error
Runtime error
Fixing GPT NEO app.py
Browse files
app.py
CHANGED
|
@@ -1,6 +1,12 @@
|
|
|
|
|
| 1 |
from fastapi import FastAPI
|
| 2 |
from transformers import pipeline
|
| 3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
app = FastAPI()
|
| 5 |
|
| 6 |
# Cargar el modelo de GPT-Neo
|
|
@@ -9,4 +15,4 @@ generator = pipeline('text-generation', model='EleutherAI/gpt-neo-2.7B')
|
|
| 9 |
@app.post("/generate")
|
| 10 |
async def generate(prompt: str):
|
| 11 |
response = generator(prompt, max_length=100)
|
| 12 |
-
return {"text": response[0]["generated_text"]}
|
|
|
|
| 1 |
+
import os
|
| 2 |
from fastapi import FastAPI
|
| 3 |
from transformers import pipeline
|
| 4 |
|
| 5 |
+
# Configurar el directorio de caché de Hugging Face
|
| 6 |
+
os.makedirs('/app/cache', exist_ok=True)
|
| 7 |
+
|
| 8 |
+
os.environ['TRANSFORMERS_CACHE'] = '/app/cache'
|
| 9 |
+
|
| 10 |
app = FastAPI()
|
| 11 |
|
| 12 |
# Cargar el modelo de GPT-Neo
|
|
|
|
| 15 |
@app.post("/generate")
|
| 16 |
async def generate(prompt: str):
|
| 17 |
response = generator(prompt, max_length=100)
|
| 18 |
+
return {"text": response[0]["generated_text"]}
|