File size: 571 Bytes
1382469
 
 
 
 
 
2cfebfe
 
 
 
 
 
 
 
 
 
1382469
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
from fastapi import FastAPI

# Use a pipeline as a high-level helper
from transformers import pipeline

pipe = pipeline("translation", model="google-t5/t5-base")
import os

# Set cache directory to a local writable directory
os.environ['HF_HOME'] = './cache'
os.environ['TRANSFORMERS_CACHE'] = './cache'
os.environ['TORCH_HOME'] = './cache'

# Ensure the directory exists
os.makedirs('./cache', exist_ok=True)


app = FastAPI()
@app.get("/")
def home():
    return {"Hello": "World"}


@app.get('/ask')
def ask(prompt: str):
    result = pipe(prompt)
    return result[0]