AlicanA commited on
Commit
4e639bf
·
1 Parent(s): da74fe2
Files changed (3) hide show
  1. Dockerfile +13 -0
  2. app.py +29 -0
  3. requirements.txt +6 -0
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+ WORKDIR /code
3
+ COPY ./requirements.txt /code/requirements.txt
4
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
5
+ RUN useradd -m -u 1000 user
6
+ USER user
7
+ ENV HOME=/home/user \\
8
+ PATH=/home/user/.local/bin:$PATH
9
+ WORKDIR $HOME/app
10
+ COPY --chown=user . $HOME/app
11
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
12
+
13
+
app.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from fastapi import FastAPI
3
+ from transformers import pipeline
4
+
5
+ # Create a new FastAPI app instance
6
+ app = FastAPI()
7
+
8
+ # Initialize the text generation pipeline
9
+ # This function will be able to generate text
10
+ # given an input.
11
+ pipe = pipeline("text2text-generation",
12
+ model="google/flan-t5-small")
13
+
14
+ # Define a function to handle the GET request at `/generate`
15
+ # The generate() function is defined as a FastAPI route that takes a
16
+ # string parameter called text. The function generates text based on the # input using the pipeline() object, and returns a JSON response
17
+ # containing the generated text under the key "output"
18
+ @app.get("/generate")
19
+ def generate(text: str):
20
+ """
21
+ Using the text2text-generation pipeline from `transformers`, generate text
22
+ from the given input text. The model used is `google/flan-t5-small`, which
23
+ can be found [here](<https://huggingface.co/google/flan-t5-small>).
24
+ """
25
+ # Use the pipeline to generate text from the given input text
26
+ output = pipe(text)
27
+
28
+ # Return the generated text in a JSON response
29
+ return {"output": output[0]["generated_text"]}
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi==0.74.*
2
+ requests==2.27.*
3
+ uvicorn[standard]==0.17.*
4
+ sentencepiece==0.1.*
5
+ torch==1.11.*
6
+ transformers==4.*