hafsa09 commited on
Commit
4a32c5c
·
verified ·
1 Parent(s): ca81dc6

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +29 -0
  2. app.py +24 -0
  3. requirements.txt +6 -0
Dockerfile ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### use the official python3.12 image
2
+ FROM python:3.12
3
+
4
+ # set the working directory to /code
5
+ WORKDIR /code
6
+
7
+ # copy the current directory contents into the container at /code
8
+ COPY . /requirements.txt /code/requirements.txt /
9
+
10
+ # install any needed packages specified in requirements.txt
11
+ RUN pip install --no-cache-dir --upgrade-r /code/requirements.txt
12
+
13
+ ## set up a new user
14
+ RUN useradd user
15
+ #switch to the new user
16
+ USER user
17
+
18
+ # set the home to user's home directory
19
+ ENV HOME =/home/user \
20
+ PATH = /home/user/.local/bin:$PATH
21
+
22
+ # set the working directory to the user's home directory
23
+ WORKDIR $HOME/app
24
+
25
+ # copy the current directory contents into the container at $HOMR/app (home app folder) setting the owner to the new user
26
+ COPY --chown=user . $HOME/app
27
+
28
+ # Start the FASTAPI App on port 7680
29
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7680"]
app.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #text generation genai app (text to text)
2
+ from fastapi import FastAPI
3
+ from transformers import pipeline
4
+ # we used pipeline so we can call any llm models that are present in hugging face
5
+ # creaye fastapi app instnace
6
+
7
+ app = FastAPI()
8
+
9
+ # initiliaze text generation pipeline
10
+ # Use a pipeline as a high-level helper
11
+
12
+ pipe = pipeline("text2text-generation", model="google/flan-t5-small")
13
+
14
+ # create
15
+ @app.get("/")
16
+ def home():
17
+ return {"message": "Welcome to the Text Generation App"}
18
+ # generate fucntion to handle some get request /generate
19
+
20
+ @app.get("/generate")
21
+ def generate(text: str): #whatever the text we give it will post to my pipeline
22
+ # use the pipeline to generate the text from given input text "(text:str)"
23
+ output = pipe(text)
24
+ return {"output": output[0]['generated_text']} # outpu is given in the form of key anf rom the output we will getting the response in the form of list and over there
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi
2
+ requests
3
+ uvicorn
4
+ sentencepiece
5
+ torch
6
+ transformers