petergits commited on
Commit
09c909e
·
1 Parent(s): 3d0bf54

Committing changes to main.py that works locally.

Browse files
Files changed (10) hide show
  1. .python-version +1 -0
  2. .vscode/extensions.json +5 -0
  3. Dockerfile +10 -5
  4. app.py +8 -0
  5. main.py +20 -0
  6. pyproject.toml +14 -0
  7. requirements.txt +6 -2
  8. static/index.html +24 -0
  9. static/script.js +17 -0
  10. uv.lock +0 -0
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.11
.vscode/extensions.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "recommendations": [
3
+ "ms-azuretools.vscode-docker"
4
+ ]
5
+ }
Dockerfile CHANGED
@@ -1,16 +1,21 @@
1
- # Read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
  # you will also find guides on how best to write your Dockerfile
3
 
4
  FROM python:3.9
5
 
 
 
6
  RUN useradd -m -u 1000 user
7
- USER user
8
- ENV PATH="/home/user/.local/bin:$PATH"
9
-
10
  WORKDIR /app
11
 
12
  COPY --chown=user ./requirements.txt requirements.txt
13
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
14
 
15
  COPY --chown=user . /app
16
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
 
 
 
 
 
1
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
  # you will also find guides on how best to write your Dockerfile
3
 
4
  FROM python:3.9
5
 
6
+ # The two following lines are requirements for the Dev Mode to be functional
7
+ # Learn more about the Dev Mode at https://huggingface.co/dev-mode-explorers
8
  RUN useradd -m -u 1000 user
 
 
 
9
  WORKDIR /app
10
 
11
  COPY --chown=user ./requirements.txt requirements.txt
12
  RUN pip install --no-cache-dir --upgrade -r requirements.txt
13
 
14
  COPY --chown=user . /app
15
+
16
+ USER user
17
+
18
+ ENV HOME=/home/user \
19
+ PATH=/home/user/.local/bin:$PATH
20
+
21
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
app.py CHANGED
@@ -1,8 +1,16 @@
1
  from fastapi import FastAPI
 
2
 
3
  app = FastAPI()
 
4
 
5
  @app.get("/")
6
  def greet_json():
7
  return {"Hello": "World!"}
8
 
 
 
 
 
 
 
 
1
  from fastapi import FastAPI
2
+ from transformers import pipeline
3
 
4
  app = FastAPI()
5
+ pipe_flan = pipeline("text2text-generation", model="google/flan-t5-small")
6
 
7
  @app.get("/")
8
  def greet_json():
9
  return {"Hello": "World!"}
10
 
11
+
12
+
13
+ @app.get("/infer_t5")
14
+ def t5(input):
15
+ output = pipe_flan(input)
16
+ return {"output": output[0]["generated_text"]}
main.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from fastapi.responses import FileResponse
3
+ from fastapi.staticfiles import StaticFiles
4
+ from transformers import pipeline
5
+
6
+ app = FastAPI()
7
+
8
+ # Serve static files at /static
9
+ app.mount("/static", StaticFiles(directory="static", html=True), name="static")
10
+
11
+ pipe_flan = pipeline("text2text-generation", model="google/flan-t5-small")
12
+
13
+ @app.get("/")
14
+ def index():
15
+ return FileResponse("static/index.html", media_type="text/html")
16
+
17
+ @app.get("/infer_t5")
18
+ def t5(input: str):
19
+ output = pipe_flan(input)
20
+ return {"output": output[0]["generated_text"]}
pyproject.toml ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "hf-mcp-server"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.11"
7
+ dependencies = [
8
+ "fastapi==0.74.*",
9
+ "requests==2.27.*",
10
+ "sentencepiece==0.1.*",
11
+ "torch>1.12.0",
12
+ "transformers==4.*",
13
+ "uvicorn[standard]==0.17.*",
14
+ ]
requirements.txt CHANGED
@@ -1,3 +1,7 @@
 
 
 
 
 
 
1
  fastapi
2
- uvicorn[standard]
3
-
 
1
+ fastapi==0.74.*
2
+ requests==2.27.*
3
+ sentencepiece==0.1.*
4
+ torch>1.12.0
5
+ transformers==4.*
6
+ uvicorn[standard]==0.17.*
7
  fastapi
 
 
static/index.html ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <main>
2
+ <section id="text-gen">
3
+ <h2>Text generation using Flan T5</h2>
4
+ <p>
5
+ Model:
6
+ <a
7
+ href="https://huggingface.co/google/flan-t5-small"
8
+ rel="noreferrer"
9
+ target="_blank"
10
+ >google/flan-t5-small
11
+ </a>
12
+ </p>
13
+ <form class="text-gen-form">
14
+ <label for="text-gen-input">Text prompt</label>
15
+ <input
16
+ id="text-gen-input"
17
+ type="text"
18
+ value="German: There are many ducks"
19
+ />
20
+ <button id="text-gen-submit">Submit</button>
21
+ <p class="text-gen-output"></p>
22
+ </form>
23
+ </section>
24
+ </main>
static/script.js ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const textGenForm = document.querySelector(".text-gen-form");
2
+
3
+ const translateText = async (text) => {
4
+ const inferResponse = await fetch(`infer_t5?input=${text}`);
5
+ const inferJson = await inferResponse.json();
6
+
7
+ return inferJson.output;
8
+ };
9
+
10
+ textGenForm.addEventListener("submit", async (event) => {
11
+ event.preventDefault();
12
+
13
+ const textGenInput = document.getElementById("text-gen-input");
14
+ const textGenParagraph = document.querySelector(".text-gen-output");
15
+
16
+ textGenParagraph.textContent = await translateText(textGenInput.value);
17
+ });
uv.lock ADDED
The diff for this file is too large to render. See raw diff