Fatitommy commited on
Commit
c8398fb
Β·
verified Β·
1 Parent(s): 4a9c5f6

Upload 4 files

Browse files
Files changed (4) hide show
  1. Dockerfile +17 -0
  2. README (1).md +8 -0
  3. app.py +81 -0
  4. requirements.txt +5 -0
Dockerfile ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10-slim
2
+
3
+ WORKDIR /app
4
+
5
+ RUN apt-get update && apt-get install -y \
6
+ build-essential \
7
+ git \
8
+ && rm -rf /var/lib/apt/lists/*
9
+
10
+ COPY requirements.txt .
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
+
13
+ COPY app.py .
14
+
15
+ EXPOSE 7860
16
+
17
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
README (1).md ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: VoiceAura Translation API
3
+ emoji: 🌐
4
+ colorFrom: blue
5
+ colorTo: green
6
+ sdk: docker
7
+ pinned: false
8
+ ---
app.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ VoiceAura Translation API
3
+ HuggingFace Space pe deploy karo β€” SLPG model use karta hai
4
+ """
5
+
6
+ from fastapi import FastAPI
7
+ from fastapi.middleware.cors import CORSMiddleware
8
+ from pydantic import BaseModel
9
+ import os
10
+ import requests
11
+
12
+ app = FastAPI()
13
+
14
+ app.add_middleware(
15
+ CORSMiddleware,
16
+ allow_origins=["*"],
17
+ allow_methods=["*"],
18
+ allow_headers=["*"],
19
+ )
20
+
21
+ # ── Sahi file names (HF repo se match karte hain) ────────
22
+ MODEL_URL = "https://huggingface.co/SLPG/English_to_Urdu_Unsupervised_MT/resolve/main/checkpoint_8_96000.pt"
23
+ DICT_EN_URL = "https://huggingface.co/SLPG/English_to_Urdu_Unsupervised_MT/resolve/main/dict.en.txt"
24
+ DICT_UR_URL = "https://huggingface.co/SLPG/English_to_Urdu_Unsupervised_MT/resolve/main/dict.ur.txt"
25
+
26
+ MODEL_PATH = "checkpoint_8_96000.pt" # βœ… Sahi naam
27
+ DICT_EN_PATH = "dict.en.txt"
28
+ DICT_UR_PATH = "dict.ur.txt"
29
+
30
+ en_ur_model = None
31
+
32
+ def download_file(url: str, path: str):
33
+ if os.path.exists(path):
34
+ print(f"[βœ“] Already exists: {path}")
35
+ return
36
+ print(f"[↓] Downloading: {path}...")
37
+ with requests.get(url, stream=True) as r:
38
+ r.raise_for_status()
39
+ with open(path, "wb") as f:
40
+ for chunk in r.iter_content(chunk_size=8192):
41
+ f.write(chunk)
42
+ print(f"[βœ“] Done: {path}")
43
+
44
+ def load_model():
45
+ global en_ur_model
46
+ if en_ur_model is not None:
47
+ return
48
+
49
+ download_file(MODEL_URL, MODEL_PATH)
50
+ download_file(DICT_EN_URL, DICT_EN_PATH)
51
+ download_file(DICT_UR_URL, DICT_UR_PATH)
52
+
53
+ from fairseq.models.transformer import TransformerModel
54
+ en_ur_model = TransformerModel.from_pretrained(
55
+ ".",
56
+ checkpoint_file=MODEL_PATH, # βœ… Sahi naam
57
+ data_name_or_path="."
58
+ )
59
+ en_ur_model.eval()
60
+ print("[βœ“] Model ready!")
61
+
62
+ @app.on_event("startup")
63
+ async def startup():
64
+ load_model()
65
+
66
+ class Req(BaseModel):
67
+ text: str
68
+
69
+ @app.get("/")
70
+ def root():
71
+ return {"status": "VoiceAura API βœ“", "model_loaded": en_ur_model is not None}
72
+
73
+ @app.post("/translate")
74
+ def translate(req: Req):
75
+ if not req.text.strip():
76
+ return {"success": False, "translation": ""}
77
+ try:
78
+ result = en_ur_model.translate(req.text.strip())
79
+ return {"success": True, "translation": result}
80
+ except Exception as e:
81
+ return {"success": False, "translation": str(e)}
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
3
+ fairseq
4
+ torch
5
+ requests