Perth0603 commited on
Commit
2a3df01
·
verified ·
1 Parent(s): ae52dda
Files changed (4) hide show
  1. Dockerfile +22 -0
  2. README.md +35 -0
  3. app.py +50 -0
  4. requirements.txt +6 -0
Dockerfile ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10-slim
2
+
3
+ ENV PYTHONDONTWRITEBYTECODE=1 \
4
+ PYTHONUNBUFFERED=1 \
5
+ PIP_NO_CACHE_DIR=1
6
+
7
+ WORKDIR /app
8
+
9
+ # System deps (optional but helps with torch wheels)
10
+ RUN apt-get update && apt-get install -y --no-install-recommends \
11
+ build-essential git && \
12
+ rm -rf /var/lib/apt/lists/*
13
+
14
+ COPY requirements.txt /app/requirements.txt
15
+ RUN pip install -r /app/requirements.txt
16
+
17
+ COPY app.py /app/app.py
18
+
19
+ EXPOSE 7860
20
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
21
+
22
+
README.md ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: PhishWatch Proxy
3
+ emoji: 🛡️
4
+ sdk: docker
5
+ ---
6
+
7
+ # Hugging Face Space - Phishing Text Classifier (Docker + FastAPI)
8
+
9
+ This Space exposes a minimal `/predict` endpoint for your MobileBERT phishing model so the Flutter app can call it reliably.
10
+
11
+ ## Files
12
+ - Dockerfile - builds a small FastAPI server image
13
+ - app.py - FastAPI app that loads the model and returns `{ label, score }`.
14
+ - requirements.txt - Python dependencies.
15
+
16
+ ## How to deploy
17
+ 1. Create a new Space on Hugging Face (type: Docker).
18
+ 2. Upload the contents of this `hf_space/` folder to the Space root (including Dockerfile).
19
+ 3. In Space Settings → Variables, add:
20
+ - MODEL_ID = Perth0603/phishing-email-mobilebert
21
+ 4. Wait for the Space to build and become green. Test:
22
+ - GET `/` should return `{ status: ok, model: ... }`
23
+ - POST `/predict` with `{ "inputs": "Win an iPhone! Click here" }`
24
+
25
+ ## Flutter app config
26
+ Set the Space URL in your env file so the app targets the Space instead of the Hosted Inference API:
27
+
28
+ ```
29
+ {"HF_SPACE_URL":"https://<your-space>.hf.space"}
30
+ ```
31
+
32
+ Run the app:
33
+ ```
34
+ flutter run --dart-define-from-file=hf.env.json
35
+ ```
app.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
+ import torch
5
+ import os
6
+
7
+
8
+ MODEL_ID = os.environ.get("MODEL_ID", "Perth0603/phishing-email-mobilebert")
9
+
10
+ app = FastAPI(title="Phishing Text Classifier", version="1.0.0")
11
+
12
+
13
+ class PredictPayload(BaseModel):
14
+ inputs: str
15
+
16
+
17
+ # Lazy singletons for model/tokenizer
18
+ _tokenizer = None
19
+ _model = None
20
+
21
+
22
+ def _load_model():
23
+ global _tokenizer, _model
24
+ if _tokenizer is None or _model is None:
25
+ _tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
26
+ _model = AutoModelForSequenceClassification.from_pretrained(MODEL_ID)
27
+ # Warm-up
28
+ with torch.no_grad():
29
+ _ = _model(**_tokenizer(["warm up"], return_tensors="pt")).logits
30
+
31
+
32
+ @app.get("/")
33
+ def root():
34
+ return {"status": "ok", "model": MODEL_ID}
35
+
36
+
37
+ @app.post("/predict")
38
+ def predict(payload: PredictPayload):
39
+ _load_model()
40
+ with torch.no_grad():
41
+ logits = _model(**_tokenizer([payload.inputs], return_tensors="pt")).logits
42
+ probs = torch.softmax(logits, dim=-1)[0]
43
+ score, idx = torch.max(probs, dim=0)
44
+
45
+ # Map common ids to labels (kept generic; your config also has these)
46
+ id2label = {0: "LEGIT", 1: "PHISH"}
47
+ label = id2label.get(int(idx), str(int(idx)))
48
+ return {"label": label, "score": float(score)}
49
+
50
+
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi==0.115.0
2
+ uvicorn==0.30.6
3
+ transformers==4.46.3
4
+ torch==2.3.1
5
+ accelerate>=0.33.0
6
+