ClergeF commited on
Commit
13c4bb6
·
verified ·
1 Parent(s): 2ebb489

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +89 -71
app.py CHANGED
@@ -2,128 +2,146 @@ import json
2
  import numpy as np
3
  from fastapi import FastAPI
4
  from pydantic import BaseModel
5
- from sentence_transformers import SentenceTransformer
6
  from huggingface_hub import hf_hub_download
7
- import os
8
-
9
- app = FastAPI()
10
-
11
- # ============================================================
12
- # Load Embedder (NO MORE CUSTOM REPO — USE BASE MODEL)
13
- # ============================================================
14
-
15
- print("Loading embedder: all-MiniLM-L6-v2 ...")
16
- embedder = SentenceTransformer("all-MiniLM-L6-v2")
17
 
18
  # ============================================================
19
- # Model Registry — maps category → HF repo + file
20
  # ============================================================
21
 
22
  HF_USER = "ClergeF"
23
 
 
24
  MODEL_REPOS = {
25
- "value": "value-impact-model",
26
- "impact": "impact-model",
27
- "family": "family-model",
28
- "community": "community-model",
29
- "education": "education-model",
30
- "health": "health-model",
31
  "environment": "environment-model",
32
- "business": "business-model",
33
- "finance": "finance-model",
34
- "history": "history-model",
35
- "spirituality":"spirituality-model",
36
- "innovation": "innovation-model"
37
  }
38
 
 
39
  MODEL_FILES = {
40
- "value": "value_impact.json",
41
- "impact": "impact.json",
42
- "family": "family_level.json",
43
- "community": "community_level.json",
44
- "education": "education_level.json",
45
- "health": "health_level.json",
46
  "environment": "environment_level.json",
47
- "business": "business_level.json",
48
- "finance": "finance_level.json",
49
- "history": "history_level.json",
50
- "spirituality":"spirituality_level.json",
51
- "innovation": "innovation_level.json"
52
  }
53
 
54
  # ============================================================
55
- # Load all category models into memory
56
  # ============================================================
57
 
58
- loaded_models = {}
 
 
59
 
60
- def load_single_model(category: str):
61
- """Download & load one model's JSON coefficients."""
62
- repo = MODEL_REPOS[category]
63
- file = MODEL_FILES[category]
 
 
 
 
64
 
65
  path = hf_hub_download(
66
- repo_id=f"{HF_USER}/{repo}",
67
- filename=file
68
  )
69
 
70
  with open(path, "r") as f:
71
- data = json.load(f)
 
 
 
 
 
72
 
73
- model = {
74
- "weights": np.array(data["weights"]),
75
- "bias": float(data["bias"])
76
- }
77
- return model
78
 
 
 
 
 
79
 
80
- print("Loading all 12 models...")
81
- for cat in MODEL_REPOS:
82
- loaded_models[cat] = load_single_model(cat)
83
- print("All models loaded successfully.")
84
 
85
 
86
  # ============================================================
87
- # Input schema
88
  # ============================================================
89
 
90
- class InputText(BaseModel):
91
- text: str
92
 
 
93
 
94
- # ============================================================
95
- # Predict function per model
96
- # ============================================================
 
 
 
97
 
98
- def predict_single(text: str, model_dict):
99
- embedding = embedder.encode([text])[0] # vector
100
- score = float(np.dot(embedding, model_dict["weights"]) + model_dict["bias"])
101
- return max(0.0, min(1.0, score)) # clamp 0–1
102
 
103
 
104
  # ============================================================
105
- # API Routes
106
  # ============================================================
107
 
 
 
 
 
 
 
 
108
  @app.get("/")
109
  def root():
110
- return {"message": "MVT Category API is running."}
111
 
112
 
 
 
 
 
113
  @app.post("/predict")
114
  def predict(payload: InputText):
115
  text = payload.text
 
116
 
117
- results = {}
118
 
119
- for category, model in loaded_models.items():
120
- results[category] = predict_single(text, model)
 
 
121
 
122
- # Also return categories >= 0.85
123
- high_cats = [c for c, s in results.items() if s >= 0.85]
 
 
 
124
 
125
  return {
126
  "input": text,
127
- "scores": results,
128
- "high_confidence_categories": high_cats
129
  }
 
2
  import numpy as np
3
  from fastapi import FastAPI
4
  from pydantic import BaseModel
 
5
  from huggingface_hub import hf_hub_download
6
+ from sentence_transformers import SentenceTransformer
 
 
 
 
 
 
 
 
 
7
 
8
  # ============================================================
9
+ # CONFIG
10
  # ============================================================
11
 
12
  HF_USER = "ClergeF"
13
 
14
+ # Your 12 individual HF repos
15
  MODEL_REPOS = {
16
+ "value_impact": "value-impact-model",
17
+ "impact": "impact-model",
18
+ "family": "family-model",
19
+ "community": "community-model",
20
+ "education": "education-model",
21
+ "health": "health-model",
22
  "environment": "environment-model",
23
+ "business": "business-model",
24
+ "finance": "finance-model",
25
+ "history": "history-model",
26
+ "spirituality": "spirituality-model",
27
+ "innovation": "innovation-model",
28
  }
29
 
30
+ # Model file names inside each repo
31
  MODEL_FILES = {
32
+ "value_impact": "value_impact.json",
33
+ "impact": "impact.json",
34
+ "family": "family_level.json",
35
+ "community": "community_level.json",
36
+ "education": "education_level.json",
37
+ "health": "health_level.json",
38
  "environment": "environment_level.json",
39
+ "business": "business_level.json",
40
+ "finance": "finance_level.json",
41
+ "history": "history_level.json",
42
+ "spirituality": "spirituality_level.json",
43
+ "innovation": "innovation_level.json",
44
  }
45
 
46
  # ============================================================
47
+ # LOAD EMBEDDER
48
  # ============================================================
49
 
50
+ # Keep it light + consistent for Spaces
51
+ print("Loading embedder: all-MiniLM-L6-v2 …")
52
+ embedder = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")
53
 
54
+
55
+ # ============================================================
56
+ # MODEL LOADING HELPERS
57
+ # ============================================================
58
+
59
+ def load_model_from_hf(repo_name: str, filename: str):
60
+ """Downloads a model.json file from HuggingFace and returns Python dict."""
61
+ print(f"↳ Loading {filename} from {repo_name} …")
62
 
63
  path = hf_hub_download(
64
+ repo_id=f"{HF_USER}/{repo_name}",
65
+ filename=filename
66
  )
67
 
68
  with open(path, "r") as f:
69
+ return json.load(f)
70
+
71
+
72
+ def embed(text: str):
73
+ """Returns a 384-dim sentence embedding."""
74
+ return embedder.encode([text])[0]
75
 
 
 
 
 
 
76
 
77
+ def linear_predict(model_json, vec):
78
+ """Linear model forward pass using coef + intercept."""
79
+ coef = np.array(model_json["coef"])
80
+ intercept = np.array(model_json["intercept"])
81
 
82
+ if coef.ndim == 2: # Multi-output
83
+ return coef @ vec + intercept
84
+ else:
85
+ return float(np.dot(coef, vec) + intercept)
86
 
87
 
88
  # ============================================================
89
+ # LOAD ALL 12 MODELS AT STARTUP
90
  # ============================================================
91
 
92
+ print("Loading all 12 models…")
 
93
 
94
+ loaded_models = {}
95
 
96
+ for key in MODEL_REPOS:
97
+ repo = MODEL_REPOS[key]
98
+ file = MODEL_FILES[key]
99
+
100
+ model_json = load_model_from_hf(repo, file)
101
+ loaded_models[key] = model_json
102
 
103
+ print("✔ All models loaded successfully.")
 
 
 
104
 
105
 
106
  # ============================================================
107
+ # FASTAPI APP
108
  # ============================================================
109
 
110
+ app = FastAPI(title="MVT Community Value API (Production)")
111
+
112
+
113
+ class InputText(BaseModel):
114
+ text: str
115
+
116
+
117
  @app.get("/")
118
  def root():
119
+ return {"status": "ok", "message": "MVT Category & Value API running!"}
120
 
121
 
122
+ # ============================================================
123
+ # PREDICT ROUTE
124
+ # ============================================================
125
+
126
  @app.post("/predict")
127
  def predict(payload: InputText):
128
  text = payload.text
129
+ vec = embed(text)
130
 
131
+ out = {}
132
 
133
+ # MULTI-OUTPUT: Value + Impact
134
+ value_pred, impact_pred = linear_predict(loaded_models["value_impact"], vec)
135
+ out["estimated_value"] = float(value_pred)
136
+ out["impact_level"] = float(impact_pred)
137
 
138
+ # ALL OTHER SINGLE-OUTPUT MODELS
139
+ for key in MODEL_REPOS:
140
+ if key == "value_impact":
141
+ continue # skip, already handled
142
+ out[key] = float(linear_predict(loaded_models[key], vec))
143
 
144
  return {
145
  "input": text,
146
+ "predictions": out
 
147
  }