File size: 1,550 Bytes
5d10a46
 
3425760
 
 
5d10a46
3425760
5d10a46
 
 
6f96073
5d10a46
 
6f96073
5d10a46
 
3425760
bd545c2
 
5d10a46
bd545c2
 
5d10a46
 
bd545c2
3425760
bd545c2
 
6f96073
bd545c2
3425760
5d10a46
 
 
 
6f96073
bd545c2
 
 
6f96073
5d10a46
6f96073
5d10a46
 
fa7292c
6f96073
 
5d10a46
3425760
6f96073
5d10a46
 
 
 
bd545c2
5d10a46
3425760
5d10a46
bd545c2
 
 
3425760
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import json
import numpy as np
from fastapi import FastAPI
from pydantic import BaseModel
from huggingface_hub import hf_hub_download
from sentence_transformers import SentenceTransformer

HF_USER = "ClergeF"
IMPACT_REPO = "impact-model"
IMPACT_FILE = "impact.json"

print("Loading embedder: all-MiniLM-L6-v2 …")
embedder = SentenceTransformer("sentence-transformers/all-MiniLM-L6-v2")

def embed(text: str):
    return embedder.encode([text])[0]

def load_model():
    print(f"Loading {IMPACT_REPO}/{IMPACT_FILE}")
    path = hf_hub_download(
        repo_id=f"{HF_USER}/{IMPACT_REPO}",
        filename=IMPACT_FILE
    )
    with open(path, "r") as f:
        data = json.load(f)

        # 🔥 REMOVE unwanted fields BEFORE storing the model
        data.pop("matched_keyword", None)

        return data

def linear_predict(model_json, vec):
    coef = np.array(model_json["coef"])
    intercept = np.array(model_json["intercept"])
    return float(np.dot(coef, vec) + intercept)

print("Loading impact model...")
impact_model = load_model()
print("✔ Impact model loaded!")

app = FastAPI(title="Impact Rating API")

class InputText(BaseModel):
    text: str

@app.get("/")
def home():
    return {"status": "ok", "message": "Impact API running"}

@app.post("/rate")
def rate(payload: InputText):
    text = payload.text
    vec = embed(text)

    score = linear_predict(impact_model, vec)

    return {
        "input": text,
        "result": {
            "impact_score": score  # 🔥 cleaned output (NO matched_keyword)
        }
    }