File size: 2,699 Bytes
648285b
 
 
 
7bafbe7
648285b
 
7bafbe7
648285b
7bafbe7
 
 
 
 
648285b
7bafbe7
 
 
648285b
7bafbe7
 
 
 
 
 
151e905
7c6d3a5
 
6eb8735
 
da08cd8
05326ba
f419b15
732258d
648285b
 
7bafbe7
648285b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7bafbe7
 
648285b
 
7bafbe7
648285b
 
d16406b
7bafbe7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from fastapi import FastAPI
from pydantic import BaseModel
from sentence_transformers import SentenceTransformer
from huggingface_hub import hf_hub_download
import xgboost as xgb

# -----------------------------
# Helper: Load XGBoost Booster (.json)
# -----------------------------
def load_xgb_model(repo_id: str, filename: str):
    path = hf_hub_download(repo_id=repo_id, filename=filename)
    booster = xgb.Booster()
    booster.load_model(path)
    return booster

# -----------------------------
# Load Soulprint models (all JSON now)
# -----------------------------
available_models = {
    "Griot":   load_xgb_model("mjpsm/Griot-xgb-model",   "Griot_xgb_model.json"),
    "Kinara":  load_xgb_model("mjpsm/Kinara-xgb-model",  "Kinara_xgb_model.json"),
    "Ubuntu":  load_xgb_model("mjpsm/Ubuntu-xgb-model",  "Ubuntu_xgb_model.json"),
    "Jali":    load_xgb_model("mjpsm/Jali-xgb-model",    "Jali_xgb_model.json"),
    "Kuumba":  load_xgb_model("mjpsm/Kuumba-xgb-model",  "Kuumba_xgb_model.json"),
    "Sankofa": load_xgb_model("mjpsm/Sankofa-xgb-model", "Sankofa_xgb_model.json"),
    "Imani":   load_xgb_model("mjpsm/Imani-xgb-model",   "Imani_xgb_model.json"),
    "Maji":    load_xgb_model("mjpsm/Maji-xgb-model",    "Maji_xgb_model.json"),
    "Nzinga":  load_xgb_model("mjpsm/Nzinga-xgb-model",  "Nzinga_xgb_model.json"),
    "Bisa":    load_xgb_model("mjpsm/Bisa-xgb-model",    "Bisa_xgb_model.json"),
    "Zamani":  load_xgb_model("mjpsm/Zamani-xgb-model",  "Zamani_xgb_model.json"),
    "Tamu":    load_xgb_model("mjpsm/Tamu-xgb-model",    "Tamu_xgb_model.json"),
    "Shujaa":  load_xgb_model("mjpsm/Shujaa-xgb-model",  "Shujaa_xgb_model.json"),
    "Ayo":     load_xgb_model("mjpsm/Ayo-xgb-model",     "Ayo_xgb_model.json"),
    "Ujamaa":  load_xgb_model("mjpsm/Ujamaa-xgb-model",  "Ujamaa_xgb_model.json")
}

# Archetype list (15 total, placeholders for now)
all_archetypes = [
    "Griot", "Kinara", "Ubuntu", "Jali", "Sankofa", "Imani", "Maji",
    "Nzinga", "Bisa", "Zamani", "Tamu", "Shujaa", "Ayo", "Ujamaa", "Kuumba"
]

# Shared embedder
embedder = SentenceTransformer("all-mpnet-base-v2")

# FastAPI app
app = FastAPI()

class TextInput(BaseModel):
    text: str

@app.post("/soulprint_snapshot")
def soulprint_snapshot(input: TextInput):
    embedding = embedder.encode([input.text]).reshape(1, -1)

    snapshot = {}
    for name in all_archetypes:
        if name in available_models:
            dmatrix = xgb.DMatrix(embedding)
            score = available_models[name].predict(dmatrix)[0]
            snapshot[name] = float(score)
        else:
            snapshot[name] = 0.0  # placeholder until model is trained

    return {"soulprint_snapshot": snapshot}