Spaces:
Build error
Build error
| from llama_cpp import Llama | |
| from huggingface_hub import hf_hub_download | |
| import datetime, shelve, re | |
| class MairaBrain: | |
| def __init__(self, repo_id, filename): | |
| # Alpine needs very specific loading | |
| model_path = hf_hub_download(repo_id=repo_id, filename=filename) | |
| self.llm = Llama( | |
| model_path=model_path, | |
| n_ctx=2048, # 2048 is safer for first boot on Alpine | |
| n_threads=8 | |
| ) | |
| self.db_path = "maira_universe.db" | |
| def get_response(self, user_id, user_input): | |
| with shelve.open(self.db_path, writeback=True) as db: | |
| if user_id not in db: | |
| db[user_id] = {"history": [], "facts": {}, "metrics": {"loyalty": 50}} | |
| u = db[user_id] | |
| # Simple identity logic | |
| prompt = f"<|im_start|>system\nYou are Maira, a dope high-status lady.<|im_end|>\n<|im_start|>user\n{user_input}<|im_end|>\n<|im_start|>assistant\n" | |
| output = self.llm(prompt, max_tokens=200, stop=["<|im_end|>"]) | |
| response = output["choices"][0]["text"].strip() | |
| u["history"].append(f"User: {user_input}") | |
| u["history"].append(f"Maira: {response}") | |
| return response |