Spaces:
Sleeping
Sleeping
File size: 2,138 Bytes
fa2487d 6cf4e36 ae6bba9 6cf4e36 3a2fa09 ae6bba9 6cf4e36 3a2fa09 ae6bba9 6cf4e36 3a2fa09 a26767a 6cf4e36 b1ec06f 6cf4e36 b1ec06f 6cf4e36 3a2fa09 6cf4e36 3a2fa09 6cf4e36 3a2fa09 b1ec06f 3a2fa09 6cf4e36 3a2fa09 6cf4e36 3a2fa09 6cf4e36 3a2fa09 b1ec06f 6cf4e36 3a2fa09 6cf4e36 3a2fa09 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
import os
from typing import Dict, Any, List
# Residency / deployment guidance (enforce in your infra)
CANADA_RESIDENCY_REQUIRED = True
CLOUD_REGION = os.getenv("CLOUD_REGION", "ca-central-1")
# Keys
COHERE_API_KEY = os.getenv("COHERE_API_KEY", "")
# Cohere models (ask Cohere for larger/custom healthcare)
COHERE_MODEL_PRIMARY = os.getenv("COHERE_MODEL_PRIMARY", "command-r")
COHERE_EMBED_MODEL = os.getenv("COHERE_EMBED_MODEL", "embed-english-v3.0")
# Open-model fallbacks (largest first; will try in order)
OPEN_LLM_CANDIDATES: List[str] = [
"Qwen/Qwen2.5-72B-Instruct", # large & open weights
"meta-llama/Meta-Llama-3.1-70B-Instruct", # large (requires strong GPUs)
"mistralai/Mixtral-8x22B-Instruct-v0.1", # MoE, open
"NousResearch/Meta-Llama-3.1-8B-Instruct", # smaller safety net
"mistralai/Mistral-7B-Instruct-v0.3"
]
LOCAL_MAX_NEW_TOKENS = int(os.getenv("LOCAL_MAX_NEW_TOKENS", "1200"))
# App knobs
MODEL_SETTINGS: Dict[str, Any] = {
"temperature": float(os.getenv("TEMP", "0.3")),
"top_p": float(os.getenv("TOP_P", "0.9")),
"repetition_penalty": float(os.getenv("REP_PEN", "1.12")),
"max_new_tokens": int(os.getenv("MAX_NEW_TOKENS", "1500")),
}
HEALTHCARE_SETTINGS = {
"supported_file_types": [".csv", ".txt", ".md", ".pdf"],
"healthcare_keywords": [
"hospital", "clinic", "surgery", "surgical", "wait time", "consult", "triage",
"beds", "occupancy", "icu", "ambulatory", "perioperative", "zone", "health authority",
"province", "nova scotia", "iwk"
]
}
GENERAL_CONVERSATION_PROMPT = "You are a helpful, concise assistant."
HEALTHCARE_SYSTEM_PROMPT = (
"You are a Canadian healthcare operations copilot. "
"You must follow the scenario tasks exactly; calculations are done deterministically. "
"Do not invent numbers."
)
# Feature flags
USE_SCENARIO_ENGINE = os.getenv("USE_SCENARIO_ENGINE", "1") not in ("0", "false", "False")
# Storage
DATA_DIR = os.getenv("DATA_DIR", "./data")
RAG_INDEX_DIR = os.getenv("RAG_INDEX_DIR", "./rag_index")
PERSIST_CONTENT = False
SNAPSHOT_PATH = os.getenv("SNAPSHOT_PATH", "./snapshots")
|