exwiseapi / api /apps.py
devnamdev2003
Deploy Django API with Docker Build Caching
dd4466b
from django.apps import AppConfig
import torch
from transformers import pipeline
import os
class ApiConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "api"
def ready(self):
# Prevent loading twice (Django sometimes runs ready() twice in dev mode)
if os.environ.get("RUN_MAIN", None) != "true" and not os.environ.get(
"KUBERNETES_PORT"
):
pass # Keep going if running via Gunicorn or in production
print("Loading Expense Categorizer AI from cache...")
device = 0 if torch.cuda.is_available() else -1
# This will automatically use the cached version built by Docker
self.classifier = pipeline(
"zero-shot-classification",
model="valhalla/distilbart-mnli-12-3",
device=device,
)
self.categories = [
"Food & Drinks",
"Groceries",
"Shopping",
"Bills & Utilities",
"Entertainment",
"Health",
"Education",
"Subscriptions",
"Travel",
"Rent",
"Family & Friends",
"Miscellaneous",
"Gifts",
"Party",
"Personal Care",
"Home & Hygiene",
"Others",
"Recharge",
]
print("AI Model loaded successfully!")