File size: 964 Bytes
1182571
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b25f064
a147390
b25f064
 
1182571
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import os
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

class Config:
    # Directory where models are stored/cached
    MODELS_DIR = os.getenv("MODELS_DIR", "saved_models")

    # Inference batch size
    BATCH_SIZE = int(os.getenv("BATCH_SIZE", "8"))

    # API Key
    API_KEY = os.getenv("API_KEY")

    # FastText Model
    FASTTEXT_FILENAME = "lid.176.ftz"

    # AI Detector Model
    # AI_DETECTOR_MODEL_NAME = "yuchuantian/AIGC_detector_env3short" # Deprecated in favor of local quantized
    AI_DETECTOR_FILENAME = "aigc_detector_int8.pt"

    @staticmethod
    def get_model_path(filename: str) -> str:
        """Helper to get full path for a model file."""
        return os.path.join(Config.MODELS_DIR, filename)

    @staticmethod
    def ensure_models_dir():
        """Ensures the models directory exists."""
        if not os.path.exists(Config.MODELS_DIR):
            os.makedirs(Config.MODELS_DIR)