Spaces:
Running
Running
| """ | |
| Application settings loaded from environment. | |
| Shared by: dataset pipeline, vector index build, and future API/LLM. | |
| """ | |
| import os | |
| from pathlib import Path | |
| from typing import Optional | |
| from dotenv import load_dotenv | |
| # Load .env from project root (parent of photo_editor) | |
| _PROJECT_ROOT = Path(__file__).resolve().parents[2] | |
| load_dotenv(_PROJECT_ROOT / ".env") | |
| def _str(name: str, default: Optional[str] = None) -> str: | |
| v = os.getenv(name, default) | |
| return v.strip() if v else (default or "") | |
| def _int(name: str, default: int = 0) -> int: | |
| v = os.getenv(name) | |
| if v is None or v.strip() == "": | |
| return default | |
| try: | |
| return int(v.strip()) | |
| except ValueError: | |
| return default | |
| def _path(name: str, default: Optional[Path] = None) -> Path: | |
| v = _str(name) | |
| if not v: | |
| return default or _PROJECT_ROOT | |
| p = Path(v) | |
| if not p.is_absolute(): | |
| p = _PROJECT_ROOT / p | |
| return p | |
| class Settings: | |
| """Central settings; use get_settings() for a singleton.""" | |
| def __init__(self) -> None: | |
| self.project_root = _PROJECT_ROOT | |
| # FiveK | |
| self.fivek_subset_size = _int("FIVEK_SUBSET_SIZE", 500) | |
| self.fivek_dataset_dir = _path("FIVEK_DATASET_DIR", _PROJECT_ROOT / "fivek_dataset") | |
| self.fivek_lrcat_path = _path( | |
| "FIVEK_LRCAT_PATH", | |
| _PROJECT_ROOT / "fivek_dataset" / "raw_photos" / "fivek.lrcat", | |
| ) | |
| self.fivek_raw_photos_dir = _path( | |
| "FIVEK_RAW_PHOTOS_DIR", | |
| _PROJECT_ROOT / "fivek_dataset" / "raw_photos", | |
| ) | |
| self.fivek_file_list = self.fivek_dataset_dir / "filesAdobe.txt" | |
| # Azure AI Search | |
| self.azure_search_endpoint = _str("AZURE_SEARCH_ENDPOINT") | |
| self.azure_search_key = _str("AZURE_SEARCH_KEY") | |
| self.azure_search_index_name = _str("AZURE_SEARCH_INDEX_NAME", "fivek-vectors") | |
| # Embedding (model name and dimension for index schema) | |
| self.embedding_model = _str( | |
| "EMBEDDING_MODEL", | |
| "openai/clip-vit-base-patch32", | |
| ) | |
| self.embedding_dim = _int("EMBEDDING_DIM", 512) | |
| # Optional: Azure AI Vision multimodal embeddings (use instead of local CLIP) | |
| self.azure_vision_endpoint = _str("AZURE_VISION_ENDPOINT") | |
| self.azure_vision_key = _str("AZURE_VISION_KEY") | |
| self.azure_vision_model_version = _str("AZURE_VISION_MODEL_VERSION", "2023-04-15") | |
| # Azure OpenAI (LLM for pipeline) | |
| self.azure_openai_endpoint = _str("AZURE_OPENAI_ENDPOINT") | |
| self.azure_openai_key = _str("AZURE_OPENAI_KEY") | |
| self.azure_openai_deployment = _str("AZURE_OPENAI_DEPLOYMENT", "gpt-4o") | |
| self.azure_openai_api_version = _str("AZURE_OPENAI_API_VERSION", "2024-12-01-preview") | |
| # Optional: external editing API (if set, pipeline calls it; else uses local apply) | |
| self.editing_api_url = _str("EDITING_API_URL") | |
| def azure_search_configured(self) -> bool: | |
| return bool(self.azure_search_endpoint and self.azure_search_key) | |
| def azure_vision_configured(self) -> bool: | |
| return bool(self.azure_vision_endpoint and self.azure_vision_key) | |
| def azure_openai_configured(self) -> bool: | |
| return bool(self.azure_openai_endpoint and self.azure_openai_key) | |
| def editing_api_configured(self) -> bool: | |
| return bool(self.editing_api_url) | |
| _settings: Optional[Settings] = None | |
| def get_settings() -> Settings: | |
| global _settings | |
| if _settings is None: | |
| _settings = Settings() | |
| return _settings | |
| # Convenience singleton | |
| settings = get_settings() | |