Spaces:
Runtime error
Runtime error
ParaAIV3.0
Browse files- .env.local +70 -0
- Dockerfile +47 -15
- api/config.py +393 -0
- docker-compose.yaml +156 -0
- docker-compose.yml +0 -47
- requirements.txt +64 -18
- seed.sql +83 -0
.env.local
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================================================
|
| 2 |
+
# Variáveis de Ambiente - DESENVOLVIMENTO LOCAL
|
| 3 |
+
# para.AI API v3.0
|
| 4 |
+
# ============================================================================
|
| 5 |
+
# Este arquivo é para desenvolvimento local com docker-compose
|
| 6 |
+
# Copie para .env.local e ajuste conforme necessário
|
| 7 |
+
|
| 8 |
+
# ============================================================================
|
| 9 |
+
# APP SETTINGS
|
| 10 |
+
# ============================================================================
|
| 11 |
+
APP_ENV=development
|
| 12 |
+
DEBUG=true
|
| 13 |
+
|
| 14 |
+
# ============================================================================
|
| 15 |
+
# DATABASE - PostgreSQL Local (Docker)
|
| 16 |
+
# ============================================================================
|
| 17 |
+
DATABASE_URL=postgresql://para_ai:para_ai_dev_2026@localhost:5432/para_ai
|
| 18 |
+
|
| 19 |
+
# Ou se estiver usando docker-compose (conectando do host):
|
| 20 |
+
# DATABASE_URL=postgresql://para_ai:para_ai_dev_2026@db:5432/para_ai
|
| 21 |
+
|
| 22 |
+
DB_POOL_SIZE=10
|
| 23 |
+
DB_MAX_OVERFLOW=20
|
| 24 |
+
|
| 25 |
+
# ============================================================================
|
| 26 |
+
# LLM PROVIDERS - SUAS API KEYS REAIS
|
| 27 |
+
# ============================================================================
|
| 28 |
+
# Obtenha suas keys em:
|
| 29 |
+
# - Groq: https://console.groq.com/
|
| 30 |
+
# - OpenAI: https://platform.openai.com/api-keys
|
| 31 |
+
# - Anthropic: https://console.anthropic.com/
|
| 32 |
+
|
| 33 |
+
GROQ_API_KEY=gsk_sua_key_aqui
|
| 34 |
+
# OPENAI_API_KEY=sk-sua_key_aqui
|
| 35 |
+
# ANTHROPIC_API_KEY=sk-ant-sua_key_aqui
|
| 36 |
+
|
| 37 |
+
DEFAULT_LLM_PROVIDER=groq
|
| 38 |
+
DEFAULT_MODEL_TYPE=balanced
|
| 39 |
+
|
| 40 |
+
# ============================================================================
|
| 41 |
+
# CACHE - Redis Local (Docker)
|
| 42 |
+
# ============================================================================
|
| 43 |
+
ENABLE_CACHE=true
|
| 44 |
+
REDIS_URL=redis://localhost:6379/0
|
| 45 |
+
|
| 46 |
+
# ============================================================================
|
| 47 |
+
# PROCESSING
|
| 48 |
+
# ============================================================================
|
| 49 |
+
ENABLE_PARALLEL=true
|
| 50 |
+
MAX_CONCURRENT_PROCESSES=3
|
| 51 |
+
PROCESS_TIMEOUT_SECONDS=600
|
| 52 |
+
|
| 53 |
+
# ============================================================================
|
| 54 |
+
# LOGGING
|
| 55 |
+
# ============================================================================
|
| 56 |
+
LOG_LEVEL=DEBUG
|
| 57 |
+
LOG_FILE_ENABLED=true
|
| 58 |
+
|
| 59 |
+
# ============================================================================
|
| 60 |
+
# SECURITY - Desenvolvimento (permissivo)
|
| 61 |
+
# ============================================================================
|
| 62 |
+
CORS_ORIGINS=*
|
| 63 |
+
REQUIRE_API_KEY=false
|
| 64 |
+
|
| 65 |
+
# ============================================================================
|
| 66 |
+
# DEVELOPMENT
|
| 67 |
+
# ============================================================================
|
| 68 |
+
DEV_MODE=true
|
| 69 |
+
MOCK_LLM_RESPONSES=false
|
| 70 |
+
SAVE_DEBUG_FILES=true
|
Dockerfile
CHANGED
|
@@ -1,28 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
FROM python:3.11-slim
|
| 2 |
|
| 3 |
-
|
| 4 |
-
|
|
|
|
|
|
|
| 5 |
|
|
|
|
| 6 |
WORKDIR /app
|
| 7 |
|
| 8 |
-
#
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
COPY requirements.txt .
|
|
|
|
|
|
|
| 17 |
RUN pip install --no-cache-dir -r requirements.txt
|
| 18 |
|
| 19 |
-
#
|
| 20 |
COPY . .
|
| 21 |
|
| 22 |
-
#
|
| 23 |
-
RUN mkdir -p /app/data/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
|
|
|
| 25 |
EXPOSE 7860
|
| 26 |
|
| 27 |
-
#
|
| 28 |
-
CMD ["uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "7860"]
|
|
|
|
| 1 |
+
# ============================================================================
|
| 2 |
+
# Dockerfile para Hugging Face Spaces - para.AI API v3.0
|
| 3 |
+
# ============================================================================
|
| 4 |
+
|
| 5 |
FROM python:3.11-slim
|
| 6 |
|
| 7 |
+
# Metadados
|
| 8 |
+
LABEL maintainer="para.AI Team"
|
| 9 |
+
LABEL version="3.0.0"
|
| 10 |
+
LABEL description="API de análise jurisprudencial com IA"
|
| 11 |
|
| 12 |
+
# Definir diretório de trabalho
|
| 13 |
WORKDIR /app
|
| 14 |
|
| 15 |
+
# Variáveis de ambiente padrão (sobrescritas pelas Secrets do HF Spaces)
|
| 16 |
+
ENV PYTHONUNBUFFERED=1 \
|
| 17 |
+
PYTHONDONTWRITEBYTECODE=1 \
|
| 18 |
+
PIP_NO_CACHE_DIR=1 \
|
| 19 |
+
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
| 20 |
+
APP_ENV=production \
|
| 21 |
+
HOST=0.0.0.0 \
|
| 22 |
+
PORT=7860
|
| 23 |
+
|
| 24 |
+
# Instalar dependências do sistema
|
| 25 |
+
RUN apt-get update && apt-get install -y \
|
| 26 |
+
gcc \
|
| 27 |
+
g++ \
|
| 28 |
+
postgresql-client \
|
| 29 |
+
libpq-dev \
|
| 30 |
+
curl \
|
| 31 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 32 |
+
|
| 33 |
+
# Copiar requirements primeiro (para cache do Docker)
|
| 34 |
COPY requirements.txt .
|
| 35 |
+
|
| 36 |
+
# Instalar dependências Python
|
| 37 |
RUN pip install --no-cache-dir -r requirements.txt
|
| 38 |
|
| 39 |
+
# Copiar código da aplicação
|
| 40 |
COPY . .
|
| 41 |
|
| 42 |
+
# Criar diretórios necessários
|
| 43 |
+
RUN mkdir -p /app/data/uploads \
|
| 44 |
+
/app/data/outputs \
|
| 45 |
+
/app/data/temp \
|
| 46 |
+
/app/data/backups \
|
| 47 |
+
/app/logs
|
| 48 |
+
|
| 49 |
+
# Permissões
|
| 50 |
+
RUN chmod -R 755 /app
|
| 51 |
+
|
| 52 |
+
# Health check
|
| 53 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
| 54 |
+
CMD curl -f http://localhost:7860/api/v1/health || exit 1
|
| 55 |
|
| 56 |
+
# Expor porta (HF Spaces usa 7860)
|
| 57 |
EXPOSE 7860
|
| 58 |
|
| 59 |
+
# Comando de inicialização
|
| 60 |
+
CMD ["uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "2"]
|
api/config.py
ADDED
|
@@ -0,0 +1,393 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Configurações centralizadas da API para.AI v3.0
|
| 3 |
+
Gerencia todas as variáveis de ambiente e configurações do sistema
|
| 4 |
+
"""
|
| 5 |
+
import os
|
| 6 |
+
from typing import Optional, List
|
| 7 |
+
from pydantic_settings import BaseSettings
|
| 8 |
+
from pydantic import Field
|
| 9 |
+
from functools import lru_cache
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Settings(BaseSettings):
|
| 13 |
+
"""
|
| 14 |
+
Configurações da aplicação para.AI.
|
| 15 |
+
|
| 16 |
+
Todas as configurações podem ser sobrescritas via variáveis de ambiente.
|
| 17 |
+
Carrega automaticamente de arquivo .env se presente.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
# ========================================================================
|
| 21 |
+
# API SETTINGS
|
| 22 |
+
# ========================================================================
|
| 23 |
+
APP_NAME: str = "para.AI API"
|
| 24 |
+
APP_VERSION: str = "3.0.0"
|
| 25 |
+
APP_DESCRIPTION: str = "API para análise automatizada de acórdãos jurisprudenciais"
|
| 26 |
+
APP_ENV: str = Field(default="production", env="APP_ENV")
|
| 27 |
+
DEBUG: bool = Field(default=False, env="DEBUG")
|
| 28 |
+
|
| 29 |
+
# ========================================================================
|
| 30 |
+
# SERVER SETTINGS
|
| 31 |
+
# ========================================================================
|
| 32 |
+
HOST: str = Field(default="0.0.0.0", env="HOST")
|
| 33 |
+
PORT: int = Field(default=8000, env="PORT")
|
| 34 |
+
WORKERS: int = Field(default=4, env="WORKERS")
|
| 35 |
+
RELOAD: bool = Field(default=False, env="RELOAD")
|
| 36 |
+
|
| 37 |
+
# ========================================================================
|
| 38 |
+
# DATABASE SETTINGS
|
| 39 |
+
# ========================================================================
|
| 40 |
+
DATABASE_URL: str = Field(
|
| 41 |
+
default="postgresql://para_ai:para_ai@localhost:5432/para_ai",
|
| 42 |
+
env="DATABASE_URL",
|
| 43 |
+
description="PostgreSQL connection string"
|
| 44 |
+
)
|
| 45 |
+
DB_POOL_SIZE: int = Field(default=20, env="DB_POOL_SIZE")
|
| 46 |
+
DB_MAX_OVERFLOW: int = Field(default=40, env="DB_MAX_OVERFLOW")
|
| 47 |
+
DB_POOL_TIMEOUT: int = Field(default=30, env="DB_POOL_TIMEOUT")
|
| 48 |
+
DB_POOL_RECYCLE: int = Field(default=3600, env="DB_POOL_RECYCLE")
|
| 49 |
+
SQL_ECHO: bool = Field(default=False, env="SQL_ECHO")
|
| 50 |
+
|
| 51 |
+
# ========================================================================
|
| 52 |
+
# FILES & STORAGE SETTINGS
|
| 53 |
+
# ========================================================================
|
| 54 |
+
FILES_BASE_PATH: str = Field(default="./data/files", env="FILES_BASE_PATH")
|
| 55 |
+
UPLOAD_PATH: str = Field(default="./data/uploads", env="UPLOAD_PATH")
|
| 56 |
+
OUTPUT_PATH: str = Field(default="./data/outputs", env="OUTPUT_PATH")
|
| 57 |
+
TEMP_PATH: str = Field(default="./data/temp", env="TEMP_PATH")
|
| 58 |
+
BACKUP_PATH: str = Field(default="./data/backups", env="BACKUP_PATH")
|
| 59 |
+
|
| 60 |
+
MAX_UPLOAD_SIZE_MB: int = Field(default=500, env="MAX_UPLOAD_SIZE_MB")
|
| 61 |
+
ALLOWED_EXTENSIONS: List[str] = [".jsonl", ".json", ".txt"]
|
| 62 |
+
|
| 63 |
+
# ========================================================================
|
| 64 |
+
# LLM PROVIDERS API KEYS
|
| 65 |
+
# ========================================================================
|
| 66 |
+
GROQ_API_KEY: Optional[str] = Field(default=None, env="GROQ_API_KEY")
|
| 67 |
+
OPENAI_API_KEY: Optional[str] = Field(default=None, env="OPENAI_API_KEY")
|
| 68 |
+
ANTHROPIC_API_KEY: Optional[str] = Field(default=None, env="ANTHROPIC_API_KEY")
|
| 69 |
+
|
| 70 |
+
DEFAULT_LLM_PROVIDER: str = Field(default="groq", env="DEFAULT_LLM_PROVIDER")
|
| 71 |
+
DEFAULT_MODEL_TYPE: str = Field(default="balanced", env="DEFAULT_MODEL_TYPE")
|
| 72 |
+
|
| 73 |
+
# LLM Request defaults
|
| 74 |
+
LLM_DEFAULT_TEMPERATURE: float = Field(default=0.7, env="LLM_DEFAULT_TEMPERATURE")
|
| 75 |
+
LLM_DEFAULT_MAX_TOKENS: int = Field(default=2048, env="LLM_DEFAULT_MAX_TOKENS")
|
| 76 |
+
LLM_TIMEOUT_SECONDS: int = Field(default=60, env="LLM_TIMEOUT_SECONDS")
|
| 77 |
+
|
| 78 |
+
# ========================================================================
|
| 79 |
+
# PROCESSING SETTINGS
|
| 80 |
+
# ========================================================================
|
| 81 |
+
MAX_CONCURRENT_PROCESSES: int = Field(default=5, env="MAX_CONCURRENT_PROCESSES")
|
| 82 |
+
PROCESS_TIMEOUT_SECONDS: int = Field(default=600, env="PROCESS_TIMEOUT_SECONDS")
|
| 83 |
+
ENABLE_PARALLEL_PROCESSING: bool = Field(default=True, env="ENABLE_PARALLEL")
|
| 84 |
+
DEFAULT_MAX_WORKERS: int = Field(default=3, env="DEFAULT_MAX_WORKERS")
|
| 85 |
+
|
| 86 |
+
# Batch processing
|
| 87 |
+
BATCH_SIZE: int = Field(default=10, env="BATCH_SIZE")
|
| 88 |
+
BATCH_DELAY_MS: int = Field(default=100, env="BATCH_DELAY_MS")
|
| 89 |
+
|
| 90 |
+
# ========================================================================
|
| 91 |
+
# LOGGING SETTINGS
|
| 92 |
+
# ========================================================================
|
| 93 |
+
LOG_LEVEL: str = Field(default="INFO", env="LOG_LEVEL")
|
| 94 |
+
LOG_FORMAT: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 95 |
+
LOG_FILE_ENABLED: bool = Field(default=True, env="LOG_FILE_ENABLED")
|
| 96 |
+
LOG_FILE_PATH: str = Field(default="./logs", env="LOG_FILE_PATH")
|
| 97 |
+
LOG_FILE_MAX_BYTES: int = Field(default=10485760, env="LOG_FILE_MAX_BYTES") # 10MB
|
| 98 |
+
LOG_FILE_BACKUP_COUNT: int = Field(default=5, env="LOG_FILE_BACKUP_COUNT")
|
| 99 |
+
|
| 100 |
+
# ========================================================================
|
| 101 |
+
# SECURITY SETTINGS
|
| 102 |
+
# ========================================================================
|
| 103 |
+
# CORS
|
| 104 |
+
CORS_ORIGINS: List[str] = ["*"] # Em produção, especificar domínios
|
| 105 |
+
CORS_ALLOW_CREDENTIALS: bool = True
|
| 106 |
+
CORS_ALLOW_METHODS: List[str] = ["*"]
|
| 107 |
+
CORS_ALLOW_HEADERS: List[str] = ["*"]
|
| 108 |
+
|
| 109 |
+
# API Keys
|
| 110 |
+
API_KEY_HEADER: str = Field(default="X-API-Key", env="API_KEY_HEADER")
|
| 111 |
+
REQUIRE_API_KEY: bool = Field(default=False, env="REQUIRE_API_KEY")
|
| 112 |
+
VALID_API_KEYS: List[str] = [] # Carregar de env separado por vírgula
|
| 113 |
+
|
| 114 |
+
# Rate limiting
|
| 115 |
+
RATE_LIMIT_ENABLED: bool = Field(default=False, env="RATE_LIMIT_ENABLED")
|
| 116 |
+
RATE_LIMIT_PER_MINUTE: int = Field(default=60, env="RATE_LIMIT_PER_MINUTE")
|
| 117 |
+
|
| 118 |
+
# ========================================================================
|
| 119 |
+
# CACHE SETTINGS
|
| 120 |
+
# ========================================================================
|
| 121 |
+
ENABLE_CACHE: bool = Field(default=False, env="ENABLE_CACHE")
|
| 122 |
+
CACHE_TTL_SECONDS: int = Field(default=3600, env="CACHE_TTL_SECONDS")
|
| 123 |
+
CACHE_BACKEND: str = Field(default="memory", env="CACHE_BACKEND") # memory/redis
|
| 124 |
+
REDIS_URL: Optional[str] = Field(default=None, env="REDIS_URL")
|
| 125 |
+
|
| 126 |
+
# ========================================================================
|
| 127 |
+
# MONITORING & METRICS
|
| 128 |
+
# ========================================================================
|
| 129 |
+
ENABLE_METRICS: bool = Field(default=True, env="ENABLE_METRICS")
|
| 130 |
+
METRICS_EXPORT_INTERVAL: int = Field(default=60, env="METRICS_EXPORT_INTERVAL")
|
| 131 |
+
|
| 132 |
+
# Prometheus
|
| 133 |
+
PROMETHEUS_ENABLED: bool = Field(default=False, env="PROMETHEUS_ENABLED")
|
| 134 |
+
PROMETHEUS_PORT: int = Field(default=9090, env="PROMETHEUS_PORT")
|
| 135 |
+
|
| 136 |
+
# ========================================================================
|
| 137 |
+
# PROCESSORS SETTINGS
|
| 138 |
+
# ========================================================================
|
| 139 |
+
# Quais processadores habilitar (1-9)
|
| 140 |
+
ENABLED_PROCESSORS: List[int] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
| 141 |
+
|
| 142 |
+
# Confidence thresholds
|
| 143 |
+
MIN_CONFIDENCE_THRESHOLD: float = Field(default=0.7, env="MIN_CONFIDENCE_THRESHOLD")
|
| 144 |
+
|
| 145 |
+
# ========================================================================
|
| 146 |
+
# DEVELOPMENT & DEBUG
|
| 147 |
+
# ========================================================================
|
| 148 |
+
DEV_MODE: bool = Field(default=False, env="DEV_MODE")
|
| 149 |
+
MOCK_LLM_RESPONSES: bool = Field(default=False, env="MOCK_LLM_RESPONSES")
|
| 150 |
+
SAVE_DEBUG_FILES: bool = Field(default=False, env="SAVE_DEBUG_FILES")
|
| 151 |
+
|
| 152 |
+
# ========================================================================
|
| 153 |
+
# ADVANCED SETTINGS
|
| 154 |
+
# ========================================================================
|
| 155 |
+
# Request tracking
|
| 156 |
+
ENABLE_REQUEST_ID: bool = Field(default=True, env="ENABLE_REQUEST_ID")
|
| 157 |
+
REQUEST_ID_HEADER: str = "X-Request-ID"
|
| 158 |
+
|
| 159 |
+
# Compression
|
| 160 |
+
ENABLE_GZIP: bool = Field(default=True, env="ENABLE_GZIP")
|
| 161 |
+
GZIP_MIN_SIZE: int = Field(default=1000, env="GZIP_MIN_SIZE")
|
| 162 |
+
|
| 163 |
+
# Timeouts
|
| 164 |
+
HTTP_TIMEOUT_SECONDS: int = Field(default=300, env="HTTP_TIMEOUT_SECONDS")
|
| 165 |
+
|
| 166 |
+
# Task retention
|
| 167 |
+
TASK_RETENTION_HOURS: int = Field(default=24, env="TASK_RETENTION_HOURS")
|
| 168 |
+
AUTO_CLEANUP_ENABLED: bool = Field(default=True, env="AUTO_CLEANUP_ENABLED")
|
| 169 |
+
|
| 170 |
+
class Config:
|
| 171 |
+
"""Pydantic config."""
|
| 172 |
+
env_file = ".env"
|
| 173 |
+
env_file_encoding = "utf-8"
|
| 174 |
+
case_sensitive = True
|
| 175 |
+
|
| 176 |
+
def __init__(self, **kwargs):
|
| 177 |
+
"""Initialize settings com validações customizadas."""
|
| 178 |
+
super().__init__(**kwargs)
|
| 179 |
+
|
| 180 |
+
# Parse CORS_ORIGINS se vier como string
|
| 181 |
+
if isinstance(self.CORS_ORIGINS, str):
|
| 182 |
+
self.CORS_ORIGINS = [origin.strip() for origin in self.CORS_ORIGINS.split(",")]
|
| 183 |
+
|
| 184 |
+
# Parse VALID_API_KEYS se vier como string
|
| 185 |
+
if os.getenv("VALID_API_KEYS"):
|
| 186 |
+
self.VALID_API_KEYS = [key.strip() for key in os.getenv("VALID_API_KEYS").split(",")]
|
| 187 |
+
|
| 188 |
+
@property
|
| 189 |
+
def is_production(self) -> bool:
|
| 190 |
+
"""Verifica se está em produção."""
|
| 191 |
+
return self.APP_ENV.lower() == "production"
|
| 192 |
+
|
| 193 |
+
@property
|
| 194 |
+
def is_development(self) -> bool:
|
| 195 |
+
"""Verifica se está em desenvolvimento."""
|
| 196 |
+
return self.APP_ENV.lower() in ["development", "dev"]
|
| 197 |
+
|
| 198 |
+
@property
|
| 199 |
+
def database_url_masked(self) -> str:
|
| 200 |
+
"""Retorna DATABASE_URL com senha mascarada."""
|
| 201 |
+
if "@" in self.DATABASE_URL:
|
| 202 |
+
parts = self.DATABASE_URL.split("@")
|
| 203 |
+
return f"***@{parts[1]}"
|
| 204 |
+
return self.DATABASE_URL
|
| 205 |
+
|
| 206 |
+
def get_llm_providers_status(self) -> dict:
|
| 207 |
+
"""Retorna status de configuração dos provedores LLM."""
|
| 208 |
+
return {
|
| 209 |
+
"groq": bool(self.GROQ_API_KEY),
|
| 210 |
+
"openai": bool(self.OPENAI_API_KEY),
|
| 211 |
+
"anthropic": bool(self.ANTHROPIC_API_KEY)
|
| 212 |
+
}
|
| 213 |
+
|
| 214 |
+
def validate_paths(self) -> None:
|
| 215 |
+
"""Cria diretórios necessários se não existirem."""
|
| 216 |
+
from pathlib import Path
|
| 217 |
+
|
| 218 |
+
paths = [
|
| 219 |
+
self.FILES_BASE_PATH,
|
| 220 |
+
self.UPLOAD_PATH,
|
| 221 |
+
self.OUTPUT_PATH,
|
| 222 |
+
self.TEMP_PATH,
|
| 223 |
+
self.BACKUP_PATH,
|
| 224 |
+
self.LOG_FILE_PATH
|
| 225 |
+
]
|
| 226 |
+
|
| 227 |
+
for path_str in paths:
|
| 228 |
+
path = Path(path_str)
|
| 229 |
+
path.mkdir(parents=True, exist_ok=True)
|
| 230 |
+
|
| 231 |
+
def to_dict(self) -> dict:
|
| 232 |
+
"""Converte settings para dict (sem expor secrets)."""
|
| 233 |
+
data = self.dict()
|
| 234 |
+
|
| 235 |
+
# Mascarar informações sensíveis
|
| 236 |
+
sensitive_keys = [
|
| 237 |
+
"GROQ_API_KEY",
|
| 238 |
+
"OPENAI_API_KEY",
|
| 239 |
+
"ANTHROPIC_API_KEY",
|
| 240 |
+
"DATABASE_URL",
|
| 241 |
+
"VALID_API_KEYS",
|
| 242 |
+
"REDIS_URL"
|
| 243 |
+
]
|
| 244 |
+
|
| 245 |
+
for key in sensitive_keys:
|
| 246 |
+
if key in data and data[key]:
|
| 247 |
+
if isinstance(data[key], str):
|
| 248 |
+
data[key] = "***HIDDEN***"
|
| 249 |
+
elif isinstance(data[key], list):
|
| 250 |
+
data[key] = ["***HIDDEN***"] * len(data[key])
|
| 251 |
+
|
| 252 |
+
return data
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
# ============================================================================
|
| 256 |
+
# SINGLETON INSTANCE
|
| 257 |
+
# ============================================================================
|
| 258 |
+
|
| 259 |
+
@lru_cache()
|
| 260 |
+
def get_settings() -> Settings:
|
| 261 |
+
"""
|
| 262 |
+
Retorna instância singleton das configurações.
|
| 263 |
+
|
| 264 |
+
Usa lru_cache para garantir que Settings é criado apenas uma vez
|
| 265 |
+
e reutilizado em toda a aplicação.
|
| 266 |
+
|
| 267 |
+
Returns:
|
| 268 |
+
Settings: Instância configurada
|
| 269 |
+
"""
|
| 270 |
+
settings = Settings()
|
| 271 |
+
|
| 272 |
+
# Validar e criar paths necessários
|
| 273 |
+
settings.validate_paths()
|
| 274 |
+
|
| 275 |
+
return settings
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
# ============================================================================
|
| 279 |
+
# HELPER FUNCTIONS
|
| 280 |
+
# ============================================================================
|
| 281 |
+
|
| 282 |
+
def get_env(key: str, default: any = None) -> any:
|
| 283 |
+
"""
|
| 284 |
+
Helper para pegar variável de ambiente.
|
| 285 |
+
|
| 286 |
+
Args:
|
| 287 |
+
key: Nome da variável
|
| 288 |
+
default: Valor padrão se não existir
|
| 289 |
+
|
| 290 |
+
Returns:
|
| 291 |
+
Valor da variável ou default
|
| 292 |
+
"""
|
| 293 |
+
return os.getenv(key, default)
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
def is_production() -> bool:
|
| 297 |
+
"""Verifica se está rodando em produção."""
|
| 298 |
+
return get_settings().is_production
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
def is_development() -> bool:
|
| 302 |
+
"""Verifica se está rodando em desenvolvimento."""
|
| 303 |
+
return get_settings().is_development
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
# ============================================================================
|
| 307 |
+
# CONFIGURAÇÃO DE EXEMPLO (.env)
|
| 308 |
+
# ============================================================================
|
| 309 |
+
|
| 310 |
+
ENV_EXAMPLE = """
|
| 311 |
+
# ============================================================================
|
| 312 |
+
# para.AI API - Configuração de Ambiente
|
| 313 |
+
# ============================================================================
|
| 314 |
+
|
| 315 |
+
# API Settings
|
| 316 |
+
APP_ENV=production
|
| 317 |
+
DEBUG=false
|
| 318 |
+
|
| 319 |
+
# Server
|
| 320 |
+
HOST=0.0.0.0
|
| 321 |
+
PORT=8000
|
| 322 |
+
WORKERS=4
|
| 323 |
+
|
| 324 |
+
# Database
|
| 325 |
+
DATABASE_URL=postgresql://para_ai:SUA_SENHA@localhost:5432/para_ai
|
| 326 |
+
DB_POOL_SIZE=20
|
| 327 |
+
DB_MAX_OVERFLOW=40
|
| 328 |
+
|
| 329 |
+
# LLM Providers (configure pelo menos um)
|
| 330 |
+
GROQ_API_KEY=gsk_...
|
| 331 |
+
OPENAI_API_KEY=sk-...
|
| 332 |
+
ANTHROPIC_API_KEY=sk-ant-...
|
| 333 |
+
|
| 334 |
+
DEFAULT_LLM_PROVIDER=groq
|
| 335 |
+
DEFAULT_MODEL_TYPE=balanced
|
| 336 |
+
|
| 337 |
+
# Files & Storage
|
| 338 |
+
FILES_BASE_PATH=./data/files
|
| 339 |
+
UPLOAD_PATH=./data/uploads
|
| 340 |
+
OUTPUT_PATH=./data/outputs
|
| 341 |
+
MAX_UPLOAD_SIZE_MB=500
|
| 342 |
+
|
| 343 |
+
# Processing
|
| 344 |
+
MAX_CONCURRENT_PROCESSES=5
|
| 345 |
+
PROCESS_TIMEOUT_SECONDS=600
|
| 346 |
+
ENABLE_PARALLEL=true
|
| 347 |
+
DEFAULT_MAX_WORKERS=3
|
| 348 |
+
|
| 349 |
+
# Logging
|
| 350 |
+
LOG_LEVEL=INFO
|
| 351 |
+
LOG_FILE_ENABLED=true
|
| 352 |
+
|
| 353 |
+
# Security (IMPORTANTE EM PRODUÇÃO!)
|
| 354 |
+
REQUIRE_API_KEY=false
|
| 355 |
+
VALID_API_KEYS=key1,key2,key3
|
| 356 |
+
CORS_ORIGINS=https://seudominio.com,https://app.seudominio.com
|
| 357 |
+
|
| 358 |
+
# Cache (opcional - requer Redis)
|
| 359 |
+
ENABLE_CACHE=false
|
| 360 |
+
REDIS_URL=redis://localhost:6379/0
|
| 361 |
+
|
| 362 |
+
# Monitoring
|
| 363 |
+
ENABLE_METRICS=true
|
| 364 |
+
PROMETHEUS_ENABLED=false
|
| 365 |
+
"""
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def generate_env_file(path: str = ".env.example"):
|
| 369 |
+
"""
|
| 370 |
+
Gera arquivo .env.example com todas as variáveis.
|
| 371 |
+
|
| 372 |
+
Args:
|
| 373 |
+
path: Caminho onde salvar o arquivo
|
| 374 |
+
"""
|
| 375 |
+
with open(path, "w") as f:
|
| 376 |
+
f.write(ENV_EXAMPLE)
|
| 377 |
+
print(f"✅ Arquivo {path} criado com sucesso!")
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
if __name__ == "__main__":
|
| 381 |
+
# Quando executado diretamente, gera .env.example
|
| 382 |
+
generate_env_file()
|
| 383 |
+
|
| 384 |
+
# Mostra configurações atuais
|
| 385 |
+
settings = get_settings()
|
| 386 |
+
print("\n" + "=" * 70)
|
| 387 |
+
print("CONFIGURAÇÕES ATUAIS - para.AI API")
|
| 388 |
+
print("=" * 70)
|
| 389 |
+
print(f"Environment: {settings.APP_ENV}")
|
| 390 |
+
print(f"Debug: {settings.DEBUG}")
|
| 391 |
+
print(f"Database: {settings.database_url_masked}")
|
| 392 |
+
print(f"LLM Providers: {settings.get_llm_providers_status()}")
|
| 393 |
+
print("=" * 70)
|
docker-compose.yaml
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================================================
|
| 2 |
+
# Docker Compose para desenvolvimento local - para.AI API v3.0
|
| 3 |
+
# ============================================================================
|
| 4 |
+
#
|
| 5 |
+
# Uso:
|
| 6 |
+
# docker-compose up -d # Iniciar todos os serviços
|
| 7 |
+
# docker-compose logs -f api # Ver logs da API
|
| 8 |
+
# docker-compose down # Parar todos os serviços
|
| 9 |
+
# docker-compose down -v # Parar e remover volumes
|
| 10 |
+
#
|
| 11 |
+
# ============================================================================
|
| 12 |
+
|
| 13 |
+
version: '3.8'
|
| 14 |
+
|
| 15 |
+
services:
|
| 16 |
+
# ==========================================================================
|
| 17 |
+
# PostgreSQL Database
|
| 18 |
+
# ==========================================================================
|
| 19 |
+
db:
|
| 20 |
+
image: postgres:15-alpine
|
| 21 |
+
container_name: para_ai_db
|
| 22 |
+
restart: unless-stopped
|
| 23 |
+
environment:
|
| 24 |
+
POSTGRES_USER: para_ai
|
| 25 |
+
POSTGRES_PASSWORD: para_ai_dev_2026
|
| 26 |
+
POSTGRES_DB: para_ai
|
| 27 |
+
POSTGRES_INITDB_ARGS: "--encoding=UTF8 --locale=pt_BR.UTF-8"
|
| 28 |
+
ports:
|
| 29 |
+
- "5432:5432"
|
| 30 |
+
volumes:
|
| 31 |
+
- postgres_data:/var/lib/postgresql/data
|
| 32 |
+
- ./database/init_db.sql:/docker-entrypoint-initdb.d/01-init.sql
|
| 33 |
+
- ./scripts/seed_data.sql:/docker-entrypoint-initdb.d/02-seed.sql
|
| 34 |
+
healthcheck:
|
| 35 |
+
test: ["CMD-SHELL", "pg_isready -U para_ai -d para_ai"]
|
| 36 |
+
interval: 10s
|
| 37 |
+
timeout: 5s
|
| 38 |
+
retries: 5
|
| 39 |
+
networks:
|
| 40 |
+
- para_ai_network
|
| 41 |
+
|
| 42 |
+
# ==========================================================================
|
| 43 |
+
# Redis (opcional - para cache)
|
| 44 |
+
# ==========================================================================
|
| 45 |
+
redis:
|
| 46 |
+
image: redis:7-alpine
|
| 47 |
+
container_name: para_ai_redis
|
| 48 |
+
restart: unless-stopped
|
| 49 |
+
ports:
|
| 50 |
+
- "6379:6379"
|
| 51 |
+
volumes:
|
| 52 |
+
- redis_data:/data
|
| 53 |
+
command: redis-server --appendonly yes
|
| 54 |
+
healthcheck:
|
| 55 |
+
test: ["CMD", "redis-cli", "ping"]
|
| 56 |
+
interval: 10s
|
| 57 |
+
timeout: 5s
|
| 58 |
+
retries: 5
|
| 59 |
+
networks:
|
| 60 |
+
- para_ai_network
|
| 61 |
+
|
| 62 |
+
# ==========================================================================
|
| 63 |
+
# API Application
|
| 64 |
+
# ==========================================================================
|
| 65 |
+
api:
|
| 66 |
+
build:
|
| 67 |
+
context: .
|
| 68 |
+
dockerfile: Dockerfile
|
| 69 |
+
container_name: para_ai_api
|
| 70 |
+
restart: unless-stopped
|
| 71 |
+
depends_on:
|
| 72 |
+
db:
|
| 73 |
+
condition: service_healthy
|
| 74 |
+
redis:
|
| 75 |
+
condition: service_healthy
|
| 76 |
+
environment:
|
| 77 |
+
# App
|
| 78 |
+
APP_ENV: development
|
| 79 |
+
DEBUG: "true"
|
| 80 |
+
|
| 81 |
+
# Database
|
| 82 |
+
DATABASE_URL: postgresql://para_ai:para_ai_dev_2026@db:5432/para_ai
|
| 83 |
+
DB_POOL_SIZE: 10
|
| 84 |
+
DB_MAX_OVERFLOW: 20
|
| 85 |
+
|
| 86 |
+
# LLM Providers (use suas keys reais aqui ou via .env)
|
| 87 |
+
GROQ_API_KEY: ${GROQ_API_KEY:-}
|
| 88 |
+
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
| 89 |
+
ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-}
|
| 90 |
+
|
| 91 |
+
# Cache
|
| 92 |
+
ENABLE_CACHE: "true"
|
| 93 |
+
REDIS_URL: redis://redis:6379/0
|
| 94 |
+
|
| 95 |
+
# Processing
|
| 96 |
+
ENABLE_PARALLEL: "true"
|
| 97 |
+
MAX_CONCURRENT_PROCESSES: 3
|
| 98 |
+
|
| 99 |
+
# Logging
|
| 100 |
+
LOG_LEVEL: DEBUG
|
| 101 |
+
ports:
|
| 102 |
+
- "8000:7860"
|
| 103 |
+
volumes:
|
| 104 |
+
- ./:/app
|
| 105 |
+
- api_data:/app/data
|
| 106 |
+
- api_logs:/app/logs
|
| 107 |
+
healthcheck:
|
| 108 |
+
test: ["CMD", "curl", "-f", "http://localhost:7860/api/v1/health"]
|
| 109 |
+
interval: 30s
|
| 110 |
+
timeout: 10s
|
| 111 |
+
retries: 3
|
| 112 |
+
start_period: 40s
|
| 113 |
+
networks:
|
| 114 |
+
- para_ai_network
|
| 115 |
+
|
| 116 |
+
# ==========================================================================
|
| 117 |
+
# pgAdmin (opcional - interface web para PostgreSQL)
|
| 118 |
+
# ==========================================================================
|
| 119 |
+
pgadmin:
|
| 120 |
+
image: dpage/pgadmin4:latest
|
| 121 |
+
container_name: para_ai_pgadmin
|
| 122 |
+
restart: unless-stopped
|
| 123 |
+
environment:
|
| 124 |
+
PGADMIN_DEFAULT_EMAIL: admin@para.ai
|
| 125 |
+
PGADMIN_DEFAULT_PASSWORD: admin123
|
| 126 |
+
PGADMIN_CONFIG_SERVER_MODE: 'False'
|
| 127 |
+
ports:
|
| 128 |
+
- "5050:80"
|
| 129 |
+
volumes:
|
| 130 |
+
- pgadmin_data:/var/lib/pgadmin
|
| 131 |
+
depends_on:
|
| 132 |
+
- db
|
| 133 |
+
networks:
|
| 134 |
+
- para_ai_network
|
| 135 |
+
|
| 136 |
+
# ============================================================================
|
| 137 |
+
# VOLUMES
|
| 138 |
+
# ============================================================================
|
| 139 |
+
volumes:
|
| 140 |
+
postgres_data:
|
| 141 |
+
driver: local
|
| 142 |
+
redis_data:
|
| 143 |
+
driver: local
|
| 144 |
+
api_data:
|
| 145 |
+
driver: local
|
| 146 |
+
api_logs:
|
| 147 |
+
driver: local
|
| 148 |
+
pgadmin_data:
|
| 149 |
+
driver: local
|
| 150 |
+
|
| 151 |
+
# ============================================================================
|
| 152 |
+
# NETWORKS
|
| 153 |
+
# ============================================================================
|
| 154 |
+
networks:
|
| 155 |
+
para_ai_network:
|
| 156 |
+
driver: bridge
|
docker-compose.yml
DELETED
|
@@ -1,47 +0,0 @@
|
|
| 1 |
-
version: "3.9"
|
| 2 |
-
|
| 3 |
-
services:
|
| 4 |
-
app:
|
| 5 |
-
build:
|
| 6 |
-
context: .
|
| 7 |
-
dockerfile: Dockerfile
|
| 8 |
-
container_name: para_ai_app
|
| 9 |
-
depends_on:
|
| 10 |
-
- db
|
| 11 |
-
environment:
|
| 12 |
-
APP_ENV: ${APP_ENV:-dev}
|
| 13 |
-
DEBUG: ${DEBUG:-true}
|
| 14 |
-
LOG_LEVEL: ${LOG_LEVEL:-INFO}
|
| 15 |
-
DATABASE_URL: ${DATABASE_URL:-postgresql://para_ai:para_ai@db:5432/para_ai}
|
| 16 |
-
FILES_BASE_PATH: ${FILES_BASE_PATH:-/app/data/files}
|
| 17 |
-
GROQ_API_KEY: ${GROQ_API_KEY:-}
|
| 18 |
-
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
| 19 |
-
ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-}
|
| 20 |
-
volumes:
|
| 21 |
-
- .:/app
|
| 22 |
-
- para_ai_files:/app/data
|
| 23 |
-
ports:
|
| 24 |
-
- "7860:7860"
|
| 25 |
-
working_dir: /app
|
| 26 |
-
command: >
|
| 27 |
-
uvicorn main:app
|
| 28 |
-
--host 0.0.0.0
|
| 29 |
-
--port 7860
|
| 30 |
-
--reload
|
| 31 |
-
|
| 32 |
-
db:
|
| 33 |
-
image: postgres:16
|
| 34 |
-
container_name: para_ai_db
|
| 35 |
-
environment:
|
| 36 |
-
POSTGRES_USER: ${POSTGRES_USER:-para_ai}
|
| 37 |
-
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-para_ai}
|
| 38 |
-
POSTGRES_DB: ${POSTGRES_DB:-para_ai}
|
| 39 |
-
volumes:
|
| 40 |
-
- para_ai_db_data:/var/lib/postgresql/data
|
| 41 |
-
- ./database/init_db.sql:/docker-entrypoint-initdb.d/00_init_db.sql:ro
|
| 42 |
-
ports:
|
| 43 |
-
- "5432:5432"
|
| 44 |
-
|
| 45 |
-
volumes:
|
| 46 |
-
para_ai_db_data:
|
| 47 |
-
para_ai_files:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
|
@@ -1,21 +1,67 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
# LLM Providers
|
| 15 |
-
groq
|
| 16 |
-
openai
|
| 17 |
-
anthropic
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
|
| 19 |
-
#
|
| 20 |
-
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
|
| 3 |
+
# ============================================================================
|
| 4 |
+
# para.AI API v3.0 - Dependências Python
|
| 5 |
+
# ============================================================================
|
| 6 |
+
|
| 7 |
+
# Core Framework
|
| 8 |
+
fastapi==0.109.0
|
| 9 |
+
uvicorn[standard]==0.27.0
|
| 10 |
+
python-multipart==0.0.6
|
| 11 |
+
|
| 12 |
+
# Pydantic (validação)
|
| 13 |
+
pydantic==2.5.3
|
| 14 |
+
pydantic-settings==2.1.0
|
| 15 |
+
|
| 16 |
+
# Database
|
| 17 |
+
sqlalchemy==2.0.25
|
| 18 |
+
psycopg2-binary==2.9.9
|
| 19 |
+
alembic==1.13.1
|
| 20 |
|
| 21 |
# LLM Providers
|
| 22 |
+
groq==0.4.1
|
| 23 |
+
openai==1.10.0
|
| 24 |
+
anthropic==0.8.1
|
| 25 |
+
|
| 26 |
+
# Utilities
|
| 27 |
+
python-dotenv==1.0.0
|
| 28 |
+
psutil==5.9.8
|
| 29 |
+
python-json-logger==2.0.7
|
| 30 |
+
|
| 31 |
+
# HTTP & Async
|
| 32 |
+
httpx==0.26.0
|
| 33 |
+
aiofiles==23.2.1
|
| 34 |
+
|
| 35 |
+
# Data Processing
|
| 36 |
+
python-dateutil==2.8.2
|
| 37 |
+
|
| 38 |
+
# Security
|
| 39 |
+
python-jose[cryptography]==3.3.0
|
| 40 |
+
passlib[bcrypt]==1.7.4
|
| 41 |
+
|
| 42 |
+
# Testing (opcional - desenvolvimento)
|
| 43 |
+
pytest==7.4.3
|
| 44 |
+
pytest-asyncio==0.21.1
|
| 45 |
+
pytest-cov==4.1.0
|
| 46 |
+
httpx==0.26.0
|
| 47 |
+
|
| 48 |
+
# Production Server (opcional)
|
| 49 |
+
gunicorn==21.2.0
|
| 50 |
+
|
| 51 |
+
# Monitoring (opcional)
|
| 52 |
+
prometheus-client==0.19.0
|
| 53 |
|
| 54 |
+
# ============================================================================
|
| 55 |
+
# INSTALAÇÃO
|
| 56 |
+
# ============================================================================
|
| 57 |
+
#
|
| 58 |
+
# Instalação básica (produção):
|
| 59 |
+
# pip install -r requirements.txt
|
| 60 |
+
#
|
| 61 |
+
# Instalação completa (desenvolvimento):
|
| 62 |
+
# pip install -r requirements.txt
|
| 63 |
+
#
|
| 64 |
+
# Atualizar dependências:
|
| 65 |
+
# pip install --upgrade -r requirements.txt
|
| 66 |
+
#
|
| 67 |
+
# ============================================================================
|
seed.sql
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-- ============================================================================
|
| 2 |
+
-- Seed Data para desenvolvimento local - para.AI
|
| 3 |
+
-- ============================================================================
|
| 4 |
+
-- Este script insere dados de teste para desenvolvimento
|
| 5 |
+
|
| 6 |
+
-- Inserir tribunal de teste
|
| 7 |
+
INSERT INTO tribunais (id, nome, sigla, uf, tipo, url_base, created_at, updated_at)
|
| 8 |
+
VALUES
|
| 9 |
+
(gen_random_uuid(), 'Tribunal de Justiça do Paraná', 'TJPR', 'PR', 'Estadual', 'https://www.tjpr.jus.br', NOW(), NOW()),
|
| 10 |
+
(gen_random_uuid(), 'Tribunal de Justiça de São Paulo', 'TJSP', 'SP', 'Estadual', 'https://www.tjsp.jus.br', NOW(), NOW()),
|
| 11 |
+
(gen_random_uuid(), 'Superior Tribunal de Justiça', 'STJ', 'DF', 'Superior', 'https://www.stj.jus.br', NOW(), NOW())
|
| 12 |
+
ON CONFLICT (sigla) DO NOTHING;
|
| 13 |
+
|
| 14 |
+
-- Inserir usuário de teste
|
| 15 |
+
INSERT INTO usuarios (id, email, nome, senha_hash, ativo, role, created_at, updated_at)
|
| 16 |
+
VALUES
|
| 17 |
+
(gen_random_uuid(), 'admin@para.ai', 'Administrador', '$2b$12$dummy_hash_for_development', TRUE, 'admin', NOW(), NOW()),
|
| 18 |
+
(gen_random_uuid(), 'dev@para.ai', 'Desenvolvedor', '$2b$12$dummy_hash_for_development', TRUE, 'user', NOW(), NOW())
|
| 19 |
+
ON CONFLICT (email) DO NOTHING;
|
| 20 |
+
|
| 21 |
+
-- Inserir acórdão de teste
|
| 22 |
+
DO $$
|
| 23 |
+
DECLARE
|
| 24 |
+
v_tribunal_id VARCHAR(36);
|
| 25 |
+
v_acordao_id VARCHAR(36);
|
| 26 |
+
BEGIN
|
| 27 |
+
-- Pegar ID do TJPR
|
| 28 |
+
SELECT id INTO v_tribunal_id FROM tribunais WHERE sigla = 'TJPR' LIMIT 1;
|
| 29 |
+
|
| 30 |
+
-- Inserir acórdão de teste
|
| 31 |
+
INSERT INTO acordaos (
|
| 32 |
+
id, tribunal_id, numero, ano, data_julgamento,
|
| 33 |
+
ementa, relator, orgao_julgador,
|
| 34 |
+
hash_conteudo, status_processamento,
|
| 35 |
+
created_at, updated_at
|
| 36 |
+
)
|
| 37 |
+
VALUES (
|
| 38 |
+
gen_random_uuid(),
|
| 39 |
+
v_tribunal_id,
|
| 40 |
+
'0000001-00.2025.8.16.0000',
|
| 41 |
+
2025,
|
| 42 |
+
'2025-01-15 14:30:00',
|
| 43 |
+
'APELAÇÃO CÍVEL. DIREITO DO CONSUMIDOR. RESPONSABILIDADE CIVIL. DANO MORAL. CDC ART. 14. RECURSO PROVIDO.',
|
| 44 |
+
'Des. Teste da Silva',
|
| 45 |
+
'1ª Câmara Cível',
|
| 46 |
+
encode(sha256('teste_acordao_1'::bytea), 'hex'),
|
| 47 |
+
'pendente',
|
| 48 |
+
NOW(),
|
| 49 |
+
NOW()
|
| 50 |
+
)
|
| 51 |
+
ON CONFLICT (hash_conteudo) DO NOTHING
|
| 52 |
+
RETURNING id INTO v_acordao_id;
|
| 53 |
+
|
| 54 |
+
-- Inserir palavras-chave de teste
|
| 55 |
+
INSERT INTO palavras_chave (id, termo, categoria, frequencia, ativo, created_at)
|
| 56 |
+
VALUES
|
| 57 |
+
(gen_random_uuid(), 'responsabilidade civil', 'direito_civil', 100, TRUE, NOW()),
|
| 58 |
+
(gen_random_uuid(), 'dano moral', 'direito_civil', 150, TRUE, NOW()),
|
| 59 |
+
(gen_random_uuid(), 'consumidor', 'direito_consumidor', 200, TRUE, NOW()),
|
| 60 |
+
(gen_random_uuid(), 'CDC', 'legislacao', 180, TRUE, NOW())
|
| 61 |
+
ON CONFLICT (termo) DO NOTHING;
|
| 62 |
+
|
| 63 |
+
END $$;
|
| 64 |
+
|
| 65 |
+
-- Criar índices adicionais úteis
|
| 66 |
+
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_acordao_ano_tribunal ON acordaos(ano, tribunal_id);
|
| 67 |
+
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_acordao_status_data ON acordaos(status_processamento, data_julgamento DESC);
|
| 68 |
+
|
| 69 |
+
-- Vacuum e analyze
|
| 70 |
+
VACUUM ANALYZE acordaos;
|
| 71 |
+
VACUUM ANALYZE tribunais;
|
| 72 |
+
VACUUM ANALYZE usuarios;
|
| 73 |
+
|
| 74 |
+
-- Mensagem de sucesso
|
| 75 |
+
DO $$
|
| 76 |
+
BEGIN
|
| 77 |
+
RAISE NOTICE '✅ Seed data inserido com sucesso!';
|
| 78 |
+
RAISE NOTICE '📊 Estatísticas:';
|
| 79 |
+
RAISE NOTICE ' - Tribunais: %', (SELECT COUNT(*) FROM tribunais);
|
| 80 |
+
RAISE NOTICE ' - Usuários: %', (SELECT COUNT(*) FROM usuarios);
|
| 81 |
+
RAISE NOTICE ' - Acórdãos: %', (SELECT COUNT(*) FROM acordaos);
|
| 82 |
+
RAISE NOTICE ' - Palavras-chave: %', (SELECT COUNT(*) FROM palavras_chave);
|
| 83 |
+
END $$;
|