Commit Β·
82fcb44
1
Parent(s): 3de7757
Check in service
Browse files- .env.example +24 -0
- .gitignore +76 -0
- README.md +272 -11
- app/core/config.py +73 -2
- app/core/database.py +7 -0
- app/core/logging.py +56 -0
- app/dependencies/__init__.py +3 -0
- app/dependencies/auth.py +85 -0
- app/main.py +170 -10
- app/nosql.py +87 -0
- app/postgres.py +175 -0
- app/tracker/attendance/__init__.py +3 -0
- app/tracker/attendance/constants.py +16 -0
- app/tracker/attendance/models.py +48 -0
- app/tracker/attendance/router.py +138 -0
- app/tracker/attendance/schemas.py +88 -0
- app/tracker/attendance/service.py +248 -0
- migrate_attendance.py +156 -0
- migrate_attendance_fix_ids.py +148 -0
- migrate_attendance_revert_to_uuid.py +127 -0
- requirements.txt +17 -0
- setup.sh +55 -0
- test_checkin.py +239 -0
- test_request.json +6 -0
.env.example
CHANGED
|
@@ -4,3 +4,27 @@ DEBUG=false
|
|
| 4 |
LOG_LEVEL=INFO
|
| 5 |
PORT=8003
|
| 6 |
ROOT_PATH=
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
LOG_LEVEL=INFO
|
| 5 |
PORT=8003
|
| 6 |
ROOT_PATH=
|
| 7 |
+
|
| 8 |
+
# MongoDB Configuration
|
| 9 |
+
MONGODB_URI=mongodb+srv://username:password@cluster0.2shrc.mongodb.net/?retryWrites=true&w=majority
|
| 10 |
+
MONGODB_DB_NAME=cuatrolabs
|
| 11 |
+
|
| 12 |
+
# PostgreSQL Configuration
|
| 13 |
+
DB_PROTOCOL=postgresql+asyncpg
|
| 14 |
+
DB_USER=postgres
|
| 15 |
+
DB_PASSWORD=your-db-password
|
| 16 |
+
DB_HOST=localhost
|
| 17 |
+
DB_PORT=5432
|
| 18 |
+
DB_NAME=cuatrolabs
|
| 19 |
+
DB_MIN_POOL_SIZE=5
|
| 20 |
+
DB_MAX_POOL_SIZE=20
|
| 21 |
+
DB_SSLMODE=disable
|
| 22 |
+
DATABASE_URL=postgresql+asyncpg://postgres:your-db-password@localhost:5432/cuatrolabs
|
| 23 |
+
|
| 24 |
+
# JWT Configuration
|
| 25 |
+
SECRET_KEY=your-secret-key-here-change-in-production
|
| 26 |
+
ALGORITHM=HS256
|
| 27 |
+
TOKEN_EXPIRATION_HOURS=8
|
| 28 |
+
|
| 29 |
+
# CORS Settings
|
| 30 |
+
CORS_ORIGINS=["http://localhost:3000","http://localhost:8000","http://localhost:8003"]
|
.gitignore
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Python
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
*.so
|
| 6 |
+
.Python
|
| 7 |
+
build/
|
| 8 |
+
develop-eggs/
|
| 9 |
+
dist/
|
| 10 |
+
downloads/
|
| 11 |
+
eggs/
|
| 12 |
+
.eggs/
|
| 13 |
+
lib/
|
| 14 |
+
lib64/
|
| 15 |
+
parts/
|
| 16 |
+
sdist/
|
| 17 |
+
var/
|
| 18 |
+
wheels/
|
| 19 |
+
*.egg-info/
|
| 20 |
+
.installed.cfg
|
| 21 |
+
*.egg
|
| 22 |
+
|
| 23 |
+
# Virtual Environment
|
| 24 |
+
venv/
|
| 25 |
+
env/
|
| 26 |
+
ENV/
|
| 27 |
+
.venv
|
| 28 |
+
|
| 29 |
+
# Environment variables
|
| 30 |
+
.env.local
|
| 31 |
+
|
| 32 |
+
# IDE
|
| 33 |
+
.vscode/
|
| 34 |
+
.idea/
|
| 35 |
+
*.swp
|
| 36 |
+
*.swo
|
| 37 |
+
*~
|
| 38 |
+
|
| 39 |
+
# Testing
|
| 40 |
+
.pytest_cache/
|
| 41 |
+
.coverage
|
| 42 |
+
htmlcov/
|
| 43 |
+
.hypothesis/
|
| 44 |
+
|
| 45 |
+
# Logs
|
| 46 |
+
*.log
|
| 47 |
+
logs/
|
| 48 |
+
|
| 49 |
+
# OS
|
| 50 |
+
.DS_Store
|
| 51 |
+
Thumbs.db
|
| 52 |
+
|
| 53 |
+
# Database
|
| 54 |
+
*.db
|
| 55 |
+
*.sqlite
|
| 56 |
+
*.sqlite3
|
| 57 |
+
.env
|
| 58 |
+
|
| 59 |
+
# Debug and test scripts
|
| 60 |
+
debug_*.py
|
| 61 |
+
test_conversion.py
|
| 62 |
+
test_dependency.py
|
| 63 |
+
test_endpoint_direct.py
|
| 64 |
+
update_system_users.py
|
| 65 |
+
update_system_users.js
|
| 66 |
+
|
| 67 |
+
# MinIO data directory
|
| 68 |
+
minio-data/
|
| 69 |
+
|
| 70 |
+
#MD Files
|
| 71 |
+
*.md
|
| 72 |
+
app/insightfy_utils-0.1.0-py3-none-any.whl
|
| 73 |
+
test_token.txt
|
| 74 |
+
get-pip.py
|
| 75 |
+
generate_test_token.py
|
| 76 |
+
generate_test_request.py
|
README.md
CHANGED
|
@@ -1,11 +1,272 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
-
|
| 10 |
-
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Tracker Microservice
|
| 2 |
+
|
| 3 |
+
Employee tracking and attendance management microservice for the Cuatro Labs platform.
|
| 4 |
+
|
| 5 |
+
## Features
|
| 6 |
+
|
| 7 |
+
### Attendance Management
|
| 8 |
+
- Employee check-in with GPS coordinates
|
| 9 |
+
- Location tracking consent validation
|
| 10 |
+
- Geofence detection support
|
| 11 |
+
- Duplicate check-in prevention
|
| 12 |
+
- Daily attendance records
|
| 13 |
+
|
| 14 |
+
## Architecture
|
| 15 |
+
|
| 16 |
+
### Technology Stack
|
| 17 |
+
- **Framework**: FastAPI 0.104.1
|
| 18 |
+
- **Database**:
|
| 19 |
+
- MongoDB (employee data, location settings)
|
| 20 |
+
- PostgreSQL (attendance records)
|
| 21 |
+
- **Authentication**: JWT-based
|
| 22 |
+
- **Server**: Uvicorn with async support
|
| 23 |
+
|
| 24 |
+
### Project Structure
|
| 25 |
+
```
|
| 26 |
+
app/
|
| 27 |
+
βββ core/
|
| 28 |
+
β βββ config.py # Configuration settings
|
| 29 |
+
β βββ logging.py # Logging setup
|
| 30 |
+
β βββ database.py # SQLAlchemy base
|
| 31 |
+
βββ dependencies/
|
| 32 |
+
β βββ auth.py # JWT authentication
|
| 33 |
+
βββ tracker/
|
| 34 |
+
β βββ attendance/
|
| 35 |
+
β βββ models.py # SQLAlchemy models
|
| 36 |
+
β βββ schemas.py # Pydantic schemas
|
| 37 |
+
β βββ service.py # Business logic
|
| 38 |
+
β βββ router.py # API endpoints
|
| 39 |
+
β βββ constants.py # Constants
|
| 40 |
+
βββ main.py # Application entry point
|
| 41 |
+
βββ nosql.py # MongoDB connection
|
| 42 |
+
βββ postgres.py # PostgreSQL connection pool
|
| 43 |
+
```
|
| 44 |
+
|
| 45 |
+
## API Endpoints
|
| 46 |
+
|
| 47 |
+
### Attendance Endpoints
|
| 48 |
+
|
| 49 |
+
#### Check-In
|
| 50 |
+
**POST** `/api/v1/attendance/check-in`
|
| 51 |
+
|
| 52 |
+
Mark the start of an employee's working day.
|
| 53 |
+
|
| 54 |
+
**Request Body:**
|
| 55 |
+
```json
|
| 56 |
+
{
|
| 57 |
+
"timestamp": 1708156800000,
|
| 58 |
+
"latitude": 19.0760,
|
| 59 |
+
"longitude": 72.8777,
|
| 60 |
+
"location_id": "loc_mumbai_office_001"
|
| 61 |
+
}
|
| 62 |
+
```
|
| 63 |
+
|
| 64 |
+
**Response:**
|
| 65 |
+
```json
|
| 66 |
+
{
|
| 67 |
+
"success": true,
|
| 68 |
+
"id": "550e8400-e29b-41d4-a716-446655440000",
|
| 69 |
+
"message": "Check-in successful"
|
| 70 |
+
}
|
| 71 |
+
```
|
| 72 |
+
|
| 73 |
+
**Rules:**
|
| 74 |
+
- Can check-in only once per day
|
| 75 |
+
- Location coordinates are mandatory
|
| 76 |
+
- GPS tracking must be enabled (checked from MongoDB)
|
| 77 |
+
- Optional location_id if inside a geofence
|
| 78 |
+
|
| 79 |
+
**Edge Cases:**
|
| 80 |
+
- Duplicate check-in β 400 error
|
| 81 |
+
- GPS disabled β 400 error
|
| 82 |
+
|
| 83 |
+
## Database Schema
|
| 84 |
+
|
| 85 |
+
### PostgreSQL - trans.scm_attendance
|
| 86 |
+
|
| 87 |
+
```sql
|
| 88 |
+
CREATE TABLE trans.scm_attendance (
|
| 89 |
+
id UUID PRIMARY KEY,
|
| 90 |
+
merchant_id UUID NOT NULL,
|
| 91 |
+
employee_id UUID NOT NULL,
|
| 92 |
+
work_date DATE NOT NULL,
|
| 93 |
+
check_in_time BIGINT,
|
| 94 |
+
check_in_lat DOUBLE PRECISION,
|
| 95 |
+
check_in_lon DOUBLE PRECISION,
|
| 96 |
+
check_in_geofence_id UUID,
|
| 97 |
+
check_out_time BIGINT,
|
| 98 |
+
check_out_lat DOUBLE PRECISION,
|
| 99 |
+
check_out_lon DOUBLE PRECISION,
|
| 100 |
+
total_minutes INTEGER,
|
| 101 |
+
created_at TIMESTAMP DEFAULT now(),
|
| 102 |
+
updated_at TIMESTAMP DEFAULT now(),
|
| 103 |
+
UNIQUE (employee_id, work_date)
|
| 104 |
+
);
|
| 105 |
+
|
| 106 |
+
CREATE INDEX idx_scm_attendance_work_date
|
| 107 |
+
ON trans.scm_attendance (employee_id, work_date);
|
| 108 |
+
|
| 109 |
+
CREATE INDEX idx_scm_attendance_merchant
|
| 110 |
+
ON trans.scm_attendance (merchant_id, work_date);
|
| 111 |
+
```
|
| 112 |
+
|
| 113 |
+
### MongoDB - scm_employees
|
| 114 |
+
|
| 115 |
+
Location tracking consent is checked from:
|
| 116 |
+
```
|
| 117 |
+
scm_employees.location_settings.location_tracking_consent
|
| 118 |
+
```
|
| 119 |
+
|
| 120 |
+
## Environment Configuration
|
| 121 |
+
|
| 122 |
+
Copy `.env.example` to `.env` and configure:
|
| 123 |
+
|
| 124 |
+
```bash
|
| 125 |
+
# Application
|
| 126 |
+
APP_NAME=Tracker Microservice
|
| 127 |
+
APP_VERSION=1.0.0
|
| 128 |
+
DEBUG=false
|
| 129 |
+
LOG_LEVEL=INFO
|
| 130 |
+
PORT=8003
|
| 131 |
+
|
| 132 |
+
# MongoDB
|
| 133 |
+
MONGODB_URI=mongodb+srv://username:password@cluster0.2shrc.mongodb.net/?retryWrites=true&w=majority
|
| 134 |
+
MONGODB_DB_NAME=cuatrolabs
|
| 135 |
+
|
| 136 |
+
# PostgreSQL
|
| 137 |
+
DB_HOST=localhost
|
| 138 |
+
DB_PORT=5432
|
| 139 |
+
DB_NAME=cuatrolabs
|
| 140 |
+
DB_USER=postgres
|
| 141 |
+
DB_PASSWORD=your-password
|
| 142 |
+
DATABASE_URL=postgresql+asyncpg://postgres:password@localhost:5432/cuatrolabs
|
| 143 |
+
|
| 144 |
+
# JWT
|
| 145 |
+
SECRET_KEY=your-secret-key-change-in-production
|
| 146 |
+
ALGORITHM=HS256
|
| 147 |
+
TOKEN_EXPIRATION_HOURS=8
|
| 148 |
+
```
|
| 149 |
+
|
| 150 |
+
## Getting Started
|
| 151 |
+
|
| 152 |
+
### Prerequisites
|
| 153 |
+
- Python 3.11+
|
| 154 |
+
- MongoDB (Atlas or local)
|
| 155 |
+
- PostgreSQL
|
| 156 |
+
|
| 157 |
+
### Local Development
|
| 158 |
+
|
| 159 |
+
1. Create and activate virtual environment:
|
| 160 |
+
```bash
|
| 161 |
+
python -m venv venv
|
| 162 |
+
source venv/bin/activate # On Windows: venv\Scripts\activate
|
| 163 |
+
```
|
| 164 |
+
|
| 165 |
+
2. Install dependencies:
|
| 166 |
+
```bash
|
| 167 |
+
pip install -r requirements.txt
|
| 168 |
+
```
|
| 169 |
+
|
| 170 |
+
3. Configure environment:
|
| 171 |
+
```bash
|
| 172 |
+
cp .env.example .env
|
| 173 |
+
# Edit .env with your configuration
|
| 174 |
+
```
|
| 175 |
+
|
| 176 |
+
4. Run the service:
|
| 177 |
+
```bash
|
| 178 |
+
uvicorn app.main:app --host 0.0.0.0 --port 8003 --reload
|
| 179 |
+
```
|
| 180 |
+
|
| 181 |
+
5. Access API documentation:
|
| 182 |
+
- Swagger UI: http://localhost:8003/docs
|
| 183 |
+
- ReDoc: http://localhost:8003/redoc
|
| 184 |
+
|
| 185 |
+
### Docker Deployment
|
| 186 |
+
|
| 187 |
+
```bash
|
| 188 |
+
# Build the image
|
| 189 |
+
docker build -t tracker-microservice .
|
| 190 |
+
|
| 191 |
+
# Run the container
|
| 192 |
+
docker run -p 8003:8003 --env-file .env tracker-microservice
|
| 193 |
+
```
|
| 194 |
+
|
| 195 |
+
## Testing
|
| 196 |
+
|
| 197 |
+
### Manual Testing with curl
|
| 198 |
+
|
| 199 |
+
```bash
|
| 200 |
+
# Health check
|
| 201 |
+
curl http://localhost:8003/health
|
| 202 |
+
|
| 203 |
+
# Check-in (requires JWT token)
|
| 204 |
+
curl -X POST http://localhost:8003/api/v1/attendance/check-in \
|
| 205 |
+
-H "Authorization: Bearer YOUR_JWT_TOKEN" \
|
| 206 |
+
-H "Content-Type: application/json" \
|
| 207 |
+
-d '{
|
| 208 |
+
"timestamp": 1708156800000,
|
| 209 |
+
"latitude": 19.0760,
|
| 210 |
+
"longitude": 72.8777,
|
| 211 |
+
"location_id": "loc_mumbai_office_001"
|
| 212 |
+
}'
|
| 213 |
+
```
|
| 214 |
+
|
| 215 |
+
## Error Handling
|
| 216 |
+
|
| 217 |
+
The service provides structured error responses:
|
| 218 |
+
|
| 219 |
+
**400 Bad Request** - Business logic errors
|
| 220 |
+
```json
|
| 221 |
+
{
|
| 222 |
+
"success": false,
|
| 223 |
+
"error": "Already checked in today",
|
| 224 |
+
"detail": "You have already checked in for today"
|
| 225 |
+
}
|
| 226 |
+
```
|
| 227 |
+
|
| 228 |
+
**401 Unauthorized** - Invalid or missing JWT token
|
| 229 |
+
```json
|
| 230 |
+
{
|
| 231 |
+
"success": false,
|
| 232 |
+
"error": "Unauthorized",
|
| 233 |
+
"detail": "Invalid or expired token"
|
| 234 |
+
}
|
| 235 |
+
```
|
| 236 |
+
|
| 237 |
+
**422 Validation Error** - Invalid request data
|
| 238 |
+
```json
|
| 239 |
+
{
|
| 240 |
+
"success": false,
|
| 241 |
+
"error": "Validation Error",
|
| 242 |
+
"errors": [
|
| 243 |
+
{
|
| 244 |
+
"field": "latitude",
|
| 245 |
+
"message": "Latitude must be between -90 and 90",
|
| 246 |
+
"type": "value_error"
|
| 247 |
+
}
|
| 248 |
+
]
|
| 249 |
+
}
|
| 250 |
+
```
|
| 251 |
+
|
| 252 |
+
**500 Internal Server Error** - Unexpected errors
|
| 253 |
+
```json
|
| 254 |
+
{
|
| 255 |
+
"success": false,
|
| 256 |
+
"error": "Internal Server Error",
|
| 257 |
+
"detail": "An unexpected error occurred"
|
| 258 |
+
}
|
| 259 |
+
```
|
| 260 |
+
|
| 261 |
+
## Logging
|
| 262 |
+
|
| 263 |
+
The service uses structured JSON logging with the following levels:
|
| 264 |
+
- DEBUG: Detailed diagnostic information
|
| 265 |
+
- INFO: General informational messages
|
| 266 |
+
- WARNING: Warning messages for recoverable issues
|
| 267 |
+
- ERROR: Error messages for failures
|
| 268 |
+
- CRITICAL: Critical errors requiring immediate attention
|
| 269 |
+
|
| 270 |
+
## License
|
| 271 |
+
|
| 272 |
+
Part of the Cuatro Labs platform.
|
app/core/config.py
CHANGED
|
@@ -2,7 +2,9 @@
|
|
| 2 |
Configuration settings for Tracker microservice.
|
| 3 |
Loads environment variables and provides application settings.
|
| 4 |
"""
|
| 5 |
-
|
|
|
|
|
|
|
| 6 |
from pydantic_settings import BaseSettings, SettingsConfigDict
|
| 7 |
|
| 8 |
|
|
@@ -14,6 +16,69 @@ class Settings(BaseSettings):
|
|
| 14 |
APP_VERSION: str = "1.0.0"
|
| 15 |
DEBUG: bool = False
|
| 16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
# Logging
|
| 18 |
LOG_LEVEL: str = "INFO"
|
| 19 |
|
|
@@ -21,6 +86,7 @@ class Settings(BaseSettings):
|
|
| 21 |
CORS_ORIGINS: List[str] = [
|
| 22 |
"http://localhost:3000",
|
| 23 |
"http://localhost:8000",
|
|
|
|
| 24 |
]
|
| 25 |
|
| 26 |
# Pydantic v2 config
|
|
@@ -28,7 +94,12 @@ class Settings(BaseSettings):
|
|
| 28 |
env_file=".env",
|
| 29 |
env_file_encoding="utf-8",
|
| 30 |
case_sensitive=True,
|
| 31 |
-
extra="allow",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
)
|
| 33 |
|
| 34 |
|
|
|
|
| 2 |
Configuration settings for Tracker microservice.
|
| 3 |
Loads environment variables and provides application settings.
|
| 4 |
"""
|
| 5 |
+
import os
|
| 6 |
+
from typing import Optional, List
|
| 7 |
+
from pydantic import model_validator, Field
|
| 8 |
from pydantic_settings import BaseSettings, SettingsConfigDict
|
| 9 |
|
| 10 |
|
|
|
|
| 16 |
APP_VERSION: str = "1.0.0"
|
| 17 |
DEBUG: bool = False
|
| 18 |
|
| 19 |
+
# MongoDB Configuration
|
| 20 |
+
MONGODB_URI: str = "mongodb://localhost:27017"
|
| 21 |
+
MONGODB_DB_NAME: str = "cuatrolabs"
|
| 22 |
+
|
| 23 |
+
# PostgreSQL Configuration
|
| 24 |
+
# Let Pydantic handle environment variables - don't use os.getenv() here!
|
| 25 |
+
# Use Field with alias to map DB_HOST -> POSTGRES_HOST, etc.
|
| 26 |
+
POSTGRES_HOST: str = Field(default="localhost", validation_alias="DB_HOST")
|
| 27 |
+
POSTGRES_PORT: int = Field(default=5432, validation_alias="DB_PORT")
|
| 28 |
+
POSTGRES_DB: str = Field(default="cuatrolabs", validation_alias="DB_NAME")
|
| 29 |
+
POSTGRES_USER: str = Field(default="postgres", validation_alias="DB_USER")
|
| 30 |
+
POSTGRES_PASSWORD: str = Field(default="", validation_alias="DB_PASSWORD")
|
| 31 |
+
POSTGRES_MIN_POOL_SIZE: int = 5
|
| 32 |
+
POSTGRES_MAX_POOL_SIZE: int = 20
|
| 33 |
+
POSTGRES_CONNECT_MAX_RETRIES: int = 20
|
| 34 |
+
POSTGRES_CONNECT_INITIAL_DELAY_MS: int = 500
|
| 35 |
+
POSTGRES_CONNECT_BACKOFF_MULTIPLIER: float = 1.5
|
| 36 |
+
POSTGRES_SSL_MODE: str = Field(default="disable", validation_alias="DB_SSLMODE")
|
| 37 |
+
POSTGRES_SSL_ROOT_CERT: Optional[str] = None
|
| 38 |
+
POSTGRES_SSL_CERT: Optional[str] = None
|
| 39 |
+
POSTGRES_SSL_KEY: Optional[str] = None
|
| 40 |
+
POSTGRES_URI: Optional[str] = None
|
| 41 |
+
|
| 42 |
+
@model_validator(mode='after')
|
| 43 |
+
def assemble_db_connection(self) -> 'Settings':
|
| 44 |
+
from urllib.parse import quote_plus
|
| 45 |
+
# Prefer DATABASE_URL and DATABASE_URI
|
| 46 |
+
env_url = (os.getenv("DATABASE_URL") or os.getenv("DATABASE_URI") or "").strip()
|
| 47 |
+
if env_url:
|
| 48 |
+
self.POSTGRES_URI = env_url
|
| 49 |
+
print(f"[CONFIG] Using provided DATABASE_URL/URI")
|
| 50 |
+
return self
|
| 51 |
+
# Build DSN from individual parts
|
| 52 |
+
if all([self.POSTGRES_USER, self.POSTGRES_PASSWORD, self.POSTGRES_HOST, self.POSTGRES_DB]):
|
| 53 |
+
protocol = os.getenv("DB_PROTOCOL", "postgresql+asyncpg")
|
| 54 |
+
# Ensure no spaces in connection components
|
| 55 |
+
user = self.POSTGRES_USER.strip()
|
| 56 |
+
host = self.POSTGRES_HOST.strip()
|
| 57 |
+
port = str(self.POSTGRES_PORT).strip()
|
| 58 |
+
db = self.POSTGRES_DB.strip()
|
| 59 |
+
self.POSTGRES_URI = f"{protocol}://{user}:{quote_plus(self.POSTGRES_PASSWORD)}@{host}:{port}/{db}"
|
| 60 |
+
print(f"[CONFIG] Built POSTGRES_URI from components")
|
| 61 |
+
print(f"[CONFIG] Protocol: {protocol}")
|
| 62 |
+
print(f"[CONFIG] User: {self.POSTGRES_USER}")
|
| 63 |
+
print(f"[CONFIG] Host: {self.POSTGRES_HOST}")
|
| 64 |
+
print(f"[CONFIG] Port: {self.POSTGRES_PORT}")
|
| 65 |
+
print(f"[CONFIG] Database: {self.POSTGRES_DB}")
|
| 66 |
+
print(f"[CONFIG] Password: {'SET' if self.POSTGRES_PASSWORD else 'EMPTY'}")
|
| 67 |
+
print(f"[CONFIG] SSL Mode: {self.POSTGRES_SSL_MODE}")
|
| 68 |
+
else:
|
| 69 |
+
self.POSTGRES_URI = None
|
| 70 |
+
print(f"[CONFIG] ERROR: Cannot build POSTGRES_URI - missing required components")
|
| 71 |
+
print(f"[CONFIG] POSTGRES_USER: {'SET' if self.POSTGRES_USER else 'MISSING'}")
|
| 72 |
+
print(f"[CONFIG] POSTGRES_PASSWORD: {'SET' if self.POSTGRES_PASSWORD else 'MISSING'}")
|
| 73 |
+
print(f"[CONFIG] POSTGRES_HOST: {'SET' if self.POSTGRES_HOST else 'MISSING'}")
|
| 74 |
+
print(f"[CONFIG] POSTGRES_DB: {'SET' if self.POSTGRES_DB else 'MISSING'}")
|
| 75 |
+
return self
|
| 76 |
+
|
| 77 |
+
# JWT Configuration
|
| 78 |
+
SECRET_KEY: str = "your-secret-key-change-in-production"
|
| 79 |
+
ALGORITHM: str = "HS256"
|
| 80 |
+
TOKEN_EXPIRATION_HOURS: int = 8
|
| 81 |
+
|
| 82 |
# Logging
|
| 83 |
LOG_LEVEL: str = "INFO"
|
| 84 |
|
|
|
|
| 86 |
CORS_ORIGINS: List[str] = [
|
| 87 |
"http://localhost:3000",
|
| 88 |
"http://localhost:8000",
|
| 89 |
+
"http://localhost:8003",
|
| 90 |
]
|
| 91 |
|
| 92 |
# Pydantic v2 config
|
|
|
|
| 94 |
env_file=".env",
|
| 95 |
env_file_encoding="utf-8",
|
| 96 |
case_sensitive=True,
|
| 97 |
+
extra="allow", # allows extra environment variables without error
|
| 98 |
+
# Priority order (highest to lowest):
|
| 99 |
+
# 1. OS environment variables (Docker, shell exports)
|
| 100 |
+
# 2. .env file (local development)
|
| 101 |
+
# 3. Default values (fallback)
|
| 102 |
+
env_prefix="", # No prefix, use exact names
|
| 103 |
)
|
| 104 |
|
| 105 |
|
app/core/database.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Shared database configuration and base classes.
|
| 3 |
+
"""
|
| 4 |
+
from sqlalchemy.orm import declarative_base
|
| 5 |
+
|
| 6 |
+
# Shared Base for all SQLAlchemy models
|
| 7 |
+
Base = declarative_base()
|
app/core/logging.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Logging configuration for Tracker microservice.
|
| 3 |
+
"""
|
| 4 |
+
import logging
|
| 5 |
+
import sys
|
| 6 |
+
from typing import Optional
|
| 7 |
+
|
| 8 |
+
try:
|
| 9 |
+
from insightfy_utils.logging import get_logger as _insight_get_logger
|
| 10 |
+
from insightfy_utils.logging import setup_logging as _insight_setup_logging
|
| 11 |
+
except Exception:
|
| 12 |
+
_insight_get_logger = None
|
| 13 |
+
_insight_setup_logging = None
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def setup_logging(
|
| 17 |
+
level: str = "INFO",
|
| 18 |
+
format_type: str = "colored",
|
| 19 |
+
app_name: Optional[str] = None,
|
| 20 |
+
include_correlation: bool = True
|
| 21 |
+
) -> None:
|
| 22 |
+
"""
|
| 23 |
+
Configure logging for the application.
|
| 24 |
+
Uses insightfy_utils logging when available, falls back to standard logging.
|
| 25 |
+
"""
|
| 26 |
+
if _insight_setup_logging:
|
| 27 |
+
_insight_setup_logging(
|
| 28 |
+
level=level,
|
| 29 |
+
format_type=format_type,
|
| 30 |
+
app_name=app_name or "tracker-microservice",
|
| 31 |
+
include_correlation=include_correlation
|
| 32 |
+
)
|
| 33 |
+
return
|
| 34 |
+
|
| 35 |
+
# Fallback to standard logging
|
| 36 |
+
log_level = getattr(logging, level.upper(), logging.INFO)
|
| 37 |
+
logging.basicConfig(
|
| 38 |
+
level=log_level,
|
| 39 |
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
| 40 |
+
handlers=[logging.StreamHandler(sys.stdout)]
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
# Set specific log levels for noisy libraries
|
| 44 |
+
logging.getLogger("motor").setLevel(logging.WARNING)
|
| 45 |
+
logging.getLogger("pymongo").setLevel(logging.WARNING)
|
| 46 |
+
logging.getLogger("asyncpg").setLevel(logging.WARNING)
|
| 47 |
+
logging.getLogger("httpx").setLevel(logging.WARNING)
|
| 48 |
+
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
| 49 |
+
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
def get_logger(name: str) -> logging.Logger:
|
| 53 |
+
"""Get a logger instance"""
|
| 54 |
+
if _insight_get_logger:
|
| 55 |
+
return _insight_get_logger(name)
|
| 56 |
+
return logging.getLogger(name)
|
app/dependencies/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Dependencies module for Tracker microservice.
|
| 3 |
+
"""
|
app/dependencies/auth.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Authentication dependencies for FastAPI.
|
| 3 |
+
Validates JWT tokens and provides user context.
|
| 4 |
+
"""
|
| 5 |
+
from typing import Optional
|
| 6 |
+
from fastapi import Depends, HTTPException, status
|
| 7 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 8 |
+
from jose import JWTError, jwt
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
|
| 11 |
+
from app.core.config import settings
|
| 12 |
+
|
| 13 |
+
security = HTTPBearer()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TokenUser(BaseModel):
|
| 17 |
+
"""User information extracted from JWT token"""
|
| 18 |
+
user_id: str
|
| 19 |
+
username: str
|
| 20 |
+
role: str
|
| 21 |
+
merchant_id: str
|
| 22 |
+
merchant_type: Optional[str] = None
|
| 23 |
+
metadata: Optional[dict] = None
|
| 24 |
+
|
| 25 |
+
def has_role(self, *roles: str) -> bool:
|
| 26 |
+
"""Check if user has any of the specified roles"""
|
| 27 |
+
return self.role in roles
|
| 28 |
+
|
| 29 |
+
def is_admin(self) -> bool:
|
| 30 |
+
"""Check if user has admin privileges"""
|
| 31 |
+
return "admin" in self.role.lower()
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
async def get_current_user(
|
| 35 |
+
credentials: HTTPAuthorizationCredentials = Depends(security)
|
| 36 |
+
) -> TokenUser:
|
| 37 |
+
"""
|
| 38 |
+
Get current authenticated user from JWT token.
|
| 39 |
+
Validates the token and extracts user information.
|
| 40 |
+
"""
|
| 41 |
+
credentials_exception = HTTPException(
|
| 42 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 43 |
+
detail="Could not validate credentials",
|
| 44 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
# Decode and verify JWT token
|
| 49 |
+
payload = jwt.decode(
|
| 50 |
+
credentials.credentials,
|
| 51 |
+
settings.SECRET_KEY,
|
| 52 |
+
algorithms=[settings.ALGORITHM]
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
user_id: str = payload.get("sub")
|
| 56 |
+
username: str = payload.get("username")
|
| 57 |
+
role: str = payload.get("role")
|
| 58 |
+
merchant_id: str = payload.get("merchant_id")
|
| 59 |
+
merchant_type: str = payload.get("merchant_type")
|
| 60 |
+
metadata: dict = payload.get("metadata")
|
| 61 |
+
|
| 62 |
+
if user_id is None or username is None or merchant_id is None:
|
| 63 |
+
raise credentials_exception
|
| 64 |
+
|
| 65 |
+
# Create TokenUser from payload
|
| 66 |
+
return TokenUser(
|
| 67 |
+
user_id=user_id,
|
| 68 |
+
username=username,
|
| 69 |
+
role=role or "user",
|
| 70 |
+
merchant_id=merchant_id,
|
| 71 |
+
merchant_type=merchant_type,
|
| 72 |
+
metadata=metadata
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
except JWTError:
|
| 76 |
+
raise credentials_exception
|
| 77 |
+
except Exception:
|
| 78 |
+
raise credentials_exception
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
async def get_current_active_user(
|
| 82 |
+
current_user: TokenUser = Depends(get_current_user)
|
| 83 |
+
) -> TokenUser:
|
| 84 |
+
"""Get current active user."""
|
| 85 |
+
return current_user
|
app/main.py
CHANGED
|
@@ -1,64 +1,224 @@
|
|
| 1 |
"""
|
| 2 |
Main FastAPI application for Tracker Microservice.
|
| 3 |
"""
|
| 4 |
-
import logging
|
| 5 |
import os
|
| 6 |
-
from fastapi import FastAPI
|
| 7 |
from fastapi.middleware.cors import CORSMiddleware
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
from app.core.config import settings
|
| 10 |
-
from app.
|
|
|
|
|
|
|
|
|
|
| 11 |
|
| 12 |
-
|
| 13 |
-
|
|
|
|
|
|
|
| 14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
app = FastAPI(
|
| 16 |
title=settings.APP_NAME,
|
| 17 |
-
description="Employee Tracker -
|
| 18 |
version=settings.APP_VERSION,
|
| 19 |
docs_url="/docs",
|
| 20 |
redoc_url="/redoc",
|
| 21 |
root_path=os.getenv("ROOT_PATH", ""),
|
| 22 |
)
|
| 23 |
|
|
|
|
| 24 |
app.add_middleware(
|
| 25 |
CORSMiddleware,
|
| 26 |
allow_origins=settings.CORS_ORIGINS,
|
| 27 |
allow_credentials=True,
|
| 28 |
allow_methods=["*"],
|
| 29 |
allow_headers=["*"],
|
|
|
|
| 30 |
)
|
| 31 |
|
| 32 |
|
|
|
|
| 33 |
@app.on_event("startup")
|
| 34 |
async def startup_event():
|
|
|
|
| 35 |
logger.info("Starting Tracker Microservice")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
|
| 37 |
|
| 38 |
@app.on_event("shutdown")
|
| 39 |
async def shutdown_event():
|
|
|
|
| 40 |
logger.info("Shutting down Tracker Microservice")
|
|
|
|
|
|
|
|
|
|
| 41 |
|
| 42 |
|
|
|
|
| 43 |
@app.get("/health", tags=["health"])
|
| 44 |
async def health_check():
|
|
|
|
| 45 |
return {
|
| 46 |
"status": "healthy",
|
| 47 |
"service": "tracker-microservice",
|
| 48 |
-
"version": settings.APP_VERSION
|
| 49 |
}
|
| 50 |
|
| 51 |
|
| 52 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
|
| 54 |
|
| 55 |
if __name__ == "__main__":
|
| 56 |
import uvicorn
|
| 57 |
-
|
| 58 |
uvicorn.run(
|
| 59 |
"app.main:app",
|
| 60 |
host="0.0.0.0",
|
| 61 |
port=int(os.getenv("PORT", "8003")),
|
| 62 |
reload=True,
|
| 63 |
-
log_level=
|
| 64 |
)
|
|
|
|
| 1 |
"""
|
| 2 |
Main FastAPI application for Tracker Microservice.
|
| 3 |
"""
|
|
|
|
| 4 |
import os
|
| 5 |
+
from fastapi import FastAPI, Request, status
|
| 6 |
from fastapi.middleware.cors import CORSMiddleware
|
| 7 |
+
from fastapi.responses import JSONResponse
|
| 8 |
+
from fastapi.exceptions import RequestValidationError
|
| 9 |
+
from jose import JWTError
|
| 10 |
|
| 11 |
from app.core.config import settings
|
| 12 |
+
from app.core.logging import setup_logging, get_logger
|
| 13 |
+
from app.nosql import connect_to_mongo, close_mongo_connection
|
| 14 |
+
from app.postgres import connect_to_postgres, close_postgres_connection
|
| 15 |
+
from app.tracker.attendance.router import router as attendance_router
|
| 16 |
|
| 17 |
+
# Initialize logging first
|
| 18 |
+
log_level = getattr(settings, 'LOG_LEVEL', 'INFO').strip().upper()
|
| 19 |
+
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
| 20 |
+
log_level = 'INFO'
|
| 21 |
|
| 22 |
+
setup_logging(
|
| 23 |
+
level=log_level,
|
| 24 |
+
format_type="colored",
|
| 25 |
+
app_name="tracker-microservice",
|
| 26 |
+
include_correlation=True
|
| 27 |
+
)
|
| 28 |
+
logger = get_logger(__name__)
|
| 29 |
+
|
| 30 |
+
# Create FastAPI app
|
| 31 |
app = FastAPI(
|
| 32 |
title=settings.APP_NAME,
|
| 33 |
+
description="Employee Tracker - Attendance and Location Tracking",
|
| 34 |
version=settings.APP_VERSION,
|
| 35 |
docs_url="/docs",
|
| 36 |
redoc_url="/redoc",
|
| 37 |
root_path=os.getenv("ROOT_PATH", ""),
|
| 38 |
)
|
| 39 |
|
| 40 |
+
# CORS middleware
|
| 41 |
app.add_middleware(
|
| 42 |
CORSMiddleware,
|
| 43 |
allow_origins=settings.CORS_ORIGINS,
|
| 44 |
allow_credentials=True,
|
| 45 |
allow_methods=["*"],
|
| 46 |
allow_headers=["*"],
|
| 47 |
+
expose_headers=["*"],
|
| 48 |
)
|
| 49 |
|
| 50 |
|
| 51 |
+
# Startup and shutdown events
|
| 52 |
@app.on_event("startup")
|
| 53 |
async def startup_event():
|
| 54 |
+
"""Initialize connections on startup"""
|
| 55 |
logger.info("Starting Tracker Microservice")
|
| 56 |
+
await connect_to_mongo()
|
| 57 |
+
await connect_to_postgres()
|
| 58 |
+
|
| 59 |
+
# Create schema and tables
|
| 60 |
+
try:
|
| 61 |
+
from app.tracker.attendance.models import ScmAttendance
|
| 62 |
+
conn = None
|
| 63 |
+
try:
|
| 64 |
+
from app.postgres import get_postgres_connection, release_postgres_connection
|
| 65 |
+
conn = await get_postgres_connection()
|
| 66 |
+
|
| 67 |
+
# Create trans schema
|
| 68 |
+
await conn.execute("CREATE SCHEMA IF NOT EXISTS trans")
|
| 69 |
+
logger.info("β
TRANS schema exists")
|
| 70 |
+
|
| 71 |
+
# Create table
|
| 72 |
+
create_table_sql = """
|
| 73 |
+
CREATE TABLE IF NOT EXISTS trans.scm_attendance (
|
| 74 |
+
id UUID PRIMARY KEY,
|
| 75 |
+
merchant_id UUID NOT NULL,
|
| 76 |
+
employee_id UUID NOT NULL,
|
| 77 |
+
work_date DATE NOT NULL,
|
| 78 |
+
check_in_time BIGINT,
|
| 79 |
+
check_in_lat DOUBLE PRECISION,
|
| 80 |
+
check_in_lon DOUBLE PRECISION,
|
| 81 |
+
check_in_geofence_id UUID,
|
| 82 |
+
check_out_time BIGINT,
|
| 83 |
+
check_out_lat DOUBLE PRECISION,
|
| 84 |
+
check_out_lon DOUBLE PRECISION,
|
| 85 |
+
total_minutes INTEGER,
|
| 86 |
+
created_at TIMESTAMP DEFAULT now(),
|
| 87 |
+
updated_at TIMESTAMP DEFAULT now(),
|
| 88 |
+
UNIQUE (employee_id, work_date)
|
| 89 |
+
)
|
| 90 |
+
"""
|
| 91 |
+
await conn.execute(create_table_sql)
|
| 92 |
+
logger.info("β
scm_attendance table created/verified")
|
| 93 |
+
|
| 94 |
+
# Create indexes
|
| 95 |
+
index_sql = """
|
| 96 |
+
CREATE INDEX IF NOT EXISTS idx_scm_attendance_work_date
|
| 97 |
+
ON trans.scm_attendance (employee_id, work_date)
|
| 98 |
+
"""
|
| 99 |
+
await conn.execute(index_sql)
|
| 100 |
+
|
| 101 |
+
index_merchant_sql = """
|
| 102 |
+
CREATE INDEX IF NOT EXISTS idx_scm_attendance_merchant
|
| 103 |
+
ON trans.scm_attendance (merchant_id, work_date)
|
| 104 |
+
"""
|
| 105 |
+
await conn.execute(index_merchant_sql)
|
| 106 |
+
logger.info("β
Indexes created/verified")
|
| 107 |
+
|
| 108 |
+
finally:
|
| 109 |
+
if conn:
|
| 110 |
+
await release_postgres_connection(conn)
|
| 111 |
+
|
| 112 |
+
except Exception as e:
|
| 113 |
+
logger.error("Failed to create database schema", exc_info=e)
|
| 114 |
+
raise
|
| 115 |
+
|
| 116 |
+
logger.info("Tracker Microservice started successfully")
|
| 117 |
|
| 118 |
|
| 119 |
@app.on_event("shutdown")
|
| 120 |
async def shutdown_event():
|
| 121 |
+
"""Close connections on shutdown"""
|
| 122 |
logger.info("Shutting down Tracker Microservice")
|
| 123 |
+
await close_mongo_connection()
|
| 124 |
+
await close_postgres_connection()
|
| 125 |
+
logger.info("Tracker Microservice shut down successfully")
|
| 126 |
|
| 127 |
|
| 128 |
+
# Health check endpoint
|
| 129 |
@app.get("/health", tags=["health"])
|
| 130 |
async def health_check():
|
| 131 |
+
"""Health check endpoint"""
|
| 132 |
return {
|
| 133 |
"status": "healthy",
|
| 134 |
"service": "tracker-microservice",
|
| 135 |
+
"version": settings.APP_VERSION
|
| 136 |
}
|
| 137 |
|
| 138 |
|
| 139 |
+
# Include routers
|
| 140 |
+
app.include_router(attendance_router, prefix="/api/v1")
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
# Global exception handlers
|
| 144 |
+
@app.exception_handler(RequestValidationError)
|
| 145 |
+
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
| 146 |
+
errors = [
|
| 147 |
+
{
|
| 148 |
+
"field": " -> ".join(str(loc) for loc in error["loc"]),
|
| 149 |
+
"message": error["msg"],
|
| 150 |
+
"type": error["type"]
|
| 151 |
+
}
|
| 152 |
+
for error in exc.errors()
|
| 153 |
+
]
|
| 154 |
+
logger.warning(
|
| 155 |
+
"Validation error",
|
| 156 |
+
extra={
|
| 157 |
+
"path": request.url.path,
|
| 158 |
+
"method": request.method,
|
| 159 |
+
"error_count": len(errors),
|
| 160 |
+
"errors": errors
|
| 161 |
+
}
|
| 162 |
+
)
|
| 163 |
+
return JSONResponse(
|
| 164 |
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
| 165 |
+
content={
|
| 166 |
+
"success": False,
|
| 167 |
+
"error": "Validation Error",
|
| 168 |
+
"errors": errors
|
| 169 |
+
}
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
@app.exception_handler(JWTError)
|
| 174 |
+
async def jwt_exception_handler(request: Request, exc: JWTError):
|
| 175 |
+
logger.warning(
|
| 176 |
+
"JWT authentication failed",
|
| 177 |
+
extra={
|
| 178 |
+
"path": request.url.path,
|
| 179 |
+
"error": str(exc),
|
| 180 |
+
"client_ip": request.client.host if request.client else None
|
| 181 |
+
}
|
| 182 |
+
)
|
| 183 |
+
return JSONResponse(
|
| 184 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 185 |
+
content={
|
| 186 |
+
"success": False,
|
| 187 |
+
"error": "Unauthorized",
|
| 188 |
+
"detail": "Invalid or expired token"
|
| 189 |
+
}
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
@app.exception_handler(Exception)
|
| 194 |
+
async def general_exception_handler(request: Request, exc: Exception):
|
| 195 |
+
logger.error(
|
| 196 |
+
"Unhandled exception",
|
| 197 |
+
extra={
|
| 198 |
+
"method": request.method,
|
| 199 |
+
"path": request.url.path,
|
| 200 |
+
"error": str(exc),
|
| 201 |
+
"error_type": type(exc).__name__,
|
| 202 |
+
"client_ip": request.client.host if request.client else None
|
| 203 |
+
},
|
| 204 |
+
exc_info=True
|
| 205 |
+
)
|
| 206 |
+
return JSONResponse(
|
| 207 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 208 |
+
content={
|
| 209 |
+
"success": False,
|
| 210 |
+
"error": "Internal Server Error",
|
| 211 |
+
"detail": "An unexpected error occurred"
|
| 212 |
+
}
|
| 213 |
+
)
|
| 214 |
|
| 215 |
|
| 216 |
if __name__ == "__main__":
|
| 217 |
import uvicorn
|
|
|
|
| 218 |
uvicorn.run(
|
| 219 |
"app.main:app",
|
| 220 |
host="0.0.0.0",
|
| 221 |
port=int(os.getenv("PORT", "8003")),
|
| 222 |
reload=True,
|
| 223 |
+
log_level=log_level.lower()
|
| 224 |
)
|
app/nosql.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
MongoDB connection and database instance.
|
| 3 |
+
Provides a singleton database connection for the application.
|
| 4 |
+
"""
|
| 5 |
+
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
|
| 6 |
+
from app.core.logging import get_logger
|
| 7 |
+
from app.core.config import settings
|
| 8 |
+
|
| 9 |
+
logger = get_logger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class DatabaseConnection:
|
| 13 |
+
"""Singleton class to manage MongoDB connection"""
|
| 14 |
+
_client: AsyncIOMotorClient = None
|
| 15 |
+
_db: AsyncIOMotorDatabase = None
|
| 16 |
+
|
| 17 |
+
@classmethod
|
| 18 |
+
def get_database(cls) -> AsyncIOMotorDatabase:
|
| 19 |
+
"""
|
| 20 |
+
Get the database instance.
|
| 21 |
+
|
| 22 |
+
Returns:
|
| 23 |
+
MongoDB database instance
|
| 24 |
+
|
| 25 |
+
Raises:
|
| 26 |
+
RuntimeError if database is not connected
|
| 27 |
+
"""
|
| 28 |
+
if cls._db is None:
|
| 29 |
+
raise RuntimeError("Database not connected. Call connect_to_mongo() first.")
|
| 30 |
+
return cls._db
|
| 31 |
+
|
| 32 |
+
@classmethod
|
| 33 |
+
async def connect(cls):
|
| 34 |
+
"""
|
| 35 |
+
Establish connection to MongoDB.
|
| 36 |
+
Called during application startup.
|
| 37 |
+
"""
|
| 38 |
+
try:
|
| 39 |
+
mongodb_uri = settings.MONGODB_URI.strip()
|
| 40 |
+
|
| 41 |
+
logger.info("Connecting to MongoDB", extra={
|
| 42 |
+
"database": settings.MONGODB_DB_NAME
|
| 43 |
+
})
|
| 44 |
+
|
| 45 |
+
cls._client = AsyncIOMotorClient(
|
| 46 |
+
mongodb_uri,
|
| 47 |
+
uuidRepresentation="standard"
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
cls._db = cls._client[settings.MONGODB_DB_NAME]
|
| 51 |
+
|
| 52 |
+
# Test the connection
|
| 53 |
+
await cls._client.admin.command('ping')
|
| 54 |
+
|
| 55 |
+
logger.info("Successfully connected to MongoDB", extra={
|
| 56 |
+
"database": settings.MONGODB_DB_NAME
|
| 57 |
+
})
|
| 58 |
+
except Exception as e:
|
| 59 |
+
logger.error("Failed to connect to MongoDB", exc_info=e)
|
| 60 |
+
raise
|
| 61 |
+
|
| 62 |
+
@classmethod
|
| 63 |
+
async def close(cls):
|
| 64 |
+
"""
|
| 65 |
+
Close MongoDB connection.
|
| 66 |
+
Called during application shutdown.
|
| 67 |
+
"""
|
| 68 |
+
if cls._client:
|
| 69 |
+
logger.info("Closing MongoDB connection")
|
| 70 |
+
cls._client.close()
|
| 71 |
+
logger.info("MongoDB connection closed")
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
# Public API
|
| 75 |
+
async def connect_to_mongo():
|
| 76 |
+
"""Establish connection to MongoDB"""
|
| 77 |
+
await DatabaseConnection.connect()
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
async def close_mongo_connection():
|
| 81 |
+
"""Close MongoDB connection"""
|
| 82 |
+
await DatabaseConnection.close()
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def get_database() -> AsyncIOMotorDatabase:
|
| 86 |
+
"""Get the database instance"""
|
| 87 |
+
return DatabaseConnection.get_database()
|
app/postgres.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
PostgreSQL connection pool management.
|
| 3 |
+
Provides async connection pool for PostgreSQL operations.
|
| 4 |
+
"""
|
| 5 |
+
import asyncpg
|
| 6 |
+
import ssl
|
| 7 |
+
from typing import Optional, Dict, Any
|
| 8 |
+
from app.core.logging import get_logger
|
| 9 |
+
from app.core.config import settings
|
| 10 |
+
import time
|
| 11 |
+
|
| 12 |
+
logger = get_logger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class PostgreSQLConnectionPool:
|
| 16 |
+
"""Singleton class to manage PostgreSQL connection pool"""
|
| 17 |
+
_pool: Optional[asyncpg.Pool] = None
|
| 18 |
+
|
| 19 |
+
_metrics = {
|
| 20 |
+
"connections_acquired": 0,
|
| 21 |
+
"connections_released": 0,
|
| 22 |
+
"connections_failed": 0,
|
| 23 |
+
"health_check_failures": 0,
|
| 24 |
+
"acquisition_times": [],
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
+
@classmethod
|
| 28 |
+
async def initialize(cls) -> None:
|
| 29 |
+
"""Initialize PostgreSQL connection pool."""
|
| 30 |
+
if cls._pool is not None:
|
| 31 |
+
logger.warning("PostgreSQL connection pool already initialized")
|
| 32 |
+
return
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
logger.info("Initializing PostgreSQL connection pool", extra={
|
| 36 |
+
"host": settings.POSTGRES_HOST,
|
| 37 |
+
"port": settings.POSTGRES_PORT,
|
| 38 |
+
"database": settings.POSTGRES_DB,
|
| 39 |
+
"user": settings.POSTGRES_USER,
|
| 40 |
+
"min_pool_size": settings.POSTGRES_MIN_POOL_SIZE,
|
| 41 |
+
"max_pool_size": settings.POSTGRES_MAX_POOL_SIZE
|
| 42 |
+
})
|
| 43 |
+
|
| 44 |
+
# Optional SSL context
|
| 45 |
+
ssl_context = None
|
| 46 |
+
mode = (settings.POSTGRES_SSL_MODE or "disable").lower()
|
| 47 |
+
if mode != "disable":
|
| 48 |
+
if mode == "verify-full":
|
| 49 |
+
ssl_context = ssl.create_default_context()
|
| 50 |
+
ssl_context.check_hostname = True
|
| 51 |
+
ssl_context.verify_mode = ssl.CERT_REQUIRED
|
| 52 |
+
else:
|
| 53 |
+
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
| 54 |
+
ssl_context.check_hostname = False
|
| 55 |
+
ssl_context.verify_mode = ssl.CERT_NONE
|
| 56 |
+
logger.info("PostgreSQL pool SSL enabled", extra={"ssl_mode": settings.POSTGRES_SSL_MODE})
|
| 57 |
+
|
| 58 |
+
# Create connection pool
|
| 59 |
+
cls._pool = await asyncpg.create_pool(
|
| 60 |
+
host=settings.POSTGRES_HOST,
|
| 61 |
+
port=settings.POSTGRES_PORT,
|
| 62 |
+
database=settings.POSTGRES_DB,
|
| 63 |
+
user=settings.POSTGRES_USER,
|
| 64 |
+
password=settings.POSTGRES_PASSWORD,
|
| 65 |
+
min_size=settings.POSTGRES_MIN_POOL_SIZE,
|
| 66 |
+
max_size=settings.POSTGRES_MAX_POOL_SIZE,
|
| 67 |
+
command_timeout=30.0,
|
| 68 |
+
timeout=30.0,
|
| 69 |
+
ssl=ssl_context,
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
# Test connection
|
| 73 |
+
async with cls._pool.acquire() as conn:
|
| 74 |
+
await conn.fetchval("SELECT 1")
|
| 75 |
+
|
| 76 |
+
logger.info("PostgreSQL connection pool initialized successfully")
|
| 77 |
+
|
| 78 |
+
except Exception as e:
|
| 79 |
+
logger.error("Failed to initialize PostgreSQL connection pool", exc_info=e)
|
| 80 |
+
raise
|
| 81 |
+
|
| 82 |
+
@classmethod
|
| 83 |
+
async def get_connection(cls) -> asyncpg.Connection:
|
| 84 |
+
"""Acquire a connection from the pool."""
|
| 85 |
+
if cls._pool is None:
|
| 86 |
+
raise RuntimeError("PostgreSQL connection pool not initialized. Call initialize() first.")
|
| 87 |
+
|
| 88 |
+
start_time = time.time()
|
| 89 |
+
|
| 90 |
+
try:
|
| 91 |
+
conn = await cls._pool.acquire()
|
| 92 |
+
|
| 93 |
+
# Health check
|
| 94 |
+
try:
|
| 95 |
+
await conn.fetchval("SELECT 1")
|
| 96 |
+
except Exception as health_check_error:
|
| 97 |
+
logger.warning("Connection health check failed, releasing dead connection",
|
| 98 |
+
exc_info=health_check_error)
|
| 99 |
+
cls._metrics["health_check_failures"] += 1
|
| 100 |
+
await cls._pool.release(conn)
|
| 101 |
+
conn = await cls._pool.acquire()
|
| 102 |
+
await conn.fetchval("SELECT 1")
|
| 103 |
+
|
| 104 |
+
acquisition_time = (time.time() - start_time) * 1000
|
| 105 |
+
cls._metrics["connections_acquired"] += 1
|
| 106 |
+
cls._metrics["acquisition_times"].append(acquisition_time)
|
| 107 |
+
|
| 108 |
+
if len(cls._metrics["acquisition_times"]) > 1000:
|
| 109 |
+
cls._metrics["acquisition_times"] = cls._metrics["acquisition_times"][-1000:]
|
| 110 |
+
|
| 111 |
+
return conn
|
| 112 |
+
|
| 113 |
+
except Exception as e:
|
| 114 |
+
cls._metrics["connections_failed"] += 1
|
| 115 |
+
logger.error("Failed to acquire PostgreSQL connection", exc_info=e)
|
| 116 |
+
raise
|
| 117 |
+
|
| 118 |
+
@classmethod
|
| 119 |
+
async def release_connection(cls, conn: asyncpg.Connection) -> None:
|
| 120 |
+
"""Release a connection back to the pool."""
|
| 121 |
+
if cls._pool is None:
|
| 122 |
+
raise RuntimeError("PostgreSQL connection pool not initialized")
|
| 123 |
+
|
| 124 |
+
try:
|
| 125 |
+
await cls._pool.release(conn)
|
| 126 |
+
cls._metrics["connections_released"] += 1
|
| 127 |
+
except Exception as e:
|
| 128 |
+
logger.error("Failed to release PostgreSQL connection", exc_info=e)
|
| 129 |
+
|
| 130 |
+
@classmethod
|
| 131 |
+
async def close(cls) -> None:
|
| 132 |
+
"""Close all connections in the pool."""
|
| 133 |
+
if cls._pool is None:
|
| 134 |
+
logger.warning("PostgreSQL connection pool not initialized, nothing to close")
|
| 135 |
+
return
|
| 136 |
+
|
| 137 |
+
try:
|
| 138 |
+
logger.info("Closing PostgreSQL connection pool")
|
| 139 |
+
await cls._pool.close()
|
| 140 |
+
cls._pool = None
|
| 141 |
+
logger.info("PostgreSQL connection pool closed successfully")
|
| 142 |
+
except Exception as e:
|
| 143 |
+
logger.error("Error closing PostgreSQL connection pool", exc_info=e)
|
| 144 |
+
cls._pool = None
|
| 145 |
+
|
| 146 |
+
@classmethod
|
| 147 |
+
def is_initialized(cls) -> bool:
|
| 148 |
+
"""Check if connection pool is initialized."""
|
| 149 |
+
return cls._pool is not None
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
# Public API
|
| 153 |
+
async def connect_to_postgres() -> None:
|
| 154 |
+
"""Initialize PostgreSQL connection pool."""
|
| 155 |
+
await PostgreSQLConnectionPool.initialize()
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
async def close_postgres_connection() -> None:
|
| 159 |
+
"""Close PostgreSQL connection pool."""
|
| 160 |
+
await PostgreSQLConnectionPool.close()
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
async def get_postgres_connection() -> asyncpg.Connection:
|
| 164 |
+
"""Get a connection from the PostgreSQL pool."""
|
| 165 |
+
return await PostgreSQLConnectionPool.get_connection()
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
async def release_postgres_connection(conn: asyncpg.Connection) -> None:
|
| 169 |
+
"""Release a connection back to the pool."""
|
| 170 |
+
await PostgreSQLConnectionPool.release_connection(conn)
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def is_postgres_connected() -> bool:
|
| 174 |
+
"""Check if PostgreSQL connection pool is initialized."""
|
| 175 |
+
return PostgreSQLConnectionPool.is_initialized()
|
app/tracker/attendance/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Attendance module for employee check-in/check-out tracking.
|
| 3 |
+
"""
|
app/tracker/attendance/constants.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Constants for attendance module.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
# Collection names
|
| 6 |
+
EMPLOYEES_COLLECTION = "scm_employees"
|
| 7 |
+
|
| 8 |
+
# Error messages
|
| 9 |
+
ERROR_DUPLICATE_CHECKIN = "Already checked in today"
|
| 10 |
+
ERROR_GPS_DISABLED = "GPS tracking is disabled for this employee"
|
| 11 |
+
ERROR_LOCATION_REQUIRED = "Location coordinates are required"
|
| 12 |
+
ERROR_EMPLOYEE_NOT_FOUND = "Employee not found"
|
| 13 |
+
ERROR_INVALID_COORDINATES = "Invalid GPS coordinates"
|
| 14 |
+
|
| 15 |
+
# Success messages
|
| 16 |
+
SUCCESS_CHECKIN = "Check-in successful"
|
app/tracker/attendance/models.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
SQLAlchemy models for attendance tracking.
|
| 3 |
+
"""
|
| 4 |
+
from sqlalchemy import Column, String, Integer, Float, Date, BigInteger, TIMESTAMP, Index
|
| 5 |
+
from sqlalchemy.dialects.postgresql import UUID
|
| 6 |
+
from sqlalchemy.sql import func
|
| 7 |
+
import uuid
|
| 8 |
+
|
| 9 |
+
from app.core.database import Base
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ScmAttendance(Base):
|
| 13 |
+
"""
|
| 14 |
+
Attendance tracking model.
|
| 15 |
+
One row per employee per day.
|
| 16 |
+
"""
|
| 17 |
+
__tablename__ = "scm_attendance"
|
| 18 |
+
__table_args__ = (
|
| 19 |
+
Index('idx_scm_attendance_work_date', 'employee_id', 'work_date'),
|
| 20 |
+
Index('idx_scm_attendance_merchant', 'merchant_id', 'work_date'),
|
| 21 |
+
{'schema': 'trans'}
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
| 25 |
+
merchant_id = Column(UUID(as_uuid=True), nullable=False)
|
| 26 |
+
employee_id = Column(UUID(as_uuid=True), nullable=False)
|
| 27 |
+
work_date = Column(Date, nullable=False)
|
| 28 |
+
|
| 29 |
+
# Check-in details
|
| 30 |
+
check_in_time = Column(BigInteger, nullable=True) # Unix timestamp in milliseconds
|
| 31 |
+
check_in_lat = Column(Float, nullable=True)
|
| 32 |
+
check_in_lon = Column(Float, nullable=True)
|
| 33 |
+
check_in_geofence_id = Column(UUID(as_uuid=True), nullable=True)
|
| 34 |
+
|
| 35 |
+
# Check-out details
|
| 36 |
+
check_out_time = Column(BigInteger, nullable=True)
|
| 37 |
+
check_out_lat = Column(Float, nullable=True)
|
| 38 |
+
check_out_lon = Column(Float, nullable=True)
|
| 39 |
+
|
| 40 |
+
# Calculated fields
|
| 41 |
+
total_minutes = Column(Integer, nullable=True)
|
| 42 |
+
|
| 43 |
+
# Timestamps
|
| 44 |
+
created_at = Column(TIMESTAMP, server_default=func.now(), nullable=False)
|
| 45 |
+
updated_at = Column(TIMESTAMP, server_default=func.now(), onupdate=func.now(), nullable=False)
|
| 46 |
+
|
| 47 |
+
def __repr__(self):
|
| 48 |
+
return f"<ScmAttendance(id={self.id}, employee_id={self.employee_id}, work_date={self.work_date})>"
|
app/tracker/attendance/router.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
API router for attendance endpoints.
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import APIRouter, Depends, HTTPException, status
|
| 5 |
+
from app.core.logging import get_logger
|
| 6 |
+
from app.dependencies.auth import get_current_user, TokenUser
|
| 7 |
+
from app.nosql import get_database
|
| 8 |
+
from app.tracker.attendance.service import AttendanceService, get_attendance_service
|
| 9 |
+
from app.tracker.attendance.schemas import CheckInRequest, CheckInResponse, ErrorResponse
|
| 10 |
+
|
| 11 |
+
logger = get_logger(__name__)
|
| 12 |
+
|
| 13 |
+
router = APIRouter(prefix="/attendance", tags=["Attendance"])
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@router.post(
|
| 17 |
+
"/check-in",
|
| 18 |
+
response_model=CheckInResponse,
|
| 19 |
+
status_code=status.HTTP_201_CREATED,
|
| 20 |
+
responses={
|
| 21 |
+
201: {"description": "Check-in successful"},
|
| 22 |
+
400: {"model": ErrorResponse, "description": "Bad request - duplicate check-in or GPS disabled"},
|
| 23 |
+
401: {"description": "Unauthorized"},
|
| 24 |
+
422: {"description": "Validation error"},
|
| 25 |
+
500: {"model": ErrorResponse, "description": "Internal server error"}
|
| 26 |
+
},
|
| 27 |
+
summary="Employee Check-In",
|
| 28 |
+
description="""
|
| 29 |
+
Mark the start of an employee's working day.
|
| 30 |
+
|
| 31 |
+
**Rules:**
|
| 32 |
+
- Can check-in only once per day
|
| 33 |
+
- Location coordinates are mandatory
|
| 34 |
+
- GPS tracking must be enabled for the employee
|
| 35 |
+
- Optional location_id if inside a geofence
|
| 36 |
+
|
| 37 |
+
**Edge Cases:**
|
| 38 |
+
- Duplicate check-in β 400 error
|
| 39 |
+
- GPS disabled β 400 error (checked from MongoDB: scm_employees.location_settings.location_tracking_consent)
|
| 40 |
+
|
| 41 |
+
**Data Storage:**
|
| 42 |
+
- Stores timestamp, coordinates, and geofence match (if any) in PostgreSQL
|
| 43 |
+
- Table: trans.scm_attendance
|
| 44 |
+
"""
|
| 45 |
+
)
|
| 46 |
+
async def check_in(
|
| 47 |
+
payload: CheckInRequest,
|
| 48 |
+
current_user: TokenUser = Depends(get_current_user)
|
| 49 |
+
) -> CheckInResponse:
|
| 50 |
+
"""
|
| 51 |
+
Create a check-in record for the authenticated employee.
|
| 52 |
+
|
| 53 |
+
Args:
|
| 54 |
+
payload: Check-in request with timestamp, latitude, longitude, and optional location_id
|
| 55 |
+
current_user: Authenticated user from JWT token
|
| 56 |
+
|
| 57 |
+
Returns:
|
| 58 |
+
CheckInResponse with success status and attendance record ID
|
| 59 |
+
|
| 60 |
+
Raises:
|
| 61 |
+
HTTPException 400: If duplicate check-in or GPS disabled
|
| 62 |
+
HTTPException 500: If internal error occurs
|
| 63 |
+
"""
|
| 64 |
+
try:
|
| 65 |
+
# Get MongoDB database
|
| 66 |
+
mongo_db = get_database()
|
| 67 |
+
|
| 68 |
+
# Create service instance
|
| 69 |
+
service = get_attendance_service(mongo_db)
|
| 70 |
+
|
| 71 |
+
# Create check-in
|
| 72 |
+
result = await service.create_checkin(
|
| 73 |
+
employee_id=current_user.user_id,
|
| 74 |
+
merchant_id=current_user.merchant_id,
|
| 75 |
+
payload=payload
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
logger.info(
|
| 79 |
+
"Check-in successful",
|
| 80 |
+
extra={
|
| 81 |
+
"employee_id": current_user.user_id,
|
| 82 |
+
"merchant_id": current_user.merchant_id,
|
| 83 |
+
"attendance_id": result.id
|
| 84 |
+
}
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
return result
|
| 88 |
+
|
| 89 |
+
except ValueError as e:
|
| 90 |
+
# Business logic errors (duplicate check-in, GPS disabled)
|
| 91 |
+
error_message = str(e)
|
| 92 |
+
logger.warning(
|
| 93 |
+
f"Check-in validation failed: {error_message}",
|
| 94 |
+
extra={
|
| 95 |
+
"employee_id": current_user.user_id,
|
| 96 |
+
"error": error_message
|
| 97 |
+
}
|
| 98 |
+
)
|
| 99 |
+
raise HTTPException(
|
| 100 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 101 |
+
detail={
|
| 102 |
+
"success": False,
|
| 103 |
+
"error": error_message,
|
| 104 |
+
"detail": error_message
|
| 105 |
+
}
|
| 106 |
+
)
|
| 107 |
+
except Exception as e:
|
| 108 |
+
# Unexpected errors
|
| 109 |
+
logger.error(
|
| 110 |
+
"Check-in failed with unexpected error",
|
| 111 |
+
extra={
|
| 112 |
+
"employee_id": current_user.user_id,
|
| 113 |
+
"error": str(e)
|
| 114 |
+
},
|
| 115 |
+
exc_info=e
|
| 116 |
+
)
|
| 117 |
+
raise HTTPException(
|
| 118 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 119 |
+
detail={
|
| 120 |
+
"success": False,
|
| 121 |
+
"error": "Internal server error",
|
| 122 |
+
"detail": "An unexpected error occurred during check-in"
|
| 123 |
+
}
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
@router.get(
|
| 128 |
+
"/health",
|
| 129 |
+
summary="Attendance Module Health Check",
|
| 130 |
+
description="Check if the attendance module is operational"
|
| 131 |
+
)
|
| 132 |
+
async def attendance_health():
|
| 133 |
+
"""Health check endpoint for attendance module"""
|
| 134 |
+
return {
|
| 135 |
+
"status": "healthy",
|
| 136 |
+
"module": "attendance",
|
| 137 |
+
"version": "1.0.0"
|
| 138 |
+
}
|
app/tracker/attendance/schemas.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pydantic schemas for attendance module.
|
| 3 |
+
"""
|
| 4 |
+
from typing import Optional
|
| 5 |
+
from pydantic import BaseModel, Field, field_validator
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class CheckInRequest(BaseModel):
|
| 10 |
+
"""Request schema for check-in"""
|
| 11 |
+
timestamp: int = Field(..., description="Unix timestamp in milliseconds")
|
| 12 |
+
latitude: float = Field(..., description="GPS latitude", ge=-90, le=90)
|
| 13 |
+
longitude: float = Field(..., description="GPS longitude", ge=-180, le=180)
|
| 14 |
+
location_id: Optional[str] = Field(None, description="Geofence location ID if inside a geofence")
|
| 15 |
+
|
| 16 |
+
@field_validator('timestamp')
|
| 17 |
+
@classmethod
|
| 18 |
+
def validate_timestamp(cls, v):
|
| 19 |
+
"""Validate timestamp is reasonable"""
|
| 20 |
+
if v <= 0:
|
| 21 |
+
raise ValueError("Timestamp must be positive")
|
| 22 |
+
# Check if timestamp is not too far in the past or future (within 24 hours)
|
| 23 |
+
now_ms = int(datetime.now().timestamp() * 1000)
|
| 24 |
+
diff_hours = abs(now_ms - v) / (1000 * 60 * 60)
|
| 25 |
+
if diff_hours > 24:
|
| 26 |
+
raise ValueError("Timestamp is too far from current time")
|
| 27 |
+
return v
|
| 28 |
+
|
| 29 |
+
@field_validator('latitude')
|
| 30 |
+
@classmethod
|
| 31 |
+
def validate_latitude(cls, v):
|
| 32 |
+
"""Validate latitude range"""
|
| 33 |
+
if not -90 <= v <= 90:
|
| 34 |
+
raise ValueError("Latitude must be between -90 and 90")
|
| 35 |
+
return v
|
| 36 |
+
|
| 37 |
+
@field_validator('longitude')
|
| 38 |
+
@classmethod
|
| 39 |
+
def validate_longitude(cls, v):
|
| 40 |
+
"""Validate longitude range"""
|
| 41 |
+
if not -180 <= v <= 180:
|
| 42 |
+
raise ValueError("Longitude must be between -180 and 180")
|
| 43 |
+
return v
|
| 44 |
+
|
| 45 |
+
model_config = {
|
| 46 |
+
"json_schema_extra": {
|
| 47 |
+
"example": {
|
| 48 |
+
"timestamp": 1708156800000,
|
| 49 |
+
"latitude": 19.0760,
|
| 50 |
+
"longitude": 72.8777,
|
| 51 |
+
"location_id": "loc_mumbai_office_001"
|
| 52 |
+
}
|
| 53 |
+
}
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class CheckInResponse(BaseModel):
|
| 58 |
+
"""Response schema for check-in"""
|
| 59 |
+
success: bool = Field(..., description="Whether check-in was successful")
|
| 60 |
+
id: str = Field(..., description="Attendance record ID")
|
| 61 |
+
message: Optional[str] = Field(None, description="Success or error message")
|
| 62 |
+
|
| 63 |
+
model_config = {
|
| 64 |
+
"json_schema_extra": {
|
| 65 |
+
"example": {
|
| 66 |
+
"success": True,
|
| 67 |
+
"id": "550e8400-e29b-41d4-a716-446655440000",
|
| 68 |
+
"message": "Check-in successful"
|
| 69 |
+
}
|
| 70 |
+
}
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class ErrorResponse(BaseModel):
|
| 75 |
+
"""Error response schema"""
|
| 76 |
+
success: bool = Field(False, description="Always false for errors")
|
| 77 |
+
error: str = Field(..., description="Error message")
|
| 78 |
+
detail: Optional[str] = Field(None, description="Detailed error information")
|
| 79 |
+
|
| 80 |
+
model_config = {
|
| 81 |
+
"json_schema_extra": {
|
| 82 |
+
"example": {
|
| 83 |
+
"success": False,
|
| 84 |
+
"error": "Already checked in today",
|
| 85 |
+
"detail": "You have already checked in for today"
|
| 86 |
+
}
|
| 87 |
+
}
|
| 88 |
+
}
|
app/tracker/attendance/service.py
ADDED
|
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Service layer for attendance operations.
|
| 3 |
+
Handles business logic for check-in/check-out.
|
| 4 |
+
"""
|
| 5 |
+
from typing import Optional, Dict, Any
|
| 6 |
+
from datetime import datetime, date
|
| 7 |
+
import uuid
|
| 8 |
+
from motor.motor_asyncio import AsyncIOMotorDatabase
|
| 9 |
+
|
| 10 |
+
from app.core.logging import get_logger
|
| 11 |
+
from app.postgres import get_postgres_connection, release_postgres_connection
|
| 12 |
+
from app.tracker.attendance.schemas import CheckInRequest, CheckInResponse
|
| 13 |
+
from app.tracker.attendance.constants import (
|
| 14 |
+
EMPLOYEES_COLLECTION,
|
| 15 |
+
ERROR_DUPLICATE_CHECKIN,
|
| 16 |
+
ERROR_GPS_DISABLED,
|
| 17 |
+
ERROR_EMPLOYEE_NOT_FOUND,
|
| 18 |
+
SUCCESS_CHECKIN
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
logger = get_logger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class AttendanceService:
|
| 25 |
+
"""Service for attendance operations"""
|
| 26 |
+
|
| 27 |
+
def __init__(self, mongo_db: AsyncIOMotorDatabase):
|
| 28 |
+
self.mongo_db = mongo_db
|
| 29 |
+
self.employees_collection = mongo_db[EMPLOYEES_COLLECTION]
|
| 30 |
+
|
| 31 |
+
async def check_location_tracking_consent(self, employee_id: str) -> bool:
|
| 32 |
+
"""
|
| 33 |
+
Check if employee has location tracking enabled.
|
| 34 |
+
|
| 35 |
+
Args:
|
| 36 |
+
employee_id: Employee UUID
|
| 37 |
+
|
| 38 |
+
Returns:
|
| 39 |
+
True if location tracking is enabled, False otherwise
|
| 40 |
+
|
| 41 |
+
Raises:
|
| 42 |
+
ValueError: If employee not found or validation fails
|
| 43 |
+
"""
|
| 44 |
+
try:
|
| 45 |
+
employee = await self.employees_collection.find_one(
|
| 46 |
+
{"user_id": employee_id},
|
| 47 |
+
{"location_settings.location_tracking_consent": 1}
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
if not employee:
|
| 51 |
+
logger.warning(f"Employee not found: {employee_id}")
|
| 52 |
+
raise ValueError(ERROR_EMPLOYEE_NOT_FOUND)
|
| 53 |
+
|
| 54 |
+
# Check location_settings.location_tracking_consent
|
| 55 |
+
location_settings = employee.get("location_settings", {})
|
| 56 |
+
consent = location_settings.get("location_tracking_consent", False)
|
| 57 |
+
|
| 58 |
+
logger.info(f"Location tracking consent for {employee_id}: {consent}")
|
| 59 |
+
return consent
|
| 60 |
+
|
| 61 |
+
except ValueError:
|
| 62 |
+
# Re-raise ValueError (employee not found)
|
| 63 |
+
raise
|
| 64 |
+
except Exception as e:
|
| 65 |
+
logger.error(f"Error checking location tracking consent: {e}", exc_info=e)
|
| 66 |
+
raise
|
| 67 |
+
|
| 68 |
+
async def check_duplicate_checkin(
|
| 69 |
+
self,
|
| 70 |
+
employee_id: str,
|
| 71 |
+
work_date: date
|
| 72 |
+
) -> bool:
|
| 73 |
+
"""
|
| 74 |
+
Check if employee has already checked in today.
|
| 75 |
+
|
| 76 |
+
Args:
|
| 77 |
+
employee_id: Employee UUID (as string)
|
| 78 |
+
work_date: Date to check
|
| 79 |
+
|
| 80 |
+
Returns:
|
| 81 |
+
True if already checked in, False otherwise
|
| 82 |
+
"""
|
| 83 |
+
conn = None
|
| 84 |
+
try:
|
| 85 |
+
conn = await get_postgres_connection()
|
| 86 |
+
|
| 87 |
+
query = """
|
| 88 |
+
SELECT id FROM trans.scm_attendance
|
| 89 |
+
WHERE employee_id = $1::uuid AND work_date = $2
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
result = await conn.fetchval(query, employee_id, work_date)
|
| 93 |
+
|
| 94 |
+
return result is not None
|
| 95 |
+
|
| 96 |
+
except Exception as e:
|
| 97 |
+
logger.error(f"Error checking duplicate check-in: {e}", exc_info=e)
|
| 98 |
+
raise
|
| 99 |
+
finally:
|
| 100 |
+
if conn:
|
| 101 |
+
await release_postgres_connection(conn)
|
| 102 |
+
|
| 103 |
+
async def create_checkin(
|
| 104 |
+
self,
|
| 105 |
+
employee_id: str,
|
| 106 |
+
merchant_id: str,
|
| 107 |
+
payload: CheckInRequest
|
| 108 |
+
) -> CheckInResponse:
|
| 109 |
+
"""
|
| 110 |
+
Create a check-in record.
|
| 111 |
+
|
| 112 |
+
Args:
|
| 113 |
+
employee_id: Employee UUID
|
| 114 |
+
merchant_id: Merchant UUID
|
| 115 |
+
payload: Check-in request data
|
| 116 |
+
|
| 117 |
+
Returns:
|
| 118 |
+
CheckInResponse with attendance record ID
|
| 119 |
+
|
| 120 |
+
Raises:
|
| 121 |
+
ValueError: If validation fails
|
| 122 |
+
"""
|
| 123 |
+
# Convert timestamp to date
|
| 124 |
+
work_date = datetime.fromtimestamp(payload.timestamp / 1000).date()
|
| 125 |
+
|
| 126 |
+
# 1. Check location tracking consent
|
| 127 |
+
has_consent = await self.check_location_tracking_consent(employee_id)
|
| 128 |
+
if not has_consent:
|
| 129 |
+
logger.warning(f"GPS disabled for employee {employee_id}")
|
| 130 |
+
raise ValueError(ERROR_GPS_DISABLED)
|
| 131 |
+
|
| 132 |
+
# 2. Check for duplicate check-in
|
| 133 |
+
already_checked_in = await self.check_duplicate_checkin(employee_id, work_date)
|
| 134 |
+
if already_checked_in:
|
| 135 |
+
logger.warning(f"Duplicate check-in attempt for employee {employee_id} on {work_date}")
|
| 136 |
+
raise ValueError(ERROR_DUPLICATE_CHECKIN)
|
| 137 |
+
|
| 138 |
+
# 3. Create attendance record
|
| 139 |
+
conn = None
|
| 140 |
+
try:
|
| 141 |
+
conn = await get_postgres_connection()
|
| 142 |
+
|
| 143 |
+
attendance_id = uuid.uuid4()
|
| 144 |
+
|
| 145 |
+
query = """
|
| 146 |
+
INSERT INTO trans.scm_attendance (
|
| 147 |
+
id,
|
| 148 |
+
merchant_id,
|
| 149 |
+
employee_id,
|
| 150 |
+
work_date,
|
| 151 |
+
check_in_time,
|
| 152 |
+
check_in_lat,
|
| 153 |
+
check_in_lon,
|
| 154 |
+
check_in_geofence_id,
|
| 155 |
+
created_at,
|
| 156 |
+
updated_at
|
| 157 |
+
) VALUES ($1, $2::uuid, $3::uuid, $4, $5, $6, $7, $8::uuid, NOW(), NOW())
|
| 158 |
+
RETURNING id
|
| 159 |
+
"""
|
| 160 |
+
|
| 161 |
+
result = await conn.fetchval(
|
| 162 |
+
query,
|
| 163 |
+
attendance_id,
|
| 164 |
+
merchant_id,
|
| 165 |
+
employee_id,
|
| 166 |
+
work_date,
|
| 167 |
+
payload.timestamp,
|
| 168 |
+
payload.latitude,
|
| 169 |
+
payload.longitude,
|
| 170 |
+
payload.location_id
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
logger.info(
|
| 174 |
+
f"Check-in created successfully",
|
| 175 |
+
extra={
|
| 176 |
+
"attendance_id": str(result),
|
| 177 |
+
"employee_id": employee_id,
|
| 178 |
+
"merchant_id": merchant_id,
|
| 179 |
+
"work_date": str(work_date),
|
| 180 |
+
"geofence_id": payload.location_id if payload.location_id else None
|
| 181 |
+
}
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
return CheckInResponse(
|
| 185 |
+
success=True,
|
| 186 |
+
id=str(result),
|
| 187 |
+
message=SUCCESS_CHECKIN
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
except Exception as e:
|
| 191 |
+
logger.error(f"Error creating check-in: {e}", exc_info=e)
|
| 192 |
+
raise
|
| 193 |
+
finally:
|
| 194 |
+
if conn:
|
| 195 |
+
await release_postgres_connection(conn)
|
| 196 |
+
|
| 197 |
+
async def get_attendance_by_id(self, attendance_id: str) -> Optional[Dict[str, Any]]:
|
| 198 |
+
"""
|
| 199 |
+
Get attendance record by ID.
|
| 200 |
+
|
| 201 |
+
Args:
|
| 202 |
+
attendance_id: Attendance UUID (as string)
|
| 203 |
+
|
| 204 |
+
Returns:
|
| 205 |
+
Attendance record or None
|
| 206 |
+
"""
|
| 207 |
+
conn = None
|
| 208 |
+
try:
|
| 209 |
+
conn = await get_postgres_connection()
|
| 210 |
+
|
| 211 |
+
query = """
|
| 212 |
+
SELECT
|
| 213 |
+
id,
|
| 214 |
+
merchant_id,
|
| 215 |
+
employee_id,
|
| 216 |
+
work_date,
|
| 217 |
+
check_in_time,
|
| 218 |
+
check_in_lat,
|
| 219 |
+
check_in_lon,
|
| 220 |
+
check_in_geofence_id,
|
| 221 |
+
check_out_time,
|
| 222 |
+
check_out_lat,
|
| 223 |
+
check_out_lon,
|
| 224 |
+
total_minutes,
|
| 225 |
+
created_at,
|
| 226 |
+
updated_at
|
| 227 |
+
FROM trans.scm_attendance
|
| 228 |
+
WHERE id = $1::uuid
|
| 229 |
+
"""
|
| 230 |
+
|
| 231 |
+
row = await conn.fetchrow(query, attendance_id)
|
| 232 |
+
|
| 233 |
+
if not row:
|
| 234 |
+
return None
|
| 235 |
+
|
| 236 |
+
return dict(row)
|
| 237 |
+
|
| 238 |
+
except Exception as e:
|
| 239 |
+
logger.error(f"Error fetching attendance: {e}", exc_info=e)
|
| 240 |
+
raise
|
| 241 |
+
finally:
|
| 242 |
+
if conn:
|
| 243 |
+
await release_postgres_connection(conn)
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def get_attendance_service(mongo_db: AsyncIOMotorDatabase) -> AttendanceService:
|
| 247 |
+
"""Factory function to create AttendanceService instance"""
|
| 248 |
+
return AttendanceService(mongo_db)
|
migrate_attendance.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Migration script to create attendance table and indexes.
|
| 3 |
+
Run this script to set up the database schema for attendance tracking.
|
| 4 |
+
"""
|
| 5 |
+
import asyncio
|
| 6 |
+
import asyncpg
|
| 7 |
+
import os
|
| 8 |
+
from dotenv import load_dotenv
|
| 9 |
+
|
| 10 |
+
# Load environment variables
|
| 11 |
+
load_dotenv()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
async def run_migration():
|
| 15 |
+
"""Create attendance table and indexes"""
|
| 16 |
+
|
| 17 |
+
# Get database connection details
|
| 18 |
+
db_host = os.getenv("DB_HOST", "localhost")
|
| 19 |
+
db_port = int(os.getenv("DB_PORT", "5432"))
|
| 20 |
+
db_name = os.getenv("DB_NAME", "cuatrolabs")
|
| 21 |
+
db_user = os.getenv("DB_USER", "postgres")
|
| 22 |
+
db_password = os.getenv("DB_PASSWORD", "")
|
| 23 |
+
|
| 24 |
+
print("=" * 80)
|
| 25 |
+
print("ATTENDANCE TABLE MIGRATION")
|
| 26 |
+
print("=" * 80)
|
| 27 |
+
print(f"Host: {db_host}")
|
| 28 |
+
print(f"Port: {db_port}")
|
| 29 |
+
print(f"Database: {db_name}")
|
| 30 |
+
print(f"User: {db_user}")
|
| 31 |
+
print("=" * 80)
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
# Connect to database
|
| 35 |
+
print("\n[1/4] Connecting to PostgreSQL...")
|
| 36 |
+
conn = await asyncpg.connect(
|
| 37 |
+
host=db_host,
|
| 38 |
+
port=db_port,
|
| 39 |
+
database=db_name,
|
| 40 |
+
user=db_user,
|
| 41 |
+
password=db_password
|
| 42 |
+
)
|
| 43 |
+
print("β
Connected successfully")
|
| 44 |
+
|
| 45 |
+
# Create trans schema
|
| 46 |
+
print("\n[2/4] Creating trans schema...")
|
| 47 |
+
await conn.execute("CREATE SCHEMA IF NOT EXISTS trans")
|
| 48 |
+
print("β
Schema 'trans' created/verified")
|
| 49 |
+
|
| 50 |
+
# Create attendance table
|
| 51 |
+
print("\n[3/4] Creating scm_attendance table...")
|
| 52 |
+
create_table_sql = """
|
| 53 |
+
CREATE TABLE IF NOT EXISTS trans.scm_attendance (
|
| 54 |
+
id UUID PRIMARY KEY,
|
| 55 |
+
merchant_id UUID NOT NULL,
|
| 56 |
+
employee_id UUID NOT NULL,
|
| 57 |
+
work_date DATE NOT NULL,
|
| 58 |
+
check_in_time BIGINT,
|
| 59 |
+
check_in_lat DOUBLE PRECISION,
|
| 60 |
+
check_in_lon DOUBLE PRECISION,
|
| 61 |
+
check_in_geofence_id UUID,
|
| 62 |
+
check_out_time BIGINT,
|
| 63 |
+
check_out_lat DOUBLE PRECISION,
|
| 64 |
+
check_out_lon DOUBLE PRECISION,
|
| 65 |
+
total_minutes INTEGER,
|
| 66 |
+
created_at TIMESTAMP DEFAULT now(),
|
| 67 |
+
updated_at TIMESTAMP DEFAULT now(),
|
| 68 |
+
UNIQUE (employee_id, work_date)
|
| 69 |
+
)
|
| 70 |
+
"""
|
| 71 |
+
await conn.execute(create_table_sql)
|
| 72 |
+
print("β
Table 'trans.scm_attendance' created/verified")
|
| 73 |
+
|
| 74 |
+
# Create indexes
|
| 75 |
+
print("\n[4/4] Creating indexes...")
|
| 76 |
+
|
| 77 |
+
# Index for employee + work_date lookups
|
| 78 |
+
index_sql = """
|
| 79 |
+
CREATE INDEX IF NOT EXISTS idx_scm_attendance_work_date
|
| 80 |
+
ON trans.scm_attendance (employee_id, work_date)
|
| 81 |
+
"""
|
| 82 |
+
await conn.execute(index_sql)
|
| 83 |
+
print("β
Index 'idx_scm_attendance_work_date' created")
|
| 84 |
+
|
| 85 |
+
# Index for merchant + work_date lookups
|
| 86 |
+
index_merchant_sql = """
|
| 87 |
+
CREATE INDEX IF NOT EXISTS idx_scm_attendance_merchant
|
| 88 |
+
ON trans.scm_attendance (merchant_id, work_date)
|
| 89 |
+
"""
|
| 90 |
+
await conn.execute(index_merchant_sql)
|
| 91 |
+
print("β
Index 'idx_scm_attendance_merchant' created")
|
| 92 |
+
|
| 93 |
+
# Verify table structure
|
| 94 |
+
print("\n[VERIFICATION] Checking table structure...")
|
| 95 |
+
columns = await conn.fetch("""
|
| 96 |
+
SELECT column_name, data_type, is_nullable
|
| 97 |
+
FROM information_schema.columns
|
| 98 |
+
WHERE table_schema = 'trans'
|
| 99 |
+
AND table_name = 'scm_attendance'
|
| 100 |
+
ORDER BY ordinal_position
|
| 101 |
+
""")
|
| 102 |
+
|
| 103 |
+
print("\nTable Structure:")
|
| 104 |
+
print("-" * 80)
|
| 105 |
+
for col in columns:
|
| 106 |
+
nullable = "NULL" if col['is_nullable'] == 'YES' else "NOT NULL"
|
| 107 |
+
print(f" {col['column_name']:<25} {col['data_type']:<20} {nullable}")
|
| 108 |
+
print("-" * 80)
|
| 109 |
+
|
| 110 |
+
# Check constraints
|
| 111 |
+
constraints = await conn.fetch("""
|
| 112 |
+
SELECT constraint_name, constraint_type
|
| 113 |
+
FROM information_schema.table_constraints
|
| 114 |
+
WHERE table_schema = 'trans'
|
| 115 |
+
AND table_name = 'scm_attendance'
|
| 116 |
+
""")
|
| 117 |
+
|
| 118 |
+
print("\nConstraints:")
|
| 119 |
+
print("-" * 80)
|
| 120 |
+
for constraint in constraints:
|
| 121 |
+
print(f" {constraint['constraint_name']:<40} {constraint['constraint_type']}")
|
| 122 |
+
print("-" * 80)
|
| 123 |
+
|
| 124 |
+
# Check indexes
|
| 125 |
+
indexes = await conn.fetch("""
|
| 126 |
+
SELECT indexname, indexdef
|
| 127 |
+
FROM pg_indexes
|
| 128 |
+
WHERE schemaname = 'trans'
|
| 129 |
+
AND tablename = 'scm_attendance'
|
| 130 |
+
""")
|
| 131 |
+
|
| 132 |
+
print("\nIndexes:")
|
| 133 |
+
print("-" * 80)
|
| 134 |
+
for index in indexes:
|
| 135 |
+
print(f" {index['indexname']}")
|
| 136 |
+
print("-" * 80)
|
| 137 |
+
|
| 138 |
+
# Close connection
|
| 139 |
+
await conn.close()
|
| 140 |
+
|
| 141 |
+
print("\n" + "=" * 80)
|
| 142 |
+
print("β
MIGRATION COMPLETED SUCCESSFULLY")
|
| 143 |
+
print("=" * 80)
|
| 144 |
+
print("\nNext steps:")
|
| 145 |
+
print("1. Start the Tracker microservice")
|
| 146 |
+
print("2. Test the check-in endpoint: POST /api/v1/attendance/check-in")
|
| 147 |
+
print("3. Check the API documentation: http://localhost:8003/docs")
|
| 148 |
+
print("=" * 80)
|
| 149 |
+
|
| 150 |
+
except Exception as e:
|
| 151 |
+
print(f"\nβ Migration failed: {e}")
|
| 152 |
+
raise
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
if __name__ == "__main__":
|
| 156 |
+
asyncio.run(run_migration())
|
migrate_attendance_fix_ids.py
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Migration script to change UUID columns to VARCHAR to support custom ID formats.
|
| 3 |
+
This allows the attendance table to work with custom user IDs like 'usr_xxx' instead of UUIDs.
|
| 4 |
+
"""
|
| 5 |
+
import asyncio
|
| 6 |
+
import asyncpg
|
| 7 |
+
import os
|
| 8 |
+
from dotenv import load_dotenv
|
| 9 |
+
|
| 10 |
+
# Load environment variables
|
| 11 |
+
load_dotenv()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
async def run_migration():
|
| 15 |
+
"""Alter attendance table to use VARCHAR for ID columns"""
|
| 16 |
+
|
| 17 |
+
# Get database connection details
|
| 18 |
+
db_host = os.getenv("DB_HOST", "localhost")
|
| 19 |
+
db_port = int(os.getenv("DB_PORT", "5432"))
|
| 20 |
+
db_name = os.getenv("DB_NAME", "cuatrolabs")
|
| 21 |
+
db_user = os.getenv("DB_USER", "postgres")
|
| 22 |
+
db_password = os.getenv("DB_PASSWORD", "")
|
| 23 |
+
|
| 24 |
+
print("=" * 80)
|
| 25 |
+
print("ATTENDANCE TABLE ID FIX MIGRATION")
|
| 26 |
+
print("=" * 80)
|
| 27 |
+
print(f"Host: {db_host}")
|
| 28 |
+
print(f"Port: {db_port}")
|
| 29 |
+
print(f"Database: {db_name}")
|
| 30 |
+
print(f"User: {db_user}")
|
| 31 |
+
print("=" * 80)
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
# Connect to database
|
| 35 |
+
print("\n[1/5] Connecting to PostgreSQL...")
|
| 36 |
+
conn = await asyncpg.connect(
|
| 37 |
+
host=db_host,
|
| 38 |
+
port=db_port,
|
| 39 |
+
database=db_name,
|
| 40 |
+
user=db_user,
|
| 41 |
+
password=db_password
|
| 42 |
+
)
|
| 43 |
+
print("β
Connected successfully")
|
| 44 |
+
|
| 45 |
+
# Check if table exists
|
| 46 |
+
print("\n[2/5] Checking if table exists...")
|
| 47 |
+
table_exists = await conn.fetchval("""
|
| 48 |
+
SELECT EXISTS (
|
| 49 |
+
SELECT FROM information_schema.tables
|
| 50 |
+
WHERE table_schema = 'trans'
|
| 51 |
+
AND table_name = 'scm_attendance'
|
| 52 |
+
)
|
| 53 |
+
""")
|
| 54 |
+
|
| 55 |
+
if not table_exists:
|
| 56 |
+
print("β Table trans.scm_attendance does not exist. Run migrate_attendance.py first.")
|
| 57 |
+
await conn.close()
|
| 58 |
+
return
|
| 59 |
+
|
| 60 |
+
print("β
Table exists")
|
| 61 |
+
|
| 62 |
+
# Drop and recreate table with VARCHAR columns
|
| 63 |
+
print("\n[3/5] Dropping existing table...")
|
| 64 |
+
await conn.execute("DROP TABLE IF EXISTS trans.scm_attendance CASCADE")
|
| 65 |
+
print("β
Table dropped")
|
| 66 |
+
|
| 67 |
+
print("\n[4/5] Creating table with VARCHAR ID columns...")
|
| 68 |
+
create_table_sql = """
|
| 69 |
+
CREATE TABLE trans.scm_attendance (
|
| 70 |
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
| 71 |
+
merchant_id VARCHAR(100) NOT NULL,
|
| 72 |
+
employee_id VARCHAR(100) NOT NULL,
|
| 73 |
+
work_date DATE NOT NULL,
|
| 74 |
+
check_in_time BIGINT,
|
| 75 |
+
check_in_lat DOUBLE PRECISION,
|
| 76 |
+
check_in_lon DOUBLE PRECISION,
|
| 77 |
+
check_in_geofence_id VARCHAR(100),
|
| 78 |
+
check_out_time BIGINT,
|
| 79 |
+
check_out_lat DOUBLE PRECISION,
|
| 80 |
+
check_out_lon DOUBLE PRECISION,
|
| 81 |
+
total_minutes INTEGER,
|
| 82 |
+
created_at TIMESTAMP DEFAULT now(),
|
| 83 |
+
updated_at TIMESTAMP DEFAULT now(),
|
| 84 |
+
UNIQUE (employee_id, work_date)
|
| 85 |
+
)
|
| 86 |
+
"""
|
| 87 |
+
await conn.execute(create_table_sql)
|
| 88 |
+
print("β
Table created with VARCHAR ID columns")
|
| 89 |
+
|
| 90 |
+
# Create indexes
|
| 91 |
+
print("\n[5/5] Creating indexes...")
|
| 92 |
+
|
| 93 |
+
# Index for employee + work_date lookups
|
| 94 |
+
await conn.execute("""
|
| 95 |
+
CREATE INDEX idx_scm_attendance_work_date
|
| 96 |
+
ON trans.scm_attendance (employee_id, work_date)
|
| 97 |
+
""")
|
| 98 |
+
print("β
Index 'idx_scm_attendance_work_date' created")
|
| 99 |
+
|
| 100 |
+
# Index for merchant + work_date lookups
|
| 101 |
+
await conn.execute("""
|
| 102 |
+
CREATE INDEX idx_scm_attendance_merchant
|
| 103 |
+
ON trans.scm_attendance (merchant_id, work_date)
|
| 104 |
+
""")
|
| 105 |
+
print("β
Index 'idx_scm_attendance_merchant' created")
|
| 106 |
+
|
| 107 |
+
# Verify table structure
|
| 108 |
+
print("\n[VERIFICATION] Checking table structure...")
|
| 109 |
+
columns = await conn.fetch("""
|
| 110 |
+
SELECT column_name, data_type, character_maximum_length, is_nullable
|
| 111 |
+
FROM information_schema.columns
|
| 112 |
+
WHERE table_schema = 'trans'
|
| 113 |
+
AND table_name = 'scm_attendance'
|
| 114 |
+
ORDER BY ordinal_position
|
| 115 |
+
""")
|
| 116 |
+
|
| 117 |
+
print("\nTable Structure:")
|
| 118 |
+
print("-" * 80)
|
| 119 |
+
for col in columns:
|
| 120 |
+
nullable = "NULL" if col['is_nullable'] == 'YES' else "NOT NULL"
|
| 121 |
+
data_type = col['data_type']
|
| 122 |
+
if col['character_maximum_length']:
|
| 123 |
+
data_type += f"({col['character_maximum_length']})"
|
| 124 |
+
print(f" {col['column_name']:<25} {data_type:<20} {nullable}")
|
| 125 |
+
print("-" * 80)
|
| 126 |
+
|
| 127 |
+
# Close connection
|
| 128 |
+
await conn.close()
|
| 129 |
+
|
| 130 |
+
print("\n" + "=" * 80)
|
| 131 |
+
print("β
MIGRATION COMPLETED SUCCESSFULLY")
|
| 132 |
+
print("=" * 80)
|
| 133 |
+
print("\nChanges:")
|
| 134 |
+
print(" - merchant_id: UUID β VARCHAR(100)")
|
| 135 |
+
print(" - employee_id: UUID β VARCHAR(100)")
|
| 136 |
+
print(" - check_in_geofence_id: UUID β VARCHAR(100)")
|
| 137 |
+
print("\nThis allows custom ID formats like 'usr_xxx', 'mkt_xxx', etc.")
|
| 138 |
+
print("=" * 80)
|
| 139 |
+
|
| 140 |
+
except Exception as e:
|
| 141 |
+
print(f"\nβ Migration failed: {e}")
|
| 142 |
+
import traceback
|
| 143 |
+
traceback.print_exc()
|
| 144 |
+
raise
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
if __name__ == "__main__":
|
| 148 |
+
asyncio.run(run_migration())
|
migrate_attendance_revert_to_uuid.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Migration script to revert ID columns back to UUID type.
|
| 3 |
+
"""
|
| 4 |
+
import asyncio
|
| 5 |
+
import asyncpg
|
| 6 |
+
import os
|
| 7 |
+
from dotenv import load_dotenv
|
| 8 |
+
|
| 9 |
+
# Load environment variables
|
| 10 |
+
load_dotenv()
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
async def run_migration():
|
| 14 |
+
"""Revert attendance table to use UUID for ID columns"""
|
| 15 |
+
|
| 16 |
+
# Get database connection details
|
| 17 |
+
db_host = os.getenv("DB_HOST", "localhost")
|
| 18 |
+
db_port = int(os.getenv("DB_PORT", "5432"))
|
| 19 |
+
db_name = os.getenv("DB_NAME", "cuatrolabs")
|
| 20 |
+
db_user = os.getenv("DB_USER", "postgres")
|
| 21 |
+
db_password = os.getenv("DB_PASSWORD", "")
|
| 22 |
+
|
| 23 |
+
print("=" * 80)
|
| 24 |
+
print("ATTENDANCE TABLE REVERT TO UUID MIGRATION")
|
| 25 |
+
print("=" * 80)
|
| 26 |
+
print(f"Host: {db_host}")
|
| 27 |
+
print(f"Port: {db_port}")
|
| 28 |
+
print(f"Database: {db_name}")
|
| 29 |
+
print(f"User: {db_user}")
|
| 30 |
+
print("=" * 80)
|
| 31 |
+
|
| 32 |
+
try:
|
| 33 |
+
# Connect to database
|
| 34 |
+
print("\n[1/4] Connecting to PostgreSQL...")
|
| 35 |
+
conn = await asyncpg.connect(
|
| 36 |
+
host=db_host,
|
| 37 |
+
port=db_port,
|
| 38 |
+
database=db_name,
|
| 39 |
+
user=db_user,
|
| 40 |
+
password=db_password
|
| 41 |
+
)
|
| 42 |
+
print("β
Connected successfully")
|
| 43 |
+
|
| 44 |
+
# Drop and recreate table with UUID columns
|
| 45 |
+
print("\n[2/4] Dropping existing table...")
|
| 46 |
+
await conn.execute("DROP TABLE IF EXISTS trans.scm_attendance CASCADE")
|
| 47 |
+
print("β
Table dropped")
|
| 48 |
+
|
| 49 |
+
print("\n[3/4] Creating table with UUID columns...")
|
| 50 |
+
create_table_sql = """
|
| 51 |
+
CREATE TABLE trans.scm_attendance (
|
| 52 |
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
| 53 |
+
merchant_id UUID NOT NULL,
|
| 54 |
+
employee_id UUID NOT NULL,
|
| 55 |
+
work_date DATE NOT NULL,
|
| 56 |
+
check_in_time BIGINT,
|
| 57 |
+
check_in_lat DOUBLE PRECISION,
|
| 58 |
+
check_in_lon DOUBLE PRECISION,
|
| 59 |
+
check_in_geofence_id UUID,
|
| 60 |
+
check_out_time BIGINT,
|
| 61 |
+
check_out_lat DOUBLE PRECISION,
|
| 62 |
+
check_out_lon DOUBLE PRECISION,
|
| 63 |
+
total_minutes INTEGER,
|
| 64 |
+
created_at TIMESTAMP DEFAULT now(),
|
| 65 |
+
updated_at TIMESTAMP DEFAULT now(),
|
| 66 |
+
UNIQUE (employee_id, work_date)
|
| 67 |
+
)
|
| 68 |
+
"""
|
| 69 |
+
await conn.execute(create_table_sql)
|
| 70 |
+
print("β
Table created with UUID columns")
|
| 71 |
+
|
| 72 |
+
# Create indexes
|
| 73 |
+
print("\n[4/4] Creating indexes...")
|
| 74 |
+
|
| 75 |
+
# Index for employee + work_date lookups
|
| 76 |
+
await conn.execute("""
|
| 77 |
+
CREATE INDEX idx_scm_attendance_work_date
|
| 78 |
+
ON trans.scm_attendance (employee_id, work_date)
|
| 79 |
+
""")
|
| 80 |
+
print("β
Index 'idx_scm_attendance_work_date' created")
|
| 81 |
+
|
| 82 |
+
# Index for merchant + work_date lookups
|
| 83 |
+
await conn.execute("""
|
| 84 |
+
CREATE INDEX idx_scm_attendance_merchant
|
| 85 |
+
ON trans.scm_attendance (merchant_id, work_date)
|
| 86 |
+
""")
|
| 87 |
+
print("β
Index 'idx_scm_attendance_merchant' created")
|
| 88 |
+
|
| 89 |
+
# Verify table structure
|
| 90 |
+
print("\n[VERIFICATION] Checking table structure...")
|
| 91 |
+
columns = await conn.fetch("""
|
| 92 |
+
SELECT column_name, data_type, is_nullable
|
| 93 |
+
FROM information_schema.columns
|
| 94 |
+
WHERE table_schema = 'trans'
|
| 95 |
+
AND table_name = 'scm_attendance'
|
| 96 |
+
ORDER BY ordinal_position
|
| 97 |
+
""")
|
| 98 |
+
|
| 99 |
+
print("\nTable Structure:")
|
| 100 |
+
print("-" * 80)
|
| 101 |
+
for col in columns:
|
| 102 |
+
nullable = "NULL" if col['is_nullable'] == 'YES' else "NOT NULL"
|
| 103 |
+
print(f" {col['column_name']:<25} {col['data_type']:<20} {nullable}")
|
| 104 |
+
print("-" * 80)
|
| 105 |
+
|
| 106 |
+
# Close connection
|
| 107 |
+
await conn.close()
|
| 108 |
+
|
| 109 |
+
print("\n" + "=" * 80)
|
| 110 |
+
print("β
MIGRATION COMPLETED SUCCESSFULLY")
|
| 111 |
+
print("=" * 80)
|
| 112 |
+
print("\nReverted to UUID columns:")
|
| 113 |
+
print(" - merchant_id: UUID")
|
| 114 |
+
print(" - employee_id: UUID")
|
| 115 |
+
print(" - check_in_geofence_id: UUID")
|
| 116 |
+
print("\nMake sure JWT tokens contain valid UUID format for user_id and merchant_id")
|
| 117 |
+
print("=" * 80)
|
| 118 |
+
|
| 119 |
+
except Exception as e:
|
| 120 |
+
print(f"\nβ Migration failed: {e}")
|
| 121 |
+
import traceback
|
| 122 |
+
traceback.print_exc()
|
| 123 |
+
raise
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
if __name__ == "__main__":
|
| 127 |
+
asyncio.run(run_migration())
|
requirements.txt
CHANGED
|
@@ -3,3 +3,20 @@ uvicorn[standard]==0.24.0
|
|
| 3 |
pydantic>=2.12.5,<3.0.0
|
| 4 |
pydantic-settings>=2.0.0
|
| 5 |
python-dotenv==1.0.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
pydantic>=2.12.5,<3.0.0
|
| 4 |
pydantic-settings>=2.0.0
|
| 5 |
python-dotenv==1.0.0
|
| 6 |
+
python-multipart==0.0.6
|
| 7 |
+
|
| 8 |
+
# Database
|
| 9 |
+
motor==3.3.2
|
| 10 |
+
pymongo==4.6.0
|
| 11 |
+
asyncpg==0.31.0
|
| 12 |
+
sqlalchemy[asyncio]==2.0.36
|
| 13 |
+
|
| 14 |
+
# Authentication
|
| 15 |
+
python-jose[cryptography]==3.3.0
|
| 16 |
+
passlib[bcrypt]==1.7.4
|
| 17 |
+
|
| 18 |
+
# Utilities
|
| 19 |
+
insightfy-utils>=0.1.0
|
| 20 |
+
|
| 21 |
+
# Logging
|
| 22 |
+
python-json-logger==2.0.7
|
setup.sh
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
echo "=========================================="
|
| 4 |
+
echo "Tracker Microservice Setup"
|
| 5 |
+
echo "=========================================="
|
| 6 |
+
|
| 7 |
+
# Check Python version
|
| 8 |
+
echo ""
|
| 9 |
+
echo "Checking Python version..."
|
| 10 |
+
python3 --version
|
| 11 |
+
|
| 12 |
+
# Install insightfy-utils from wheel first
|
| 13 |
+
echo ""
|
| 14 |
+
echo "Installing insightfy-utils..."
|
| 15 |
+
python3 -m pip install --user app/insightfy_utils-0.1.0-py3-none-any.whl
|
| 16 |
+
|
| 17 |
+
# Install dependencies
|
| 18 |
+
echo ""
|
| 19 |
+
echo "Installing dependencies..."
|
| 20 |
+
python3 -m pip install --user -r requirements.txt
|
| 21 |
+
|
| 22 |
+
# Check if .env exists
|
| 23 |
+
echo ""
|
| 24 |
+
if [ -f ".env" ]; then
|
| 25 |
+
echo "β
.env file exists"
|
| 26 |
+
else
|
| 27 |
+
echo "β οΈ .env file not found. Creating from .env.example..."
|
| 28 |
+
cp .env.example .env
|
| 29 |
+
echo "β
Created .env file. Please edit it with your credentials."
|
| 30 |
+
fi
|
| 31 |
+
|
| 32 |
+
# Run migration
|
| 33 |
+
echo ""
|
| 34 |
+
echo "Do you want to run the database migration now? (y/n)"
|
| 35 |
+
read -r response
|
| 36 |
+
if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]; then
|
| 37 |
+
echo "Running migration..."
|
| 38 |
+
python3 migrate_attendance.py
|
| 39 |
+
else
|
| 40 |
+
echo "Skipping migration. Run 'python3 migrate_attendance.py' when ready."
|
| 41 |
+
fi
|
| 42 |
+
|
| 43 |
+
echo ""
|
| 44 |
+
echo "=========================================="
|
| 45 |
+
echo "β
Setup Complete!"
|
| 46 |
+
echo "=========================================="
|
| 47 |
+
echo ""
|
| 48 |
+
echo "Next steps:"
|
| 49 |
+
echo "1. Edit .env file with your database credentials"
|
| 50 |
+
echo "2. Run migration: python3 migrate_attendance.py"
|
| 51 |
+
echo "3. Start service: python3 -m uvicorn app.main:app --host 0.0.0.0 --port 8003 --reload"
|
| 52 |
+
echo "4. Or press F5 in VS Code to debug"
|
| 53 |
+
echo ""
|
| 54 |
+
echo "API Documentation: http://localhost:8003/docs"
|
| 55 |
+
echo "=========================================="
|
test_checkin.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Test script for attendance check-in API.
|
| 3 |
+
Generates a JWT token and tests the check-in endpoint.
|
| 4 |
+
"""
|
| 5 |
+
import requests
|
| 6 |
+
import json
|
| 7 |
+
from datetime import datetime, timedelta
|
| 8 |
+
from jose import jwt
|
| 9 |
+
import os
|
| 10 |
+
from dotenv import load_dotenv
|
| 11 |
+
|
| 12 |
+
load_dotenv()
|
| 13 |
+
|
| 14 |
+
# Configuration
|
| 15 |
+
BASE_URL = "http://localhost:8003"
|
| 16 |
+
SECRET_KEY = os.getenv("SECRET_KEY", "your-secret-key-change-in-production")
|
| 17 |
+
ALGORITHM = os.getenv("ALGORITHM", "HS256")
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def generate_test_token(
|
| 21 |
+
user_id: str = "550e8400-e29b-41d4-a716-446655440000",
|
| 22 |
+
username: str = "test.employee@example.com",
|
| 23 |
+
merchant_id: str = "660e8400-e29b-41d4-a716-446655440000",
|
| 24 |
+
role: str = "employee"
|
| 25 |
+
) -> str:
|
| 26 |
+
"""Generate a test JWT token"""
|
| 27 |
+
|
| 28 |
+
payload = {
|
| 29 |
+
"sub": user_id,
|
| 30 |
+
"username": username,
|
| 31 |
+
"merchant_id": merchant_id,
|
| 32 |
+
"role": role,
|
| 33 |
+
"exp": datetime.utcnow() + timedelta(hours=8)
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
token = jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM)
|
| 37 |
+
return token
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def test_health_check():
|
| 41 |
+
"""Test health check endpoint"""
|
| 42 |
+
print("\n" + "=" * 80)
|
| 43 |
+
print("TEST 1: Health Check")
|
| 44 |
+
print("=" * 80)
|
| 45 |
+
|
| 46 |
+
response = requests.get(f"{BASE_URL}/health")
|
| 47 |
+
print(f"Status Code: {response.status_code}")
|
| 48 |
+
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
| 49 |
+
|
| 50 |
+
assert response.status_code == 200
|
| 51 |
+
print("β
Health check passed")
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def test_attendance_health():
|
| 55 |
+
"""Test attendance module health check"""
|
| 56 |
+
print("\n" + "=" * 80)
|
| 57 |
+
print("TEST 2: Attendance Module Health Check")
|
| 58 |
+
print("=" * 80)
|
| 59 |
+
|
| 60 |
+
response = requests.get(f"{BASE_URL}/api/v1/attendance/health")
|
| 61 |
+
print(f"Status Code: {response.status_code}")
|
| 62 |
+
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
| 63 |
+
|
| 64 |
+
assert response.status_code == 200
|
| 65 |
+
print("β
Attendance health check passed")
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def test_checkin_without_auth():
|
| 69 |
+
"""Test check-in without authentication (should fail)"""
|
| 70 |
+
print("\n" + "=" * 80)
|
| 71 |
+
print("TEST 3: Check-in Without Authentication (Should Fail)")
|
| 72 |
+
print("=" * 80)
|
| 73 |
+
|
| 74 |
+
payload = {
|
| 75 |
+
"timestamp": int(datetime.now().timestamp() * 1000),
|
| 76 |
+
"latitude": 19.0760,
|
| 77 |
+
"longitude": 72.8777,
|
| 78 |
+
"location_id": "loc_mumbai_office_001"
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
response = requests.post(
|
| 82 |
+
f"{BASE_URL}/api/v1/attendance/check-in",
|
| 83 |
+
json=payload
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
print(f"Status Code: {response.status_code}")
|
| 87 |
+
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
| 88 |
+
|
| 89 |
+
assert response.status_code == 403 # Forbidden without auth
|
| 90 |
+
print("β
Correctly rejected unauthenticated request")
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def test_checkin_with_auth():
|
| 94 |
+
"""Test check-in with authentication"""
|
| 95 |
+
print("\n" + "=" * 80)
|
| 96 |
+
print("TEST 4: Check-in With Authentication")
|
| 97 |
+
print("=" * 80)
|
| 98 |
+
|
| 99 |
+
# Generate token
|
| 100 |
+
token = generate_test_token()
|
| 101 |
+
print(f"Generated JWT Token: {token[:50]}...")
|
| 102 |
+
|
| 103 |
+
# Prepare payload
|
| 104 |
+
payload = {
|
| 105 |
+
"timestamp": int(datetime.now().timestamp() * 1000),
|
| 106 |
+
"latitude": 19.0760,
|
| 107 |
+
"longitude": 72.8777,
|
| 108 |
+
"location_id": "loc_mumbai_office_001"
|
| 109 |
+
}
|
| 110 |
+
|
| 111 |
+
print(f"\nRequest Payload:")
|
| 112 |
+
print(json.dumps(payload, indent=2))
|
| 113 |
+
|
| 114 |
+
# Make request
|
| 115 |
+
headers = {
|
| 116 |
+
"Authorization": f"Bearer {token}",
|
| 117 |
+
"Content-Type": "application/json"
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
response = requests.post(
|
| 121 |
+
f"{BASE_URL}/api/v1/attendance/check-in",
|
| 122 |
+
json=payload,
|
| 123 |
+
headers=headers
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
print(f"\nStatus Code: {response.status_code}")
|
| 127 |
+
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
| 128 |
+
|
| 129 |
+
if response.status_code == 201:
|
| 130 |
+
print("β
Check-in successful")
|
| 131 |
+
elif response.status_code == 400:
|
| 132 |
+
error_detail = response.json().get("detail", {})
|
| 133 |
+
if "Already checked in" in str(error_detail):
|
| 134 |
+
print("β οΈ Already checked in today (expected if running multiple times)")
|
| 135 |
+
elif "GPS" in str(error_detail):
|
| 136 |
+
print("β οΈ GPS tracking disabled for this employee")
|
| 137 |
+
print(" To fix: Update MongoDB scm_employees collection")
|
| 138 |
+
print(" Set location_settings.location_tracking_consent = true")
|
| 139 |
+
else:
|
| 140 |
+
print(f"β Check-in failed: {error_detail}")
|
| 141 |
+
else:
|
| 142 |
+
print(f"β Unexpected status code: {response.status_code}")
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def test_checkin_invalid_coordinates():
|
| 146 |
+
"""Test check-in with invalid coordinates"""
|
| 147 |
+
print("\n" + "=" * 80)
|
| 148 |
+
print("TEST 5: Check-in With Invalid Coordinates (Should Fail)")
|
| 149 |
+
print("=" * 80)
|
| 150 |
+
|
| 151 |
+
token = generate_test_token()
|
| 152 |
+
|
| 153 |
+
payload = {
|
| 154 |
+
"timestamp": int(datetime.now().timestamp() * 1000),
|
| 155 |
+
"latitude": 999.0, # Invalid latitude
|
| 156 |
+
"longitude": 72.8777
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
headers = {
|
| 160 |
+
"Authorization": f"Bearer {token}",
|
| 161 |
+
"Content-Type": "application/json"
|
| 162 |
+
}
|
| 163 |
+
|
| 164 |
+
response = requests.post(
|
| 165 |
+
f"{BASE_URL}/api/v1/attendance/check-in",
|
| 166 |
+
json=payload,
|
| 167 |
+
headers=headers
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
print(f"Status Code: {response.status_code}")
|
| 171 |
+
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
| 172 |
+
|
| 173 |
+
assert response.status_code == 422 # Validation error
|
| 174 |
+
print("β
Correctly rejected invalid coordinates")
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
def test_checkin_missing_fields():
|
| 178 |
+
"""Test check-in with missing required fields"""
|
| 179 |
+
print("\n" + "=" * 80)
|
| 180 |
+
print("TEST 6: Check-in With Missing Fields (Should Fail)")
|
| 181 |
+
print("=" * 80)
|
| 182 |
+
|
| 183 |
+
token = generate_test_token()
|
| 184 |
+
|
| 185 |
+
payload = {
|
| 186 |
+
"timestamp": int(datetime.now().timestamp() * 1000),
|
| 187 |
+
# Missing latitude and longitude
|
| 188 |
+
}
|
| 189 |
+
|
| 190 |
+
headers = {
|
| 191 |
+
"Authorization": f"Bearer {token}",
|
| 192 |
+
"Content-Type": "application/json"
|
| 193 |
+
}
|
| 194 |
+
|
| 195 |
+
response = requests.post(
|
| 196 |
+
f"{BASE_URL}/api/v1/attendance/check-in",
|
| 197 |
+
json=payload,
|
| 198 |
+
headers=headers
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
print(f"Status Code: {response.status_code}")
|
| 202 |
+
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
| 203 |
+
|
| 204 |
+
assert response.status_code == 422 # Validation error
|
| 205 |
+
print("β
Correctly rejected request with missing fields")
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
def main():
|
| 209 |
+
"""Run all tests"""
|
| 210 |
+
print("\n" + "=" * 80)
|
| 211 |
+
print("ATTENDANCE CHECK-IN API TEST SUITE")
|
| 212 |
+
print("=" * 80)
|
| 213 |
+
print(f"Base URL: {BASE_URL}")
|
| 214 |
+
print(f"Time: {datetime.now().isoformat()}")
|
| 215 |
+
print("=" * 80)
|
| 216 |
+
|
| 217 |
+
try:
|
| 218 |
+
test_health_check()
|
| 219 |
+
test_attendance_health()
|
| 220 |
+
test_checkin_without_auth()
|
| 221 |
+
test_checkin_invalid_coordinates()
|
| 222 |
+
test_checkin_missing_fields()
|
| 223 |
+
test_checkin_with_auth() # Run this last as it may succeed
|
| 224 |
+
|
| 225 |
+
print("\n" + "=" * 80)
|
| 226 |
+
print("β
ALL TESTS COMPLETED")
|
| 227 |
+
print("=" * 80)
|
| 228 |
+
|
| 229 |
+
except requests.exceptions.ConnectionError:
|
| 230 |
+
print("\nβ ERROR: Could not connect to the server")
|
| 231 |
+
print(f" Make sure the service is running at {BASE_URL}")
|
| 232 |
+
print(" Run: uvicorn app.main:app --host 0.0.0.0 --port 8003")
|
| 233 |
+
except Exception as e:
|
| 234 |
+
print(f"\nβ ERROR: {e}")
|
| 235 |
+
raise
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
if __name__ == "__main__":
|
| 239 |
+
main()
|
test_request.json
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"timestamp": 1771332328203,
|
| 3 |
+
"latitude": 13.0827,
|
| 4 |
+
"longitude": 80.2707,
|
| 5 |
+
"location_id": null
|
| 6 |
+
}
|