Initial deployment
Browse files- REMB_Production_Final_Reliable.ipynb +0 -0
- algorithms/.env.production +17 -0
- algorithms/DEPLOYMENT.md +386 -0
- algorithms/Makefile +79 -0
- algorithms/QUICK_START.md +255 -0
- algorithms/README.md +54 -3
- algorithms/RUNNING.md +5 -3
- algorithms/backend/.dockerignore +59 -0
- algorithms/backend/Dockerfile +58 -0
- algorithms/backend/README_HF.md +87 -0
- algorithms/backend/analyze_dxf_entities.py +74 -0
- algorithms/backend/compare_methods.py +40 -0
- algorithms/backend/debug_ezdxf.py +40 -0
- algorithms/backend/debug_validation.py +71 -0
- algorithms/backend/requirements.txt +3 -1
- algorithms/backend/test_additional_dxf.py +47 -0
- algorithms/backend/test_belair_dxf.py +38 -0
- algorithms/backend/test_dxf.py +30 -0
- algorithms/backend/utils/dxf_utils.py +219 -58
- algorithms/docker-compose.yml +48 -0
- algorithms/frontend/.streamlit/config.toml +20 -0
- algorithms/frontend/app.py +42 -29
- algorithms/frontend/requirements.txt +1 -0
- examples/663409.dxf +0 -0
- examples/930300.dxf +2962 -0
- examples/Lot Plan Bel air Technical Description.dxf +0 -0
REMB_Production_Final_Reliable.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
algorithms/.env.production
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Production Environment Configuration
|
| 2 |
+
# Copy to .env for local production testing
|
| 3 |
+
|
| 4 |
+
# API Server Configuration
|
| 5 |
+
API_HOST=0.0.0.0
|
| 6 |
+
API_PORT=7860
|
| 7 |
+
|
| 8 |
+
# CORS Configuration (comma-separated origins)
|
| 9 |
+
# Use * for development, specific domains for production
|
| 10 |
+
CORS_ORIGINS=*
|
| 11 |
+
|
| 12 |
+
# Logging
|
| 13 |
+
LOG_LEVEL=INFO
|
| 14 |
+
|
| 15 |
+
# Application Settings
|
| 16 |
+
APP_TITLE="Land Redistribution Algorithm API"
|
| 17 |
+
APP_VERSION=2.0.0
|
algorithms/DEPLOYMENT.md
ADDED
|
@@ -0,0 +1,386 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Deployment Guide
|
| 2 |
+
|
| 3 |
+
Complete guide for deploying the Land Redistribution Algorithm to production.
|
| 4 |
+
|
| 5 |
+
## Table of Contents
|
| 6 |
+
|
| 7 |
+
- [Prerequisites](#prerequisites)
|
| 8 |
+
- [Backend Deployment (Hugging Face Spaces)](#backend-deployment-hugging-face-spaces)
|
| 9 |
+
- [Frontend Deployment (Streamlit Cloud)](#frontend-deployment-streamlit-cloud)
|
| 10 |
+
- [Local Docker Testing](#local-docker-testing)
|
| 11 |
+
- [Environment Variables](#environment-variables)
|
| 12 |
+
- [Troubleshooting](#troubleshooting)
|
| 13 |
+
|
| 14 |
+
## Prerequisites
|
| 15 |
+
|
| 16 |
+
### For All Deployments
|
| 17 |
+
- Git installed on your machine
|
| 18 |
+
- GitHub account (for Streamlit Cloud)
|
| 19 |
+
- Hugging Face account (for backend deployment)
|
| 20 |
+
|
| 21 |
+
### For Local Testing
|
| 22 |
+
- Docker and Docker Compose installed
|
| 23 |
+
- Python 3.11+ (for non-Docker development)
|
| 24 |
+
- Make (optional, for convenience commands)
|
| 25 |
+
|
| 26 |
+
## Backend Deployment (Hugging Face Spaces)
|
| 27 |
+
|
| 28 |
+
Hugging Face Spaces provides free hosting for ML applications with Docker support.
|
| 29 |
+
|
| 30 |
+
### Step 1: Create a New Space
|
| 31 |
+
|
| 32 |
+
1. Go to [Hugging Face Spaces](https://huggingface.co/spaces)
|
| 33 |
+
2. Click **"Create new Space"**
|
| 34 |
+
3. Configure:
|
| 35 |
+
- **Space name**: `land-redistribution-api` (or your choice)
|
| 36 |
+
- **License**: MIT
|
| 37 |
+
- **Select the Space SDK**: Docker
|
| 38 |
+
- **Visibility**: Public or Private
|
| 39 |
+
|
| 40 |
+
### Step 2: Prepare Backend Files
|
| 41 |
+
|
| 42 |
+
The backend directory is already configured with:
|
| 43 |
+
- ✅ `Dockerfile` - Multi-stage production build
|
| 44 |
+
- ✅ `README_HF.md` - Hugging Face metadata
|
| 45 |
+
- ✅ `requirements.txt` - Python dependencies
|
| 46 |
+
- ✅ `.dockerignore` - Build optimization
|
| 47 |
+
|
| 48 |
+
### Step 3: Deploy to Hugging Face
|
| 49 |
+
|
| 50 |
+
#### Option A: Git Push (Recommended)
|
| 51 |
+
|
| 52 |
+
```bash
|
| 53 |
+
# Navigate to backend directory
|
| 54 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms/backend
|
| 55 |
+
|
| 56 |
+
# Initialize git (if not already)
|
| 57 |
+
git init
|
| 58 |
+
|
| 59 |
+
# Add Hugging Face remote using your space name
|
| 60 |
+
git remote add hf https://huggingface.co/spaces/<YOUR_USERNAME>/<SPACE_NAME>
|
| 61 |
+
|
| 62 |
+
# Rename README for Hugging Face
|
| 63 |
+
cp README_HF.md README.md
|
| 64 |
+
|
| 65 |
+
# Add and commit files
|
| 66 |
+
git add .
|
| 67 |
+
git commit -m "Initial deployment"
|
| 68 |
+
|
| 69 |
+
# Push to Hugging Face
|
| 70 |
+
git push hf main
|
| 71 |
+
```
|
| 72 |
+
|
| 73 |
+
#### Option B: Web Upload
|
| 74 |
+
|
| 75 |
+
1. In your Space, click **"Files and versions"**
|
| 76 |
+
2. Upload all files from `backend/` directory
|
| 77 |
+
3. Ensure `README_HF.md` is renamed to `README.md`
|
| 78 |
+
|
| 79 |
+
### Step 4: Wait for Build
|
| 80 |
+
|
| 81 |
+
- Hugging Face will automatically build your Docker image
|
| 82 |
+
- Build time: ~5-10 minutes
|
| 83 |
+
- Monitor progress in the "Logs" tab
|
| 84 |
+
|
| 85 |
+
### Step 5: Test Your Backend API
|
| 86 |
+
|
| 87 |
+
Once deployed, your API will be available at:
|
| 88 |
+
```
|
| 89 |
+
https://<YOUR_USERNAME>-<SPACE_NAME>.hf.space
|
| 90 |
+
```
|
| 91 |
+
|
| 92 |
+
Test endpoints:
|
| 93 |
+
```bash
|
| 94 |
+
# Health check
|
| 95 |
+
curl https://<YOUR_USERNAME>-<SPACE_NAME>.hf.space/health
|
| 96 |
+
|
| 97 |
+
# API documentation
|
| 98 |
+
open https://<YOUR_USERNAME>-<SPACE_NAME>.hf.space/docs
|
| 99 |
+
```
|
| 100 |
+
|
| 101 |
+
## Frontend Deployment (Streamlit Cloud)
|
| 102 |
+
|
| 103 |
+
### Step 1: Push Frontend to GitHub
|
| 104 |
+
|
| 105 |
+
```bash
|
| 106 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms/frontend
|
| 107 |
+
|
| 108 |
+
# Initialize git repository (if not already)
|
| 109 |
+
git init
|
| 110 |
+
|
| 111 |
+
# Add GitHub remote
|
| 112 |
+
git remote add origin https://github.com/<YOUR_USERNAME>/land-redistribution-ui.git
|
| 113 |
+
|
| 114 |
+
# Add all files
|
| 115 |
+
git add .
|
| 116 |
+
git commit -m "Initial commit"
|
| 117 |
+
|
| 118 |
+
# Push to GitHub
|
| 119 |
+
git branch -M main
|
| 120 |
+
git push -u origin main
|
| 121 |
+
```
|
| 122 |
+
|
| 123 |
+
### Step 2: Deploy on Streamlit Cloud
|
| 124 |
+
|
| 125 |
+
1. Go to [Streamlit Cloud](https://streamlit.io/cloud)
|
| 126 |
+
2. Sign in with GitHub
|
| 127 |
+
3. Click **"New app"**
|
| 128 |
+
4. Configure:
|
| 129 |
+
- **Repository**: Select your frontend repository
|
| 130 |
+
- **Branch**: `main`
|
| 131 |
+
- **Main file path**: `app.py`
|
| 132 |
+
|
| 133 |
+
### Step 3: Configure Environment Variables
|
| 134 |
+
|
| 135 |
+
In Streamlit Cloud, add secrets:
|
| 136 |
+
|
| 137 |
+
1. Go to your app settings
|
| 138 |
+
2. Click **"Secrets"**
|
| 139 |
+
3. Add:
|
| 140 |
+
```toml
|
| 141 |
+
API_URL = "https://<YOUR_HF_USERNAME>-<SPACE_NAME>.hf.space"
|
| 142 |
+
```
|
| 143 |
+
|
| 144 |
+
### Step 4: Deploy
|
| 145 |
+
|
| 146 |
+
- Click **"Deploy"**
|
| 147 |
+
- Streamlit Cloud will install dependencies and launch your app
|
| 148 |
+
- Your app will be available at: `https://<APP_NAME>.streamlit.app`
|
| 149 |
+
|
| 150 |
+
## Local Docker Testing
|
| 151 |
+
|
| 152 |
+
Before deploying to production, test locally with Docker Compose.
|
| 153 |
+
|
| 154 |
+
### Quick Start
|
| 155 |
+
|
| 156 |
+
```bash
|
| 157 |
+
# Navigate to algorithms directory
|
| 158 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms
|
| 159 |
+
|
| 160 |
+
# Build and start services
|
| 161 |
+
make build
|
| 162 |
+
make up
|
| 163 |
+
|
| 164 |
+
# View logs
|
| 165 |
+
make logs
|
| 166 |
+
|
| 167 |
+
# Test services
|
| 168 |
+
make health
|
| 169 |
+
```
|
| 170 |
+
|
| 171 |
+
### Manual Testing
|
| 172 |
+
|
| 173 |
+
```bash
|
| 174 |
+
# Build backend
|
| 175 |
+
docker-compose build backend
|
| 176 |
+
|
| 177 |
+
# Start all services
|
| 178 |
+
docker-compose up -d
|
| 179 |
+
|
| 180 |
+
# Check status
|
| 181 |
+
docker-compose ps
|
| 182 |
+
|
| 183 |
+
# View logs
|
| 184 |
+
docker-compose logs -f
|
| 185 |
+
|
| 186 |
+
# Test backend
|
| 187 |
+
curl http://localhost:8000/health
|
| 188 |
+
|
| 189 |
+
# Access frontend
|
| 190 |
+
open http://localhost:8501
|
| 191 |
+
|
| 192 |
+
# Stop services
|
| 193 |
+
docker-compose down
|
| 194 |
+
```
|
| 195 |
+
|
| 196 |
+
### Testing the Backend Container Only
|
| 197 |
+
|
| 198 |
+
```bash
|
| 199 |
+
cd backend
|
| 200 |
+
|
| 201 |
+
# Build image
|
| 202 |
+
docker build -t land-redistribution-api .
|
| 203 |
+
|
| 204 |
+
# Run container
|
| 205 |
+
docker run -p 7860:7860 land-redistribution-api
|
| 206 |
+
|
| 207 |
+
# Test in another terminal
|
| 208 |
+
curl http://localhost:7860/health
|
| 209 |
+
```
|
| 210 |
+
|
| 211 |
+
## Environment Variables
|
| 212 |
+
|
| 213 |
+
### Backend (.env or Hugging Face Secrets)
|
| 214 |
+
|
| 215 |
+
```bash
|
| 216 |
+
API_HOST=0.0.0.0
|
| 217 |
+
API_PORT=7860
|
| 218 |
+
CORS_ORIGINS=*
|
| 219 |
+
LOG_LEVEL=INFO
|
| 220 |
+
```
|
| 221 |
+
|
| 222 |
+
### Frontend (.env or Streamlit Secrets)
|
| 223 |
+
|
| 224 |
+
```bash
|
| 225 |
+
# Development
|
| 226 |
+
API_URL=http://localhost:8000
|
| 227 |
+
|
| 228 |
+
# Production (use your actual Hugging Face Space URL)
|
| 229 |
+
API_URL=https://<YOUR_HF_USERNAME>-<SPACE_NAME>.hf.space
|
| 230 |
+
```
|
| 231 |
+
|
| 232 |
+
## Troubleshooting
|
| 233 |
+
|
| 234 |
+
### Backend Issues
|
| 235 |
+
|
| 236 |
+
#### Build Fails on Hugging Face
|
| 237 |
+
|
| 238 |
+
**Problem**: Docker build fails with dependency errors
|
| 239 |
+
|
| 240 |
+
**Solution**:
|
| 241 |
+
1. Check Dockerfile syntax
|
| 242 |
+
2. Verify requirements.txt has pinned versions
|
| 243 |
+
3. Check build logs in Hugging Face Space
|
| 244 |
+
4. Test locally first: `docker build -t test ./backend`
|
| 245 |
+
|
| 246 |
+
#### API Returns 500 Error
|
| 247 |
+
|
| 248 |
+
**Problem**: Backend starts but API endpoints fail
|
| 249 |
+
|
| 250 |
+
**Solution**:
|
| 251 |
+
1. Check logs in Hugging Face Space
|
| 252 |
+
2. Verify all imports work: Test locally with Docker
|
| 253 |
+
3. Check CORS settings in `main.py`
|
| 254 |
+
|
| 255 |
+
#### Slow Performance
|
| 256 |
+
|
| 257 |
+
**Problem**: API is slow or times out
|
| 258 |
+
|
| 259 |
+
**Solution**:
|
| 260 |
+
- Reduce optimization parameters (population_size, generations)
|
| 261 |
+
- Consider upgrading to Hugging Face paid tier for more resources
|
| 262 |
+
- Add caching for common requests
|
| 263 |
+
|
| 264 |
+
### Frontend Issues
|
| 265 |
+
|
| 266 |
+
#### Cannot Connect to Backend
|
| 267 |
+
|
| 268 |
+
**Problem**: Frontend shows "Cannot connect to API"
|
| 269 |
+
|
| 270 |
+
**Solution**:
|
| 271 |
+
1. Verify `API_URL` environment variable is set correctly in Streamlit Secrets
|
| 272 |
+
2. Check backend is running: Visit backend URL directly
|
| 273 |
+
3. Check CORS settings on backend
|
| 274 |
+
4. Verify no typos in API_URL (should include https://)
|
| 275 |
+
|
| 276 |
+
#### Streamlit Cloud Build Fails
|
| 277 |
+
|
| 278 |
+
**Problem**: Deployment fails on Streamlit Cloud
|
| 279 |
+
|
| 280 |
+
**Solution**:
|
| 281 |
+
1. Check `requirements.txt` for incompatible versions
|
| 282 |
+
2. Verify `app.py` has no syntax errors
|
| 283 |
+
3. Check Streamlit Cloud build logs
|
| 284 |
+
4. Test locally: `streamlit run app.py`
|
| 285 |
+
|
| 286 |
+
### Docker Compose Issues
|
| 287 |
+
|
| 288 |
+
#### Port Already in Use
|
| 289 |
+
|
| 290 |
+
**Problem**: `Error: port is already allocated`
|
| 291 |
+
|
| 292 |
+
**Solution**:
|
| 293 |
+
```bash
|
| 294 |
+
# Find process using port
|
| 295 |
+
lsof -i :8000
|
| 296 |
+
lsof -i :8501
|
| 297 |
+
|
| 298 |
+
# Kill process
|
| 299 |
+
kill -9 <PID>
|
| 300 |
+
|
| 301 |
+
# Or change ports in docker-compose.yml
|
| 302 |
+
```
|
| 303 |
+
|
| 304 |
+
#### Container Crashes on Startup
|
| 305 |
+
|
| 306 |
+
**Problem**: Service exits immediately
|
| 307 |
+
|
| 308 |
+
**Solution**:
|
| 309 |
+
```bash
|
| 310 |
+
# Check logs
|
| 311 |
+
docker-compose logs backend
|
| 312 |
+
docker-compose logs frontend
|
| 313 |
+
|
| 314 |
+
# Run container interactively
|
| 315 |
+
docker run -it land-redistribution-api /bin/bash
|
| 316 |
+
|
| 317 |
+
# Check health
|
| 318 |
+
docker-compose ps
|
| 319 |
+
```
|
| 320 |
+
|
| 321 |
+
## Performance Optimization
|
| 322 |
+
|
| 323 |
+
### Backend
|
| 324 |
+
|
| 325 |
+
1. **Reduce CPU-intensive operations**:
|
| 326 |
+
- Lower default `population_size` and `generations`
|
| 327 |
+
- Add request timeouts
|
| 328 |
+
- Implement result caching
|
| 329 |
+
|
| 330 |
+
2. **Optimize Docker image**:
|
| 331 |
+
- Use multi-stage builds (already implemented)
|
| 332 |
+
- Minimize layers
|
| 333 |
+
- Remove unnecessary dependencies
|
| 334 |
+
|
| 335 |
+
### Frontend
|
| 336 |
+
|
| 337 |
+
1. **Optimize Streamlit**:
|
| 338 |
+
- Use `@st.cache_data` for expensive computations
|
| 339 |
+
- Lazy load visualizations
|
| 340 |
+
- Reduce re-renders with `st.session_state`
|
| 341 |
+
|
| 342 |
+
2. **Reduce API calls**:
|
| 343 |
+
- Cache results in session state
|
| 344 |
+
- Batch multiple requests
|
| 345 |
+
|
| 346 |
+
## Monitoring
|
| 347 |
+
|
| 348 |
+
### Hugging Face Spaces
|
| 349 |
+
|
| 350 |
+
- View logs: Space → Logs tab
|
| 351 |
+
- Check metrics: Space → Settings → Usage
|
| 352 |
+
- Restart: Space → Settings → Factory reboot
|
| 353 |
+
|
| 354 |
+
### Streamlit Cloud
|
| 355 |
+
|
| 356 |
+
- View logs: App → Manage app → Logs
|
| 357 |
+
- Check analytics: App → Analytics
|
| 358 |
+
- Restart: App → Manage app → Reboot app
|
| 359 |
+
|
| 360 |
+
## Security Considerations
|
| 361 |
+
|
| 362 |
+
1. **Environment Variables**: Never commit `.env` files with secrets
|
| 363 |
+
2. **CORS**: In production, replace `CORS_ORIGINS=*` with specific domains
|
| 364 |
+
3. **Rate Limiting**: Consider adding rate limiting for public APIs
|
| 365 |
+
4. **Input Validation**: Backend validates all inputs (already implemented)
|
| 366 |
+
|
| 367 |
+
## Next Steps
|
| 368 |
+
|
| 369 |
+
1. ✅ Test locally with Docker Compose
|
| 370 |
+
2. ✅ Deploy backend to Hugging Face Spaces
|
| 371 |
+
3. ✅ Deploy frontend to Streamlit Cloud
|
| 372 |
+
4. ✅ Configure environment variables
|
| 373 |
+
5. ✅ Test end-to-end flow
|
| 374 |
+
6. 📝 Monitor performance and logs
|
| 375 |
+
7. 🚀 Share with users!
|
| 376 |
+
|
| 377 |
+
## Support
|
| 378 |
+
|
| 379 |
+
For issues or questions:
|
| 380 |
+
- Backend API: Check Hugging Face Space discussions
|
| 381 |
+
- Frontend: Check Streamlit Community forum
|
| 382 |
+
- General: Open an issue on GitHub
|
| 383 |
+
|
| 384 |
+
## License
|
| 385 |
+
|
| 386 |
+
MIT
|
algorithms/Makefile
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.PHONY: help build up down logs clean test-backend test-frontend restart
|
| 2 |
+
|
| 3 |
+
help: ## Show this help message
|
| 4 |
+
@echo 'Usage: make [target]'
|
| 5 |
+
@echo ''
|
| 6 |
+
@echo 'Available targets:'
|
| 7 |
+
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf " \033[36m%-15s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
|
| 8 |
+
|
| 9 |
+
build: ## Build Docker images
|
| 10 |
+
@echo "Building backend Docker image..."
|
| 11 |
+
docker-compose build backend
|
| 12 |
+
@echo "✅ Build complete!"
|
| 13 |
+
|
| 14 |
+
up: ## Start all services
|
| 15 |
+
@echo "Starting services..."
|
| 16 |
+
docker-compose up -d
|
| 17 |
+
@echo "✅ Services started!"
|
| 18 |
+
@echo "Backend API: http://localhost:8000"
|
| 19 |
+
@echo "Frontend UI: http://localhost:8501"
|
| 20 |
+
@echo "API Docs: http://localhost:8000/docs"
|
| 21 |
+
|
| 22 |
+
down: ## Stop all services
|
| 23 |
+
@echo "Stopping services..."
|
| 24 |
+
docker-compose down
|
| 25 |
+
@echo "✅ Services stopped!"
|
| 26 |
+
|
| 27 |
+
logs: ## View logs from all services
|
| 28 |
+
docker-compose logs -f
|
| 29 |
+
|
| 30 |
+
logs-backend: ## View backend logs only
|
| 31 |
+
docker-compose logs -f backend
|
| 32 |
+
|
| 33 |
+
logs-frontend: ## View frontend logs only
|
| 34 |
+
docker-compose logs -f frontend
|
| 35 |
+
|
| 36 |
+
restart: ## Restart all services
|
| 37 |
+
@echo "Restarting services..."
|
| 38 |
+
docker-compose restart
|
| 39 |
+
@echo "✅ Services restarted!"
|
| 40 |
+
|
| 41 |
+
clean: ## Stop services and remove volumes
|
| 42 |
+
@echo "Cleaning up..."
|
| 43 |
+
docker-compose down -v
|
| 44 |
+
@echo "✅ Cleanup complete!"
|
| 45 |
+
|
| 46 |
+
test-backend: ## Test backend container
|
| 47 |
+
@echo "Testing backend..."
|
| 48 |
+
@docker build -t land-redistribution-test ./backend
|
| 49 |
+
@docker run --rm -p 7860:7860 -d --name backend-test land-redistribution-test
|
| 50 |
+
@sleep 5
|
| 51 |
+
@echo "Testing health endpoint..."
|
| 52 |
+
@curl -f http://localhost:7860/health || (docker stop backend-test && exit 1)
|
| 53 |
+
@echo "\n✅ Backend test passed!"
|
| 54 |
+
@docker stop backend-test
|
| 55 |
+
|
| 56 |
+
dev-backend: ## Run backend in development mode (without Docker)
|
| 57 |
+
@echo "Starting backend in development mode..."
|
| 58 |
+
cd backend && uvicorn main:app --reload --port 8000
|
| 59 |
+
|
| 60 |
+
dev-frontend: ## Run frontend in development mode (without Docker)
|
| 61 |
+
@echo "Starting frontend in development mode..."
|
| 62 |
+
cd frontend && streamlit run app.py --server.port 8501
|
| 63 |
+
|
| 64 |
+
install-backend: ## Install backend dependencies
|
| 65 |
+
cd backend && pip install -r requirements.txt
|
| 66 |
+
|
| 67 |
+
install-frontend: ## Install frontend dependencies
|
| 68 |
+
cd frontend && pip install -r requirements.txt
|
| 69 |
+
|
| 70 |
+
install: install-backend install-frontend ## Install all dependencies
|
| 71 |
+
|
| 72 |
+
status: ## Show status of services
|
| 73 |
+
docker-compose ps
|
| 74 |
+
|
| 75 |
+
health: ## Check health of running services
|
| 76 |
+
@echo "Checking backend health..."
|
| 77 |
+
@curl -s http://localhost:8000/health | python -m json.tool || echo "❌ Backend not responding"
|
| 78 |
+
@echo "\nChecking frontend..."
|
| 79 |
+
@curl -s -o /dev/null -w "Status: %{http_code}\n" http://localhost:8501 || echo "❌ Frontend not responding"
|
algorithms/QUICK_START.md
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# 🚀 Quick Deployment Reference
|
| 2 |
+
|
| 3 |
+
## Local Testing
|
| 4 |
+
|
| 5 |
+
### Using Docker Compose (Recommended)
|
| 6 |
+
```bash
|
| 7 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms
|
| 8 |
+
|
| 9 |
+
# Build images
|
| 10 |
+
make build
|
| 11 |
+
|
| 12 |
+
# Start services
|
| 13 |
+
make up
|
| 14 |
+
|
| 15 |
+
# View logs
|
| 16 |
+
make logs
|
| 17 |
+
|
| 18 |
+
# Check health
|
| 19 |
+
make health
|
| 20 |
+
|
| 21 |
+
# Access services
|
| 22 |
+
# Backend: http://localhost:8000
|
| 23 |
+
# Frontend: http://localhost:8501
|
| 24 |
+
# API Docs: http://localhost:8000/docs
|
| 25 |
+
|
| 26 |
+
# Stop services
|
| 27 |
+
make down
|
| 28 |
+
```
|
| 29 |
+
|
| 30 |
+
### Manual (Without Docker)
|
| 31 |
+
```bash
|
| 32 |
+
# Terminal 1: Backend
|
| 33 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms/backend
|
| 34 |
+
pip install -r requirements.txt
|
| 35 |
+
uvicorn main:app --reload --port 8000
|
| 36 |
+
|
| 37 |
+
# Terminal 2: Frontend
|
| 38 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms/frontend
|
| 39 |
+
pip install -r requirements.txt
|
| 40 |
+
export API_URL=http://localhost:8000
|
| 41 |
+
streamlit run app.py --server.port 8501
|
| 42 |
+
```
|
| 43 |
+
|
| 44 |
+
---
|
| 45 |
+
|
| 46 |
+
## Production Deployment
|
| 47 |
+
|
| 48 |
+
### Backend → Hugging Face Spaces
|
| 49 |
+
|
| 50 |
+
```bash
|
| 51 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms/backend
|
| 52 |
+
|
| 53 |
+
# 1. Rename README for HF
|
| 54 |
+
cp README_HF.md README.md
|
| 55 |
+
|
| 56 |
+
# 2. Initialize git (if needed)
|
| 57 |
+
git init
|
| 58 |
+
|
| 59 |
+
# 3. Add HF remote (replace with your details)
|
| 60 |
+
git remote add hf https://huggingface.co/spaces/<USERNAME>/<SPACE_NAME>
|
| 61 |
+
|
| 62 |
+
# 4. Commit and push
|
| 63 |
+
git add .
|
| 64 |
+
git commit -m "Deploy to Hugging Face Spaces"
|
| 65 |
+
git push hf main
|
| 66 |
+
|
| 67 |
+
# 5. Monitor build at:
|
| 68 |
+
# https://huggingface.co/spaces/<USERNAME>/<SPACE_NAME>
|
| 69 |
+
```
|
| 70 |
+
|
| 71 |
+
**Your API will be at**: `https://<USERNAME>-<SPACE_NAME>.hf.space`
|
| 72 |
+
|
| 73 |
+
### Frontend → Streamlit Cloud
|
| 74 |
+
|
| 75 |
+
```bash
|
| 76 |
+
cd /Volumes/WorkSpace/Project/REMB/algorithms/frontend
|
| 77 |
+
|
| 78 |
+
# 1. Push to GitHub
|
| 79 |
+
git init
|
| 80 |
+
git remote add origin https://github.com/<USERNAME>/<REPO_NAME>.git
|
| 81 |
+
git add .
|
| 82 |
+
git commit -m "Initial commit"
|
| 83 |
+
git branch -M main
|
| 84 |
+
git push -u origin main
|
| 85 |
+
|
| 86 |
+
# 2. Go to Streamlit Cloud
|
| 87 |
+
# https://streamlit.io/cloud
|
| 88 |
+
|
| 89 |
+
# 3. Create new app:
|
| 90 |
+
# - Repository: <USERNAME>/<REPO_NAME>
|
| 91 |
+
# - Branch: main
|
| 92 |
+
# - Main file: app.py
|
| 93 |
+
|
| 94 |
+
# 4. Add secrets in Streamlit Cloud settings:
|
| 95 |
+
# API_URL = "https://<HF_USERNAME>-<SPACE_NAME>.hf.space"
|
| 96 |
+
|
| 97 |
+
# 5. Deploy!
|
| 98 |
+
```
|
| 99 |
+
|
| 100 |
+
**Your app will be at**: `https://<APP_NAME>.streamlit.app`
|
| 101 |
+
|
| 102 |
+
---
|
| 103 |
+
|
| 104 |
+
## Environment Variables
|
| 105 |
+
|
| 106 |
+
### Backend (.env or HF Secrets)
|
| 107 |
+
```bash
|
| 108 |
+
API_HOST=0.0.0.0
|
| 109 |
+
API_PORT=7860
|
| 110 |
+
CORS_ORIGINS=*
|
| 111 |
+
LOG_LEVEL=INFO
|
| 112 |
+
```
|
| 113 |
+
|
| 114 |
+
### Frontend (.env or Streamlit Secrets)
|
| 115 |
+
```bash
|
| 116 |
+
# Development
|
| 117 |
+
API_URL=http://localhost:8000
|
| 118 |
+
|
| 119 |
+
# Production
|
| 120 |
+
API_URL=https://<HF_USERNAME>-<SPACE_NAME>.hf.space
|
| 121 |
+
```
|
| 122 |
+
|
| 123 |
+
---
|
| 124 |
+
|
| 125 |
+
## Troubleshooting
|
| 126 |
+
|
| 127 |
+
### Docker Build Fails
|
| 128 |
+
```bash
|
| 129 |
+
# Test build locally
|
| 130 |
+
cd backend
|
| 131 |
+
docker build -t test .
|
| 132 |
+
|
| 133 |
+
# Check logs
|
| 134 |
+
docker logs <container_id>
|
| 135 |
+
|
| 136 |
+
# Run interactively
|
| 137 |
+
docker run -it test /bin/bash
|
| 138 |
+
```
|
| 139 |
+
|
| 140 |
+
### Frontend Can't Connect
|
| 141 |
+
1. Check `API_URL` environment variable
|
| 142 |
+
2. Verify backend is running
|
| 143 |
+
3. Check CORS settings in backend
|
| 144 |
+
4. Test backend directly: `curl <API_URL>/health`
|
| 145 |
+
|
| 146 |
+
### Port Already in Use
|
| 147 |
+
```bash
|
| 148 |
+
# Find process
|
| 149 |
+
lsof -i :8000
|
| 150 |
+
lsof -i :8501
|
| 151 |
+
|
| 152 |
+
# Kill process
|
| 153 |
+
kill -9 <PID>
|
| 154 |
+
```
|
| 155 |
+
|
| 156 |
+
---
|
| 157 |
+
|
| 158 |
+
## Testing Deployed Services
|
| 159 |
+
|
| 160 |
+
### Test Backend API
|
| 161 |
+
```bash
|
| 162 |
+
# Health check
|
| 163 |
+
curl https://<USERNAME>-<SPACE_NAME>.hf.space/health
|
| 164 |
+
|
| 165 |
+
# View API docs
|
| 166 |
+
open https://<USERNAME>-<SPACE_NAME>.hf.space/docs
|
| 167 |
+
|
| 168 |
+
# Test optimization endpoint
|
| 169 |
+
curl -X POST https://<USERNAME>-<SPACE_NAME>.hf.space/api/optimize \
|
| 170 |
+
-H "Content-Type: application/json" \
|
| 171 |
+
-d '{
|
| 172 |
+
"config": {
|
| 173 |
+
"spacing_min": 20,
|
| 174 |
+
"spacing_max": 30,
|
| 175 |
+
"population_size": 20,
|
| 176 |
+
"generations": 50
|
| 177 |
+
},
|
| 178 |
+
"land_plots": [{
|
| 179 |
+
"type": "Polygon",
|
| 180 |
+
"coordinates": [[[0,0],[100,0],[100,100],[0,100],[0,0]]]
|
| 181 |
+
}]
|
| 182 |
+
}'
|
| 183 |
+
```
|
| 184 |
+
|
| 185 |
+
### Test Frontend
|
| 186 |
+
1. Open `https://<APP_NAME>.streamlit.app`
|
| 187 |
+
2. Select "Sample" → "Rectangle 100x100"
|
| 188 |
+
3. Click "🚀 Run Optimization"
|
| 189 |
+
4. Wait for results
|
| 190 |
+
5. Download GeoJSON
|
| 191 |
+
|
| 192 |
+
---
|
| 193 |
+
|
| 194 |
+
## Useful Makefile Commands
|
| 195 |
+
|
| 196 |
+
```bash
|
| 197 |
+
make help # Show all available commands
|
| 198 |
+
make build # Build Docker images
|
| 199 |
+
make up # Start services
|
| 200 |
+
make down # Stop services
|
| 201 |
+
make logs # View all logs
|
| 202 |
+
make logs-backend # View backend logs only
|
| 203 |
+
make logs-frontend # View frontend logs only
|
| 204 |
+
make restart # Restart all services
|
| 205 |
+
make clean # Stop and remove volumes
|
| 206 |
+
make test-backend # Test backend container
|
| 207 |
+
make dev-backend # Run backend without Docker
|
| 208 |
+
make dev-frontend # Run frontend without Docker
|
| 209 |
+
make status # Show service status
|
| 210 |
+
make health # Check service health
|
| 211 |
+
```
|
| 212 |
+
|
| 213 |
+
---
|
| 214 |
+
|
| 215 |
+
## File Checklist
|
| 216 |
+
|
| 217 |
+
Before deploying, ensure these files exist:
|
| 218 |
+
|
| 219 |
+
**Backend**:
|
| 220 |
+
- [x] `Dockerfile`
|
| 221 |
+
- [x] `.dockerignore`
|
| 222 |
+
- [x] `requirements.txt` (with production deps)
|
| 223 |
+
- [x] `README_HF.md` (will become README.md)
|
| 224 |
+
|
| 225 |
+
**Frontend**:
|
| 226 |
+
- [x] `app.py` (with environment support)
|
| 227 |
+
- [x] `requirements.txt` (with python-dotenv)
|
| 228 |
+
- [x] `.streamlit/config.toml`
|
| 229 |
+
|
| 230 |
+
**Root**:
|
| 231 |
+
- [x] `docker-compose.yml`
|
| 232 |
+
- [x] `Makefile`
|
| 233 |
+
- [x] `.env.production` (template)
|
| 234 |
+
- [x] `DEPLOYMENT.md`
|
| 235 |
+
- [x] `README.md` (updated)
|
| 236 |
+
|
| 237 |
+
---
|
| 238 |
+
|
| 239 |
+
## Quick Links
|
| 240 |
+
|
| 241 |
+
- 📖 **Full Guide**: [DEPLOYMENT.md](file:///Volumes/WorkSpace/Project/REMB/algorithms/DEPLOYMENT.md)
|
| 242 |
+
- 🏠 **Main README**: [README.md](file:///Volumes/WorkSpace/Project/REMB/algorithms/README.md)
|
| 243 |
+
- 🐳 **Backend Dockerfile**: [Dockerfile](file:///Volumes/WorkSpace/Project/REMB/algorithms/backend/Dockerfile)
|
| 244 |
+
- 🎨 **Frontend App**: [app.py](file:///Volumes/WorkSpace/Project/REMB/algorithms/frontend/app.py)
|
| 245 |
+
- 🔧 **Docker Compose**: [docker-compose.yml](file:///Volumes/WorkSpace/Project/REMB/algorithms/docker-compose.yml)
|
| 246 |
+
|
| 247 |
+
---
|
| 248 |
+
|
| 249 |
+
## Support
|
| 250 |
+
|
| 251 |
+
- **Hugging Face Spaces**: https://huggingface.co/docs/hub/spaces
|
| 252 |
+
- **Streamlit Cloud**: https://docs.streamlit.io/streamlit-community-cloud
|
| 253 |
+
- **Docker**: https://docs.docker.com/
|
| 254 |
+
|
| 255 |
+
**Next Step**: Follow the [DEPLOYMENT.md](file:///Volumes/WorkSpace/Project/REMB/algorithms/DEPLOYMENT.md) guide!
|
algorithms/README.md
CHANGED
|
@@ -30,22 +30,73 @@ algorithms/
|
|
| 30 |
└── README.md
|
| 31 |
```
|
| 32 |
|
| 33 |
-
##
|
| 34 |
|
| 35 |
-
###
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
|
| 37 |
```bash
|
| 38 |
cd algorithms/backend
|
| 39 |
pip install -r requirements.txt
|
| 40 |
```
|
| 41 |
|
| 42 |
-
|
| 43 |
|
| 44 |
```bash
|
| 45 |
cd algorithms/frontend
|
| 46 |
pip install -r requirements.txt
|
| 47 |
```
|
| 48 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
## Running the Application
|
| 50 |
|
| 51 |
### Start Backend Server
|
|
|
|
| 30 |
└── README.md
|
| 31 |
```
|
| 32 |
|
| 33 |
+
## Quick Start
|
| 34 |
|
| 35 |
+
### Option 1: Docker Compose (Recommended)
|
| 36 |
+
|
| 37 |
+
Run both backend and frontend with a single command:
|
| 38 |
+
|
| 39 |
+
```bash
|
| 40 |
+
cd algorithms
|
| 41 |
+
make build # Build Docker images
|
| 42 |
+
make up # Start services
|
| 43 |
+
```
|
| 44 |
+
|
| 45 |
+
Services will be available at:
|
| 46 |
+
- **Backend API**: http://localhost:8000
|
| 47 |
+
- **Frontend UI**: http://localhost:8501
|
| 48 |
+
- **API Docs**: http://localhost:8000/docs
|
| 49 |
+
|
| 50 |
+
Stop services:
|
| 51 |
+
```bash
|
| 52 |
+
make down
|
| 53 |
+
```
|
| 54 |
+
|
| 55 |
+
### Option 2: Manual Installation
|
| 56 |
+
|
| 57 |
+
#### Backend
|
| 58 |
|
| 59 |
```bash
|
| 60 |
cd algorithms/backend
|
| 61 |
pip install -r requirements.txt
|
| 62 |
```
|
| 63 |
|
| 64 |
+
#### Frontend
|
| 65 |
|
| 66 |
```bash
|
| 67 |
cd algorithms/frontend
|
| 68 |
pip install -r requirements.txt
|
| 69 |
```
|
| 70 |
|
| 71 |
+
## Deployment
|
| 72 |
+
|
| 73 |
+
### Production Deployment
|
| 74 |
+
|
| 75 |
+
For deploying to production environments:
|
| 76 |
+
|
| 77 |
+
- **Backend**: Deploy to [Hugging Face Spaces](https://huggingface.co/spaces) using Docker
|
| 78 |
+
- **Frontend**: Deploy to [Streamlit Cloud](https://streamlit.io/cloud) or any hosting service
|
| 79 |
+
|
| 80 |
+
📖 **See [DEPLOYMENT.md](DEPLOYMENT.md) for detailed deployment instructions**
|
| 81 |
+
|
| 82 |
+
### Environment Variables
|
| 83 |
+
|
| 84 |
+
Create a `.env` file (copy from `.env.production`):
|
| 85 |
+
|
| 86 |
+
**Backend**:
|
| 87 |
+
```bash
|
| 88 |
+
API_HOST=0.0.0.0
|
| 89 |
+
API_PORT=7860
|
| 90 |
+
CORS_ORIGINS=*
|
| 91 |
+
LOG_LEVEL=INFO
|
| 92 |
+
```
|
| 93 |
+
|
| 94 |
+
**Frontend**:
|
| 95 |
+
```bash
|
| 96 |
+
API_URL=http://localhost:8000 # Development
|
| 97 |
+
# API_URL=https://your-space.hf.space # Production
|
| 98 |
+
```
|
| 99 |
+
|
| 100 |
## Running the Application
|
| 101 |
|
| 102 |
### Start Backend Server
|
algorithms/RUNNING.md
CHANGED
|
@@ -76,14 +76,16 @@ lsof -i :8502
|
|
| 76 |
kill -9 <PID>
|
| 77 |
```
|
| 78 |
|
| 79 |
-
|
| 80 |
|
| 81 |
-
```bash
|
| 82 |
# Terminal 1: Backend
|
|
|
|
|
|
|
| 83 |
cd /Volumes/WorkSpace/Project/REMB/algorithms/backend
|
| 84 |
../../venv/bin/uvicorn main:app --reload --port 8000
|
| 85 |
|
| 86 |
# Terminal 2: Frontend
|
|
|
|
|
|
|
| 87 |
cd /Volumes/WorkSpace/Project/REMB/algorithms/frontend
|
| 88 |
../../venv/bin/streamlit run app.py --server.port 8502
|
| 89 |
-
```
|
|
|
|
| 76 |
kill -9 <PID>
|
| 77 |
```
|
| 78 |
|
| 79 |
+
# Restart Services
|
| 80 |
|
|
|
|
| 81 |
# Terminal 1: Backend
|
| 82 |
+
# Kill existing backend process
|
| 83 |
+
lsof -i :8000 | awk 'NR!=1 {print $2}' | xargs kill -9
|
| 84 |
cd /Volumes/WorkSpace/Project/REMB/algorithms/backend
|
| 85 |
../../venv/bin/uvicorn main:app --reload --port 8000
|
| 86 |
|
| 87 |
# Terminal 2: Frontend
|
| 88 |
+
# Kill existing frontend process
|
| 89 |
+
lsof -i :8502 | awk 'NR!=1 {print $2}' | xargs kill -9
|
| 90 |
cd /Volumes/WorkSpace/Project/REMB/algorithms/frontend
|
| 91 |
../../venv/bin/streamlit run app.py --server.port 8502
|
|
|
algorithms/backend/.dockerignore
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Python
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
*.so
|
| 6 |
+
.Python
|
| 7 |
+
*.egg-info/
|
| 8 |
+
dist/
|
| 9 |
+
build/
|
| 10 |
+
|
| 11 |
+
# Virtual environments
|
| 12 |
+
venv/
|
| 13 |
+
env/
|
| 14 |
+
ENV/
|
| 15 |
+
|
| 16 |
+
# IDEs
|
| 17 |
+
.vscode/
|
| 18 |
+
.idea/
|
| 19 |
+
*.swp
|
| 20 |
+
*.swo
|
| 21 |
+
*~
|
| 22 |
+
|
| 23 |
+
# Testing
|
| 24 |
+
.pytest_cache/
|
| 25 |
+
.coverage
|
| 26 |
+
htmlcov/
|
| 27 |
+
*.cover
|
| 28 |
+
|
| 29 |
+
# Development
|
| 30 |
+
.env
|
| 31 |
+
.env.local
|
| 32 |
+
*.log
|
| 33 |
+
|
| 34 |
+
# Git
|
| 35 |
+
.git/
|
| 36 |
+
.gitignore
|
| 37 |
+
.gitattributes
|
| 38 |
+
|
| 39 |
+
# Documentation
|
| 40 |
+
*.md
|
| 41 |
+
!README_HF.md
|
| 42 |
+
docs/
|
| 43 |
+
|
| 44 |
+
# Test files
|
| 45 |
+
test_*.py
|
| 46 |
+
*_test.py
|
| 47 |
+
tests/
|
| 48 |
+
analyze_*.py
|
| 49 |
+
debug_*.py
|
| 50 |
+
compare_*.py
|
| 51 |
+
|
| 52 |
+
# Sample data
|
| 53 |
+
samples/
|
| 54 |
+
examples/
|
| 55 |
+
data/
|
| 56 |
+
|
| 57 |
+
# OS
|
| 58 |
+
.DS_Store
|
| 59 |
+
Thumbs.db
|
algorithms/backend/Dockerfile
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Multi-stage build for Land Redistribution API
|
| 2 |
+
FROM python:3.11-slim as builder
|
| 3 |
+
|
| 4 |
+
# Set working directory
|
| 5 |
+
WORKDIR /app
|
| 6 |
+
|
| 7 |
+
# Install system dependencies required for scientific libraries
|
| 8 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 9 |
+
build-essential \
|
| 10 |
+
gcc \
|
| 11 |
+
g++ \
|
| 12 |
+
gfortran \
|
| 13 |
+
libopenblas-dev \
|
| 14 |
+
liblapack-dev \
|
| 15 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 16 |
+
|
| 17 |
+
# Copy requirements first for better caching
|
| 18 |
+
COPY requirements.txt .
|
| 19 |
+
|
| 20 |
+
# Install Python dependencies
|
| 21 |
+
RUN pip install --no-cache-dir --upgrade pip && \
|
| 22 |
+
pip install --no-cache-dir -r requirements.txt
|
| 23 |
+
|
| 24 |
+
# Final stage
|
| 25 |
+
FROM python:3.11-slim
|
| 26 |
+
|
| 27 |
+
# Set working directory
|
| 28 |
+
WORKDIR /app
|
| 29 |
+
|
| 30 |
+
# Install runtime dependencies
|
| 31 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 32 |
+
libopenblas0 \
|
| 33 |
+
libgomp1 \
|
| 34 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 35 |
+
|
| 36 |
+
# Copy Python packages from builder
|
| 37 |
+
COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages
|
| 38 |
+
COPY --from=builder /usr/local/bin /usr/local/bin
|
| 39 |
+
|
| 40 |
+
# Copy application code
|
| 41 |
+
COPY . .
|
| 42 |
+
|
| 43 |
+
# Create non-root user
|
| 44 |
+
RUN useradd -m -u 1000 appuser && \
|
| 45 |
+
chown -R appuser:appuser /app
|
| 46 |
+
|
| 47 |
+
# Switch to non-root user
|
| 48 |
+
USER appuser
|
| 49 |
+
|
| 50 |
+
# Expose Hugging Face Spaces default port
|
| 51 |
+
EXPOSE 7860
|
| 52 |
+
|
| 53 |
+
# Health check
|
| 54 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
| 55 |
+
CMD python -c "import requests; requests.get('http://localhost:7860/health')"
|
| 56 |
+
|
| 57 |
+
# Run the application
|
| 58 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"]
|
algorithms/backend/README_HF.md
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: Land Redistribution Algorithm API
|
| 3 |
+
emoji: 🏘️
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: green
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
+
pinned: false
|
| 9 |
+
---
|
| 10 |
+
|
| 11 |
+
# Land Redistribution Algorithm API
|
| 12 |
+
|
| 13 |
+
API for industrial park land subdivision and redistribution using advanced optimization algorithms.
|
| 14 |
+
|
| 15 |
+
## Features
|
| 16 |
+
|
| 17 |
+
- **Multi-stage Optimization**: NSGA-II genetic algorithm + OR-Tools constraint programming
|
| 18 |
+
- **DXF Import**: Upload site boundaries from CAD files
|
| 19 |
+
- **Automated Layout**: Grid optimization, block subdivision, and infrastructure planning
|
| 20 |
+
- **Export Results**: Download results as GeoJSON
|
| 21 |
+
|
| 22 |
+
## API Endpoints
|
| 23 |
+
|
| 24 |
+
### Health Check
|
| 25 |
+
```bash
|
| 26 |
+
GET /health
|
| 27 |
+
```
|
| 28 |
+
|
| 29 |
+
### Full Optimization Pipeline
|
| 30 |
+
```bash
|
| 31 |
+
POST /api/optimize
|
| 32 |
+
```
|
| 33 |
+
|
| 34 |
+
Runs the complete 3-stage optimization:
|
| 35 |
+
1. Grid Optimization (NSGA-II)
|
| 36 |
+
2. Block Subdivision (OR-Tools)
|
| 37 |
+
3. Infrastructure Planning
|
| 38 |
+
|
| 39 |
+
### DXF Upload
|
| 40 |
+
```bash
|
| 41 |
+
POST /api/upload-dxf
|
| 42 |
+
```
|
| 43 |
+
|
| 44 |
+
Upload DXF file and extract boundary polygon.
|
| 45 |
+
|
| 46 |
+
## Usage
|
| 47 |
+
|
| 48 |
+
Visit the [interactive API documentation](/docs) for detailed endpoint specifications and to test the API directly.
|
| 49 |
+
|
| 50 |
+
### Quick Example
|
| 51 |
+
|
| 52 |
+
```python
|
| 53 |
+
import requests
|
| 54 |
+
|
| 55 |
+
url = "https://your-space-name.hf.space/api/optimize"
|
| 56 |
+
payload = {
|
| 57 |
+
"config": {
|
| 58 |
+
"spacing_min": 20.0,
|
| 59 |
+
"spacing_max": 30.0,
|
| 60 |
+
"population_size": 50,
|
| 61 |
+
"generations": 100
|
| 62 |
+
},
|
| 63 |
+
"land_plots": [{
|
| 64 |
+
"type": "Polygon",
|
| 65 |
+
"coordinates": [[[0, 0], [100, 0], [100, 100], [0, 100], [0, 0]]]
|
| 66 |
+
}]
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
response = requests.post(url, json=payload)
|
| 70 |
+
result = response.json()
|
| 71 |
+
```
|
| 72 |
+
|
| 73 |
+
## Frontend
|
| 74 |
+
|
| 75 |
+
For a complete user interface, use the Streamlit frontend: [Link to your Streamlit app]
|
| 76 |
+
|
| 77 |
+
## Technology Stack
|
| 78 |
+
|
| 79 |
+
- **FastAPI**: High-performance Python web framework
|
| 80 |
+
- **DEAP**: Genetic algorithms (NSGA-II)
|
| 81 |
+
- **OR-Tools**: Constraint programming solver
|
| 82 |
+
- **Shapely**: Geometric operations
|
| 83 |
+
- **ezdxf**: DXF file parsing
|
| 84 |
+
|
| 85 |
+
## License
|
| 86 |
+
|
| 87 |
+
MIT
|
algorithms/backend/analyze_dxf_entities.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Investigate what entities are in these DXF files."""
|
| 2 |
+
|
| 3 |
+
import ezdxf
|
| 4 |
+
import tempfile
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
files = [
|
| 8 |
+
"/Volumes/WorkSpace/Project/REMB/examples/663409.dxf",
|
| 9 |
+
"/Volumes/WorkSpace/Project/REMB/examples/930300.dxf"
|
| 10 |
+
]
|
| 11 |
+
|
| 12 |
+
for dxf_path in files:
|
| 13 |
+
print("=" * 70)
|
| 14 |
+
print(f"Analyzing: {dxf_path.split('/')[-1]}")
|
| 15 |
+
print("=" * 70)
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
doc = ezdxf.readfile(dxf_path)
|
| 19 |
+
msp = doc.modelspace()
|
| 20 |
+
|
| 21 |
+
# Count all entity types
|
| 22 |
+
entity_types = {}
|
| 23 |
+
total = 0
|
| 24 |
+
|
| 25 |
+
for entity in msp:
|
| 26 |
+
etype = entity.dxftype()
|
| 27 |
+
entity_types[etype] = entity_types.get(etype, 0) + 1
|
| 28 |
+
total += 1
|
| 29 |
+
|
| 30 |
+
print(f"Total entities: {total}")
|
| 31 |
+
print(f"\nEntity breakdown:")
|
| 32 |
+
for etype, count in sorted(entity_types.items(), key=lambda x: -x[1]):
|
| 33 |
+
print(f" {etype}: {count}")
|
| 34 |
+
|
| 35 |
+
# Check for specific geometry types
|
| 36 |
+
print(f"\nGeometry analysis:")
|
| 37 |
+
|
| 38 |
+
# Check for POINT entities
|
| 39 |
+
points = list(msp.query('POINT'))
|
| 40 |
+
if points:
|
| 41 |
+
print(f" Found {len(points)} POINT entities")
|
| 42 |
+
print(f" Sample: {points[0].dxf.location if points else 'N/A'}")
|
| 43 |
+
|
| 44 |
+
# Check for CIRCLE entities
|
| 45 |
+
circles = list(msp.query('CIRCLE'))
|
| 46 |
+
if circles:
|
| 47 |
+
print(f" Found {len(circles)} CIRCLE entities")
|
| 48 |
+
if circles:
|
| 49 |
+
c = circles[0]
|
| 50 |
+
print(f" Sample: center={c.dxf.center}, radius={c.dxf.radius}")
|
| 51 |
+
|
| 52 |
+
# Check for ARC entities
|
| 53 |
+
arcs = list(msp.query('ARC'))
|
| 54 |
+
if arcs:
|
| 55 |
+
print(f" Found {len(arcs)} ARC entities")
|
| 56 |
+
|
| 57 |
+
# Check for SPLINE entities
|
| 58 |
+
splines = list(msp.query('SPLINE'))
|
| 59 |
+
if splines:
|
| 60 |
+
print(f" Found {len(splines)} SPLINE entities")
|
| 61 |
+
|
| 62 |
+
# Check for TEXT entities
|
| 63 |
+
texts = list(msp.query('TEXT'))
|
| 64 |
+
if texts:
|
| 65 |
+
print(f" Found {len(texts)} TEXT entities")
|
| 66 |
+
if texts:
|
| 67 |
+
print(f" Sample text: '{texts[0].dxf.text}'")
|
| 68 |
+
|
| 69 |
+
except Exception as e:
|
| 70 |
+
print(f"Error: {e}")
|
| 71 |
+
import traceback
|
| 72 |
+
traceback.print_exc()
|
| 73 |
+
|
| 74 |
+
print()
|
algorithms/backend/compare_methods.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Compare file read vs stream read"""
|
| 2 |
+
|
| 3 |
+
import ezdxf
|
| 4 |
+
|
| 5 |
+
dxf_path = "/Volumes/WorkSpace/Project/REMB/examples/663409.dxf"
|
| 6 |
+
|
| 7 |
+
# Method 1: Direct readfile
|
| 8 |
+
print("Method 1: Direct readfile()")
|
| 9 |
+
doc1 = ezdxf.readfile(dxf_path)
|
| 10 |
+
msp1 = doc1.modelspace()
|
| 11 |
+
lines1 = len(list(msp1.query('LINE')))
|
| 12 |
+
print(f" LINE entities: {lines1}")
|
| 13 |
+
|
| 14 |
+
# Method 2: Read bytes, decode, StringIO
|
| 15 |
+
import io
|
| 16 |
+
with open(dxf_path, 'rb') as f:
|
| 17 |
+
content = f.read()
|
| 18 |
+
|
| 19 |
+
print("\nMethod 2: bytes -> latin-1 -> StringIO")
|
| 20 |
+
text = content.decode('latin-1')
|
| 21 |
+
stream = io.StringIO(text)
|
| 22 |
+
doc2 = ezdxf.read(stream)
|
| 23 |
+
msp2 = doc2.modelspace()
|
| 24 |
+
lines2 = len(list(msp2.query('LINE')))
|
| 25 |
+
print(f" LINE entities: {lines2}")
|
| 26 |
+
|
| 27 |
+
# Method 3: tempfile
|
| 28 |
+
import tempfile
|
| 29 |
+
import os
|
| 30 |
+
|
| 31 |
+
print("\nMethod 3: bytes -> tempfile -> readfile()")
|
| 32 |
+
with tempfile.NamedTemporaryFile(mode='wb', suffix='.dxf', delete=False) as tmp:
|
| 33 |
+
tmp.write(content)
|
| 34 |
+
tmp_path = tmp.name
|
| 35 |
+
|
| 36 |
+
doc3 = ezdxf.readfile(tmp_path)
|
| 37 |
+
msp3 = doc3.modelspace()
|
| 38 |
+
lines3 = len(list(msp3.query('LINE')))
|
| 39 |
+
print(f" LINE entities: {lines3}")
|
| 40 |
+
os.unlink(tmp_path)
|
algorithms/backend/debug_ezdxf.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Debug ezdxf behavior with Bel air file"""
|
| 2 |
+
import ezdxf
|
| 3 |
+
import io
|
| 4 |
+
|
| 5 |
+
dxf_path = "/Volumes/WorkSpace/Project/REMB/examples/Lot Plan Bel air Technical Description.dxf"
|
| 6 |
+
|
| 7 |
+
# Test 1: Direct file read
|
| 8 |
+
print("Test 1: Direct file read")
|
| 9 |
+
try:
|
| 10 |
+
doc = ezdxf.readfile(dxf_path)
|
| 11 |
+
print(f"✅ Success with readfile()")
|
| 12 |
+
print(f" Entities: {len(list(doc.modelspace()))}")
|
| 13 |
+
except Exception as e:
|
| 14 |
+
print(f"❌ Failed: {e}")
|
| 15 |
+
|
| 16 |
+
# Test 2: Read through bytes
|
| 17 |
+
print("\nTest 2: Read through bytes")
|
| 18 |
+
with open(dxf_path, 'rb') as f:
|
| 19 |
+
content = f.read()
|
| 20 |
+
|
| 21 |
+
# Try decoding
|
| 22 |
+
for enc in ['utf-8', 'latin-1', 'cp1252']:
|
| 23 |
+
print(f"\n Trying {enc}:")
|
| 24 |
+
try:
|
| 25 |
+
text = content.decode(enc)
|
| 26 |
+
stream = io.StringIO(text)
|
| 27 |
+
doc = ezdxf.read(stream)
|
| 28 |
+
print(f" ✅ Success with {enc}")
|
| 29 |
+
break
|
| 30 |
+
except Exception as e:
|
| 31 |
+
print(f" ❌ {type(e).__name__}: {str(e)[:100]}")
|
| 32 |
+
|
| 33 |
+
# Test 3: Binary stream
|
| 34 |
+
print("\nTest 3: Binary stream")
|
| 35 |
+
try:
|
| 36 |
+
stream = io.BytesIO(content)
|
| 37 |
+
doc = ezdxf.read(stream)
|
| 38 |
+
print(f"✅ Success with binary stream")
|
| 39 |
+
except Exception as e:
|
| 40 |
+
print(f"❌ Failed: {e}")
|
algorithms/backend/debug_validation.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Debug validation process for 663409.dxf"""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
sys.path.insert(0, '/Volumes/WorkSpace/Project/REMB/algorithms/backend')
|
| 5 |
+
|
| 6 |
+
import ezdxf
|
| 7 |
+
import io
|
| 8 |
+
import tempfile
|
| 9 |
+
import os
|
| 10 |
+
|
| 11 |
+
dxf_path = "/Volumes/WorkSpace/Project/REMB/examples/663409.dxf"
|
| 12 |
+
|
| 13 |
+
with open(dxf_path, 'rb') as f:
|
| 14 |
+
dxf_content = f.read()
|
| 15 |
+
|
| 16 |
+
print(f"File size: {len(dxf_content)} bytes")
|
| 17 |
+
|
| 18 |
+
# Try to load with different methods
|
| 19 |
+
encodings = ['utf-8', 'latin-1', 'cp1252', 'utf-16']
|
| 20 |
+
doc = None
|
| 21 |
+
|
| 22 |
+
for encoding in encodings:
|
| 23 |
+
print(f"\nTrying {encoding}...")
|
| 24 |
+
try:
|
| 25 |
+
text_content = dxf_content.decode(encoding)
|
| 26 |
+
text_stream = io.StringIO(text_content)
|
| 27 |
+
doc = ezdxf.read(text_stream)
|
| 28 |
+
print(f" ✅ Success with {encoding}")
|
| 29 |
+
break
|
| 30 |
+
except Exception as e:
|
| 31 |
+
print(f" ❌ {type(e).__name__}: {str(e)[:80]}")
|
| 32 |
+
|
| 33 |
+
if doc is None:
|
| 34 |
+
print("\nTrying binary stream...")
|
| 35 |
+
try:
|
| 36 |
+
dxf_stream = io.BytesIO(dxf_content)
|
| 37 |
+
doc = ezdxf.read(dxf_stream)
|
| 38 |
+
print(" ✅ Success with binary stream")
|
| 39 |
+
except Exception as e:
|
| 40 |
+
print(f" ❌ {type(e).__name__}: {str(e)[:80]}")
|
| 41 |
+
|
| 42 |
+
if doc is None:
|
| 43 |
+
print("\nTrying tempfile...")
|
| 44 |
+
try:
|
| 45 |
+
with tempfile.NamedTemporaryFile(mode='wb', suffix='.dxf', delete=False) as tmp:
|
| 46 |
+
tmp.write(dxf_content)
|
| 47 |
+
tmp_path = tmp.name
|
| 48 |
+
|
| 49 |
+
doc = ezdxf.readfile(tmp_path)
|
| 50 |
+
print(f" ✅ Success with tempfile")
|
| 51 |
+
os.unlink(tmp_path)
|
| 52 |
+
except Exception as e:
|
| 53 |
+
print(f" ❌ {type(e).__name__}: {str(e)[:80]}")
|
| 54 |
+
try:
|
| 55 |
+
os.unlink(tmp_path)
|
| 56 |
+
except:
|
| 57 |
+
pass
|
| 58 |
+
|
| 59 |
+
if doc:
|
| 60 |
+
msp = doc.modelspace()
|
| 61 |
+
lwpolylines = sum(1 for e in msp if e.dxftype() == 'LWPOLYLINE')
|
| 62 |
+
polylines = len(list(msp.query('POLYLINE')))
|
| 63 |
+
lines = len(list(msp.query('LINE')))
|
| 64 |
+
|
| 65 |
+
print(f"\n✅ LOADED SUCCESSFULLY")
|
| 66 |
+
print(f" LWPOLYLINE: {lwpolylines}")
|
| 67 |
+
print(f" POLYLINE: {polylines}")
|
| 68 |
+
print(f" LINE: {lines}")
|
| 69 |
+
print(f" Total: {lwpolylines + polylines + lines}")
|
| 70 |
+
else:
|
| 71 |
+
print("\n❌ FAILED TO LOAD")
|
algorithms/backend/requirements.txt
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
fastapi==0.104.1
|
| 2 |
-
uvicorn==0.24.0
|
| 3 |
pydantic==2.5.0
|
| 4 |
numpy==1.26.2
|
| 5 |
shapely==2.0.2
|
|
@@ -11,3 +11,5 @@ ezdxf==1.1.3
|
|
| 11 |
scipy==1.11.4
|
| 12 |
networkx==3.2.1
|
| 13 |
scikit-learn==1.3.2
|
|
|
|
|
|
|
|
|
| 1 |
fastapi==0.104.1
|
| 2 |
+
uvicorn[standard]==0.24.0
|
| 3 |
pydantic==2.5.0
|
| 4 |
numpy==1.26.2
|
| 5 |
shapely==2.0.2
|
|
|
|
| 11 |
scipy==1.11.4
|
| 12 |
networkx==3.2.1
|
| 13 |
scikit-learn==1.3.2
|
| 14 |
+
python-dotenv==1.0.0
|
| 15 |
+
gunicorn==21.2.0
|
algorithms/backend/test_additional_dxf.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test additional DXF files."""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
sys.path.insert(0, '/Volumes/WorkSpace/Project/REMB/algorithms/backend')
|
| 5 |
+
|
| 6 |
+
from utils.dxf_utils import load_boundary_from_dxf, validate_dxf
|
| 7 |
+
|
| 8 |
+
# Test both files
|
| 9 |
+
files = [
|
| 10 |
+
"/Volumes/WorkSpace/Project/REMB/examples/663409.dxf",
|
| 11 |
+
"/Volumes/WorkSpace/Project/REMB/examples/930300.dxf"
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
for dxf_path in files:
|
| 15 |
+
print("=" * 70)
|
| 16 |
+
print(f"Testing: {dxf_path.split('/')[-1]}")
|
| 17 |
+
print("=" * 70)
|
| 18 |
+
|
| 19 |
+
try:
|
| 20 |
+
with open(dxf_path, 'rb') as f:
|
| 21 |
+
content = f.read()
|
| 22 |
+
|
| 23 |
+
print(f"File size: {len(content):,} bytes")
|
| 24 |
+
print(f"First 100 bytes: {content[:100]}")
|
| 25 |
+
|
| 26 |
+
# Test validation
|
| 27 |
+
is_valid, message = validate_dxf(content)
|
| 28 |
+
print(f"\nValidation: {'✅' if is_valid else '❌'} {message}")
|
| 29 |
+
|
| 30 |
+
# Test loading
|
| 31 |
+
if is_valid:
|
| 32 |
+
polygon = load_boundary_from_dxf(content)
|
| 33 |
+
if polygon:
|
| 34 |
+
print(f"\n✅ Polygon extracted successfully!")
|
| 35 |
+
print(f" Area: {polygon.area/10000:.2f} ha")
|
| 36 |
+
print(f" Bounds: {polygon.bounds}")
|
| 37 |
+
else:
|
| 38 |
+
print("\n❌ Failed to extract polygon")
|
| 39 |
+
|
| 40 |
+
except FileNotFoundError:
|
| 41 |
+
print(f"❌ File not found: {dxf_path}")
|
| 42 |
+
except Exception as e:
|
| 43 |
+
print(f"❌ Error: {e}")
|
| 44 |
+
import traceback
|
| 45 |
+
traceback.print_exc()
|
| 46 |
+
|
| 47 |
+
print()
|
algorithms/backend/test_belair_dxf.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test DXF loading with Bel air file."""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
sys.path.insert(0, '/Volumes/WorkSpace/Project/REMB/algorithms/backend')
|
| 5 |
+
|
| 6 |
+
from utils.dxf_utils import load_boundary_from_dxf, validate_dxf
|
| 7 |
+
|
| 8 |
+
# Test with Bel air DXF file
|
| 9 |
+
dxf_path = "/Volumes/WorkSpace/Project/REMB/examples/Lot Plan Bel air Technical Description.dxf"
|
| 10 |
+
|
| 11 |
+
print(f"Testing file: {dxf_path}")
|
| 12 |
+
|
| 13 |
+
with open(dxf_path, 'rb') as f:
|
| 14 |
+
content = f.read()
|
| 15 |
+
|
| 16 |
+
print(f"File size: {len(content)} bytes")
|
| 17 |
+
print(f"First 200 bytes: {content[:200]}")
|
| 18 |
+
print(f"\nIs binary? {content[:10]}")
|
| 19 |
+
|
| 20 |
+
# Test validation
|
| 21 |
+
try:
|
| 22 |
+
is_valid, message = validate_dxf(content)
|
| 23 |
+
print(f"\nValidation: {is_valid}")
|
| 24 |
+
print(f"Message: {message}")
|
| 25 |
+
|
| 26 |
+
# Test loading
|
| 27 |
+
if is_valid:
|
| 28 |
+
polygon = load_boundary_from_dxf(content)
|
| 29 |
+
if polygon:
|
| 30 |
+
print(f"\n✅ Success!")
|
| 31 |
+
print(f"Polygon area: {polygon.area/10000:.2f} ha")
|
| 32 |
+
print(f"Bounds: {polygon.bounds}")
|
| 33 |
+
else:
|
| 34 |
+
print("\n❌ Failed to extract polygon")
|
| 35 |
+
except Exception as e:
|
| 36 |
+
print(f"\n❌ Error: {e}")
|
| 37 |
+
import traceback
|
| 38 |
+
traceback.print_exc()
|
algorithms/backend/test_dxf.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Test DXF loading functionality."""
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
sys.path.insert(0, '/Volumes/WorkSpace/Project/REMB/algorithms/backend')
|
| 5 |
+
|
| 6 |
+
from utils.dxf_utils import load_boundary_from_dxf, validate_dxf
|
| 7 |
+
|
| 8 |
+
# Test with actual DXF file
|
| 9 |
+
dxf_path = "/Volumes/WorkSpace/Project/REMB/examples/api-cw750-details.dxf"
|
| 10 |
+
|
| 11 |
+
with open(dxf_path, 'rb') as f:
|
| 12 |
+
content = f.read()
|
| 13 |
+
|
| 14 |
+
print(f"File size: {len(content)} bytes")
|
| 15 |
+
print(f"First 100 bytes: {content[:100]}")
|
| 16 |
+
|
| 17 |
+
# Test validation
|
| 18 |
+
is_valid, message = validate_dxf(content)
|
| 19 |
+
print(f"\nValidation: {is_valid}")
|
| 20 |
+
print(f"Message: {message}")
|
| 21 |
+
|
| 22 |
+
# Test loading
|
| 23 |
+
if is_valid:
|
| 24 |
+
polygon = load_boundary_from_dxf(content)
|
| 25 |
+
if polygon:
|
| 26 |
+
print(f"\n✅ Success!")
|
| 27 |
+
print(f"Polygon area: {polygon.area/10000:.2f} ha")
|
| 28 |
+
print(f"Bounds: {polygon.bounds}")
|
| 29 |
+
else:
|
| 30 |
+
print("\n❌ Failed to extract polygon")
|
algorithms/backend/utils/dxf_utils.py
CHANGED
|
@@ -1,16 +1,26 @@
|
|
| 1 |
-
"""DXF file handling utilities for importing and exporting geometry.
|
| 2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
import ezdxf
|
| 4 |
-
from shapely.geometry import Polygon, mapping
|
| 5 |
-
from shapely.ops import unary_union
|
| 6 |
from typing import Optional, List, Tuple
|
| 7 |
import io
|
| 8 |
|
|
|
|
|
|
|
| 9 |
|
| 10 |
def load_boundary_from_dxf(dxf_content: bytes) -> Optional[Polygon]:
|
| 11 |
"""
|
| 12 |
Load site boundary from DXF file content.
|
| 13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
Args:
|
| 15 |
dxf_content: Bytes content of DXF file
|
| 16 |
|
|
@@ -19,54 +29,149 @@ def load_boundary_from_dxf(dxf_content: bytes) -> Optional[Polygon]:
|
|
| 19 |
"""
|
| 20 |
try:
|
| 21 |
# Load DXF from bytes
|
| 22 |
-
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
msp = doc.modelspace()
|
| 25 |
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
try:
|
| 38 |
-
|
| 39 |
-
if
|
| 40 |
-
|
| 41 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
continue
|
| 43 |
|
| 44 |
-
#
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
|
|
|
|
| 66 |
return None
|
| 67 |
|
| 68 |
except Exception as e:
|
| 69 |
-
|
| 70 |
return None
|
| 71 |
|
| 72 |
|
|
@@ -87,10 +192,12 @@ def export_to_dxf(geometries: List[dict], output_type: str = 'final') -> bytes:
|
|
| 87 |
msp = doc.modelspace()
|
| 88 |
|
| 89 |
# Create layers
|
| 90 |
-
doc.layers.add('BLOCKS', color=5)
|
| 91 |
-
doc.layers.add('LOTS', color=3)
|
| 92 |
-
doc.layers.add('PARKS', color=2)
|
| 93 |
-
doc.layers.add('
|
|
|
|
|
|
|
| 94 |
|
| 95 |
# Add geometries
|
| 96 |
for item in geometries:
|
|
@@ -99,12 +206,19 @@ def export_to_dxf(geometries: List[dict], output_type: str = 'final') -> bytes:
|
|
| 99 |
geom_type = props.get('type', 'lot')
|
| 100 |
|
| 101 |
# Determine layer
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
|
| 109 |
# Get coordinates
|
| 110 |
if geom and 'coordinates' in geom:
|
|
@@ -120,6 +234,7 @@ def export_to_dxf(geometries: List[dict], output_type: str = 'final') -> bytes:
|
|
| 120 |
radius=2.0,
|
| 121 |
dxfattribs={'layer': layer}
|
| 122 |
)
|
|
|
|
| 123 |
elif geom_geom_type == 'LineString':
|
| 124 |
# LineString: [[x1, y1], [x2, y2], ...]
|
| 125 |
if isinstance(coords, list) and len(coords) > 0:
|
|
@@ -130,6 +245,7 @@ def export_to_dxf(geometries: List[dict], output_type: str = 'final') -> bytes:
|
|
| 130 |
points_2d,
|
| 131 |
dxfattribs={'layer': layer, 'closed': False}
|
| 132 |
)
|
|
|
|
| 133 |
elif geom_geom_type == 'Polygon':
|
| 134 |
# Polygon: [[[x1, y1], [x2, y2], ...]] (exterior ring)
|
| 135 |
if isinstance(coords, list) and len(coords) > 0:
|
|
@@ -155,7 +271,7 @@ def export_to_dxf(geometries: List[dict], output_type: str = 'final') -> bytes:
|
|
| 155 |
return stream.getvalue().encode('utf-8')
|
| 156 |
|
| 157 |
except Exception as e:
|
| 158 |
-
|
| 159 |
return b''
|
| 160 |
|
| 161 |
|
|
@@ -170,18 +286,63 @@ def validate_dxf(dxf_content: bytes) -> Tuple[bool, str]:
|
|
| 170 |
(is_valid, message)
|
| 171 |
"""
|
| 172 |
try:
|
| 173 |
-
|
| 174 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
msp = doc.modelspace()
|
| 176 |
|
| 177 |
# Count entities
|
| 178 |
-
|
|
|
|
| 179 |
lines = len(list(msp.query('LINE')))
|
| 180 |
|
| 181 |
-
|
|
|
|
|
|
|
| 182 |
return False, "No polylines or lines found in DXF"
|
| 183 |
|
| 184 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 185 |
|
| 186 |
except Exception as e:
|
| 187 |
return False, f"Invalid DXF: {str(e)}"
|
|
|
|
| 1 |
+
"""DXF file handling utilities for importing and exporting geometry.
|
| 2 |
|
| 3 |
+
Based on REMB_Production_Final_Reliable.ipynb converter logic.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
import ezdxf
|
| 8 |
+
from shapely.geometry import Polygon, mapping, LineString
|
| 9 |
+
from shapely.ops import unary_union, polygonize
|
| 10 |
from typing import Optional, List, Tuple
|
| 11 |
import io
|
| 12 |
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
|
| 16 |
def load_boundary_from_dxf(dxf_content: bytes) -> Optional[Polygon]:
|
| 17 |
"""
|
| 18 |
Load site boundary from DXF file content.
|
| 19 |
|
| 20 |
+
Uses the same logic as notebook's load_boundary_from_dxf function:
|
| 21 |
+
- Looks for closed LWPOLYLINE entities
|
| 22 |
+
- Returns the largest valid polygon found
|
| 23 |
+
|
| 24 |
Args:
|
| 25 |
dxf_content: Bytes content of DXF file
|
| 26 |
|
|
|
|
| 29 |
"""
|
| 30 |
try:
|
| 31 |
# Load DXF from bytes
|
| 32 |
+
# For maximum compatibility, especially with old DXF formats (R11/R12),
|
| 33 |
+
# use tempfile approach as it preserves all entity data
|
| 34 |
+
import tempfile
|
| 35 |
+
import os
|
| 36 |
+
|
| 37 |
+
doc = None
|
| 38 |
+
tmp_path = None
|
| 39 |
+
|
| 40 |
+
try:
|
| 41 |
+
# Write bytes to temporary file
|
| 42 |
+
with tempfile.NamedTemporaryFile(mode='wb', suffix='.dxf', delete=False) as tmp:
|
| 43 |
+
tmp.write(dxf_content)
|
| 44 |
+
tmp_path = tmp.name
|
| 45 |
+
|
| 46 |
+
# Read using ezdxf.readfile (most reliable method)
|
| 47 |
+
doc = ezdxf.readfile(tmp_path)
|
| 48 |
+
logger.info("Successfully loaded DXF using tempfile method")
|
| 49 |
+
|
| 50 |
+
except Exception as e:
|
| 51 |
+
logger.warning(f"Tempfile method failed: {e}, trying stream methods")
|
| 52 |
+
|
| 53 |
+
# Fallback: Try stream methods
|
| 54 |
+
encodings = ['utf-8', 'latin-1', 'cp1252', 'utf-16']
|
| 55 |
+
|
| 56 |
+
for encoding in encodings:
|
| 57 |
+
try:
|
| 58 |
+
text_content = dxf_content.decode(encoding)
|
| 59 |
+
text_stream = io.StringIO(text_content)
|
| 60 |
+
doc = ezdxf.read(text_stream)
|
| 61 |
+
logger.info(f"Successfully loaded DXF with {encoding} encoding")
|
| 62 |
+
break
|
| 63 |
+
except (UnicodeDecodeError, AttributeError):
|
| 64 |
+
continue
|
| 65 |
+
except Exception:
|
| 66 |
+
continue
|
| 67 |
+
|
| 68 |
+
# Last resort: Binary stream
|
| 69 |
+
if doc is None:
|
| 70 |
+
try:
|
| 71 |
+
dxf_stream = io.BytesIO(dxf_content)
|
| 72 |
+
doc = ezdxf.read(dxf_stream)
|
| 73 |
+
logger.info("Successfully loaded DXF in binary format")
|
| 74 |
+
except Exception as final_error:
|
| 75 |
+
logger.error(f"Failed to load DXF in any format: {final_error}")
|
| 76 |
+
return None
|
| 77 |
+
finally:
|
| 78 |
+
# Clean up temp file
|
| 79 |
+
if tmp_path and os.path.exists(tmp_path):
|
| 80 |
+
try:
|
| 81 |
+
os.unlink(tmp_path)
|
| 82 |
+
except:
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
if doc is None:
|
| 86 |
+
return None
|
| 87 |
+
|
| 88 |
msp = doc.modelspace()
|
| 89 |
|
| 90 |
+
largest = None
|
| 91 |
+
max_area = 0.0
|
| 92 |
+
|
| 93 |
+
# Extract LWPOLYLINE entities (matching notebook logic)
|
| 94 |
+
for entity in msp:
|
| 95 |
+
if entity.dxftype() == 'LWPOLYLINE' and entity.is_closed:
|
| 96 |
+
try:
|
| 97 |
+
# Get points in xy format (matching notebook)
|
| 98 |
+
pts = list(entity.get_points(format='xy'))
|
| 99 |
+
|
| 100 |
+
if len(pts) >= 3:
|
| 101 |
+
poly = Polygon(pts)
|
| 102 |
+
|
| 103 |
+
if poly.is_valid and poly.area > max_area:
|
| 104 |
+
max_area = poly.area
|
| 105 |
+
largest = poly
|
| 106 |
+
|
| 107 |
+
except Exception as e:
|
| 108 |
+
logger.warning(f"Failed to process LWPOLYLINE: {e}")
|
| 109 |
+
continue
|
| 110 |
+
|
| 111 |
+
# Also try POLYLINE entities as fallback
|
| 112 |
+
if not largest:
|
| 113 |
+
for entity in msp.query('POLYLINE'):
|
| 114 |
+
if entity.is_closed:
|
| 115 |
try:
|
| 116 |
+
points = list(entity.get_points())
|
| 117 |
+
if len(points) >= 3:
|
| 118 |
+
coords = [(p[0], p[1]) for p in points]
|
| 119 |
+
poly = Polygon(coords)
|
| 120 |
+
|
| 121 |
+
if poly.is_valid and poly.area > max_area:
|
| 122 |
+
max_area = poly.area
|
| 123 |
+
largest = poly
|
| 124 |
+
|
| 125 |
+
except Exception as e:
|
| 126 |
+
logger.warning(f"Failed to process POLYLINE: {e}")
|
| 127 |
continue
|
| 128 |
|
| 129 |
+
# Try to build polygons from LINE entities (for CAD files with separate lines)
|
| 130 |
+
if not largest:
|
| 131 |
+
try:
|
| 132 |
+
from shapely.ops import polygonize, unary_union
|
| 133 |
+
from shapely.geometry import MultiLineString
|
| 134 |
+
|
| 135 |
+
lines = list(msp.query('LINE'))
|
| 136 |
+
if lines:
|
| 137 |
+
logger.info(f"Attempting to build polygon from {len(lines)} LINE entities")
|
| 138 |
+
|
| 139 |
+
# Convert LINE entities to shapely LineStrings
|
| 140 |
+
line_segments = []
|
| 141 |
+
for line in lines:
|
| 142 |
+
start = (line.dxf.start.x, line.dxf.start.y)
|
| 143 |
+
end = (line.dxf.end.x, line.dxf.end.y)
|
| 144 |
+
line_segments.append(LineString([start, end]))
|
| 145 |
+
|
| 146 |
+
# Use polygonize to find closed polygons from line network
|
| 147 |
+
polygons = list(polygonize(line_segments))
|
| 148 |
+
|
| 149 |
+
if polygons:
|
| 150 |
+
logger.info(f"Found {len(polygons)} polygons from LINE entities")
|
| 151 |
+
|
| 152 |
+
# Find the largest valid polygon
|
| 153 |
+
for poly in polygons:
|
| 154 |
+
if poly.is_valid and poly.area > max_area:
|
| 155 |
+
max_area = poly.area
|
| 156 |
+
largest = poly
|
| 157 |
+
else:
|
| 158 |
+
logger.warning("Could not create polygons from LINE entities")
|
| 159 |
+
|
| 160 |
+
except Exception as e:
|
| 161 |
+
logger.warning(f"Failed to process LINE entities: {e}")
|
| 162 |
+
|
| 163 |
+
if largest:
|
| 164 |
+
# Scale coordinates by 1.5x for better visualization
|
| 165 |
+
from shapely import affinity
|
| 166 |
+
largest = affinity.scale(largest, xfact=1.5, yfact=1.5, origin='centroid')
|
| 167 |
+
logger.info(f"Boundary loaded and scaled 1.5x: {largest.area/10000:.2f} ha")
|
| 168 |
+
return largest
|
| 169 |
|
| 170 |
+
logger.warning("No valid closed polylines found in DXF")
|
| 171 |
return None
|
| 172 |
|
| 173 |
except Exception as e:
|
| 174 |
+
logger.error(f"Error loading DXF: {e}")
|
| 175 |
return None
|
| 176 |
|
| 177 |
|
|
|
|
| 192 |
msp = doc.modelspace()
|
| 193 |
|
| 194 |
# Create layers
|
| 195 |
+
doc.layers.add('BLOCKS', color=5) # Blue for blocks
|
| 196 |
+
doc.layers.add('LOTS', color=3) # Green for lots
|
| 197 |
+
doc.layers.add('PARKS', color=2) # Yellow for parks
|
| 198 |
+
doc.layers.add('SERVICE', color=4) # Cyan for service
|
| 199 |
+
doc.layers.add('ROADS', color=8) # Gray for roads
|
| 200 |
+
doc.layers.add('INFRASTRUCTURE', color=1) # Red for infrastructure
|
| 201 |
|
| 202 |
# Add geometries
|
| 203 |
for item in geometries:
|
|
|
|
| 206 |
geom_type = props.get('type', 'lot')
|
| 207 |
|
| 208 |
# Determine layer
|
| 209 |
+
layer_map = {
|
| 210 |
+
'block': 'BLOCKS',
|
| 211 |
+
'park': 'PARKS',
|
| 212 |
+
'service': 'SERVICE',
|
| 213 |
+
'xlnt': 'SERVICE',
|
| 214 |
+
'road_network': 'ROADS',
|
| 215 |
+
'connection': 'INFRASTRUCTURE',
|
| 216 |
+
'transformer': 'INFRASTRUCTURE',
|
| 217 |
+
'drainage': 'INFRASTRUCTURE',
|
| 218 |
+
'lot': 'LOTS',
|
| 219 |
+
'setback': 'LOTS'
|
| 220 |
+
}
|
| 221 |
+
layer = layer_map.get(geom_type, 'LOTS')
|
| 222 |
|
| 223 |
# Get coordinates
|
| 224 |
if geom and 'coordinates' in geom:
|
|
|
|
| 234 |
radius=2.0,
|
| 235 |
dxfattribs={'layer': layer}
|
| 236 |
)
|
| 237 |
+
|
| 238 |
elif geom_geom_type == 'LineString':
|
| 239 |
# LineString: [[x1, y1], [x2, y2], ...]
|
| 240 |
if isinstance(coords, list) and len(coords) > 0:
|
|
|
|
| 245 |
points_2d,
|
| 246 |
dxfattribs={'layer': layer, 'closed': False}
|
| 247 |
)
|
| 248 |
+
|
| 249 |
elif geom_geom_type == 'Polygon':
|
| 250 |
# Polygon: [[[x1, y1], [x2, y2], ...]] (exterior ring)
|
| 251 |
if isinstance(coords, list) and len(coords) > 0:
|
|
|
|
| 271 |
return stream.getvalue().encode('utf-8')
|
| 272 |
|
| 273 |
except Exception as e:
|
| 274 |
+
logger.error(f"Error exporting DXF: {e}")
|
| 275 |
return b''
|
| 276 |
|
| 277 |
|
|
|
|
| 286 |
(is_valid, message)
|
| 287 |
"""
|
| 288 |
try:
|
| 289 |
+
# Load DXF for validation
|
| 290 |
+
# Use tempfile method for maximum compatibility
|
| 291 |
+
import tempfile
|
| 292 |
+
import os
|
| 293 |
+
|
| 294 |
+
doc = None
|
| 295 |
+
tmp_path = None
|
| 296 |
+
|
| 297 |
+
try:
|
| 298 |
+
with tempfile.NamedTemporaryFile(mode='wb', suffix='.dxf', delete=False) as tmp:
|
| 299 |
+
tmp.write(dxf_content)
|
| 300 |
+
tmp_path = tmp.name
|
| 301 |
+
|
| 302 |
+
doc = ezdxf.readfile(tmp_path)
|
| 303 |
+
|
| 304 |
+
except Exception:
|
| 305 |
+
# Fallback to stream methods
|
| 306 |
+
encodings = ['utf-8', 'latin-1', 'cp1252', 'utf-16']
|
| 307 |
+
|
| 308 |
+
for encoding in encodings:
|
| 309 |
+
try:
|
| 310 |
+
text_content = dxf_content.decode(encoding)
|
| 311 |
+
text_stream = io.StringIO(text_content)
|
| 312 |
+
doc = ezdxf.read(text_stream)
|
| 313 |
+
break
|
| 314 |
+
except (UnicodeDecodeError, AttributeError, Exception):
|
| 315 |
+
continue
|
| 316 |
+
|
| 317 |
+
if doc is None:
|
| 318 |
+
try:
|
| 319 |
+
dxf_stream = io.BytesIO(dxf_content)
|
| 320 |
+
doc = ezdxf.read(dxf_stream)
|
| 321 |
+
except Exception as e:
|
| 322 |
+
return False, f"Failed to parse DXF: {str(e)}"
|
| 323 |
+
finally:
|
| 324 |
+
if tmp_path and os.path.exists(tmp_path):
|
| 325 |
+
try:
|
| 326 |
+
os.unlink(tmp_path)
|
| 327 |
+
except:
|
| 328 |
+
pass
|
| 329 |
msp = doc.modelspace()
|
| 330 |
|
| 331 |
# Count entities
|
| 332 |
+
lwpolylines = sum(1 for e in msp if e.dxftype() == 'LWPOLYLINE')
|
| 333 |
+
polylines = len(list(msp.query('POLYLINE')))
|
| 334 |
lines = len(list(msp.query('LINE')))
|
| 335 |
|
| 336 |
+
total_entities = lwpolylines + polylines + lines
|
| 337 |
+
|
| 338 |
+
if total_entities == 0:
|
| 339 |
return False, "No polylines or lines found in DXF"
|
| 340 |
|
| 341 |
+
# Check for closed polylines
|
| 342 |
+
closed_count = sum(1 for e in msp if e.dxftype() == 'LWPOLYLINE' and e.is_closed)
|
| 343 |
+
|
| 344 |
+
msg = f"Valid DXF: {lwpolylines} LWPOLYLINE ({closed_count} closed), {polylines} POLYLINE, {lines} LINE"
|
| 345 |
+
return True, msg
|
| 346 |
|
| 347 |
except Exception as e:
|
| 348 |
return False, f"Invalid DXF: {str(e)}"
|
algorithms/docker-compose.yml
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version: '3.8'
|
| 2 |
+
|
| 3 |
+
services:
|
| 4 |
+
backend:
|
| 5 |
+
build:
|
| 6 |
+
context: ./backend
|
| 7 |
+
dockerfile: Dockerfile
|
| 8 |
+
container_name: land-redistribution-api
|
| 9 |
+
ports:
|
| 10 |
+
- "8000:7860"
|
| 11 |
+
environment:
|
| 12 |
+
- API_HOST=0.0.0.0
|
| 13 |
+
- API_PORT=7860
|
| 14 |
+
- CORS_ORIGINS=*
|
| 15 |
+
- LOG_LEVEL=INFO
|
| 16 |
+
healthcheck:
|
| 17 |
+
test: [ "CMD", "python", "-c", "import requests; requests.get('http://localhost:7860/health')" ]
|
| 18 |
+
interval: 30s
|
| 19 |
+
timeout: 10s
|
| 20 |
+
retries: 3
|
| 21 |
+
start_period: 10s
|
| 22 |
+
restart: unless-stopped
|
| 23 |
+
networks:
|
| 24 |
+
- app-network
|
| 25 |
+
|
| 26 |
+
frontend:
|
| 27 |
+
image: python:3.11-slim
|
| 28 |
+
container_name: land-redistribution-ui
|
| 29 |
+
working_dir: /app
|
| 30 |
+
ports:
|
| 31 |
+
- "8501:8501"
|
| 32 |
+
environment:
|
| 33 |
+
- API_URL=http://backend:7860
|
| 34 |
+
volumes:
|
| 35 |
+
- ./frontend:/app
|
| 36 |
+
command: >
|
| 37 |
+
sh -c "pip install --no-cache-dir -r requirements.txt &&
|
| 38 |
+
streamlit run app.py --server.port 8501 --server.address 0.0.0.0"
|
| 39 |
+
depends_on:
|
| 40 |
+
backend:
|
| 41 |
+
condition: service_healthy
|
| 42 |
+
restart: unless-stopped
|
| 43 |
+
networks:
|
| 44 |
+
- app-network
|
| 45 |
+
|
| 46 |
+
networks:
|
| 47 |
+
app-network:
|
| 48 |
+
driver: bridge
|
algorithms/frontend/.streamlit/config.toml
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[server]
|
| 2 |
+
headless = true
|
| 3 |
+
port = 8501
|
| 4 |
+
enableCORS = false
|
| 5 |
+
enableXsrfProtection = true
|
| 6 |
+
|
| 7 |
+
[browser]
|
| 8 |
+
gatherUsageStats = false
|
| 9 |
+
serverAddress = "0.0.0.0"
|
| 10 |
+
|
| 11 |
+
[theme]
|
| 12 |
+
primaryColor = "#2E86AB"
|
| 13 |
+
backgroundColor = "#FFFFFF"
|
| 14 |
+
secondaryBackgroundColor = "#F0F2F6"
|
| 15 |
+
textColor = "#262730"
|
| 16 |
+
font = "sans serif"
|
| 17 |
+
|
| 18 |
+
[client]
|
| 19 |
+
showErrorDetails = true
|
| 20 |
+
toolbarMode = "minimal"
|
algorithms/frontend/app.py
CHANGED
|
@@ -19,9 +19,15 @@ import numpy as np
|
|
| 19 |
from plotly.subplots import make_subplots
|
| 20 |
import pandas as pd
|
| 21 |
from typing import Dict, Any
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
-
# Configuration
|
| 24 |
-
API_URL = "http://localhost:8000"
|
| 25 |
|
| 26 |
# Page config - Wide layout for one-page design
|
| 27 |
st.set_page_config(
|
|
@@ -539,6 +545,26 @@ with col_result:
|
|
| 539 |
Includes: Roads, Setbacks, Zoning, Loop Network, Transformers, Drainage.
|
| 540 |
"""
|
| 541 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 542 |
# Setup figure
|
| 543 |
fig, ax = plt.subplots(figsize=(12, 12))
|
| 544 |
ax.set_aspect('equal')
|
|
@@ -548,19 +574,11 @@ with col_result:
|
|
| 548 |
features = result_data.get('final_layout', {}).get('features', [])
|
| 549 |
|
| 550 |
# 1. Draw Roads & Sidewalks (Layer 0)
|
| 551 |
-
# We specifically look for type='road_network' or we draw the inverse using plot background if needed
|
| 552 |
-
# But our backend now sends 'road_network' feature
|
| 553 |
for f in features:
|
| 554 |
if f['properties'].get('type') == 'road_network':
|
| 555 |
geom = shape(f['geometry'])
|
| 556 |
-
if geom.is_empty:
|
| 557 |
-
|
| 558 |
-
xs, ys = geom.exterior.xy
|
| 559 |
-
ax.fill(xs, ys, color='#607d8b', alpha=0.3, label='Hạ tầng giao thông')
|
| 560 |
-
elif geom.geom_type == 'MultiPolygon':
|
| 561 |
-
for poly in geom.geoms:
|
| 562 |
-
xs, ys = poly.exterior.xy
|
| 563 |
-
ax.fill(xs, ys, color='#607d8b', alpha=0.3)
|
| 564 |
|
| 565 |
# 2. Draw Commercial Lots & Setbacks (Layer 1)
|
| 566 |
for f in features:
|
|
@@ -568,34 +586,29 @@ with col_result:
|
|
| 568 |
ftype = props.get('type')
|
| 569 |
|
| 570 |
if ftype == 'lot':
|
| 571 |
-
|
| 572 |
-
|
| 573 |
-
|
| 574 |
-
ax.fill(xs, ys, color='#fff9c4', alpha=0.5) # Yellow
|
| 575 |
|
| 576 |
elif ftype == 'setback':
|
| 577 |
-
|
| 578 |
-
|
| 579 |
-
ax.plot(xs, ys, color='red', linestyle='--', linewidth=0.8, alpha=0.7)
|
| 580 |
|
| 581 |
# 3. Draw Service / Technical Areas (Layer 2)
|
| 582 |
for f in features:
|
| 583 |
props = f['properties']
|
| 584 |
ftype = props.get('type')
|
| 585 |
-
|
| 586 |
|
| 587 |
if ftype == 'xlnt':
|
| 588 |
-
|
| 589 |
-
ax.
|
| 590 |
-
ax.text(poly.centroid.x, poly.centroid.y, "XLNT", ha='center', fontsize=8, color='black', weight='bold')
|
| 591 |
elif ftype == 'service':
|
| 592 |
-
|
| 593 |
-
ax.
|
| 594 |
-
ax.text(poly.centroid.x, poly.centroid.y, "Điều hành", ha='center', fontsize=8, color='black', weight='bold')
|
| 595 |
elif ftype == 'park':
|
| 596 |
-
|
| 597 |
-
|
| 598 |
-
ax.plot(xs, ys, color='green', linewidth=0.5, linestyle=':')
|
| 599 |
|
| 600 |
# 4. Draw Electrical Infrastructure (Loop)
|
| 601 |
for f in features:
|
|
|
|
| 19 |
from plotly.subplots import make_subplots
|
| 20 |
import pandas as pd
|
| 21 |
from typing import Dict, Any
|
| 22 |
+
import os
|
| 23 |
+
from dotenv import load_dotenv
|
| 24 |
+
|
| 25 |
+
# Load environment variables
|
| 26 |
+
load_dotenv()
|
| 27 |
+
|
| 28 |
+
# Configuration - Support both local and production deployment
|
| 29 |
+
API_URL = os.getenv("API_URL", "http://localhost:8000")
|
| 30 |
|
|
|
|
|
|
|
| 31 |
|
| 32 |
# Page config - Wide layout for one-page design
|
| 33 |
st.set_page_config(
|
|
|
|
| 545 |
Includes: Roads, Setbacks, Zoning, Loop Network, Transformers, Drainage.
|
| 546 |
"""
|
| 547 |
try:
|
| 548 |
+
def plot_geometry(geom, **kwargs):
|
| 549 |
+
"""Helper to plot Polygon or MultiPolygon."""
|
| 550 |
+
if geom.geom_type == 'Polygon':
|
| 551 |
+
xs, ys = geom.exterior.xy
|
| 552 |
+
ax.fill(xs, ys, **kwargs)
|
| 553 |
+
elif geom.geom_type == 'MultiPolygon':
|
| 554 |
+
for poly in geom.geoms:
|
| 555 |
+
xs, ys = poly.exterior.xy
|
| 556 |
+
ax.fill(xs, ys, **kwargs)
|
| 557 |
+
|
| 558 |
+
def plot_outline(geom, **kwargs):
|
| 559 |
+
"""Helper to plot outline of Polygon or MultiPolygon."""
|
| 560 |
+
if geom.geom_type == 'Polygon':
|
| 561 |
+
xs, ys = geom.exterior.xy
|
| 562 |
+
ax.plot(xs, ys, **kwargs)
|
| 563 |
+
elif geom.geom_type == 'MultiPolygon':
|
| 564 |
+
for poly in geom.geoms:
|
| 565 |
+
xs, ys = poly.exterior.xy
|
| 566 |
+
ax.plot(xs, ys, **kwargs)
|
| 567 |
+
|
| 568 |
# Setup figure
|
| 569 |
fig, ax = plt.subplots(figsize=(12, 12))
|
| 570 |
ax.set_aspect('equal')
|
|
|
|
| 574 |
features = result_data.get('final_layout', {}).get('features', [])
|
| 575 |
|
| 576 |
# 1. Draw Roads & Sidewalks (Layer 0)
|
|
|
|
|
|
|
| 577 |
for f in features:
|
| 578 |
if f['properties'].get('type') == 'road_network':
|
| 579 |
geom = shape(f['geometry'])
|
| 580 |
+
if not geom.is_empty:
|
| 581 |
+
plot_geometry(geom, color='#607d8b', alpha=0.3, label='Hạ tầng giao thông')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 582 |
|
| 583 |
# 2. Draw Commercial Lots & Setbacks (Layer 1)
|
| 584 |
for f in features:
|
|
|
|
| 586 |
ftype = props.get('type')
|
| 587 |
|
| 588 |
if ftype == 'lot':
|
| 589 |
+
geom = shape(f['geometry'])
|
| 590 |
+
plot_outline(geom, color='black', linewidth=0.5)
|
| 591 |
+
plot_geometry(geom, color='#fff9c4', alpha=0.5)
|
|
|
|
| 592 |
|
| 593 |
elif ftype == 'setback':
|
| 594 |
+
geom = shape(f['geometry'])
|
| 595 |
+
plot_outline(geom, color='red', linestyle='--', linewidth=0.8, alpha=0.7)
|
|
|
|
| 596 |
|
| 597 |
# 3. Draw Service / Technical Areas (Layer 2)
|
| 598 |
for f in features:
|
| 599 |
props = f['properties']
|
| 600 |
ftype = props.get('type')
|
| 601 |
+
geom = shape(f['geometry'])
|
| 602 |
|
| 603 |
if ftype == 'xlnt':
|
| 604 |
+
plot_geometry(geom, color='#b2dfdb', alpha=0.9)
|
| 605 |
+
ax.text(geom.centroid.x, geom.centroid.y, "XLNT", ha='center', fontsize=8, color='black', weight='bold')
|
|
|
|
| 606 |
elif ftype == 'service':
|
| 607 |
+
plot_geometry(geom, color='#d1c4e9', alpha=0.9)
|
| 608 |
+
ax.text(geom.centroid.x, geom.centroid.y, "Điều hành", ha='center', fontsize=8, color='black', weight='bold')
|
|
|
|
| 609 |
elif ftype == 'park':
|
| 610 |
+
plot_geometry(geom, color='#f6ffed', alpha=0.5)
|
| 611 |
+
plot_outline(geom, color='green', linewidth=0.5, linestyle=':')
|
|
|
|
| 612 |
|
| 613 |
# 4. Draw Electrical Infrastructure (Loop)
|
| 614 |
for f in features:
|
algorithms/frontend/requirements.txt
CHANGED
|
@@ -7,3 +7,4 @@ pandas==2.1.4
|
|
| 7 |
streamlit-drawable-canvas==0.9.3
|
| 8 |
matplotlib==3.8.2
|
| 9 |
shapely==2.0.2
|
|
|
|
|
|
| 7 |
streamlit-drawable-canvas==0.9.3
|
| 8 |
matplotlib==3.8.2
|
| 9 |
shapely==2.0.2
|
| 10 |
+
python-dotenv==1.0.0
|
examples/663409.dxf
ADDED
|
|
examples/930300.dxf
ADDED
|
|
examples/Lot Plan Bel air Technical Description.dxf
ADDED
|
|