diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..926ad9150d21d447579f276e3c14edae230cc4cd --- /dev/null +++ b/.dockerignore @@ -0,0 +1,32 @@ +# Ignore all node_modules everywhere +**/node_modules +**/.turbo +**/.next +**/dist +**/build +**/.bun +**/__pycache__ + +# Ignore VCS + metadata +.git +.gitignore +.vscode +.DS_Store + + + +# Docker-specific junk +docker-compose*.yml +Dockerfile +**/Dockerfile + +# Ignore environment files +.env +.env.* +!.env.example + +# Logs + temp +*.log +tmp +coverage +data \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..db007173dc725271251d0dab2850dc1a0034b0c5 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +*.{icns,png,jpg,zip,bin,model,pt} filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..beec0e1c71340d50e423170a2f5bf855018fa3c6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,64 @@ +# dependencies (bun install) +node_modules + +# output +out +dist +*.tgz + +# code coverage +coverage +*.lcov + +# logs +logs +_.log +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# caches +.eslintcache +.cache +*.tsbuildinfo + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store + +# Claude +CLAUDE.md +.claude/ + +*.pdf + + +.nx/cache +.nx/workspace-data +.cursor/rules/nx-rules.mdc +.github/instructions/nx.instructions.md + +**/*.log* +**/files/* + +/scripts/* +/docs/* +!/docs/od-architecture +/docs/od-architecture/implementation +!/docs/chaos +morpheus-data +/packages/controlmart/ui/docs +packages/controlmart/test_ods.sh +bun.lock +packages/controlmart/ui/bun.lock +.postman/ +postman/ +packages/controlmart/scripts/test-index-sync.sh +/packages/controlmart/src/docs/plans diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000000000000000000000000000000000000..6266224a284968f7ff3f542789400a89014d2b10 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,13 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Start ControlMart Dev Server", + "type": "shell", + "command": "bun run dev", + "group": "build", + "isBackground": true, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 567e307da884fed9df41fc0be3ea6d201b268e15..6d6856c0f5c50e9dbda4e4943928804262e592ca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1 +1,43 @@ -FROM public.ecr.aws/w3z5w3s0/skyfall/morpheus:latest +FROM oven/bun:latest AS builder + +WORKDIR /app + +COPY package.json bun.lock ./ +COPY packages/controlmart/package.json ./packages/controlmart/ +COPY packages/controlmart/ui/package.json ./packages/controlmart/ui/ + + +RUN bun install + +COPY . . + +WORKDIR /app/packages/controlmart/ui +RUN bun install +RUN bun run build + +WORKDIR /app/packages/controlmart + +RUN bun run build:binary + +FROM mongo:7 + +RUN apt-get update && apt-get install -y \ + curl \ + ca-certificates \ + netcat-openbsd \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +COPY --from=builder /app/packages/controlmart/morpheus-server ./morpheus-server +COPY --from=builder /app/packages/controlmart/dist/ui ./ui +COPY --from=builder /app/start-hf.sh ./start-hf.sh +RUN chmod +x start-hf.sh + +ENV NODE_ENV=production +ENV PORT=7860 +ENV HOST=0.0.0.0 +ENV MONGO_URI=mongodb://localhost:27017 +ENV DB_NAME=morpheus +EXPOSE 7860 +CMD ["./start-hf.sh"] diff --git a/deploy-public-hf.sh b/deploy-public-hf.sh new file mode 100755 index 0000000000000000000000000000000000000000..4e5ee64cd140d43e568bcae2f21d5984b3f0b768 --- /dev/null +++ b/deploy-public-hf.sh @@ -0,0 +1,60 @@ +#!/bin/bash +set -e + +ECR_REGISTRY="public.ecr.aws/w3z5w3s0/skyfall" +IMAGE_NAME="morpheus" +IMAGE_TAG="latest" +FULL_IMAGE_URI="$ECR_REGISTRY/$IMAGE_NAME:$IMAGE_TAG" + + +PUBLIC_REPO_DIR="../morpheus_public" +SOURCE_README="README.md" +DEST_README="$PUBLIC_REPO_DIR/README.md" + +echo "===============================================" +echo " Morpheus Public Deployment Script" +echo "===============================================" + +echo "[1/3] Building and Publishing to ECR..." +echo "Target: $FULL_IMAGE_URI" + +echo "Note: Ensure you are logged into ECR Public." + +echo "Building image..." +docker build --platform linux/amd64 -f Dockerfile.hf -t "$FULL_IMAGE_URI" . + +echo "Pushing image..." +docker push "$FULL_IMAGE_URI" +echo "ECR Publish Complete." + +echo "[2/3] Syncing Documentation..." +if [ -f "$SOURCE_README" ]; then + cp "$SOURCE_README" "$DEST_README" + echo "Copied $SOURCE_README to $DEST_README" +else + echo "Warning: $SOURCE_README not found in current directory." +fi + +echo "[3/3] Updating Public Repository..." + +if [ ! -d "$PUBLIC_REPO_DIR" ]; then + echo "Error: Directory $PUBLIC_REPO_DIR does not exist." + echo "Please clone the Hugging Face repository to $PUBLIC_REPO_DIR first." + exit 1 +fi + +cd "$PUBLIC_REPO_DIR" || exit + +if [[ -n $(git status -s) ]]; then + echo "Changes detected. Committing and pushing..." + git add . + git commit -m "Deploy update: $(date)" + git push + echo "Public Repo Updated." +else + echo "No changes to public repo files." +fi + +echo "===============================================" +echo " Deployment Complete!" +echo "===============================================" diff --git a/docs/chaos/README.md b/docs/chaos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..478c9e475819c5c405b570fe3afedc6e6598bf0b --- /dev/null +++ b/docs/chaos/README.md @@ -0,0 +1,104 @@ +# Chaos Engineering & Management + +## Overview + +This directory contains documentation for Morpheus's chaos engineering system - a comprehensive framework for injecting realistic failures into workflows to test AI agent resilience. + +## The Problem + +Currently, chaos configuration is scattered across 14+ files with: +- Hardcoded probabilities in builders (0.0 to 0.8) +- Step-level inline configs with inconsistent values +- No central configuration or master kill-switch +- No environment-specific profiles +- Same workflow having different chaos in different places + +## The Solution + +A comprehensive chaos management system with: +- **Centralized Configuration**: Preset library with reusable chaos configurations +- **Environment Control**: Master kill-switch and environment-specific profiles +- **Multi-Level Configuration**: World → Capability → OD → Step cascade +- **Reproducibility**: Seeded randomness for deterministic chaos +- **Integration**: Seamlessly integrated with OD architecture + +## Documentation + +### [Chaos Management](./chaos-management.md) +Complete chaos management system design: +- Current problems and gaps +- Proposed architecture with priority cascade +- Centralized preset library (light, moderate, aggressive, realistic) +- Environment variables and master kill-switch +- World, capability, and OD-level configuration +- API endpoints for chaos control +- Migration strategy from scattered configs +- Best practices for researchers + +## Current Chaos Capabilities + +### 11 Chaos Scenario Types + +1. **data_corruption** - Corrupt field values (null, wrong type, invalid format, random value) +2. **missing_data** - Remove fields or records +3. **stale_data** - Age timestamps (simulate eventual consistency) +4. **format_change** - Schema evolution (rename fields, change types) +5. **permission_denied** - Authorization errors +6. **rate_limit** - API throttling with delays +7. **partial_data** - Return subset of results (pagination issues) +8. **duplicate_data** - Inject duplicate records +9. **invalid_state** - Set records to invalid states (deleted, suspended) +10. **dependency_failure** - Downstream service unavailability +11. **timing_issue** - Future timestamps (clock skew) + +## Quick Start + +### Using Presets + +```yaml +world: + chaos: + preset: "moderate" # Options: light, moderate, aggressive, realistic + seed: "reproducible-123" +``` + +### Custom Configuration + +```yaml +world: + chaos: + globalPolicy: + enabled: true + probability: 0.15 + scenarios: + - type: stale_data + weight: 10 + - type: rate_limit + weight: 5 +``` + +### Master Kill-Switch + +```bash +# Disable all chaos globally +export CHAOS_ENABLED=false +``` + +## Related Documentation + +- [Current Chaos Implementation](../03-chaos-engineering.md) - Existing chaos engine details +- [OD Architecture](../od-architecture/) - How chaos integrates with OD system +- [World Configuration](../od-architecture/05-sampling-world-config.md) - Configuring chaos per world + +## Status + +**Phase**: Design & Planning +**Last Updated**: 2025-11-14 + +## Next Steps + +1. Review chaos management design +2. Implement centralized registry and presets +3. Add environment variable controls +4. Migrate scattered chaos configs +5. Build chaos configuration API diff --git a/docs/chaos/chaos-management.md b/docs/chaos/chaos-management.md new file mode 100644 index 0000000000000000000000000000000000000000..5e735e03e290155d00a8b6fe8b168e8c55fdedd3 --- /dev/null +++ b/docs/chaos/chaos-management.md @@ -0,0 +1,808 @@ +# 07. Chaos Management + +## Overview + +Chaos engineering in Morpheus allows researchers to inject realistic failures into workflows to test AI agent resilience. However, the current chaos configuration is scattered across 14+ files, making it unmanageable. This document proposes a comprehensive chaos management system integrated with the broader OD architecture. + +## Current Problems + +### 1. Scattered Configuration + +**Hardcoded Probabilities**: +```typescript +// In different files: +GenericODBuilder: chaosProbability: 0.0 +EDI builder: chaosProbability: 0.1 +ERP builder: chaosProbability: 0.05 +CRM builder: chaosProbability: 0.05 +WMS builder: chaosProbability: 0.05 +``` + +**Step-Level Inline Configs**: +```typescript +// od-builders.edi.util.ts +EDI 850 generation: probability: 0.0 +Invoice generation: probability: 0.05 +Advanced ship notice: probability: 0.1 + +// od-builders-refactored.edi.util.ts +Refactored EDI 850: probability: 0.2 // Different value! +Invoice step: probability: 0.1 +ASN step: probability: 0.1 +``` + +**Demo/Test Specific**: +```typescript +// chaos-edi-demo.ts +probability: 0.8 // Very aggressive for demo +scenarios: 7 different chaos types + +// simple-edi-demo.ts +chaosProbability: 0.0 // Disabled +``` + +### 2. No Central Configuration + +- No single source of truth +- Same workflow has different chaos in different files +- No reusable scenario library +- Must edit code to change chaos + +### 3. No Environment Awareness + +- Same chaos runs in all environments +- No way to disable in production +- No environment-specific profiles (dev/staging/prod) + +### 4. No Master Kill-Switch + +- Can't globally disable chaos +- Must manually set probability to 0 everywhere +- Risk of leaving chaos enabled accidentally + +### 5. Inconsistent Behavior + +- Duplication: Similar scenarios defined multiple times +- Variation: Same operation has different chaos values +- Maintenance: Changing chaos requires editing multiple files + +## Chaos in the OD Architecture + +### Chaos as a Cross-Cutting Concern + +Chaos configuration exists at multiple levels: + +``` +┌─────────────────────────────────┐ +│ WORLD │ Global chaos policy +│ ├─ Global Chaos Policy │ (affects all capabilities in world) +│ └─ Chaos Presets │ +└──────────────┬──────────────────┘ + │ + ↓ +┌─────────────────────────────────┐ +│ CAPABILITY │ Capability-level overrides +│ └─ Chaos Override (optional) │ (e.g., Order Fulfillment always has 0.3 chaos) +└──────────────┬──────────────────┘ + │ + ↓ +┌─────────────────────────────────┐ +│ OD (Workflow) │ OD-level chaos config +│ ├─ Global OD Chaos Policy │ (applies to all steps in OD) +│ └─ Step-Level Overrides │ +└──────────────┬──────────────────┘ + │ + ↓ +┌─────────────────────────────────┐ +│ STEP │ Step-level chaos override +│ └─ Step Chaos Override │ (most specific, highest priority) +└─────────────────────────────────┘ +``` + +**Priority (highest to lowest)**: +1. Step-level override +2. OD-level policy +3. Capability-level override +4. World-level global policy +5. System defaults + +**Master Kill-Switch**: Environment variable overrides everything + +## Proposed Architecture + +### 1. Environment Variables (Master Control) + +```bash +# Master kill-switch +CHAOS_ENABLED=true|false # Override all chaos config + +# Global settings +CHAOS_ENV=development|staging|production +CHAOS_GLOBAL_PROBABILITY=0.1 # Default probability if not specified +CHAOS_GLOBAL_SEED=seed-123 # For reproducibility + +# Preset selection +CHAOS_PRESET=light|moderate|aggressive|custom +CHAOS_PRESET_FILE=/path/to/custom-preset.json + +# Telemetry +CHAOS_TELEMETRY_LEVEL=basic|detailed|verbose +``` + +**Priority**: Environment variables override all file-based configuration. + +### 2. Centralized Preset Library + +#### File Structure +``` +/config/ + /chaos-presets/ + default.json # System default + light.json # Low probability, common scenarios + moderate.json # Medium probability, diverse scenarios + aggressive.json # High probability, all scenarios + realistic.json # Real-world distribution + /domain/ + fulfillment.json # Fulfillment-specific chaos + inventory.json # Inventory-specific chaos + transportation.json # Transportation-specific chaos +``` + +#### Preset Format + +**Example: `light.json`** +```json +{ + "id": "light", + "name": "Light Chaos", + "description": "Low probability chaos for basic resilience testing", + "globalProbability": 0.05, + "scenarios": [ + { + "type": "stale_data", + "weight": 10, + "description": "Simulate eventual consistency delays", + "config": { + "staleDataAge": 30 + } + }, + { + "type": "rate_limit", + "weight": 5, + "description": "API throttling", + "config": { + "rateLimitDelay": 1000, + "rateLimitMessage": "Rate limit exceeded" + } + }, + { + "type": "missing_data", + "weight": 3, + "description": "Occasional missing records", + "config": { + "missingRecords": true + } + } + ] +} +``` + +**Example: `aggressive.json`** +```json +{ + "id": "aggressive", + "name": "Aggressive Chaos", + "description": "High probability chaos with all scenario types", + "globalProbability": 0.3, + "scenarios": [ + { + "type": "data_corruption", + "weight": 8, + "config": { + "corruptFields": ["*"], + "corruptionType": "random" + } + }, + { + "type": "missing_data", + "weight": 7, + "config": { + "missingFields": ["*"], + "missingRecords": true + } + }, + { + "type": "stale_data", + "weight": 6, + "config": { + "staleDataAge": 120 + } + }, + { + "type": "format_change", + "weight": 5, + "config": { + "changeType": "all" + } + }, + { + "type": "permission_denied", + "weight": 4, + "config": {} + }, + { + "type": "rate_limit", + "weight": 7, + "config": { + "rateLimitDelay": 3000 + } + }, + { + "type": "partial_data", + "weight": 6, + "config": { + "returnCount": 5 + } + }, + { + "type": "duplicate_data", + "weight": 5, + "config": { + "duplicateCount": 3 + } + }, + { + "type": "invalid_state", + "weight": 4, + "config": { + "invalidStates": ["deleted", "suspended"] + } + }, + { + "type": "dependency_failure", + "weight": 3, + "config": {} + }, + { + "type": "timing_issue", + "weight": 2, + "config": { + "timeSkewMinutes": 30 + } + } + ] +} +``` + +**Example: `realistic.json`** (based on real-world failure rates) +```json +{ + "id": "realistic", + "name": "Realistic Chaos", + "description": "Chaos distribution matching real-world failure rates", + "globalProbability": 0.08, + "scenarios": [ + { + "type": "stale_data", + "weight": 40, + "description": "Most common: eventual consistency" + }, + { + "type": "rate_limit", + "weight": 20, + "description": "Common: API throttling" + }, + { + "type": "timeout", + "weight": 15, + "description": "Common: network delays" + }, + { + "type": "partial_data", + "weight": 10, + "description": "Occasional: pagination issues" + }, + { + "type": "data_corruption", + "weight": 7, + "description": "Rare: data quality issues" + }, + { + "type": "missing_data", + "weight": 5, + "description": "Rare: data loss" + }, + { + "type": "permission_denied", + "weight": 2, + "description": "Very rare: auth failures" + }, + { + "type": "dependency_failure", + "weight": 1, + "description": "Very rare: service outages" + } + ] +} +``` + +### 3. Chaos Configuration Registry + +**TypeScript Service**: +```typescript +// src/config/chaos-config.registry.ts + +interface ChaosConfigRegistry { + // Presets + loadPreset(presetId: string): ChaosPolicy; + listPresets(): PresetMetadata[]; + createPreset(preset: ChaosPreset): void; + + // World-level + getWorldChaosPolicy(worldId: string): ChaosPolicy; + setWorldChaosPolicy(worldId: string, policy: ChaosPolicy): void; + + // Capability-level + getCapabilityChaos(capabilityId: string): ChaosPolicy | null; + setCapabilityChaos(capabilityId: string, policy: ChaosPolicy): void; + + // OD-level (runtime overrides) + getODChaos(odId: string): ChaosPolicy | null; + setODChaos(odId: string, policy: ChaosPolicy): void; + + // Resolution (apply priority rules) + resolveChaosPolicy(context: ChaosContext): ChaosPolicy; + + // Master switch + isChaosEnabled(): boolean; +} + +interface ChaosContext { + worldId: string; + capabilityId?: string; + odId: string; + stepId: string; + service?: string; + tool?: string; +} + +class ChaosConfigRegistryImpl implements ChaosConfigRegistry { + private presets: Map = new Map(); + private worldPolicies: Map = new Map(); + private capabilityOverrides: Map = new Map(); + private odOverrides: Map = new Map(); + + constructor() { + this.loadPresetsFromDisk(); + } + + resolveChaosPolicy(context: ChaosContext): ChaosPolicy { + // 0. Check master kill-switch + if (!this.isChaosEnabled()) { + return { enabled: false, probability: 0, scenarios: [] }; + } + + // Priority cascade (highest to lowest) + const policies = [ + // 1. Step-level (from OD definition) + this.getStepChaos(context.odId, context.stepId), + + // 2. OD-level + this.odOverrides.get(context.odId), + + // 3. Capability-level + context.capabilityId ? this.capabilityOverrides.get(context.capabilityId) : null, + + // 4. World-level + this.worldPolicies.get(context.worldId), + + // 5. Global preset (from env or default) + this.getGlobalPreset(), + ]; + + // Return first non-null policy + return policies.find(p => p !== null) || this.getDefaultPolicy(); + } + + isChaosEnabled(): boolean { + const envFlag = process.env.CHAOS_ENABLED; + if (envFlag !== undefined) { + return envFlag.toLowerCase() === 'true'; + } + return true; // Default: enabled + } +} +``` + +### 4. World Configuration Integration + +**World Chaos Config**: +```yaml +world: + id: "warehouse-research-001" + name: "Warehouse Automation Research" + + chaos: + # Option 1: Use preset + preset: "moderate" + + # Option 2: Custom global policy + globalPolicy: + enabled: true + probability: 0.15 + seed: "reproducible-seed-123" + scenarios: + - type: stale_data + weight: 10 + - type: rate_limit + weight: 5 + + # Option 3: Per-capability overrides + capabilityOverrides: + order-fulfillment: + probability: 0.3 # Higher chaos for this capability + scenarios: + - type: missing_data + weight: 10 + + inventory-check: + probability: 0.0 # No chaos for this capability + + # Option 4: Per-OD overrides (runtime) + odOverrides: + order-fulfillment-v1: + probability: 0.25 +``` + +### 5. API Endpoints + +```typescript +// Presets +GET /api/chaos/presets # List all presets +GET /api/chaos/presets/:presetId # Get preset details +POST /api/chaos/presets # Create custom preset +PUT /api/chaos/presets/:presetId # Update preset +DELETE /api/chaos/presets/:presetId # Delete preset + +// World-level chaos +GET /api/worlds/:worldId/chaos # Get world chaos config +PUT /api/worlds/:worldId/chaos # Update world chaos +POST /api/worlds/:worldId/chaos/preset/:presetId # Apply preset to world + +// Capability-level chaos +GET /api/capabilities/:capId/chaos # Get capability chaos override +PUT /api/capabilities/:capId/chaos # Set capability chaos override +DELETE /api/capabilities/:capId/chaos # Remove override + +// Runtime OD chaos +GET /api/ods/:odId/chaos # Get OD chaos config +PUT /api/ods/:odId/chaos # Set OD chaos override (runtime) +DELETE /api/ods/:odId/chaos # Remove override + +// Chaos status and testing +GET /api/chaos/status # Is chaos enabled? Global settings +POST /api/chaos/test # Test chaos injection (dry run) +GET /api/chaos/scenarios # List all scenario types +``` + +### 6. Migration Strategy + +#### Phase 1: Add Central Registry (No Breaking Changes) +1. Create `ChaosConfigRegistry` service +2. Load presets from JSON files +3. Add environment variable support +4. Keep existing inline configs working (backward compatible) + +#### Phase 2: Update Chaos Engine +1. Modify `chaos-engine.od.ts` to use registry +2. Implement priority cascade logic +3. Add master kill-switch check +4. Log which chaos policy was applied + +#### Phase 3: Migrate Existing Configs +1. Extract inline chaos configs to presets +2. Remove hardcoded probabilities from builders +3. Update tests to use registry +4. Add deprecation warnings for old patterns + +#### Phase 4: Cleanup +1. Remove old builder chaos parameters +2. Delete duplicate scenario definitions +3. Document new patterns +4. Provide migration guide + +## Example Usage Patterns + +### Pattern 1: Default Chaos (Preset) + +**Environment**: +```bash +CHAOS_ENABLED=true +CHAOS_PRESET=moderate +``` + +**Result**: All worlds use "moderate" preset unless overridden. + +--- + +### Pattern 2: Per-World Customization + +**World Config**: +```yaml +world: + name: "High Chaos Test" + chaos: + preset: "aggressive" +``` + +**Result**: This world has aggressive chaos, others use default. + +--- + +### Pattern 3: Capability-Specific Chaos + +**Capability Override**: +```typescript +chaosRegistry.setCapabilityChaos('order-fulfillment', { + enabled: true, + probability: 0.3, + scenarios: [ + { type: 'missing_data', weight: 10, config: { missingRecords: true } }, + { type: 'stale_data', weight: 8, config: { staleDataAge: 60 } }, + ] +}); +``` + +**Result**: Order fulfillment always has 30% chaos, other capabilities use world default. + +--- + +### Pattern 4: Environment-Based Profiles + +**Environment Variables**: +```bash +# Development +CHAOS_ENABLED=true +CHAOS_PRESET=aggressive +CHAOS_GLOBAL_PROBABILITY=0.3 + +# Staging +CHAOS_ENABLED=true +CHAOS_PRESET=moderate +CHAOS_GLOBAL_PROBABILITY=0.1 + +# Production +CHAOS_ENABLED=false +``` + +**Result**: Chaos automatically adjusts based on environment. + +--- + +### Pattern 5: Reproducible Experiments + +**Configuration**: +```yaml +world: + chaos: + preset: "moderate" + seed: "experiment-001-seed" +``` + +**Result**: Same seed produces identical chaos injections across runs. + +--- + +### Pattern 6: Emergency Disable + +**Command**: +```bash +# Set environment variable and restart +export CHAOS_ENABLED=false + +# Or via API (if running) +curl -X PUT /api/chaos/status -d '{"enabled": false}' +``` + +**Result**: All chaos immediately disabled without code changes. + +## Chaos Telemetry & Observability + +### Chaos Injection Logging + +**Enhanced Logging**: +```typescript +{ + timestamp: "2025-11-14T10:30:45Z", + level: "info", + service: "ODFlow", + msg: "Chaos injected", + chaos: { + worldId: "world-123", + capabilityId: "order-fulfillment", + odId: "order-fulfillment-v1", + stepId: "step-2", + scenarioType: "stale_data", + configSource: "capability-override", // where policy came from + probability: 0.3, + seed: "experiment-001-seed", + modifications: { + staleDataAge: 60, + fieldsAffected: ["timestamp", "lastUpdated"] + } + } +} +``` + +### Chaos Metrics + +**Track**: +- Total chaos injections per world/capability/OD +- Injection rate over time +- Scenario type distribution +- Success/failure correlation with chaos +- Mean time between chaos events + +**API**: +```typescript +GET /api/chaos/metrics?worldId=world-123 +{ + totalInjections: 150, + injectionRate: 0.12, // actual vs configured + scenarioDistribution: { + stale_data: 60, + missing_data: 45, + rate_limit: 30, + ... + }, + impactAnalysis: { + odSuccessRate: 0.75, // with chaos enabled + odSuccessRateWithoutChaos: 0.95, // estimated baseline + meanRecoveryTimeMs: 1500 + } +} +``` + +## Best Practices for Researchers + +### 1. Start with Presets +```yaml +# Begin with pre-defined presets +chaos: + preset: "light" # Start simple + +# Progress to more chaos +chaos: + preset: "moderate" + +# Advanced testing +chaos: + preset: "aggressive" +``` + +### 2. Use Reproducible Seeds +```yaml +# Always specify seed for reproducibility +chaos: + preset: "moderate" + seed: "experiment-20251114-001" +``` + +### 3. Override Selectively +```yaml +# Use presets as baseline, override specific capabilities +chaos: + preset: "moderate" + capabilityOverrides: + critical-capability: + probability: 0.0 # No chaos for critical path +``` + +### 4. Document Chaos Configuration +```yaml +world: + name: "Experiment: Agent Resilience Test" + description: "Testing AI agent with realistic failure rates" + chaos: + preset: "realistic" + seed: "resilience-test-001" + # Document why this chaos config + chaosRationale: "Using realistic preset to match production failure rates" +``` + +### 5. Compare With and Without Chaos +```typescript +// Run baseline (no chaos) +const baselineWorld = { + chaos: { enabled: false } +}; + +// Run with chaos +const chaosWorld = { + chaos: { preset: "moderate", seed: "compare-001" } +}; + +// Compare results +const impact = compareChaosImpact(baselineResults, chaosResults); +``` + +## Implementation Checklist + +### Phase 1: Foundation (Week 1-2) +- [ ] Create `ChaosConfigRegistry` service +- [ ] Define preset JSON schema +- [ ] Create 5 standard presets (light, moderate, aggressive, realistic, custom) +- [ ] Add environment variable support +- [ ] Implement master kill-switch + +### Phase 2: Integration (Week 3-4) +- [ ] Update `chaos-engine.od.ts` to use registry +- [ ] Implement priority cascade logic +- [ ] Add world-level chaos configuration +- [ ] Add capability-level overrides +- [ ] Enhance chaos telemetry logging + +### Phase 3: API (Week 5) +- [ ] Build chaos configuration API endpoints +- [ ] Add preset management endpoints +- [ ] Add chaos status/testing endpoints +- [ ] Create metrics aggregation endpoint + +### Phase 4: Migration (Week 6-7) +- [ ] Extract inline chaos to presets +- [ ] Update all builders to use registry +- [ ] Migrate test files +- [ ] Add backward compatibility layer +- [ ] Add deprecation warnings + +### Phase 5: Documentation (Week 8) +- [ ] Write chaos configuration guide +- [ ] Create preset cookbook +- [ ] Document migration path +- [ ] Add API documentation +- [ ] Create tutorial videos/examples + +## Open Questions + +### Q1: Preset Versioning +**Question**: Should presets be versioned? + +**Options**: +- A. No versioning (mutable presets) +- B. Semantic versioning (e.g., `moderate-v1.2.0`) +- C. Immutable presets (new ID for changes) + +**Impact**: Reproducibility, backward compatibility + +--- + +### Q2: Custom Scenario Creation +**Question**: Can researchers create custom chaos scenarios (new types)? + +**Options**: +- A. No - limited to 11 existing types +- B. Yes - via preprocessInput/postprocessOutput hooks +- C. Yes - full extension API for new scenario types + +**Impact**: Flexibility vs complexity + +--- + +### Q3: Chaos Scheduling +**Question**: Should chaos be time-based or event-based? + +**Options**: +- A. Probabilistic only (current approach) +- B. Add temporal patterns (increase chaos over time, specific time windows) +- C. Event-driven (inject chaos after N successful operations) + +**Impact**: Research sophistication, implementation complexity + +--- + +## Related Documents + +- [02. Conceptual Model](./02-conceptual-model.md) - OD architecture +- [05. Sampling & World Config](./05-sampling-world-config.md) - World configuration +- [06. Open Questions](./06-open-questions.md) - Unresolved decisions +- Main docs: [03-chaos-engineering.md](../03-chaos-engineering.md) - Current chaos implementation diff --git a/docs/od-architecture/01-current-state.md b/docs/od-architecture/01-current-state.md new file mode 100644 index 0000000000000000000000000000000000000000..e8afc66e7d090238902bc5052b1d1650930498ef --- /dev/null +++ b/docs/od-architecture/01-current-state.md @@ -0,0 +1,516 @@ +# 01. Current State Analysis + +## System Inventory + +### Services & Tools + +Morpheus currently simulates 4 main enterprise services with approximately **162 tools/APIs**: + +| Service | Tool Count | Purpose | +|---------|-----------|---------| +| **WMS** (Warehouse Management) | 64 | Inventory, receiving, picking, packing, putaway, cycle counting | +| **ERP** (Enterprise Resource Planning) | 34 | Orders, products, customers, pricing, purchase orders | +| **TMS** (Transportation Management) | 33 | Shipments, carriers, routes, tracking | +| **EDI** (Electronic Data Interchange) | 15 | Document generation (850, 810, 856), parsing, acknowledgments | +| **Logs** | 5 | Log querying and analysis | +| **OD Management** | 4 | OD execution and status | +| **World/Registry** | 7 | World management, service registration | + +**Service Registration**: `/packages/controlmart/src/routes/registry.route.ts` +- Routes mounted by domain: `/:worldId/wms`, `/:worldId/tms`, `/:worldId/edi` +- Services exposed as REST endpoints (not pure MCP servers yet) + +### Example Service Tools + +**WMS Sample Tools:** +``` +- inventory/get-all +- inventory/get-by-id +- inventory/update-quantity +- receiving/create-appointment +- receiving/process-receipt +- picking/create-wave +- picking/assign-picker +- putaway/create-task +- cycle-count/initiate +``` + +**ERP Sample Tools:** +``` +- companies/create-company +- companies/get-by-id +- companies/get-mpc-company +- companies/bulk-upsert +- products/create-product +- products/get-by-sku +- products/get-random +- products/bulk-upsert +- orders/create-order +- orders/get-by-po-number +- orders/update-status +- orders/delete-order +- invoices/create-invoice +- invoices/get-by-number +- invoices/update-status +- shipments/create-shipment +- shipments/update-tracking +- shipments/add-event +- shipments/add-document +- payments/create-payment +- payments/apply-allocations +- payments/update-status +``` + +**TMS Sample Tools:** +``` +- shipments/create +- shipments/track +- carriers/assign +- routes/optimize +- deliveries/schedule +``` + +## Current OD Patterns + +### Existing Operational Descriptors + +ODs are currently defined programmatically using builder patterns. Here are representative examples: + +#### 1. Inbound Receiving Workflow +**Location**: `/packages/controlmart/src/examples/simple-wms-od.ts` + +**Purpose**: Complete inbound receiving process from shipment to putaway + +**Flow**: +``` +TMS: Shipment Tender + ↓ +TMS: In-Transit Tracking + ↓ +WMS: Dock Appointment + ↓ +WMS: Receiving + ↓ +WMS: Putaway +``` + +**Characteristics**: +- Multi-service orchestration (TMS → WMS) +- Sequential steps with dependencies +- Data flows between services +- ~5-7 steps total + +#### 2. Purchase Order EDI Processing +**Location**: `/packages/controlmart/src/utils/edi/od-builders-refactored.edi.util.ts` + +**Purpose**: Process EDI 850 purchase order documents + +**Flow**: +``` +Generate PO Data + ↓ +Generate EDI 850 Document + ↓ +Send EDI Acknowledgment (997) + ↓ +Generate Invoice (EDI 810) +``` + +**Characteristics**: +- Data transformation heavy +- Multi-format handling (JSON → EDI) +- Document validation +- ~4-6 steps + +#### 3. Generic WMS Workflows +**Location**: `/packages/controlmart/src/utils/wms/od-builder.wms.util.ts` + +**Purpose**: Template-based WMS operations + +**Workflow Types**: +- `inbound`: Receiving and putaway +- `outbound`: Picking and shipping +- `cycle_count`: Inventory auditing +- `replenishment`: Stock transfer + +**Characteristics**: +- Builder pattern with fluent API +- Configurable chaos injection +- Service-specific (WMS only) + +#### 4. Business Rules Triggered OD +**Location**: `/packages/controlmart/src/business-rules/actions/trigger-od.action.ts` + +**Purpose**: Execute OD when business rule fires + +**Example**: Auto-allocate inventory when new order created + +**Characteristics**: +- Event-driven +- Connects business rules → OD execution +- Async execution + +### OD Structure & Components + +**Core Files**: +- `/packages/controlmart/src/operational-descriptor/executor.od.ts` - Main executor +- `/packages/controlmart/src/operational-descriptor/run-step.od.ts` - Step execution logic +- `/packages/controlmart/src/operational-descriptor/generic-builder.od.ts` - Builder pattern +- `/packages/controlmart/src/operational-descriptor/schema.od.ts` - JSON schema validation +- `/packages/controlmart/src/operational-descriptor/chaos-engine.od.ts` - Chaos injection + +**Step Types**: +1. **MCP Step**: Call service tool (most common) +2. **Map Step**: Parallel iteration over arrays +3. **Script Step**: Execute JavaScript code +4. **Noop Step**: No-op placeholder + +**OD Properties**: +```typescript +{ + id: string, + name: string, + version: string, + description?: string, + + // Global chaos config + chaos?: ChaosPolicy, + + // Workflow steps + steps: Step[], + + // Success criteria + assertions?: Assertion[], + + // Metadata + tags?: string[], + metadata?: Record +} +``` + +**Step Properties**: +```typescript +{ + id: string, + type: "mcp" | "map" | "script" | "noop", + + // For MCP steps + service?: string, + tool?: string, + input?: InputBinding, + output?: OutputBinding, + + // Resilience + retry?: RetryPolicy, + chaos?: ChaosPolicy, // Step-level override + + // Conditional execution + condition?: string, + + // Validation + assertions?: Assertion[] +} +``` + +## Current Organization Patterns + +### 1. By Service (Primary) + +ODs and tools are primarily organized by which service they interact with: + +``` +/packages/controlmart/src/ + /routes/ + wms.route.ts # All WMS endpoints + erp.routes.ts # All ERP endpoints + tms.route.ts # All TMS endpoints + edi.route.ts # All EDI endpoints + /utils/ + /wms/ + service-tools.wms.util.ts + od-builder.wms.util.ts + /edi/ + service-tools.edi.util.ts + od-builders.edi.util.ts + od-builders-refactored.edi.util.ts + /tms/ + service-tools.tms.util.ts +``` + +**Pros**: +- Clear ownership +- Easy to find service-specific logic +- Modular structure + +**Cons**: +- Cross-service workflows scattered +- No semantic grouping +- Hard to discover "what can I do?" + +### 2. By Workflow Type (Secondary) + +Some OD builders organize by workflow category: + +**WmsODBuilderFactory** workflows: +- `inbound`: Receiving operations +- `outbound`: Fulfillment operations +- `cycle_count`: Audit operations +- `replenishment`: Transfer operations + +**GenericODBuilderFactory** services: +- `edi`: EDI document processing +- `erp`: Order management +- `wms`: Warehouse operations +- `tms`: Transportation operations +- `multi-service`: Cross-service workflows + +**Pros**: +- Some semantic meaning +- Groups related operations + +**Cons**: +- Limited vocabulary +- Inconsistent across services +- Not extensible + +### 3. By Business Domain (Emerging) + +Business rules show domain-based organization: + +**Domains**: `WMS`, `ERP`, `TMS`, `EDI` + +**Target Collections**: +- WMS: `Inventory`, `Order`, `Receiving`, `Picking` +- ERP: `Order`, `Product`, `Customer`, `PurchaseOrder` +- TMS: `Shipment`, `Carrier`, `Route` + +**Pros**: +- Aligns with business concepts +- Clear data ownership + +**Cons**: +- Only used in business rules +- Not extended to ODs +- No cross-domain taxonomy + +## Critical Gaps + +### 1. No Persona Model + +**Current State**: +- No formal persona or role system +- Some role attributes scattered in models: + - `personality` field in customer/employee types + - `role` field in WMS labor user model + - Hardcoded "Skyfall Automation Bot" reference + +**Missing**: +- Persona definitions (Store Manager, Warehouse Worker, etc.) +- Capability matrix (who can do what) +- Access control model +- Persona-based OD filtering + +**Impact**: +- Can't answer: "What can a store manager do?" +- No way to configure worlds by persona +- Unclear authorization model + +### 2. No OD Registry/Catalog + +**Current State**: +- ODs scattered across builder files +- No central inventory +- Must read code to discover ODs +- No metadata or discoverability + +**Missing**: +- Centralized OD registry +- Searchable catalog +- Metadata (tags, description, inputs, outputs) +- Version management + +**Impact**: +- Can't answer: "What ODs exist?" +- Researchers must know code structure +- No programmatic discovery +- Hard to share/reuse ODs + +### 3. No Capability Mapping + +**Current State**: +- Tools exist but not semantically organized +- ODs exist but not categorized by function +- No mapping of tool → capability → persona + +**Missing**: +- Capability taxonomy +- Tool-to-capability mapping +- Capability-to-persona mapping +- Dependency analysis + +**Impact**: +- Can't answer: "What capabilities exist?" +- Can't suggest ODs based on available tools +- No validation of OD feasibility +- Manual OD creation only + +### 4. No Semantic Organization + +**Current State**: +- Organization by service/workflow type only +- No domain taxonomy (procurement, fulfillment, etc.) +- No complexity classification +- No tags or metadata + +**Missing**: +- Multi-dimensional taxonomy +- Tag system +- Filtering/search capabilities +- Hierarchical organization + +**Impact**: +- Hard to browse/discover +- No researcher-friendly navigation +- Can't filter by domain, complexity, etc. +- Overwhelming for newcomers + +### 5. Chaos Configuration Scattered + +**Current State**: +- Hardcoded probabilities in 14+ files +- Different values for similar workflows +- No central configuration +- No master kill-switch + +**Specific Issues**: +- EDI builder: `chaosProbability: 0.1` +- ERP builder: `chaosProbability: 0.05` +- EDI 850 step: `probability: 0.0` +- Refactored EDI 850: `probability: 0.2` +- Chaos demo: `probability: 0.8` with 7 scenarios + +**Missing**: +- Centralized chaos presets +- Environment-based configuration +- Global master switch +- Reusable scenario library + +**Impact**: +- Chaos management unmanageable (original problem!) +- Inconsistent behavior +- Hard to reproduce experiments +- Must edit code to change chaos + +### 6. No World Configuration System + +**Current State**: +- Worlds have all services/tools available +- No way to limit scope +- No sampling mechanism +- All-or-nothing approach + +**Missing**: +- Capability filtering +- OD sampling strategies +- Domain-specific worlds +- Complexity-based filtering + +**Impact**: +- Can't create specialized worlds (warehouse-only, retail-only) +- Can't simplify for experiments +- No way to control scope +- Overwhelming for simple tests + +### 7. No Experiment Tracking + +**Current State**: +- OD execution results returned via API +- No persistent run history +- Manual metrics tracking in demo scripts +- No configuration snapshots + +**Missing**: +- Run history database +- Configuration versioning +- Result comparison tools +- Reproducibility support + +**Impact**: +- Can't compare experiments +- Hard to reproduce issues +- No learning from past runs +- Manual data collection required + +## Technology Stack + +**Language**: TypeScript/Node.js + +**Framework**: Express.js + +**Database**: MongoDB (Mongoose ODM) + +**Validation**: JSON Schema, Zod (partial), JMESPath, JSONata, CEL + +**Logging**: Pino (structured logging) + +**Architecture**: Monorepo (packages/controlmart) + +## Key Codebase Locations + +### Type Definitions +- `/packages/controlmart/src/types/od.type.ts` - OD types, chaos types, step types +- `/packages/controlmart/src/types/service-tools.type.ts` - Service tool definitions + +### Core OD System +- `/packages/controlmart/src/operational-descriptor/executor.od.ts` - Main executor +- `/packages/controlmart/src/operational-descriptor/run-step.od.ts` - Step runner +- `/packages/controlmart/src/operational-descriptor/chaos-engine.od.ts` - Chaos injection +- `/packages/controlmart/src/operational-descriptor/generic-builder.od.ts` - Builder API +- `/packages/controlmart/src/operational-descriptor/schema.od.ts` - Schema validation + +### Service Implementations +- `/packages/controlmart/src/routes/wms.route.ts` - WMS REST API +- `/packages/controlmart/src/routes/erp.routes.ts` - ERP REST API +- `/packages/controlmart/src/routes/tms.route.ts` - TMS REST API +- `/packages/controlmart/src/routes/edi.route.ts` - EDI REST API + +### Service Tools +- `/packages/controlmart/src/utils/wms/service-tools.wms.util.ts` - WMS operations +- `/packages/controlmart/src/utils/edi/service-tools.edi.util.ts` - EDI operations +- `/packages/controlmart/src/utils/tms/service-tools.tms.util.ts` - TMS operations + +### OD Builders +- `/packages/controlmart/src/utils/wms/od-builder.wms.util.ts` - WMS OD builder +- `/packages/controlmart/src/utils/edi/od-builders.edi.util.ts` - EDI OD builders +- `/packages/controlmart/src/utils/edi/od-builders-refactored.edi.util.ts` - Refactored EDI builders + +### Business Rules +- `/packages/controlmart/src/business-rules/rule-engine.ts` - Rule execution engine +- `/packages/controlmart/src/business-rules/rules/wms-rules.ts` - WMS-specific rules +- `/packages/controlmart/src/business-rules/actions/trigger-od.action.ts` - OD triggering + +### Examples & Tests +- `/packages/controlmart/src/examples/simple-wms-od.ts` - Simple WMS example +- `/packages/controlmart/src/examples/generic-builder-examples.ts` - Generic builder examples +- `/packages/controlmart/src/edi-demos/chaos-edi-demo.ts` - Chaos demo +- `/packages/controlmart/tests/generic-builder.test.ts` - Builder tests +- `/packages/controlmart/tests/chaos.od.test.ts` - Chaos tests + +## Summary + +**Strengths**: +- Robust OD execution engine +- Sophisticated chaos injection framework +- Multiple services with realistic operations +- Business rules integration +- Comprehensive logging + +**Weaknesses**: +- No persona model or capability mapping +- ODs scattered with no central registry +- No semantic organization or taxonomy +- Chaos configuration unmanageable +- No world configuration/sampling +- No experiment tracking + +**Next Steps**: Define conceptual model to address these gaps (see [02-conceptual-model.md](./02-conceptual-model.md)) diff --git a/docs/od-architecture/02-conceptual-model.md b/docs/od-architecture/02-conceptual-model.md new file mode 100644 index 0000000000000000000000000000000000000000..b46ad15a5247ad61c2d5e12c307d5da94962e058 --- /dev/null +++ b/docs/od-architecture/02-conceptual-model.md @@ -0,0 +1,553 @@ +# 02. Conceptual Model + +## Overview + +This document proposes a conceptual model for organizing Operational Descriptors (ODs) around **capabilities** and **personas**, rather than just services and workflows. + +## Proposed Architecture Layers + +``` +┌─────────────────────────────────────────┐ +│ PERSONA │ Who performs actions +│ (Store Manager, Warehouse Worker) │ +└──────────────────┬──────────────────────┘ + │ has access to + ↓ +┌─────────────────────────────────────────┐ +│ CAPABILITY │ What can be done +│ (Order Fulfillment, Inventory Mgmt) │ +└──────────────────┬──────────────────────┘ + │ implemented by + ↓ +┌─────────────────────────────────────────┐ +│ OPERATIONAL DESCRIPTOR (OD) │ How it's done +│ (Workflow definition) │ +└──────────────────┬──────────────────────┘ + │ composed of + ↓ +┌─────────────────────────────────────────┐ +│ TOOL / API │ Atomic operations +│ (getOrder, updateInventory) │ +└──────────────────┬──────────────────────┘ + │ exposed by + ↓ +┌─────────────────────────────────────────┐ +│ SERVICE │ System boundaries +│ (ERP, WMS, TMS, EDI) │ +└─────────────────────────────────────────┘ +``` + +## Layer Definitions + +### 1. Service Layer + +**Definition**: A simulated enterprise system that provides tools/APIs. + +**Examples**: +- ERP (Enterprise Resource Planning) +- WMS (Warehouse Management System) +- TMS (Transportation Management System) +- EDI (Electronic Data Interchange) + +**Responsibilities**: +- Data storage (MongoDB collections) +- Business logic enforcement +- API exposure +- State management + +**Current Implementation**: ✅ Well-defined +- Each service has its own routes, models, and utilities +- ~162 tools across 4 main services + +### 2. Tool Layer + +**Definition**: An atomic API operation that reads or modifies data in a single service. + +**Examples**: +- WMS: `inventory/update-quantity` +- ERP: `orders/create` +- TMS: `shipments/track` +- EDI: `generate-850-document` + +**Characteristics**: +- Single responsibility +- Service-scoped +- Stateless operation +- Input/output contract + +**Current Implementation**: ✅ Well-defined +- Exposed as REST endpoints +- Clear input/output schemas +- Comprehensive coverage + +### 3. Operational Descriptor (OD) Layer + +**Definition**: A declarative workflow that orchestrates multiple tools to accomplish an end-to-end business process. + +**Examples**: +- "Inbound Receiving Workflow" (TMS shipment → WMS receiving → WMS putaway) +- "Order Fulfillment" (ERP order → WMS picking → WMS packing → TMS shipping) +- "EDI 850 Processing" (Generate PO → Create EDI doc → Send ACK → Create invoice) + +**Characteristics**: +- Multi-tool orchestration +- Can span multiple services +- Contains steps, assertions, retry logic, chaos config +- Declarative (can be serialized to JSON/YAML) + +**Current Implementation**: ⚠️ Partially defined +- Strong execution engine +- Builder patterns for construction +- **Missing**: Central registry, versioning, metadata + +### 4. Capability Layer + +**Definition**: A semantic business function or process that has meaning to domain experts and end users. + +**Examples**: +- "Order Fulfillment" +- "Inventory Management" +- "Shipment Tracking" +- "Purchase Order Processing" +- "Cycle Counting" + +**Characteristics**: +- Business-oriented naming +- Domain-specific +- May have multiple implementations (simple vs complex) +- Discoverable and browsable + +**Relationship to ODs**: +- **Option A (1:1)**: Each capability has exactly one canonical OD +- **Option B (1:N)**: One capability can have multiple OD variants + - Example: "Order Fulfillment" could have: + - `order-fulfillment-standard` (5 steps, 2 services) + - `order-fulfillment-express` (3 steps, aggressive SLAs) + - `order-fulfillment-with-validation` (8 steps, extensive checks) +- **Option C (N:M)**: Capabilities can share ODs or be composed + +**Current Implementation**: ❌ Not defined +- Capabilities are implicit in OD names +- No formal capability model +- No metadata or taxonomy + +### 5. Persona Layer + +**Definition**: A role or actor in the system that has access to specific capabilities. + +**Examples**: +- **Store Manager**: Order management, inventory oversight, staff coordination +- **Warehouse Worker**: Receiving, picking, packing, putaway +- **Logistics Coordinator**: Shipment planning, carrier management, route optimization +- **Purchasing Agent**: Purchase order creation, supplier management +- **Inventory Auditor**: Cycle counting, variance resolution + +**Characteristics**: +- Represents real-world roles +- Has capability permissions +- Can be used for access control +- Enables persona-based world configuration + +**Relationship to Capabilities**: +- Personas have many capabilities +- Capabilities can be shared across personas +- Permission matrix: `Persona × Capability → Boolean` + +**Current Implementation**: ❌ Not defined +- No persona model +- Some role attributes scattered in data models +- No permission system + +## Key Relationships + +### Persona ↔ Capability + +**Relationship Type**: Many-to-Many + +**Examples**: +``` +Store Manager has: + - Order Fulfillment ✓ + - Inventory Management ✓ + - Staff Coordination ✓ + - Warehouse Receiving ✗ + +Warehouse Worker has: + - Warehouse Receiving ✓ + - Picking & Packing ✓ + - Putaway ✓ + - Order Fulfillment ✗ (limited scope) +``` + +**Implementation Options**: +- **Static**: Hardcoded in configuration files +- **Dynamic**: Stored in database, configurable via API +- **Hybrid**: Default mappings with override capability + +### Capability ↔ OD + +**Relationship Type**: TBD (see options above) + +**Option A: 1:1 Mapping** +``` +Capability "Order Fulfillment" → OD "order-fulfillment-v1" +``` + +**Pros**: +- Simple +- Clear ownership +- Easy to reason about + +**Cons**: +- Inflexible +- Can't have variants +- Complexity hidden inside OD + +**Option B: 1:N Mapping (Variants)** +``` +Capability "Order Fulfillment" → [ + OD "order-fulfillment-standard", + OD "order-fulfillment-express", + OD "order-fulfillment-international" +] +``` + +**Pros**: +- Flexible +- Can optimize for different scenarios +- Clear trade-offs between variants + +**Cons**: +- More complex +- Need selection logic +- Versioning challenges + +**Option C: N:M Mapping (Composition)** +``` +Capability "Order Fulfillment" → OD "order-fulfillment" +Capability "Inventory Check" → OD "inventory-check" + +OD "order-fulfillment" uses OD "inventory-check" as sub-workflow +``` + +**Pros**: +- Maximum reusability +- Modular design +- Hierarchical composition + +**Cons**: +- Most complex +- Circular dependency risk +- Harder to reason about + +### OD ↔ Tool + +**Relationship Type**: Many-to-Many + +**Characteristics**: +- ODs reference tools in steps +- Same tool can be used in multiple ODs +- Tools can appear multiple times in same OD + +**Current Implementation**: ✅ Well-defined +```typescript +{ + step: { + type: "mcp", + service: "wms", + tool: "inventory/update-quantity", + input: { ... } + } +} +``` + +### Tool ↔ Service + +**Relationship Type**: Many-to-One + +**Characteristics**: +- Each tool belongs to exactly one service +- Services expose many tools +- Clear boundary + +**Current Implementation**: ✅ Well-defined + +## Data Entities & Dependencies + +### Data Flow + +Tools produce and consume **data entities**: + +**Common Entities**: +- Order +- Product +- Inventory +- Shipment +- Customer +- Location +- Document (EDI) + +**Dependencies**: +``` +Tool "orders/create" produces: Order +Tool "inventory/allocate" requires: Order, Product +Tool "picking/create-wave" requires: Order, Inventory +Tool "shipments/create" requires: Order, Location +``` + +**Use Case**: Knowledge graph can validate OD feasibility +- Can we execute this OD with available tools? +- What data is needed to start this workflow? +- What data will be produced? + +## Metadata & Attributes + +### Capability Metadata + +**Proposed Schema**: +```typescript +{ + id: string, // "order-fulfillment" + name: string, // "Order Fulfillment" + description: string, // "Process customer orders..." + domain: string, // "fulfillment" + complexity: "simple" | "medium" | "complex", + tags: string[], // ["retail", "b2c", "warehouse"] + personas: string[], // ["store-manager", "fulfillment-specialist"] + ods: string[], // ["order-fulfillment-v1", ...] + requiredServices: string[], // ["erp", "wms", "tms"] + estimatedDuration: number, // milliseconds + version: string // "1.0.0" +} +``` + +### OD Metadata + +**Current Schema** (from od.type.ts): +```typescript +{ + id: string, + name: string, + version: string, + description?: string, + steps: Step[], + chaos?: ChaosPolicy, + assertions?: Assertion[], + metadata?: Record +} +``` + +**Proposed Additions**: +```typescript +{ + // ... existing fields ... + + // New metadata + capability?: string, // "order-fulfillment" + domain?: string, // "fulfillment" + complexity?: "simple" | "medium" | "complex", + tags?: string[], // ["retail", "standard-shipping"] + requiredServices?: string[], // ["erp", "wms"] + personas?: string[], // ["store-manager"] + estimatedDuration?: number, // milliseconds + author?: string, // "system" or researcher name + createdAt?: Date, + updatedAt?: Date +} +``` + +### Persona Metadata + +**Proposed Schema**: +```typescript +{ + id: string, // "store-manager" + name: string, // "Store Manager" + description: string, // "Manages store operations..." + department: string, // "retail" + capabilities: string[], // ["order-fulfillment", ...] + accessLevel: "basic" | "advanced" | "admin", + tags: string[] // ["management", "retail"] +} +``` + +## Design Questions + +### 1. Capability Definition + +**Question**: What exactly is a capability? + +**Options**: +- **A. Business Function**: High-level processes ("Order Management") +- **B. User Story**: Goal-oriented tasks ("Fulfill a customer order") +- **C. Domain Process**: Technical workflows ("Inbound Receiving Flow") + +**Trade-offs**: +- A: Too broad, may contain multiple workflows +- B: Very specific, may explode in count +- C: Technical, less accessible to non-engineers + +**Recommendation Needed**: Which resonates with your target users (AI researchers)? + +### 2. Capability ↔ OD Mapping + +**Question**: Can one capability have multiple OD implementations? + +**Scenarios**: +- Simple vs complex variants +- Different optimization targets (speed vs accuracy) +- Evolution over time (v1, v2, v3) + +**Options**: +- **A. 1:1**: One capability = one canonical OD +- **B. 1:N**: One capability = multiple OD variants (with selection logic) +- **C. N:M**: Capabilities compose and share ODs + +**Recommendation Needed**: Which provides the right flexibility vs complexity? + +### 3. Persona Granularity + +**Question**: How detailed should personas be? + +**Options**: +- **A. Broad** (5-10 personas): Manager, Worker, Coordinator, Analyst +- **B. Detailed** (20-50 personas): Store Manager, DC Manager, Warehouse Manager, etc. +- **C. Functional** (50+ personas): Receiving Clerk, Picking Specialist, QA Inspector + +**Trade-offs**: +- A: Simple, easy to configure, coarse-grained permissions +- B: Balance of specificity and manageability +- C: Highly realistic, but complex to manage + +**Recommendation Needed**: What level of detail is useful for research? + +### 4. Static vs Dynamic Configuration + +**Question**: Should persona ↔ capability mappings be configurable? + +**Options**: +- **A. Static**: Hardcoded in config files, version controlled +- **B. Dynamic**: Stored in database, editable via API +- **C. Hybrid**: Defaults in config, overridable per world + +**Use Cases**: +- Researcher wants custom persona for experiment +- Need to restrict capabilities for specific test +- Want to evolve personas without code changes + +**Recommendation Needed**: How much runtime configurability is needed? + +### 5. Hierarchy & Composition + +**Question**: Should capabilities have hierarchies or compositions? + +**Examples**: +``` +Parent: "Order Management" + Children: + - "Create Order" + - "Fulfill Order" + - "Cancel Order" + - "Track Order" +``` + +or + +``` +Capability: "Fulfill Order" + Requires: + - "Check Inventory" (sub-capability) + - "Create Shipment" (sub-capability) +``` + +**Trade-offs**: +- Hierarchies: Better organization, but more complex +- Flat: Simpler, but harder to browse +- Composition: Enables reuse, but adds dependencies + +**Recommendation Needed**: Is flat structure sufficient, or do we need hierarchies? + +## Example: Order Fulfillment + +Let's walk through a concrete example: + +### Personas +``` +Store Manager: + - Can execute order fulfillment + - Can view inventory + - Can manage staff + +Warehouse Worker: + - Can pick orders + - Can pack shipments + - Cannot create orders +``` + +### Capability +``` +ID: order-fulfillment +Name: Order Fulfillment +Description: Process a customer order from creation to shipment +Domain: fulfillment +Complexity: medium +Tags: [retail, standard-shipping] +Personas: [store-manager] +``` + +### ODs (Variant Approach) +``` +OD: order-fulfillment-standard + Steps: + 1. ERP: Create order + 2. WMS: Allocate inventory + 3. WMS: Create pick wave + 4. WMS: Assign picker + 5. WMS: Create shipment + 6. TMS: Assign carrier + 7. TMS: Generate shipping label + +OD: order-fulfillment-express + Steps: + 1. ERP: Create order (skip validation) + 2. WMS: Auto-allocate + 3. WMS: Create priority shipment + 4. TMS: Assign premium carrier +``` + +### Tools Used +``` +- erp/companies/create-company +- erp/orders/create-order +- erp/invoices/create-invoice +- erp/shipments/create-shipment +- erp/payments/create-payment +- wms/inventory/allocate +- wms/picking/create-wave +- wms/picking/assign-picker +- wms/shipments/create +- tms/carriers/assign +- tms/labels/generate +``` + +### Services Required +``` +- ERP +- WMS +- TMS +``` + +## Next Steps + +1. **Answer design questions** (see above) +2. **Define capability taxonomy** (see [04-taxonomy-organization.md](./04-taxonomy-organization.md)) +3. **Design knowledge graph** (see [03-knowledge-graph.md](./03-knowledge-graph.md)) +4. **Create persona catalog** (define 10-20 personas) +5. **Map existing ODs** to capabilities + +## Related Documents + +- [01. Current State](./01-current-state.md) - What exists today +- [03. Knowledge Graph](./03-knowledge-graph.md) - Relationship modeling +- [04. Taxonomy & Organization](./04-taxonomy-organization.md) - Categorization strategy +- [06. Open Questions](./06-open-questions.md) - Unresolved decisions diff --git a/docs/od-architecture/03-knowledge-graph.md b/docs/od-architecture/03-knowledge-graph.md new file mode 100644 index 0000000000000000000000000000000000000000..fe0fdcd2c58ce048afedfc5a1a87ee50af59f56e --- /dev/null +++ b/docs/od-architecture/03-knowledge-graph.md @@ -0,0 +1,689 @@ +# 03. Knowledge Graph + +## Overview + +A **knowledge graph** can model relationships between services, tools, data entities, ODs, capabilities, and personas. This enables intelligent features like: +- Auto-discovery of valid OD compositions +- Validation of OD feasibility +- Suggestion of capabilities based on available tools +- Dependency analysis + +## Graph Structure + +### Node Types + +``` +┌─────────────┐ +│ PERSONA │ Role/actor (e.g., Store Manager) +└─────────────┘ + │ + │ can_perform + ↓ +┌─────────────┐ +│ CAPABILITY │ Business function (e.g., Order Fulfillment) +└─────────────┘ + │ + │ implemented_by + ↓ +┌─────────────┐ +│ OD │ Workflow definition +└─────────────┘ + │ + │ uses + ↓ +┌─────────────┐ +│ TOOL │ API operation (e.g., createOrder) +└─────────────┘ + │ + │ exposed_by + ↓ +┌─────────────┐ +│ SERVICE │ System boundary (e.g., ERP, WMS) +└─────────────┘ + │ + │ manages + ↓ +┌─────────────┐ +│ ENTITY │ Data object (e.g., Order, Product) +└─────────────┘ +``` + +### Node Definitions + +#### 1. Persona Node +```typescript +{ + type: "persona", + id: "store-manager", + name: "Store Manager", + description: "Manages store operations", + department: "retail", + accessLevel: "advanced" +} +``` + +#### 2. Capability Node +```typescript +{ + type: "capability", + id: "order-fulfillment", + name: "Order Fulfillment", + description: "Process customer orders end-to-end", + domain: "fulfillment", + complexity: "medium" +} +``` + +#### 3. OD Node +```typescript +{ + type: "od", + id: "order-fulfillment-standard-v1", + name: "Standard Order Fulfillment", + version: "1.0.0", + complexity: "medium", + estimatedDuration: 5000 // ms +} +``` + +#### 4. Tool Node +```typescript +{ + type: "tool", + id: "wms:inventory:allocate", + name: "Allocate Inventory", + service: "wms", + endpoint: "/inventory/allocate", + inputSchema: { ... }, + outputSchema: { ... } +} +``` + +#### 5. Service Node +```typescript +{ + type: "service", + id: "wms", + name: "Warehouse Management System", + baseUrl: "/:worldId/wms" +} +``` + +#### 6. Entity Node +```typescript +{ + type: "entity", + id: "order", + name: "Order", + collection: "orders", + schema: { ... } +} +``` + +### Edge Types + +#### Persona → Capability +```typescript +{ + source: "store-manager", + target: "order-fulfillment", + type: "can_perform", + permission: "execute" // or "read", "write" +} +``` + +#### Capability → OD +```typescript +{ + source: "order-fulfillment", + target: "order-fulfillment-standard-v1", + type: "implemented_by", + variant: "standard" // or "express", "international" +} +``` + +#### OD → Tool +```typescript +{ + source: "order-fulfillment-standard-v1", + target: "wms:inventory:allocate", + type: "uses", + stepIndex: 2, + required: true +} +``` + +#### Tool → Service +```typescript +{ + source: "wms:inventory:allocate", + target: "wms", + type: "exposed_by" +} +``` + +#### Service → Entity +```typescript +{ + source: "wms", + target: "inventory", + type: "manages" +} +``` + +#### Tool → Entity (Data Flow) + +**Produces**: +```typescript +{ + source: "erp:orders:create", + target: "order", + type: "produces" +} +``` + +**Requires**: +```typescript +{ + source: "wms:inventory:allocate", + target: "order", + type: "requires" +} +``` + +**Modifies**: +```typescript +{ + source: "wms:inventory:update-quantity", + target: "inventory", + type: "modifies" +} +``` + +#### Tool → Tool (Sequencing) + +**Prerequisite**: +```typescript +{ + source: "erp:orders:create", + target: "wms:inventory:allocate", + type: "prerequisite", + reason: "Order must exist before allocation" +} +``` + +**Conflicts With**: +```typescript +{ + source: "wms:inventory:allocate", + target: "wms:inventory:deallocate", + type: "conflicts_with", + reason: "Cannot allocate and deallocate simultaneously" +} +``` + +#### Entity → Entity (Data Relationships) + +**Contains**: +```typescript +{ + source: "order", + target: "order-line", + type: "contains", + cardinality: "one-to-many" +} +``` + +**References**: +```typescript +{ + source: "order", + target: "customer", + type: "references", + cardinality: "many-to-one" +} +``` + +## Use Cases + +### 1. OD Discovery: "What ODs can I create?" + +**Scenario**: Given available tools, suggest possible ODs. + +**Query**: +``` +Given tools: [erp:orders:create, wms:inventory:allocate, tms:shipments:create] +Find: Valid OD sequences +``` + +**Graph Traversal**: +1. Start with tools +2. Find entities they produce/require +3. Identify valid tool chains (where outputs match inputs) +4. Suggest OD templates + +**Example Result**: +``` +Suggested OD: "Simple Order Fulfillment" + Steps: + 1. erp:orders:create (produces: Order) + 2. wms:inventory:allocate (requires: Order, produces: Allocation) + 3. tms:shipments:create (requires: Order, Allocation) +``` + +### 2. OD Validation: "Is this OD valid?" + +**Scenario**: Validate that an OD can actually execute. + +**Checks**: +1. **Tool Availability**: Do all referenced tools exist? +2. **Service Dependencies**: Are required services available? +3. **Data Flow**: Does each step have required input data? +4. **Sequencing**: Are there any conflicting operations? + +**Example Validation**: +``` +OD: "order-fulfillment-v1" + Step 1: erp:orders:create ✓ + - Produces: Order + Step 2: wms:inventory:allocate ✓ + - Requires: Order ✓ (produced by step 1) + - Produces: Allocation + Step 3: wms:inventory:deallocate ✗ + - Conflicts with: wms:inventory:allocate (step 2) + +Result: INVALID - Conflicting operations +``` + +### 3. Capability Suggestion: "What capabilities are possible?" + +**Scenario**: Given available services, suggest capabilities. + +**Query**: +``` +Given services: [wms, tms] +Find: Capabilities that only need these services +``` + +**Graph Traversal**: +1. Find all tools from these services +2. Find all ODs that only use these tools +3. Find all capabilities implemented by these ODs + +**Example Result**: +``` +Possible Capabilities: + - Inbound Receiving (WMS only) + - Warehouse Transfer (WMS only) + - Shipment Tracking (TMS only) + - Outbound Shipping (WMS + TMS) + +Not Possible: + - Order Fulfillment (requires ERP) + - EDI Processing (requires EDI service) +``` + +### 4. Dependency Analysis: "What does this OD need?" + +**Scenario**: Understand prerequisites for an OD. + +**Query**: +``` +For OD: "order-fulfillment-standard-v1" +Find: All dependencies +``` + +**Graph Traversal**: +1. Find all tools used by OD +2. Find all services hosting those tools +3. Find all entities required by tools +4. Find all prerequisite data + +**Example Result**: +``` +OD: "order-fulfillment-standard-v1" + +Required Services: + - ERP + - WMS + - TMS + +Required Entities (Input): + - Customer (must pre-exist) + - Product (must pre-exist) + - Inventory (must have stock) + +Produced Entities (Output): + - Order + - Allocation + - Shipment +``` + +### 5. Impact Analysis: "What breaks if I change this?" + +**Scenario**: Understand impact of removing/changing a tool or service. + +**Query**: +``` +If we remove tool: "wms:inventory:allocate" +What is affected? +``` + +**Graph Traversal**: +1. Find all ODs using this tool +2. Find all capabilities implemented by those ODs +3. Find all personas with access to those capabilities + +**Example Result**: +``` +Removing "wms:inventory:allocate" affects: + +ODs (3): + - order-fulfillment-standard-v1 (step 2) + - order-fulfillment-express-v1 (step 2) + - inventory-reservation-v1 (step 1) + +Capabilities (2): + - Order Fulfillment + - Inventory Reservation + +Personas (3): + - Store Manager + - Fulfillment Specialist + - Inventory Manager + +Recommendation: High impact - find alternative or create substitute +``` + +### 6. Path Finding: "How do I get from A to B?" + +**Scenario**: Find tool sequences to transform one entity into another. + +**Query**: +``` +Start: Customer (exists) +Goal: Shipment (with tracking number) +Find: Shortest tool path +``` + +**Graph Traversal**: +1. Start with Customer entity +2. Find tools that require Customer (produces Order) +3. Continue until Shipment is produced +4. Return shortest path + +**Example Result**: +``` +Path: + Customer → [erp:orders:create] → Order + Order → [wms:inventory:allocate] → Allocation + Order + Allocation → [wms:shipments:create] → Shipment + Shipment → [tms:carriers:assign] → Shipment (with carrier) + Shipment → [tms:labels:generate] → Shipment (with tracking) + +Suggested OD: 5 steps, 3 services (ERP, WMS, TMS) +``` + +## Implementation Approaches + +### Option A: Static Analysis (Build Time) + +**How**: Analyze code/config files to build graph. + +**Process**: +1. Parse TypeScript types and service tool definitions +2. Extract input/output schemas from tools +3. Build graph from static information +4. Generate graph file (JSON/GraphML) + +**Pros**: +- No runtime overhead +- Version controlled +- Can be part of CI/CD + +**Cons**: +- Doesn't capture runtime behavior +- Misses dynamic relationships +- Requires manual annotation for data flow + +**Tools**: +- TypeScript compiler API +- JSON schema analysis +- Custom AST traversal + +### Option B: Dynamic Learning (Runtime) + +**How**: Learn relationships from actual OD executions. + +**Process**: +1. Start with basic graph (services, tools) +2. Monitor OD executions +3. Record which tools are called together +4. Infer data flow from step outputs → inputs +5. Update graph weights based on frequency + +**Pros**: +- Discovers actual usage patterns +- Adapts over time +- Captures implicit dependencies + +**Cons**: +- Requires execution history +- Slow to bootstrap +- May learn anti-patterns + +**Data Sources**: +- OD execution logs +- Step input/output traces +- Success/failure rates + +### Option C: Hybrid (Static + Runtime) + +**How**: Start with static analysis, refine with runtime data. + +**Process**: +1. Build initial graph from code (static) +2. Annotate tools with produces/requires/modifies (manual or inferred) +3. Validate and refine during execution (runtime) +4. Update edge weights based on usage + +**Pros**: +- Best of both worlds +- Quick bootstrap +- Improves over time + +**Cons**: +- More complex +- Need to reconcile conflicts +- Schema evolution challenges + +**Recommendation**: Start with static, add runtime as phase 2. + +## Graph Technology Options + +### Option 1: In-Memory Graph (JavaScript) + +**Libraries**: +- `graphlib` (lightweight, simple) +- `cytoscape.js` (visualization support) +- Custom adjacency list + +**Pros**: +- Simple +- Fast for small graphs +- No external dependencies + +**Cons**: +- Not persistent +- Limited query capabilities +- Rebuild on every startup + +### Option 2: Graph Database (Neo4j, ArangoDB) + +**Pros**: +- Purpose-built for graphs +- Powerful query language (Cypher, AQL) +- Scales to large graphs +- Persistent + +**Cons**: +- Additional infrastructure +- Complexity +- Overkill for initial version + +### Option 3: MongoDB (Document + Relationships) + +**Pros**: +- Already using MongoDB +- Can store nodes and edges as documents +- Familiar query language +- Good for hybrid approach + +**Cons**: +- Not optimized for graph traversal +- Complex queries for deep traversals +- Manual relationship management + +**Recommendation**: Start with in-memory (Option 1), migrate to MongoDB (Option 3) when persistence needed. + +## Practical Example: Order Fulfillment Graph + +### Nodes +```javascript +// Persona +{ type: "persona", id: "store-manager", name: "Store Manager" } + +// Capability +{ type: "capability", id: "order-fulfillment", name: "Order Fulfillment" } + +// OD +{ type: "od", id: "order-fulfillment-v1", name: "Standard Order Fulfillment" } + +// Tools +{ type: "tool", id: "erp:orders:create", service: "erp" } +{ type: "tool", id: "wms:inventory:allocate", service: "wms" } +{ type: "tool", id: "tms:shipments:create", service: "tms" } + +// Services +{ type: "service", id: "erp", name: "ERP" } +{ type: "service", id: "wms", name: "WMS" } +{ type: "service", id: "tms", name: "TMS" } + +// Entities +{ type: "entity", id: "order", collection: "orders" } +{ type: "entity", id: "inventory", collection: "inventory" } +{ type: "entity", id: "shipment", collection: "shipments" } +``` + +### Edges +```javascript +// Persona → Capability +{ from: "store-manager", to: "order-fulfillment", type: "can_perform" } + +// Capability → OD +{ from: "order-fulfillment", to: "order-fulfillment-v1", type: "implemented_by" } + +// OD → Tools +{ from: "order-fulfillment-v1", to: "erp:orders:create", type: "uses", step: 1 } +{ from: "order-fulfillment-v1", to: "wms:inventory:allocate", type: "uses", step: 2 } +{ from: "order-fulfillment-v1", to: "tms:shipments:create", type: "uses", step: 3 } + +// Tools → Services +{ from: "erp:orders:create", to: "erp", type: "exposed_by" } +{ from: "wms:inventory:allocate", to: "wms", type: "exposed_by" } +{ from: "tms:shipments:create", to: "tms", type: "exposed_by" } + +// Tools → Entities (Data Flow) +{ from: "erp:orders:create", to: "order", type: "produces" } +{ from: "wms:inventory:allocate", to: "order", type: "requires" } +{ from: "wms:inventory:allocate", to: "inventory", type: "modifies" } +{ from: "tms:shipments:create", to: "order", type: "requires" } +{ from: "tms:shipments:create", to: "shipment", type: "produces" } +``` + +### Queries + +**Query 1: What can a store manager do?** +```javascript +// Traverse: Persona → Capability +getOutgoingEdges("store-manager", "can_perform") +// Result: ["order-fulfillment", "inventory-management", ...] +``` + +**Query 2: How is order fulfillment implemented?** +```javascript +// Traverse: Capability → OD → Tool +getOutgoingEdges("order-fulfillment", "implemented_by") // ODs + .flatMap(od => getOutgoingEdges(od, "uses")) // Tools +// Result: ["erp:orders:create", "wms:inventory:allocate", "tms:shipments:create"] +``` + +**Query 3: What entities does order fulfillment require?** +```javascript +// Get OD's tools, then find entities they require +const tools = getOutgoingEdges("order-fulfillment-v1", "uses") +const requiredEntities = tools + .flatMap(tool => getOutgoingEdges(tool, "requires")) + .filter(node => node.type === "entity") +// Result: ["order", "inventory"] +``` + +## Open Questions + +### 1. Graph Granularity + +**Question**: How detailed should the graph be? + +**Options**: +- **Coarse**: Just services, capabilities, personas +- **Medium**: + tools, entities +- **Fine**: + tool parameters, entity fields, step-level details + +**Trade-off**: Detail vs maintainability + +### 2. Data Flow Inference + +**Question**: How do we know which entities a tool produces/requires? + +**Options**: +- **Manual Annotation**: Developers specify in code comments or metadata +- **Schema Analysis**: Infer from input/output TypeScript types +- **Runtime Learning**: Monitor actual executions + +**Recommendation Needed**: Feasibility of each approach? + +### 3. Graph Updates + +**Question**: When does the graph get updated? + +**Options**: +- **Build Time**: Regenerated on every deployment +- **Startup**: Built when service starts +- **Runtime**: Updated as ODs execute + +**Recommendation Needed**: What's the update frequency requirement? + +### 4. Query Performance + +**Question**: Do we need to optimize for specific query patterns? + +**Common Queries**: +- "What can persona X do?" (1-hop traversal) +- "What ODs use tool Y?" (reverse lookup) +- "Find path from A to B" (shortest path, can be expensive) + +**Recommendation Needed**: Which queries are most critical? + +## Next Steps + +1. **Choose implementation approach** (static/dynamic/hybrid) +2. **Select graph technology** (in-memory/Neo4j/MongoDB) +3. **Define annotation format** for manual metadata +4. **Build proof-of-concept** for one use case (e.g., OD validation) +5. **Evaluate query performance** on realistic graph size + +## Related Documents + +- [02. Conceptual Model](./02-conceptual-model.md) - Defines nodes (Persona, Capability, OD) +- [04. Taxonomy & Organization](./04-taxonomy-organization.md) - How to categorize nodes +- [06. Open Questions](./06-open-questions.md) - Unresolved decisions diff --git a/docs/od-architecture/04-taxonomy-organization.md b/docs/od-architecture/04-taxonomy-organization.md new file mode 100644 index 0000000000000000000000000000000000000000..9205c1c4e0fede1c34650b61283825b1bebe8135 --- /dev/null +++ b/docs/od-architecture/04-taxonomy-organization.md @@ -0,0 +1,619 @@ +# 04. Taxonomy & Organization + +## Overview + +This document proposes taxonomies for organizing capabilities and ODs so that researchers can easily browse, filter, and discover what they need. + +## Taxonomy Dimensions + +### 1. Domain-Based Organization + +Organize by **business domain** or functional area. + +#### Proposed Domains + +``` +Supply Chain Management +├── Procurement +│ ├── Supplier Management +│ ├── Purchase Order Creation +│ ├── RFQ Processing +│ └── Vendor Evaluation +│ +├── Inventory Management +│ ├── Stock Control +│ ├── Cycle Counting +│ ├── Replenishment +│ └── Allocation +│ +├── Fulfillment +│ ├── Order Processing +│ ├── Picking & Packing +│ ├── Wave Management +│ └── Returns Processing +│ +├── Warehousing +│ ├── Inbound Receiving +│ ├── Putaway +│ ├── Transfers +│ └── Labor Management +│ +├── Transportation +│ ├── Shipment Planning +│ ├── Carrier Management +│ ├── Route Optimization +│ └── Delivery Tracking +│ +├── Data Exchange +│ ├── EDI Document Processing +│ ├── API Integration +│ ├── File Import/Export +│ └── Format Transformation +│ +└── Analytics & Reporting + ├── KPI Dashboards + ├── Exception Monitoring + ├── Audit Trails + └── Performance Analysis +``` + +**Pros**: +- Aligned with business language +- Easy for domain experts to navigate +- Clear ownership + +**Cons**: +- Cross-domain capabilities hard to categorize +- May not match researcher mental models +- Requires domain knowledge + +#### Example Mapping +``` +Domain: Fulfillment + Capabilities: + - Order Fulfillment + - Express Order Fulfillment + - Drop Ship Fulfillment + - Pick & Pack + - Order Cancellation +``` + +### 2. Persona-Based Organization + +Organize by **who** performs the capability. + +#### Proposed Personas + +``` +Management Roles +├── Store Manager +│ └── Capabilities: Order oversight, inventory review, staff coordination +├── Warehouse Manager +│ └── Capabilities: Resource planning, performance monitoring +├── Logistics Manager +│ └── Capabilities: Route planning, carrier negotiation +└── Operations Director + └── Capabilities: Multi-site coordination, strategic planning + +Operational Roles +├── Warehouse Worker +│ └── Capabilities: Receiving, picking, packing, putaway +├── Inventory Specialist +│ └── Capabilities: Cycle counting, adjustments, audits +├── Shipping Clerk +│ └── Capabilities: Label generation, manifest creation +└── Receiving Clerk + └── Capabilities: Appointment scheduling, unloading, inspection + +Analytical Roles +├── Demand Planner +│ └── Capabilities: Forecast analysis, replenishment planning +├── Business Analyst +│ └── Capabilities: Report generation, exception analysis +└── Data Engineer + └── Capabilities: Data integration, pipeline management + +System Roles +├── EDI Operator +│ └── Capabilities: EDI mapping, document transmission +├── Integration Specialist +│ └── Capabilities: API configuration, webhook setup +└── Automation Bot + └── Capabilities: Scheduled workflows, event-driven processes +``` + +**Pros**: +- Intuitive for role-based access +- Clear permission model +- Enables persona-specific worlds + +**Cons**: +- Capabilities may span multiple personas +- Persona definitions may vary by organization +- Maintenance overhead + +#### Example Mapping +``` +Persona: Store Manager + Capabilities: + - Order Fulfillment + - Inventory Management + - Staff Scheduling + - Exception Handling + + Cannot Access: + - Warehouse Physical Operations (worker-level tasks) + - System Configuration (admin tasks) +``` + +### 3. Complexity-Based Organization + +Organize by **complexity** level. + +#### Complexity Metrics + +**Quantitative Factors**: +- Number of steps (1-3: simple, 4-7: medium, 8+: complex) +- Number of services involved (1: simple, 2-3: medium, 4+: complex) +- Number of decision points (conditionals, branches) +- Average execution time +- Error rate / retry frequency + +**Qualitative Factors**: +- Requires domain expertise? +- Has edge cases? +- Needs manual intervention? +- High business impact? + +#### Complexity Tiers + +``` +SIMPLE (Tier 1) +├── Definition: 1-3 steps, single service, no branching +├── Examples: +│ ├── Check Inventory Level +│ ├── Create Purchase Order +│ ├── Update Product Price +│ └── Generate Report +└── Use Case: Learning, testing, debugging + +MEDIUM (Tier 2) +├── Definition: 4-7 steps, 2-3 services, some branching +├── Examples: +│ ├── Process Customer Order +│ ├── Receive Shipment +│ ├── Allocate Inventory +│ └── Generate EDI 850 +└── Use Case: Standard operations, automation + +COMPLEX (Tier 3) +├── Definition: 8+ steps, multi-service, extensive branching +├── Examples: +│ ├── End-to-End Order Fulfillment +│ ├── Cross-Dock Transfer +│ ├── Returns Processing with Restocking +│ └── Multi-Leg Shipment Orchestration +└── Use Case: Advanced scenarios, research experiments +``` + +**Pros**: +- Easy to assess difficulty +- Good for progressive learning +- Helps with sampling strategies + +**Cons**: +- Subjective boundaries +- May not reflect actual difficulty +- Changes as system evolves + +#### Example Mapping +``` +Capability: Order Fulfillment + +Simple Variant: + - Steps: 3 (Create order, allocate inventory, create shipment) + - Services: 2 (ERP, WMS) + - Estimated Duration: 2 seconds + +Medium Variant: + - Steps: 7 (Add validation, picking, packing, carrier assignment) + - Services: 3 (ERP, WMS, TMS) + - Estimated Duration: 5 seconds + +Complex Variant: + - Steps: 12 (Add fraud check, inventory reservation, multi-location, split shipments) + - Services: 4 (ERP, WMS, TMS, External Payment Gateway) + - Estimated Duration: 10 seconds +``` + +### 4. Service-Based Organization (Current) + +Organize by **which service(s)** are involved. + +``` +Single Service +├── ERP-Only +│ ├── Customer Management +│ ├── Product Catalog +│ └── Order Entry +├── WMS-Only +│ ├── Cycle Counting +│ ├── Putaway +│ └── Inventory Adjustment +└── TMS-Only + ├── Carrier Rate Lookup + ├── Shipment Tracking + └── Route Planning + +Multi-Service +├── ERP + WMS +│ ├── Order Fulfillment (partial) +│ └── Inventory Synchronization +├── WMS + TMS +│ ├── Outbound Shipping +│ └── Inbound Receiving +├── ERP + EDI +│ ├── EDI 850 Processing +│ └── Invoice Generation +└── ERP + WMS + TMS + ├── End-to-End Order Fulfillment + └── Drop Ship Workflow +``` + +**Pros**: +- Matches current architecture +- Clear technical dependencies +- Easy to implement + +**Cons**: +- Not user-friendly +- Technical rather than semantic +- Doesn't help discovery + +### 5. Workflow Pattern Organization + +Organize by **common workflow patterns**. + +``` +Sequential Workflows +├── Linear Pipeline (A → B → C) +├── Example: Inbound Receiving (Appointment → Unload → Inspect → Putaway) + +Parallel Workflows +├── Fork-Join (A → [B, C, D] → E) +├── Example: Multi-Location Picking (Split order → Pick at each DC → Consolidate) + +Conditional Workflows +├── If-Then-Else (A → Decision → B or C) +├── Example: Order Routing (Check inventory → Ship from DC or Store) + +Event-Driven Workflows +├── Trigger-Action (Event → OD) +├── Example: Low Stock Alert → Auto-Replenishment + +Iterative Workflows +├── Loop Until Condition (Repeat A until B) +├── Example: Cycle Count (Check location → Adjust → Next location) + +Compensating Workflows +├── Try-Catch-Rollback (A → B fails → Undo A) +├── Example: Order Cancellation (Release inventory, refund payment, notify customer) +``` + +**Pros**: +- Educational for learning workflow patterns +- Useful for OD design +- Technical but accessible + +**Cons**: +- Multiple patterns may apply +- Orthogonal to business meaning +- Complex to categorize + +## Multi-Dimensional Tagging + +Rather than forcing a single taxonomy, use **tags** to support multiple views. + +### Tag Schema + +```typescript +{ + // Core Tags + domain: string[], // ["fulfillment", "inventory"] + persona: string[], // ["store-manager", "warehouse-worker"] + complexity: "simple" | "medium" | "complex", + + // Service Tags + services: string[], // ["erp", "wms", "tms"] + serviceCount: number, // 3 + + // Pattern Tags + pattern: string[], // ["sequential", "conditional"] + + // Functional Tags + category: string[], // ["order-processing", "shipping"] + + // Technical Tags + stepCount: number, // 7 + estimatedDuration: number, // 5000 (ms) + hasExternalDeps: boolean, // false + + // Business Tags + businessImpact: "low" | "medium" | "high", + frequency: "rare" | "occasional" | "frequent", + + // Meta Tags + version: string, // "1.0.0" + author: string, // "system" | researcher name + status: "draft" | "stable" | "deprecated" +} +``` + +### Tag-Based Filtering + +Researchers can filter by any combination: + +**Example Queries**: +``` +# Simple warehouse operations +domain: "warehousing" +complexity: "simple" +services: ["wms"] + +# Store manager capabilities (medium complexity) +persona: "store-manager" +complexity: ["medium", "complex"] + +# High-frequency fulfillment workflows +domain: "fulfillment" +frequency: "frequent" +businessImpact: ["medium", "high"] + +# Cross-service workflows +serviceCount: >= 2 +pattern: "sequential" +``` + +## Browsing & Discovery UI Concepts + +### Concept 1: Hierarchical Tree View + +``` +📁 Supply Chain Management + 📁 Fulfillment + 📄 Order Fulfillment (medium, store-manager) + 📄 Express Fulfillment (simple, store-manager) + 📄 Drop Ship (complex, logistics-manager) + 📁 Inventory Management + 📄 Cycle Count (simple, inventory-specialist) + 📄 Replenishment (medium, warehouse-manager) +``` + +### Concept 2: Persona-Centric View + +``` +👤 Store Manager + 📋 My Capabilities (12) + ✓ Order Fulfillment + ✓ Inventory Management + ✓ Exception Handling + 📊 By Complexity + Simple: 4 capabilities + Medium: 6 capabilities + Complex: 2 capabilities +``` + +### Concept 3: Tag Cloud / Faceted Search + +``` +🏷️ Tags: + Domain: [Fulfillment (8)] [Inventory (12)] [Transportation (6)] + Complexity: [Simple (15)] [Medium (20)] [Complex (8)] + Persona: [Store Manager (10)] [Warehouse Worker (18)] + +🔍 Search: "order" + Results (3): + - Order Fulfillment (fulfillment, medium, store-manager) + - Order Cancellation (fulfillment, simple, store-manager) + - Purchase Order Creation (procurement, simple, purchasing-agent) +``` + +### Concept 4: Capability Matrix + +``` + │ Simple │ Medium │ Complex │ +───────────────┼────────┼────────┼─────────┤ +Fulfillment │ 4 │ 8 │ 3 │ +Inventory │ 6 │ 5 │ 1 │ +Transportation │ 3 │ 4 │ 2 │ +Warehousing │ 5 │ 6 │ 4 │ +``` + +## Practical Examples + +### Example 1: Order Fulfillment Taxonomy + +```yaml +capability: + id: order-fulfillment + name: Order Fulfillment + + tags: + domain: [fulfillment, order-processing] + persona: [store-manager, fulfillment-specialist] + complexity: medium + services: [erp, wms, tms] + serviceCount: 3 + pattern: [sequential, conditional] + category: [order-processing, shipping] + businessImpact: high + frequency: frequent + + variants: + - id: order-fulfillment-standard + complexity: medium + stepCount: 7 + estimatedDuration: 5000 + + - id: order-fulfillment-express + complexity: simple + stepCount: 4 + estimatedDuration: 3000 + tags: [expedited] + + - id: order-fulfillment-international + complexity: complex + stepCount: 12 + estimatedDuration: 10000 + tags: [customs, international] +``` + +### Example 2: Warehouse Operations Taxonomy + +```yaml +domain: + id: warehousing + name: Warehouse Operations + + capabilities: + - name: Inbound Receiving + complexity: medium + personas: [receiving-clerk, warehouse-worker] + + - name: Putaway + complexity: simple + personas: [warehouse-worker] + + - name: Cycle Counting + complexity: simple + personas: [inventory-specialist] + + - name: Wave Picking + complexity: medium + personas: [warehouse-worker, picking-specialist] + + - name: Cross-Dock Transfer + complexity: complex + personas: [warehouse-manager] +``` + +## Recommended Approach + +### Phase 1: Multi-Dimensional Tagging (Immediate) + +Implement comprehensive tagging on all capabilities and ODs: +- Domain, persona, complexity (required) +- Service, pattern, category (optional) +- Business metadata (impact, frequency) + +**Benefits**: +- Maximum flexibility +- Supports all browsing patterns +- Easy to extend + +### Phase 2: Default Views (Short-term) + +Create 3 primary views: +1. **Domain View** (default for business users) +2. **Persona View** (for role-based access) +3. **Complexity View** (for learning/sampling) + +**Benefits**: +- Guided discovery +- Reduces cognitive load +- Meets different user needs + +### Phase 3: Smart Search (Medium-term) + +Add search with: +- Full-text search on names/descriptions +- Tag-based filtering +- Similarity search ("find capabilities like this one") + +**Benefits**: +- Powerful for expert users +- Handles edge cases +- Scales to large catalogs + +### Phase 4: Personalized Recommendations (Long-term) + +Use knowledge graph + usage data to suggest: +- "Researchers working on X also used Y" +- "Based on your world config, you might need Z" +- "This capability requires these prerequisites" + +**Benefits**: +- Contextual +- Reduces trial-and-error +- Learns from community + +## Open Questions + +### 1. Primary Organization + +**Question**: What should be the default/primary taxonomy? + +**Options**: +- Domain-based (business-oriented) +- Persona-based (role-oriented) +- Complexity-based (learning-oriented) +- Multi-dimensional tags (no primary) + +**Recommendation Needed**: What's most intuitive for AI researchers? + +### 2. Tag Vocabulary + +**Question**: Should tag values be freeform or controlled? + +**Options**: +- **Freeform**: Authors can add any tags +- **Controlled**: Pre-defined tag vocabulary +- **Hybrid**: Core tags controlled, custom tags allowed + +**Trade-offs**: +- Freeform: Flexible but inconsistent +- Controlled: Consistent but rigid + +**Recommendation Needed**: How important is consistency? + +### 3. Maintenance Strategy + +**Question**: Who maintains the taxonomy? + +**Options**: +- **System**: Auto-generated from code +- **Manual**: Curated by team +- **Community**: Researchers contribute tags +- **Hybrid**: System baseline + manual refinement + +**Recommendation Needed**: What's sustainable long-term? + +### 4. Granularity + +**Question**: How detailed should categories be? + +**Example**: +- Coarse: "Fulfillment" (20 capabilities) +- Medium: "Order Processing", "Picking & Packing", "Returns" (5-8 each) +- Fine: "Standard Picking", "Batch Picking", "Zone Picking" (1-3 each) + +**Trade-offs**: +- Coarse: Simple but less precise +- Fine: Precise but overwhelming + +**Recommendation Needed**: What level of detail is useful? + +## Next Steps + +1. **Choose primary taxonomy** (domain/persona/complexity/tags) +2. **Define tag schema** and controlled vocabulary +3. **Tag existing ODs** in codebase +4. **Build browsing UI** (or API endpoints for CLI) +5. **Test with users** and iterate + +## Related Documents + +- [02. Conceptual Model](./02-conceptual-model.md) - Capability and persona definitions +- [03. Knowledge Graph](./03-knowledge-graph.md) - Relationship modeling +- [05. Sampling & World Config](./05-sampling-world-config.md) - Using taxonomy for filtering +- [06. Open Questions](./06-open-questions.md) - Unresolved decisions diff --git a/docs/od-architecture/05-sampling-world-config.md b/docs/od-architecture/05-sampling-world-config.md new file mode 100644 index 0000000000000000000000000000000000000000..ab7c04fc71cf3d313e28a7149352a4e1d29f9ae1 --- /dev/null +++ b/docs/od-architecture/05-sampling-world-config.md @@ -0,0 +1,754 @@ +# 05. Sampling & World Configuration + +## Overview + +Researchers need the ability to configure "worlds" with specific subsets of capabilities, rather than always having all 100+ capabilities available. This document explores sampling strategies and world configuration patterns. + +## Problem Statement + +**Current State**: +- Every world has all services and tools available +- All ODs are accessible to all users +- No way to create focused, specialized environments + +**User Needs**: +- "Give me only warehouse operations" (domain filtering) +- "Sample 10 random capabilities for testing" (random sampling) +- "Progressive complexity: start simple, add complexity" (staged learning) +- "Retail-only world for store management research" (persona filtering) + +## Use Cases + +### Use Case 1: Domain-Specific Research + +**Scenario**: Researcher studying warehouse automation + +**Need**: World with only warehouse-related capabilities + +**Configuration**: +```yaml +world: + name: "Warehouse Automation Study" + filter: + domains: [warehousing, inventory] + services: [wms] + +result: + capabilities: 25 (out of 100) + - Inbound Receiving + - Putaway + - Cycle Counting + - Picking & Packing + - Replenishment + - Warehouse Transfers + - Labor Management + ... + + excluded: + - Order Fulfillment (requires ERP) + - Shipment Tracking (requires TMS) + - EDI Processing (requires EDI) +``` + +### Use Case 2: Persona-Based World + +**Scenario**: Training AI agent as "Store Manager" + +**Need**: World with only store manager capabilities + +**Configuration**: +```yaml +world: + name: "Store Manager Training" + filter: + personas: [store-manager] + +result: + capabilities: 15 + - Order Fulfillment + - Inventory Management + - Exception Handling + - Staff Coordination + - Customer Service Escalation + ... + + excluded: + - Warehouse Physical Tasks (worker-level) + - System Administration (admin-level) + - Data Engineering (technical roles) +``` + +### Use Case 3: Progressive Complexity + +**Scenario**: Learning path from simple to complex + +**Need**: Start with simple capabilities, gradually add complexity + +**Configuration**: +```yaml +world: + name: "Progressive Learning Path" + stages: + - stage: 1 + complexity: [simple] + count: 10 + + - stage: 2 + complexity: [simple, medium] + count: 20 + + - stage: 3 + complexity: [simple, medium, complex] + count: all + +result: + stage1: 10 simple capabilities + stage2: + 10 medium capabilities + stage3: + all remaining capabilities +``` + +### Use Case 4: Random Sampling for Generalization + +**Scenario**: Testing AI agent on diverse, random tasks + +**Need**: Randomly sample N capabilities + +**Configuration**: +```yaml +world: + name: "Random Capability Test" + sampling: + strategy: random + count: 20 + seed: "reproducible-123" + +result: + capabilities: 20 randomly selected + - Mix of domains, complexities, services + - Reproducible with same seed +``` + +### Use Case 5: Weighted Sampling by Frequency + +**Scenario**: Realistic distribution of common vs rare operations + +**Need**: Sample based on real-world frequency + +**Configuration**: +```yaml +world: + name: "Realistic Operations Mix" + sampling: + strategy: weighted + weights: + frequency: 0.7 # 70% weight on frequency + complexity: 0.2 # 20% weight on complexity + businessImpact: 0.1 # 10% weight on impact + count: 30 + +result: + capabilities: 30 selected + - 60% frequent operations (order processing, inventory checks) + - 30% occasional operations (cycle counts, transfers) + - 10% rare operations (exceptions, reversals) +``` + +### Use Case 6: Capability Prerequisites + +**Scenario**: Ensure dependent capabilities are included + +**Need**: Auto-include prerequisites when selecting capabilities + +**Configuration**: +```yaml +world: + name: "Fulfillment with Dependencies" + capabilities: + - order-fulfillment # explicitly selected + + autoIncludeDependencies: true + +result: + included: + - order-fulfillment (explicit) + - inventory-check (prerequisite) + - create-shipment (prerequisite) + - carrier-assignment (prerequisite) +``` + +## Sampling Strategies + +### 1. Filter-Based Selection + +**Method**: Boolean filtering on tags + +**Criteria**: +- Domain(s) +- Persona(s) +- Complexity level(s) +- Service(s) +- Pattern(s) +- Business impact +- Frequency + +**Algorithm**: +```typescript +function filterCapabilities( + allCapabilities: Capability[], + filters: Filters +): Capability[] { + return allCapabilities.filter(cap => { + if (filters.domains && !filters.domains.includes(cap.domain)) return false; + if (filters.personas && !cap.personas.some(p => filters.personas.includes(p))) return false; + if (filters.complexity && cap.complexity !== filters.complexity) return false; + if (filters.services && !cap.services.every(s => filters.services.includes(s))) return false; + // ... more filters + return true; + }); +} +``` + +**Pros**: +- Deterministic +- Intuitive +- Easy to explain + +**Cons**: +- May return too many or too few results +- No control over count + +### 2. Random Sampling + +**Method**: Randomly select N capabilities + +**Variants**: +- **Uniform Random**: All capabilities equally likely +- **Stratified Random**: Sample from each category proportionally + +**Algorithm**: +```typescript +function randomSample( + capabilities: Capability[], + count: number, + seed?: string +): Capability[] { + const rng = seed ? seededRandom(seed) : Math.random; + const shuffled = shuffle(capabilities, rng); + return shuffled.slice(0, count); +} + +function stratifiedSample( + capabilities: Capability[], + count: number, + stratifyBy: 'domain' | 'complexity' | 'persona' +): Capability[] { + const groups = groupBy(capabilities, stratifyBy); + const perGroup = Math.ceil(count / groups.length); + return groups.flatMap(group => randomSample(group, perGroup)).slice(0, count); +} +``` + +**Pros**: +- Good for generalization testing +- Reproducible with seed +- Unbiased + +**Cons**: +- May not match researcher intent +- May include unrelated capabilities +- No semantic coherence + +### 3. Weighted Sampling + +**Method**: Sample based on attribute weights + +**Weights**: +- **Frequency**: How often used in real world +- **Business Impact**: High-impact operations more likely +- **Complexity**: Prefer simpler or more complex +- **Recency**: Recently added capabilities +- **Popularity**: Most-used by other researchers + +**Algorithm**: +```typescript +function weightedSample( + capabilities: Capability[], + count: number, + weights: WeightConfig +): Capability[] { + // Calculate composite score for each capability + const scored = capabilities.map(cap => ({ + capability: cap, + score: + cap.frequency * weights.frequency + + cap.businessImpact * weights.businessImpact + + (1 / cap.complexity) * weights.simplicity + })); + + // Sort by score and take top N + return scored + .sort((a, b) => b.score - a.score) + .slice(0, count) + .map(s => s.capability); +} +``` + +**Pros**: +- Realistic distributions +- Tunable via weights +- Can match real-world scenarios + +**Cons**: +- Requires metadata (frequency, impact) +- More complex to configure +- Less predictable + +### 4. Hierarchical Sampling + +**Method**: Sample from capability hierarchy + +**Approach**: +- Start with high-level domains +- Drill down to sub-capabilities +- Ensure coverage across hierarchy + +**Algorithm**: +```typescript +function hierarchicalSample( + capabilityTree: CapabilityTree, + countPerLevel: number[] +): Capability[] { + const selected: Capability[] = []; + + // Level 0: Domains + const domains = randomSample(capabilityTree.domains, countPerLevel[0]); + + domains.forEach(domain => { + // Level 1: Categories within domain + const categories = randomSample(domain.categories, countPerLevel[1]); + + categories.forEach(category => { + // Level 2: Capabilities within category + selected.push(...randomSample(category.capabilities, countPerLevel[2])); + }); + }); + + return selected; +} +``` + +**Pros**: +- Ensures diversity +- Covers different areas +- Good for broad testing + +**Cons**: +- Requires hierarchical structure +- May not match real workflows +- Complex configuration + +### 5. Graph-Based Sampling + +**Method**: Use knowledge graph to ensure coherence + +**Approach**: +- Select seed capability +- Include connected capabilities (prerequisites, dependents) +- Expand by graph distance + +**Algorithm**: +```typescript +function graphSample( + graph: KnowledgeGraph, + seedCapability: string, + maxDistance: number, + maxCount: number +): Capability[] { + const visited = new Set(); + const queue: [string, number][] = [[seedCapability, 0]]; + const selected: Capability[] = []; + + while (queue.length > 0 && selected.length < maxCount) { + const [capId, distance] = queue.shift()!; + + if (visited.has(capId) || distance > maxDistance) continue; + visited.add(capId); + + const capability = graph.getNode(capId); + selected.push(capability); + + // Add neighbors + const neighbors = graph.getNeighbors(capId, ['prerequisite', 'related_to']); + neighbors.forEach(neighbor => { + queue.push([neighbor, distance + 1]); + }); + } + + return selected; +} +``` + +**Pros**: +- Semantically coherent +- Includes dependencies +- Useful for focused research + +**Cons**: +- Requires knowledge graph +- May create echo chambers +- Complex to reason about + +## World Configuration Schema + +### Proposed Configuration Format + +```yaml +world: + # Metadata + id: string + name: string + description: string + author: string + tags: string[] + + # Capability Selection + capabilities: + # Option 1: Explicit list + explicit: + - order-fulfillment + - inventory-management + - cycle-counting + + # Option 2: Filter-based + filters: + domains: [fulfillment, warehousing] + personas: [store-manager] + complexity: [simple, medium] + services: [erp, wms] + + # Option 3: Sampling + sampling: + strategy: random | weighted | stratified | hierarchical | graph + count: 20 + seed: "reproducible-123" + weights: + frequency: 0.5 + complexity: 0.3 + businessImpact: 0.2 + + # Dependency handling + autoIncludeDependencies: true + autoExcludeBlocked: true + + # Chaos Configuration (per-world) + chaos: + enabled: true + globalProbability: 0.1 + presets: [light-chaos] + overrides: + # Per-capability overrides + order-fulfillment: + probability: 0.3 + + # Resource Limits + limits: + maxConcurrentODs: 10 + maxStepsPerOD: 50 + timeoutMs: 30000 + + # Data Seeding + seed: + companies: 10 + products: 100 + initialOrders: 50 + customSeed: "data-seed-123" +``` + +### Example Configurations + +#### Example 1: Warehouse-Only World +```yaml +world: + name: "Warehouse Automation Research" + capabilities: + filters: + domains: [warehousing, inventory] + services: [wms] + autoIncludeDependencies: false + + chaos: + enabled: true + globalProbability: 0.2 + + seed: + companies: 5 + products: 50 +``` + +#### Example 2: Progressive Learning World +```yaml +world: + name: "AI Agent Training - Progressive" + capabilities: + filters: + complexity: [simple] # Start with simple only + personas: [warehouse-worker] + # Later stages can be added dynamically + + chaos: + enabled: false # No chaos during learning + + seed: + companies: 3 + products: 20 +``` + +#### Example 3: Realistic Mix World +```yaml +world: + name: "Realistic Operations Simulation" + capabilities: + sampling: + strategy: weighted + count: 30 + weights: + frequency: 0.7 + businessImpact: 0.2 + complexity: 0.1 + + chaos: + enabled: true + globalProbability: 0.05 # Light chaos + presets: [realistic-failures] + + seed: + companies: 20 + products: 200 + initialOrders: 100 +``` + +#### Example 4: Domain Exploration World +```yaml +world: + name: "Cross-Domain Integration Test" + capabilities: + sampling: + strategy: stratified + count: 25 + stratifyBy: domain # Equal representation from each domain + + chaos: + enabled: true + globalProbability: 0.15 + + seed: + companies: 10 + products: 100 +``` + +## Implementation Considerations + +### 1. Capability Registry + +Need a central registry that supports: +- Querying by tags/filters +- Counting capabilities matching criteria +- Sampling with various strategies +- Dependency resolution + +**API Example**: +```typescript +interface CapabilityRegistry { + // Query + find(filters: Filters): Capability[]; + count(filters: Filters): number; + + // Sampling + sample(strategy: SamplingStrategy, config: SamplingConfig): Capability[]; + + // Dependencies + resolveDependencies(capabilities: Capability[]): Capability[]; + validateDependencies(capabilities: Capability[]): ValidationResult; +} +``` + +### 2. World Lifecycle + +**Creation**: +``` +1. Parse world configuration +2. Resolve capability selection (filters/sampling) +3. Resolve dependencies +4. Validate configuration +5. Create world in database +6. Seed initial data +7. Return world ID +``` + +**Updates**: +``` +1. Add/remove capabilities dynamically +2. Update chaos configuration +3. Adjust limits +4. Cannot change after ODs have executed (immutability) +``` + +**Deletion**: +``` +1. Archive logs and results +2. Delete world data +3. Update registry +``` + +### 3. Configuration Validation + +**Checks**: +- At least 1 capability selected +- All referenced capabilities exist +- Dependencies are satisfiable +- No circular dependencies +- Services required by capabilities are available +- Sampling count ≤ total available capabilities + +**Validation API**: +```typescript +interface ValidationResult { + valid: boolean; + errors: string[]; + warnings: string[]; + resolvedCapabilities: Capability[]; + dependencyGraph: DependencyGraph; +} +``` + +### 4. Presets & Templates + +Provide pre-configured world templates: + +```yaml +presets: + - id: warehouse-basic + name: "Basic Warehouse Operations" + capabilities: + filters: + domains: [warehousing] + complexity: [simple] + + - id: full-supply-chain + name: "End-to-End Supply Chain" + capabilities: + filters: + domains: [procurement, inventory, fulfillment, transportation] + + - id: ai-training-starter + name: "AI Agent Training Starter Pack" + capabilities: + explicit: + - order-fulfillment-simple + - inventory-check + - shipment-tracking +``` + +## API Endpoints + +### World Configuration API + +``` +POST /api/worlds # Create world with config +GET /api/worlds/:worldId # Get world details +PUT /api/worlds/:worldId # Update world config +DELETE /api/worlds/:worldId # Delete world + +GET /api/worlds/:worldId/capabilities # List capabilities in this world +POST /api/worlds/:worldId/capabilities # Add capability to world +DELETE /api/worlds/:worldId/capabilities/:capId # Remove capability + +POST /api/worlds/:worldId/sample # Resample capabilities +POST /api/worlds/:worldId/validate # Validate configuration +``` + +### Sampling API + +``` +POST /api/capabilities/sample # Sample capabilities (without creating world) +POST /api/capabilities/filter # Filter capabilities +GET /api/capabilities/count # Count capabilities matching criteria +``` + +### Preset API + +``` +GET /api/world-presets # List presets +GET /api/world-presets/:presetId # Get preset config +POST /api/world-presets/:presetId/instantiate # Create world from preset +``` + +## Open Questions + +### 1. Static vs Dynamic Configuration + +**Question**: Can world capabilities change after creation? + +**Options**: +- **Static**: Configuration locked at creation (immutable) +- **Dynamic**: Capabilities can be added/removed during experiments +- **Staged**: Pre-defined stages that unlock over time + +**Trade-offs**: +- Static: Reproducible, simple, but inflexible +- Dynamic: Flexible, but hard to reproduce +- Staged: Good for learning paths, but complex + +**Recommendation Needed**: What's the priority—reproducibility or flexibility? + +### 2. Default Behavior + +**Question**: If no capabilities specified, what happens? + +**Options**: +- **All**: Include all capabilities (current behavior) +- **Error**: Require explicit configuration +- **Smart Default**: Sample 20 common capabilities + +**Recommendation Needed**: What's the safest default? + +### 3. Dependency Auto-Resolution + +**Question**: Should dependencies be automatically included? + +**Scenario**: Researcher selects "Order Fulfillment" but not "Inventory Check" (prerequisite) + +**Options**: +- **Auto-Include**: Silently add dependencies (convenient but surprising) +- **Warn**: Show warning but allow (flexible but risky) +- **Error**: Reject invalid configuration (strict but safe) + +**Recommendation Needed**: What behavior is least surprising? + +### 4. Sampling Determinism + +**Question**: Should sampling be deterministic? + +**Options**: +- **Always Seeded**: Require seed for reproducibility +- **Optional Seed**: Allow non-deterministic sampling +- **Hybrid**: Default seed + override option + +**Recommendation Needed**: How important is experiment reproducibility? + +## Next Steps + +1. **Choose default sampling strategy** for most common use cases +2. **Define world configuration schema** formally (JSON Schema/Zod) +3. **Build capability registry** with filtering and sampling +4. **Implement dependency resolution** using knowledge graph +5. **Create 5-10 world presets** for common scenarios +6. **Build world configuration API** endpoints +7. **Test with realistic scenarios** and iterate + +## Related Documents + +- [02. Conceptual Model](./02-conceptual-model.md) - Capability definitions +- [03. Knowledge Graph](./03-knowledge-graph.md) - Dependency modeling +- [04. Taxonomy & Organization](./04-taxonomy-organization.md) - Filtering strategies +- [06. Open Questions](./06-open-questions.md) - Unresolved decisions diff --git a/docs/od-architecture/06-open-questions.md b/docs/od-architecture/06-open-questions.md new file mode 100644 index 0000000000000000000000000000000000000000..594426493a8c0662df0e02bae151f85f07928d65 --- /dev/null +++ b/docs/od-architecture/06-open-questions.md @@ -0,0 +1,539 @@ +# 06. Open Questions & Decisions + +## Overview + +This document consolidates all design questions from the OD architecture discussion, tracking both decisions made and questions still pending. + +**Last Updated**: 2025-11-14 + +## Decision Summary + +| ID | Question | Status | Decision | +|----|----------|--------|----------| +| **Priority 1: Critical** | +| Q1.1 | Capability Definition | ✅ **DECIDED** | Semantic Grouping / Domain Process (~50 capabilities) | +| Q1.2 | Capability ↔ OD Relationship | ✅ **DECIDED** | 1:N Variants | +| Q1.3 | Knowledge Graph Approach | ✅ **DECIDED** | Hybrid (Manual → Static → Runtime, phased) | +| Q1.4 | Primary Taxonomy | ✅ **DECIDED** | Multi-dimensional tags, domain-based default | +| **Priority 2: Important** | +| Q2.1 | Persona Granularity | ✅ **DECIDED** | Detailed (20-50 personas) | +| Q2.2 | Persona-Capability Config | ✅ **DECIDED** | Hybrid (defaults + world overrides) | +| Q2.3 | Hierarchy vs Flat | ✅ **DECIDED** | Flat with tags (add hierarchy later if needed) | +| Q2.4 | Data Flow Inference | ✅ **DECIDED** | Manual annotations + runtime validation | +| Q2.5 | Graph Technology | ✅ **DECIDED** | In-memory (graphlib) + MongoDB persistence | +| **Priority 3: Nice-to-Have** | +| Q3.1 | Tag Vocabulary | 💡 RECOMMENDED | Hybrid (core controlled + custom allowed) | +| Q3.2 | Taxonomy Maintenance | 💡 RECOMMENDED | Hybrid (system baseline + manual curation) | +| Q3.3 | Graph Update Frequency | 💡 RECOMMENDED | Startup + manual refresh | +| Q3.4 | World Mutability | 💡 RECOMMENDED | Static (locked at creation for reproducibility) | +| Q3.5 | Default World Behavior | 💡 RECOMMENDED | Error (require explicit configuration) | +| Q3.6 | Dependency Auto-Resolution | 💡 RECOMMENDED | Warn (show warning but allow) | +| Q3.7 | Sampling Determinism | 💡 RECOMMENDED | Hybrid (default seed + override) | + +## Priority 1: Critical Decisions (Block Implementation) + +These questions must be answered first, as they fundamentally shape the architecture. + +### Q1.1: Capability Definition + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Option D + C**: Semantic Grouping with Domain Process characteristics (~50 capabilities) +> **Rationale**: Capabilities should be actionable business processes that AI agents can perform end-to-end. Not too broad ("Order Management" contains too many distinct workflows), not too granular ("Click button to create order" is too atomic). Examples: "Fulfill Customer Order", "Receive Inbound Shipment", "Cycle Count Inventory". +> **Implementation**: Start with ~30-40 capabilities, grow to 50-60 as needed. Each capability maps to 1-N OD variants. + +**Question**: What exactly is a capability? + +**Context**: Need clear definition to design the entire system. + +**Options**: +- **A. Business Function**: High-level processes ("Order Management", "Inventory Control") +- **B. User Story**: Goal-oriented tasks ("As a store manager, I want to fulfill an order") +- **C. Domain Process**: Technical workflows ("Inbound Receiving Flow", "Pick-Pack-Ship Process") +- **D. Semantic Grouping**: Logical grouping of related ODs + +**Trade-offs**: +| Option | Granularity | User-Friendly | Technical Clarity | Count | +|--------|-------------|---------------|-------------------|-------| +| A | Coarse | High | Low | ~20 | +| B | Fine | Very High | Medium | ~100+ | +| C | Medium | Medium | High | ~50 | +| D | Variable | Medium | Medium | ~30-60 | + +**Impact**: Affects taxonomy, knowledge graph, UI design, and tagging strategy. + +**Recommendation Needed**: Which best serves AI researchers? + +--- + +### Q1.2: Capability ↔ OD Relationship + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Option B**: 1:N Mapping (Variants) +> **Rationale**: One capability can have multiple OD implementations (variants) for different scenarios. Examples: "Order Fulfillment - Standard", "Order Fulfillment - Express", "Order Fulfillment - International". This provides flexibility for researchers to choose complexity level and optimization targets (speed vs accuracy). Also enables low-chaos vs high-chaos variants of the same capability. Can add N:M composition in future if needed. +> **Implementation**: Capability → OD[] mapping in registry. Variant selection via tags (complexity, speed, chaos-level). + +**Question**: Can one capability have multiple OD implementations? + +**Context**: Determines flexibility vs simplicity trade-off. + +**Options**: +- **A. 1:1 Mapping**: One capability = exactly one OD +- **B. 1:N Mapping (Variants)**: One capability = multiple OD variants (standard, express, complex) +- **C. N:M Mapping (Composition)**: Capabilities can share ODs, ODs can compose + +**Examples**: +``` +Option A (1:1): + Capability "Order Fulfillment" → OD "order-fulfillment-v1" + +Option B (1:N): + Capability "Order Fulfillment" → [ + OD "order-fulfillment-standard", + OD "order-fulfillment-express", + OD "order-fulfillment-international" + ] + +Option C (N:M): + Capability "Order Fulfillment" → OD "order-fulfillment" + Capability "Inventory Check" → OD "inventory-check" + OD "order-fulfillment" uses OD "inventory-check" (sub-workflow) +``` + +**Trade-offs**: +- A: Simplest, but inflexible +- B: Good balance, clear variants +- C: Most flexible, but most complex + +**Impact**: Registry design, variant selection logic, OD composition patterns. + +**Recommendation Needed**: What level of flexibility is required? + +--- + +### Q1.3: Knowledge Graph Implementation Approach + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Hybrid - Phased Approach**: Start with Option D (Manual), evolve to Option C (Hybrid) +> **Rationale**: +> - **Phase 1 (MVP)**: Manual annotations in tool definitions (`@produces Order`, `@requires Customer, Product`) - Fast to implement, high quality +> - **Phase 2**: Static analysis to extract and validate annotations using TypeScript compiler API +> - **Phase 3**: Runtime learning to refine edge weights and discover implicit patterns +> **Implementation**: Build graph structure now, populate incrementally. Start with manual annotations for critical tools, expand coverage over time. + +**Question**: How should the knowledge graph be built? + +**Context**: Affects development timeline and capabilities. + +**Options**: +- **A. Static Analysis** (Build time): Parse code, generate graph from types/schemas +- **B. Dynamic Learning** (Runtime): Learn from OD executions, infer relationships +- **C. Hybrid**: Static baseline + runtime refinement +- **D. Manual Annotation**: Developers explicitly specify relationships + +**Trade-offs**: +| Option | Effort | Accuracy | Maintenance | Bootstrap Time | +|--------|--------|----------|-------------|----------------| +| A | Medium | Medium | Low | Fast | +| B | High | High | Low | Slow | +| C | High | Very High | Medium | Medium | +| D | Low | High | High | Fast | + +**Impact**: Development time, graph quality, maintenance burden. + +**Recommendation Needed**: What's feasible for initial version? + +--- + +### Q1.4: Primary Taxonomy + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Option D**: Multi-Dimensional Tags with **Domain-Based Default View** +> **Rationale**: Different researchers have different mental models. Tags support all browsing patterns without forcing a single hierarchy. Default to domain-based view (Fulfillment, Inventory, Transportation) as it matches business language, but also offer persona view and complexity view. +> **Implementation**: Every capability gets comprehensive tags: `{domain: [], persona: [], complexity: "", services: [], pattern: []}`. API supports filtering by any tag combination. UI offers 3 primary views: Domain (default), Persona, Complexity. +> **Example**: `/api/capabilities?domain=fulfillment`, `/api/capabilities?persona=store-manager`, `/api/capabilities?complexity=simple` + +**Question**: What should be the default organization scheme? + +**Context**: Determines how researchers browse and discover capabilities. + +**Options**: +- **A. Domain-Based**: Organize by business domain (Fulfillment, Inventory, Transportation) +- **B. Persona-Based**: Organize by role (Store Manager, Warehouse Worker) +- **C. Complexity-Based**: Organize by difficulty (Simple, Medium, Complex) +- **D. Multi-Dimensional Tags**: No primary taxonomy, support all views + +**Trade-offs**: +| Option | Intuitive | Flexible | Maintenance | Use Case | +|--------|-----------|----------|-------------|----------| +| A | High | Low | Low | Business-oriented research | +| B | Medium | Medium | Medium | Role-based training | +| C | Medium | Low | Low | Progressive learning | +| D | Low | Very High | High | Expert users | + +**Impact**: UI design, browsing patterns, filtering logic. + +**Recommendation Needed**: What's most intuitive for AI researchers? + +--- + +## Priority 2: Important Design Choices + +These affect implementation details but can be iterated on. + +### Q2.1: Persona Granularity + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Option B**: Detailed (20-50 personas) +> **Rationale**: Broad personas (5-10) are too coarse - "Manager" doesn't convey specific responsibilities. Functional personas (50+) are overkill and hard to maintain. Sweet spot: Role-specific but not task-specific. Examples: Store Manager, Warehouse Manager, DC Manager (different management roles); Receiving Clerk, Shipping Clerk, Inventory Specialist (different operational functions). +> **Implementation**: Start with 15-20 core personas, expand to 30-40 as use cases emerge. Not: "Morning Shift Receiving Clerk" (too granular). + +**Question**: How detailed should personas be? + +**Options**: +- **A. Broad** (5-10 personas): Manager, Worker, Coordinator, Analyst, Bot +- **B. Detailed** (20-50 personas): Store Manager, DC Manager, Warehouse Manager, Receiving Clerk, etc. +- **C. Functional** (50+ personas): Very specific roles for each function + +**Trade-offs**: +- A: Simple to manage, coarse permissions +- B: Balance of specificity and manageability +- C: Highly realistic, complex to maintain + +**Impact**: Permission model, world configuration complexity. + +**Recommendation Needed**: What level matches research needs? + +--- + +### Q2.2: Persona ↔ Capability Configuration + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Option C**: Hybrid (Defaults in config, overridable per world) +> **Rationale**: Standard persona-capability mappings should be in version-controlled config (stable, predictable). But researchers need ability to create custom personas or restrict capabilities for experiments. Solution: Default mappings work out-of-box, but worlds can override with custom personas. +> **Implementation**: Default mappings in `config/personas.json`. World config can add `personaOverrides: { "custom-agent": { capabilities: [...] } }`. + +**Question**: Should persona-capability mappings be configurable? + +**Options**: +- **A. Static**: Hardcoded in config files, version controlled +- **B. Dynamic**: Stored in database, editable via API +- **C. Hybrid**: Defaults in config, overridable per world + +**Use Cases**: +- Custom personas for specific experiments +- Restrict capabilities for testing +- Evolve personas without code changes + +**Impact**: Flexibility, complexity, reproducibility. + +**Recommendation Needed**: How important is runtime configurability? + +--- + +### Q2.3: Hierarchy vs Flat Capabilities + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Flat with tags** (add hierarchy later if needed) +> **Rationale**: With ~50 capabilities, flat structure is manageable. Tags provide "virtual hierarchy": filtering by `domain:fulfillment` shows all fulfillment capabilities. Simpler to implement and reason about. Can add optional parent/child relationships later if catalog grows to 100+ capabilities. +> **Implementation**: No parent/child fields. Use tags for grouping: `tags: { domain: ["fulfillment"], category: ["order-processing"] }`. + +**Question**: Should capabilities have hierarchical structure? + +**Examples**: +``` +Hierarchical: + Order Management (parent) + ├── Create Order + ├── Fulfill Order + ├── Cancel Order + └── Track Order + +Flat: + - Create Order + - Fulfill Order + - Cancel Order + - Track Order +``` + +**Trade-offs**: +- Hierarchical: Better organization, more complex +- Flat: Simpler, but harder to navigate large catalogs + +**Impact**: UI complexity, filtering logic, tagging strategy. + +**Recommendation Needed**: Is flat sufficient or do we need hierarchy? + +--- + +### Q2.4: Data Flow Inference Method + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Option D**: Hybrid (Manual annotations + runtime validation) +> **Rationale**: Phase 1: Add manual annotations to tool definitions (`produces: ["Order"], requires: ["Customer", "Product"]`). Fast to implement, high quality. Phase 2: Runtime tracking validates annotations and discovers edge cases. Best of both worlds. +> **Implementation**: Add metadata fields to service tool definitions. Start with critical tools, expand coverage incrementally. Runtime logs can validate/suggest annotations. + +**Question**: How do we determine which entities a tool produces/requires? + +**Context**: Needed for knowledge graph and OD validation. + +**Options**: +- **A. Manual Annotation**: Developers add metadata (e.g., `@produces Order`) +- **B. Schema Analysis**: Infer from TypeScript types and input/output schemas +- **C. Runtime Learning**: Monitor executions, track data flow +- **D. Hybrid**: Start with annotations, refine with runtime data + +**Feasibility**: +- A: Immediate, but manual effort +- B: Possible with TypeScript compiler API, may be incomplete +- C: Accurate but slow to bootstrap +- D: Best of both worlds, more complex + +**Impact**: Graph quality, validation accuracy, developer burden. + +**Recommendation Needed**: What's most practical? + +--- + +### Q2.5: Graph Technology Choice + +> **✅ DECISION** (2025-11-14) +> **Chosen**: **Option D**: In-memory (graphlib) + MongoDB persistence +> **Rationale**: Use in-memory graph library (graphlib) for fast queries. Persist graph structure to MongoDB (already in stack). Load graph on startup. No new infrastructure (Neo4j). Can migrate to dedicated graph DB later if query complexity demands it. +> **Implementation**: `Graph` class using graphlib. Store nodes/edges in MongoDB collections. Load on service startup: `graph.loadFromMongoDB()`. Update and persist as needed. + +**Question**: What technology should power the knowledge graph? + +**Options**: +- **A. In-Memory (graphlib, cytoscape.js)**: Simple, fast, not persistent +- **B. Graph Database (Neo4j, ArangoDB)**: Purpose-built, powerful queries, infrastructure +- **C. MongoDB**: Already using it, not optimized for graphs +- **D. Hybrid**: In-memory + MongoDB persistence + +**Trade-offs**: +| Option | Setup | Performance | Persistence | Query Power | +|--------|-------|-------------|-------------|-------------| +| A | Easy | Fast | No | Basic | +| B | Hard | Fast | Yes | Excellent | +| C | Easy | Slow | Yes | Limited | +| D | Medium | Fast | Yes | Medium | + +**Impact**: Infrastructure complexity, query capabilities, scalability. + +**Recommendation Needed**: What fits the architecture best? + +--- + +## Priority 3: Nice-to-Have Features + +These can be deferred to later phases. **RECOMMENDED** options provided below can be finalized during implementation. + +### Q3.1: Tag Vocabulary Control + +> **💡 RECOMMENDED**: **Option C**: Hybrid (Core tags controlled, custom tags allowed) +> Core tags (domain, persona, complexity, services) use controlled vocabulary. Custom tags allowed for experimental/research-specific categorization. + +**Question**: Should tag values be freeform or controlled? + +**Options**: +- **A. Freeform**: Authors can add any tags (flexible, inconsistent) +- **B. Controlled**: Pre-defined tag vocabulary (consistent, rigid) +- **C. Hybrid**: Core tags controlled, custom tags allowed + +**Impact**: Tag consistency, search quality, maintenance. + +--- + +### Q3.2: Taxonomy Maintenance + +> **💡 RECOMMENDED**: **Option D**: Hybrid (System baseline + manual curation) +> System generates initial tags from code structure. Team manually curates and refines. Sustainable long-term. + +**Question**: Who maintains the taxonomy and tags? + +**Options**: +- **A. System**: Auto-generated from code +- **B. Manual**: Curated by team +- **C. Community**: Researchers contribute +- **D. Hybrid**: System baseline + manual curation + +**Impact**: Accuracy, freshness, maintenance burden. + +--- + +### Q3.3: Graph Update Frequency + +> **💡 RECOMMENDED**: **Option B**: Startup (+ manual refresh capability) +> Load graph on service startup. Provide manual refresh endpoint for updates without restart. + +**Question**: When does the knowledge graph get updated? + +**Options**: +- **A. Build Time**: Regenerated on deployment +- **B. Startup**: Built when service starts +- **C. Runtime**: Updated as ODs execute +- **D. Manual**: Explicit refresh command + +**Impact**: Graph freshness, performance, complexity. + +--- + +### Q3.4: World Configuration Mutability + +> **💡 RECOMMENDED**: **Option A**: Static (locked at creation) +> World configuration locked at creation for reproducibility. Create new world for different configuration. + +**Question**: Can world capabilities change after creation? + +**Options**: +- **A. Static**: Configuration locked at creation (reproducible, inflexible) +- **B. Dynamic**: Capabilities can be added/removed (flexible, hard to reproduce) +- **C. Staged**: Pre-defined stages that unlock over time (good for learning, complex) + +**Impact**: Reproducibility, flexibility, complexity. + +--- + +### Q3.5: Default World Behavior + +> **💡 RECOMMENDED**: **Option B**: Error (require explicit configuration) +> Fail fast if no capabilities specified. Forces researchers to think about world configuration. + +**Question**: If no capabilities specified, what happens? + +**Options**: +- **A. All**: Include all capabilities (current behavior) +- **B. Error**: Require explicit configuration (safe, strict) +- **C. Smart Default**: Sample 20 common capabilities (convenient, opinionated) + +**Impact**: User experience, safety, defaults. + +--- + +### Q3.6: Dependency Auto-Resolution + +> **💡 RECOMMENDED**: **Option B**: Warn (show warning but allow) +> Show clear warning message listing missing dependencies, but allow configuration. Researcher makes informed choice. + +**Question**: Should dependencies be automatically included? + +**Scenario**: Researcher selects "Order Fulfillment" but not "Inventory Check" (prerequisite) + +**Options**: +- **A. Auto-Include**: Silently add dependencies (convenient, surprising) +- **B. Warn**: Show warning but allow (flexible, risky) +- **C. Error**: Reject invalid configuration (strict, safe) + +**Impact**: User experience, safety, complexity. + +--- + +### Q3.7: Sampling Determinism + +> **💡 RECOMMENDED**: **Option C**: Hybrid (default seed + override) +> Auto-generate seed (timestamp-based) for reproducibility. Allow explicit seed override for exact replication. + +**Question**: Should sampling always be deterministic? + +**Options**: +- **A. Always Seeded**: Require seed for reproducibility (reproducible, less convenient) +- **B. Optional Seed**: Allow non-deterministic sampling (flexible, hard to reproduce) +- **C. Hybrid**: Default seed + override option (balanced) + +**Impact**: Experiment reproducibility, user experience. + +--- + +## Decision-Making Framework + +### How to Prioritize + +**Criteria**: +1. **Blocking**: Does this block other work? +2. **Impact**: How many components are affected? +3. **Reversibility**: Can we change this decision later? +4. **Effort**: How much work to implement each option? + +**Suggested Process**: +1. Answer all Priority 1 questions first +2. Build proof-of-concept based on Priority 1 answers +3. Test POC with realistic scenarios +4. Answer Priority 2 questions based on learnings +5. Defer Priority 3 questions to later phases + +### Decision Log Template + +For each question, document: +```markdown +## Decision: [Question Number] + +**Date**: YYYY-MM-DD +**Decided By**: [Name/Team] +**Chosen Option**: [A/B/C/D] + +**Rationale**: +- Why this option was chosen +- Key trade-offs considered +- Alternative options rejected and why + +**Impact**: +- What changes in implementation +- Dependencies on other decisions +- Timeline impact + +**Revisit Date**: YYYY-MM-DD (if needed) +``` + +## Next Steps + +✅ **Completed**: +1. All Priority 1 (critical) questions decided +2. All Priority 2 (important) questions decided +3. Priority 3 (nice-to-have) recommendations provided + +**Now Ready For**: +1. **Create implementation roadmap** based on decisions +2. **Build proof-of-concept** for: + - Capability registry with multi-dimensional tags + - Knowledge graph (in-memory + MongoDB) + - OD variant system (1:N mapping) + - Manual tool annotations (produces/requires) +3. **Define initial capability catalog** (~30-40 capabilities) +4. **Define persona catalog** (~15-20 personas) +5. **Start Phase 1 implementation** (see implementation plan TBD) + +## Related Documents + +- [02. Conceptual Model](./02-conceptual-model.md) - Questions about capability/persona/OD definitions +- [03. Knowledge Graph](./03-knowledge-graph.md) - Questions about graph implementation +- [04. Taxonomy & Organization](./04-taxonomy-organization.md) - Questions about categorization +- [05. Sampling & World Config](./05-sampling-world-config.md) - Questions about world configuration + +--- + +## Summary + +**Status**: ✅ All critical and important questions decided (2025-11-14) + +**Priority 1 (Critical)**: 4/4 **DECIDED** ✅ +- Capability definition → Semantic Grouping / Domain Process (~50 capabilities) +- Capability ↔ OD relationship → 1:N Variants +- Knowledge graph approach → Hybrid (Manual → Static → Runtime phased) +- Primary taxonomy → Multi-dimensional tags, domain-based default + +**Priority 2 (Important)**: 5/5 **DECIDED** ✅ +- Persona granularity → Detailed (20-50 personas) +- Persona-capability configuration → Hybrid (defaults + world overrides) +- Hierarchy vs flat → Flat with tags +- Data flow inference → Manual annotations + runtime validation +- Graph technology → In-memory (graphlib) + MongoDB + +**Priority 3 (Nice-to-Have)**: 7/7 **RECOMMENDED** 💡 +- Tag vocabulary → Hybrid (core controlled + custom allowed) +- Taxonomy maintenance → Hybrid (system baseline + manual curation) +- Graph update frequency → Startup + manual refresh +- World configuration mutability → Static (locked at creation) +- Default world behavior → Error (require explicit config) +- Dependency auto-resolution → Warn (show warning but allow) +- Sampling determinism → Hybrid (default seed + override) + +**Next Step**: Create implementation roadmap and build proof-of-concept. diff --git a/docs/od-architecture/07-chaos-integration.md b/docs/od-architecture/07-chaos-integration.md new file mode 100644 index 0000000000000000000000000000000000000000..e0696b4ada11e1d697b8d18e4a5daf5c3c611356 --- /dev/null +++ b/docs/od-architecture/07-chaos-integration.md @@ -0,0 +1,535 @@ +# 07. Chaos Integration + +## Overview + +This document explains how the chaos management system integrates with the OD architecture. Chaos is a **cross-cutting concern** that affects capability execution at multiple levels. + +For complete chaos management details, see [Chaos Management Documentation](../chaos/). + +**Last Updated**: 2025-11-14 + +## Chaos in the Architecture + +### Architectural Position + +``` +┌─────────────────────────────────────┐ +│ PERSONA │ +│ (Store Manager) │ +└──────────────┬──────────────────────┘ + │ + ↓ +┌─────────────────────────────────────┐ +│ CAPABILITY │ ← Chaos override possible +│ (Order Fulfillment) │ +└──────────────┬──────────────────────┘ + │ + ↓ +┌─────────────────────────────────────┐ +│ OPERATIONAL DESCRIPTOR (OD) │ ← Chaos policy defined +│ (Workflow with chaos config) │ +└──────────────┬──────────────────────┘ + │ + ↓ +┌─────────────────────────────────────┐ +│ STEP │ ← Chaos injected here +│ (Tool execution + chaos) │ +└──────────────┬──────────────────────┘ + │ + ↓ +┌─────────────────────────────────────┐ +│ TOOL │ +│ (Actual service call) │ +└─────────────────────────────────────┘ +``` + +**Chaos is injected**: After tool execution, before storing output (in the Step layer) + +**Chaos is configured**: At World, Capability, OD, and Step levels + +## Configuration Cascade + +Chaos configuration follows a **priority cascade** (highest to lowest): + +``` +1. MASTER KILL-SWITCH (environment variable) + ↓ if enabled +2. Step-level chaos override (in OD definition) + ↓ if not specified +3. OD-level chaos policy (global OD chaos) + ↓ if not specified +4. Capability-level override (per-capability config) + ↓ if not specified +5. World-level chaos policy (world config) + ↓ if not specified +6. System default chaos preset +``` + +### Example Cascade + +**Environment**: `CHAOS_ENABLED=true` + +**World Config**: +```yaml +world: + id: "research-001" + chaos: + preset: "moderate" # Global: 0.15 probability +``` + +**Capability Override**: +```yaml +capability: + id: "order-fulfillment" + chaosOverride: + probability: 0.3 # Higher for this capability +``` + +**OD Definition**: +```yaml +od: + id: "order-fulfillment-standard" + chaos: + probability: 0.25 # OD-level setting + steps: + - id: "check-inventory" + chaos: + probability: 0.0 # This step: no chaos! +``` + +**Result**: +- `check-inventory` step: **0.0** (step override) +- Other steps in OD: **0.25** (OD-level) +- Other capabilities in world: **0.15** (world preset) + +## Integration Points + +### 1. World Configuration + +Chaos is configured when creating a world: + +```yaml +POST /api/worlds +{ + "name": "Chaos Research World", + "capabilities": { + "filters": { "domain": ["fulfillment"] } + }, + "chaos": { + "preset": "aggressive", + "seed": "repro-123" + } +} +``` + +**What Happens**: +- World uses "aggressive" preset (0.3 probability) +- All capabilities in this world inherit this chaos +- Seed ensures reproducibility + +### 2. Capability-Level Overrides + +Fine-grained control per capability: + +```yaml +world: + chaos: + preset: "moderate" + capabilityOverrides: + order-fulfillment: + probability: 0.5 # Critical capability: more chaos + inventory-check: + probability: 0.0 # Critical path: no chaos +``` + +**Use Case**: Test resilience of critical capabilities with higher chaos. + +### 3. OD Execution + +Chaos is injected during OD execution: + +``` +1. Researcher executes capability +2. Capability maps to OD +3. OD executor runs steps +4. For each step: + a. Execute tool (service call) + b. Resolve chaos policy (use cascade) + c. Maybe inject chaos (based on probability) + d. Store result (potentially modified by chaos) +5. Return execution result +``` + +**Chaos Transparency**: +- Chaos injections are logged +- Chaos modifications tracked in telemetry +- Researchers can see what chaos was applied + +### 4. OD Variants with Chaos + +Capabilities can have chaos-specific variants: + +```yaml +capability: + id: "order-fulfillment" + variants: + - id: "order-fulfillment-no-chaos" + chaos: { enabled: false } + odId: "order-fulfillment-v1" + + - id: "order-fulfillment-light-chaos" + chaos: { preset: "light" } + odId: "order-fulfillment-v1" + + - id: "order-fulfillment-aggressive-chaos" + chaos: { preset: "aggressive" } + odId: "order-fulfillment-v1" +``` + +**Benefit**: Same workflow, different chaos levels for A/B testing. + +## Chaos Presets + +### Standard Presets + +Defined in `config/chaos-presets/`: + +**light.json** (0.05 probability): +- Stale data (eventual consistency) +- Rate limits (throttling) +- Missing data (occasional) + +**moderate.json** (0.15 probability): +- All light scenarios +- Data corruption +- Partial data +- Permission denied + +**aggressive.json** (0.3 probability): +- All scenarios +- Higher weights +- More severe configurations + +**realistic.json** (0.08 probability): +- Real-world failure distribution +- Stale data: 40% weight +- Rate limits: 20% weight +- Rare failures: < 5% weight + +### Custom Presets + +Researchers can create custom presets: + +```json +// config/chaos-presets/custom-fulfillment.json +{ + "id": "custom-fulfillment", + "name": "Order Fulfillment Focused Chaos", + "globalProbability": 0.2, + "scenarios": [ + { + "type": "missing_data", + "weight": 10, + "description": "Missing inventory records", + "config": { "missingRecords": true } + }, + { + "type": "stale_data", + "weight": 8, + "description": "Stale order status", + "config": { "staleDataAge": 120 } + } + ] +} +``` + +## Master Kill-Switch + +### Environment Variables + +```bash +# Disable all chaos globally +CHAOS_ENABLED=false + +# Use specific preset +CHAOS_PRESET=moderate + +# Override global probability +CHAOS_GLOBAL_PROBABILITY=0.2 + +# Set global seed +CHAOS_GLOBAL_SEED=experiment-001 +``` + +**Priority**: Environment variables override ALL file-based configuration. + +### Runtime Control + +```bash +# Via API (if implemented) +PUT /api/chaos/status +{ + "enabled": false +} +``` + +**Use Case**: Emergency disable without restarting service. + +## Chaos Telemetry + +### What's Logged + +Every chaos injection logs: + +```json +{ + "timestamp": "2025-11-14T10:30:45Z", + "level": "info", + "msg": "Chaos injected", + "chaos": { + "worldId": "research-001", + "capabilityId": "order-fulfillment", + "odId": "order-fulfillment-standard-v1", + "stepId": "check-inventory", + "scenarioType": "stale_data", + "configSource": "world-preset", + "probability": 0.15, + "seed": "repro-123", + "modifications": { + "staleDataAge": 60, + "fieldsAffected": ["timestamp", "lastUpdated"] + } + } +} +``` + +### Chaos Metrics + +Track chaos impact: + +``` +GET /api/chaos/metrics?worldId=research-001 +{ + "totalInjections": 150, + "injectionRate": 0.14, + "scenarioDistribution": { + "stale_data": 60, + "missing_data": 45, + "rate_limit": 30 + }, + "impactAnalysis": { + "odSuccessRate": 0.75, + "odSuccessRateWithoutChaos": 0.95, + "chaosImpact": -20% + } +} +``` + +## Integration with Knowledge Graph + +### Chaos-Aware Validation + +Knowledge graph can validate chaos feasibility: + +```typescript +// Can this OD handle this chaos scenario? +graph.validateChaos(odId, chaosScenario); + +// Which scenarios are safe for this capability? +graph.suggestSafeChaos(capabilityId); +``` + +### Chaos Dependencies + +Some chaos scenarios require specific data: + +``` +Scenario: "data_corruption" + Requires: Fields to corrupt + Validates: OD produces structured data (not just primitives) + +Scenario: "missing_data" + Requires: Optional fields in schema + Validates: OD can handle missing data gracefully +``` + +**Future**: Graph validates chaos applicability before injection. + +## Researcher Workflows + +### Workflow 1: Standard Chaos Testing + +```yaml +# Create world with moderate chaos +world: + name: "Resilience Test" + chaos: + preset: "moderate" + seed: "test-001" + +# Execute capabilities +# Observe failures +# Analyze chaos impact +``` + +### Workflow 2: Progressive Chaos + +```yaml +# Phase 1: No chaos +world1: + chaos: { enabled: false } + +# Phase 2: Light chaos +world2: + chaos: { preset: "light" } + +# Phase 3: Aggressive chaos +world3: + chaos: { preset: "aggressive" } + +# Compare results across phases +``` + +### Workflow 3: Targeted Chaos + +```yaml +# Most capabilities: no chaos +world: + chaos: { enabled: false } + + # Except one critical capability + capabilityOverrides: + order-fulfillment: + chaos: + preset: "aggressive" + scenarios: [ + { type: "missing_data", weight: 10 } + ] +``` + +### Workflow 4: A/B Testing + +```yaml +# Control group: no chaos variant +capability: "order-fulfillment-no-chaos" + +# Treatment group: chaos variant +capability: "order-fulfillment-light-chaos" + +# Compare AI agent performance +``` + +## Best Practices + +### 1. Start with Presets + +Don't configure chaos manually from scratch: + +```yaml +# ✅ Good: Use preset +chaos: + preset: "moderate" + +# ❌ Avoid: Manual config (error-prone) +chaos: + probability: 0.15 + scenarios: [ ... 20 lines of config ... ] +``` + +### 2. Always Use Seeds for Reproducibility + +```yaml +# ✅ Good: Reproducible +chaos: + preset: "moderate" + seed: "experiment-20251114-001" + +# ❌ Avoid: Non-deterministic +chaos: + preset: "moderate" + # no seed = different chaos each run +``` + +### 3. Override Selectively + +```yaml +# ✅ Good: Preset + targeted override +chaos: + preset: "moderate" + capabilityOverrides: + critical-capability: + probability: 0.0 # No chaos for critical path + +# ❌ Avoid: Everything manual +capabilityOverrides: + cap1: { ... } + cap2: { ... } + cap3: { ... } + # Too much manual config +``` + +### 4. Document Chaos Rationale + +```yaml +world: + name: "Agent Resilience Test" + description: "Testing AI agent with realistic failure rates" + chaos: + preset: "realistic" + seed: "resilience-001" + chaosRationale: "Using realistic preset to match production failure distribution" +``` + +## Implementation Phases + +Chaos management is built incrementally alongside OD architecture: + +### Phase 1 (Core Capability System) +- **Deliverable**: Chaos preset files +- **Deliverable**: Environment variable support (CHAOS_ENABLED) +- **Deliverable**: ChaosConfigRegistry service +- Researchers can use presets + +### Phase 2 (World Configuration) +- **Deliverable**: World-level chaos config +- **Deliverable**: Apply presets to worlds +- **Deliverable**: Seed support for reproducibility +- Researchers configure chaos per world + +### Phase 4 (Advanced Features) +- **Deliverable**: Capability-level overrides +- **Deliverable**: OD-level runtime config +- **Deliverable**: Chaos telemetry and metrics +- Researchers have fine-grained control + +### Phase 5 (Polish & Scale) +- **Deliverable**: Chaos configuration API +- **Deliverable**: Impact analysis tools +- **Deliverable**: Migration from scattered configs +- Production-ready chaos management + +## Related Documents + +- **[Chaos Management](../chaos/chaos-management.md)** - Complete chaos system design +- **[Chaos Presets](../chaos/)** - Preset library and details +- **[02. Conceptual Model](./02-conceptual-model.md)** - Where chaos fits in architecture +- **[08. Implementation Roadmap](./08-implementation-roadmap.md)** - Chaos implementation timeline +- **[05. World Configuration](./05-sampling-world-config.md)** - World-level chaos config + +## Summary + +**Key Points**: +1. Chaos is a **cross-cutting concern** affecting OD execution +2. Configuration **cascades** from World → Capability → OD → Step +3. **Master kill-switch** via environment variables +4. **Presets** provide standardized chaos configurations +5. **Telemetry** tracks all chaos injections +6. **Reproducible** via seeded randomness +7. Integrated with **OD architecture** from Phase 1 + +**For Researchers**: +- Start with presets (light, moderate, aggressive, realistic) +- Configure at world level +- Override for specific capabilities as needed +- Always use seeds for reproducibility +- Monitor chaos impact via telemetry diff --git a/docs/od-architecture/08-implementation-roadmap.md b/docs/od-architecture/08-implementation-roadmap.md new file mode 100644 index 0000000000000000000000000000000000000000..e1701d668483a4adf050d5d8cfe7076887ae35a0 --- /dev/null +++ b/docs/od-architecture/08-implementation-roadmap.md @@ -0,0 +1,819 @@ +# 08. Implementation Roadmap + +## Overview + +This roadmap outlines a **value-driven, incremental approach** to implementing the OD management system. Each phase delivers usable features to researchers, not just infrastructure. + +**Last Updated**: 2025-11-14 + +## Why NOT Bottom-Up? + +**Bottom-up approach (❌ Don't do this)**: +``` +Phase 1: Build complete database schema +Phase 2: Build knowledge graph infrastructure +Phase 3: Build capability registry +Phase 4: Build persona system +Phase 5: Finally, let researchers use it +``` + +**Problems**: +- No researcher value until Phase 5 +- High risk if requirements change +- Over-engineering for features we might not need +- Long feedback loop - don't learn if design works until the end + +## Our Approach: Value-Driven Vertical Slices + +**Incremental approach (✅ We do this)**: +``` +Phase 0: Walking skeleton - 5 hardcoded capabilities researchers can execute +Phase 1: Browse and filter ~30 capabilities +Phase 2: Create custom worlds with capability sampling +Phase 3: Validate dependencies (basic knowledge graph) +Phase 4: Add personas and chaos integration +Phase 5: Polish and scale +``` + +**Benefits**: +- Researcher value from Phase 0 +- Fast feedback - learn if approach works early +- Can pivot based on learnings +- Lower risk - small increments +- Infrastructure built when needed, not speculatively + +## Guiding Principles + +1. **Deliver Value Every Phase**: Each phase = something researchers can use +2. **Walking Skeleton First**: Get minimal end-to-end working, then expand +3. **Defer Decisions**: Don't implement everything designed; implement what's needed +4. **Evolutionary Architecture**: Allow for changes based on learnings +5. **Vertical Slices**: Build features end-to-end, not layers horizontally + +--- + +## Phase 0: Walking Skeleton (2-3 weeks) + +### Goal +Prove the concept works end-to-end with minimal implementation. + +### What Researchers Can Do +- List available capabilities (5 hardcoded examples) +- View capability details (name, description, tags) +- Execute a capability (maps to existing OD) + +### Deliverables + +**1. Hardcoded Capability Catalog** (~5 capabilities) +```typescript +// src/capabilities/catalog.ts +export const CAPABILITIES = [ + { + id: "order-fulfillment-simple", + name: "Order Fulfillment (Simple)", + description: "Process customer order with basic workflow", + tags: { + domain: ["fulfillment"], + complexity: "simple", + services: ["erp", "wms"] + }, + odId: "order-fulfillment-standard-v1" // Links to existing OD + }, + // ... 4 more +]; +``` + +**2. Simple REST API** +``` +GET /api/capabilities # List all capabilities +GET /api/capabilities/:id # Get capability details +POST /api/capabilities/:id/execute # Execute (runs linked OD) +``` + +**3. Execution Logic** +- Map capability ID → existing OD +- Execute OD using current executor +- Return results + +### What We're NOT Building Yet +- ❌ Database persistence +- ❌ Knowledge graph +- ❌ Complex filtering +- ❌ World configuration +- ❌ Persona system +- ❌ Dynamic capability creation + +### Success Criteria +- ✅ Researcher can GET /api/capabilities and see 5 options +- ✅ Researcher can execute a capability and get results +- ✅ End-to-end flow works without errors +- ✅ Team agrees this approach is promising + +### Decision Point +**After Phase 0**: Does this approach provide value? Should we continue? + +**Effort**: 2-3 weeks (1 developer) + +--- + +## Phase 1: Core Capability System (3-4 weeks) + +### Goal +Expand to ~30 capabilities with browsing, filtering, and tagging. + +### What Researchers Can Do +- Browse ~30 capabilities across domains +- Filter by domain, complexity, services +- Search by name/description +- Execute any capability +- **Use chaos presets** (light, moderate, aggressive) + +### Deliverables + +**1. Capability Registry Service** +```typescript +// src/services/capability-registry.service.ts +class CapabilityRegistry { + private capabilities: Map; + + find(filters: CapabilityFilters): Capability[]; + get(capabilityId: string): Capability | null; + search(query: string): Capability[]; +} +``` + +**2. Capability Definitions** (~30 capabilities) +- File-based storage: `config/capabilities/` +- YAML or JSON format +- Comprehensive tags for each + +**3. Enhanced API** +``` +GET /api/capabilities?domain=fulfillment +GET /api/capabilities?complexity=simple +GET /api/capabilities?services=wms,erp +GET /api/capabilities/search?q=order +``` + +**4. Map Capabilities to Existing ODs** +- Use current OD builders +- No need to refactor existing ODs yet + +**5. Chaos Management Foundation** +```typescript +// src/config/chaos-config.registry.ts +class ChaosConfigRegistry { + loadPreset(presetId: string): ChaosPolicy; + listPresets(): PresetMetadata[]; + isChaosEnabled(): boolean; // Check CHAOS_ENABLED env var +} +``` + +**6. Chaos Preset Files** +``` +config/chaos-presets/ +├── light.json # 0.05 probability +├── moderate.json # 0.15 probability +├── aggressive.json # 0.3 probability +└── realistic.json # 0.08 probability (real-world distribution) +``` + +**7. Environment Variable Support** +```bash +CHAOS_ENABLED=true|false # Master kill-switch +CHAOS_PRESET=moderate # Default preset +CHAOS_GLOBAL_PROBABILITY=0.1 # Override probability +``` + +### What We're NOT Building Yet +- ❌ MongoDB persistence (file-based is fine) +- ❌ Knowledge graph +- ❌ World configuration +- ❌ Persona mapping +- ❌ OD variants (just 1:1 mapping for now) + +### Success Criteria +- ✅ 30 capabilities defined and discoverable +- ✅ Filtering works correctly +- ✅ All capabilities executable +- ✅ Researchers can find what they need +- ✅ **Chaos presets work with env var control** +- ✅ **CHAOS_ENABLED=false disables all chaos** + +### Decision Point +**After Phase 1**: Are 30 capabilities enough? Is tagging working? Do we need hierarchy? **Are chaos presets sufficient?** + +**Effort**: 3-4 weeks (1-2 developers) +- Define 30 capabilities: 1 week +- Build registry service: 1 week +- API endpoints: 1 week +- **Chaos presets & registry: 3 days** +- Testing & docs: 1 week + +--- + +## Phase 2: World Configuration (2-3 weeks) + +### Goal +Enable researchers to create custom worlds with capability sampling. + +### What Researchers Can Do +- Create world with specific capabilities (filter-based selection) +- Sample capabilities randomly or by criteria +- Execute capabilities within configured world +- World configuration is immutable after creation +- **Configure chaos at world level** (preset-based) +- **Reproducible chaos** with seeds + +### Deliverables + +**1. World Configuration Schema** +```yaml +world: + id: "research-001" + name: "Warehouse Operations Study" + capabilities: + filters: + domains: [warehousing, inventory] + complexity: [simple, medium] + chaos: + preset: "moderate" # Use preset + seed: "repro-123" # Reproducible chaos +``` + +**2. World Configuration API** +``` +POST /api/worlds # Create world with config +GET /api/worlds/:worldId # Get world details +GET /api/worlds/:worldId/capabilities # List capabilities in world +POST /api/worlds/:worldId/capabilities/:capId/execute # Execute in world context +``` + +**3. Sampling Strategies** (start simple) +- Filter-based selection (domain, complexity, etc.) +- Random sampling with count +- Seed support for reproducibility + +**4. World-Scoped Execution** +- Execute ODs in world context +- Isolated data per world (already supported) + +**5. World-Level Chaos Configuration** +```typescript +// Update ChaosConfigRegistry +class ChaosConfigRegistry { + // Add world-level chaos resolution + getWorldChaosPolicy(worldId: string): ChaosPolicy; + setWorldChaosPolicy(worldId: string, policy: ChaosPolicy): void; + + // Resolve chaos with world context + resolveChaosPolicy(context: ChaosContext): ChaosPolicy; +} +``` +- Apply preset to world +- Support chaos seed for reproducibility +- Chaos inherits to all capabilities in world + +### What We're NOT Building Yet +- ❌ MongoDB persistence (in-memory registry is fine) +- ❌ Dependency validation +- ❌ Persona-based filtering +- ❌ Complex sampling (weighted, hierarchical, graph-based) +- ❌ World mutation after creation +- ❌ **Capability-level chaos overrides** (comes in Phase 4) + +### Success Criteria +- ✅ Researcher can create world with filtered capabilities +- ✅ World shows only selected capabilities +- ✅ Capabilities execute correctly in world context +- ✅ Sampling is reproducible with seed +- ✅ **Chaos configured at world level** +- ✅ **Same seed produces identical chaos across runs** + +### Decision Point +**After Phase 2**: Are filter-based and random sampling sufficient? Do we need weighted/hierarchical? **Is world-level chaos enough or need capability-level?** + +**Effort**: 2-3 weeks (1 developer) +- World config schema: 3 days +- Sampling logic: 1 week +- API endpoints: 3 days +- **World chaos integration: 2 days** +- Testing: 3 days + +--- + +## Phase 3: Knowledge Graph Basics (3-4 weeks) + +### Goal +Add dependency validation and basic capability suggestions using a simple knowledge graph. + +### What Researchers Can Do +- Validate that a capability is executable (has all dependencies) +- Get warned about missing prerequisites +- See "related capabilities" suggestions +- Validate OD feasibility before execution + +### Deliverables + +**1. Manual Tool Annotations** +```typescript +// Add to existing service tool definitions +{ + tool: "createOrder", + produces: ["Order"], + requires: ["Customer", "Product"], + service: "erp" +} +``` + +**2. In-Memory Knowledge Graph** +```typescript +// src/services/knowledge-graph.service.ts +class KnowledgeGraph { + // Nodes: Services, Tools, Entities, Capabilities + // Edges: uses, produces, requires, exposed_by + + validateOD(odId: string): ValidationResult; + findCapabilities(filters: any): Capability[]; + suggestRelated(capabilityId: string): Capability[]; +} +``` + +**3. Dependency Validation** +- Check if OD has all required tools +- Check if tools have required data +- Warn if dependencies missing + +**4. Enhanced API** +``` +GET /api/capabilities/:id/validate # Validate capability is executable +GET /api/capabilities/:id/dependencies # List dependencies +GET /api/capabilities/:id/related # Suggest related capabilities +``` + +### What We're NOT Building Yet +- ❌ MongoDB persistence for graph (rebuild on startup is fine) +- ❌ Runtime learning +- ❌ Static analysis of code +- ❌ Complex graph queries +- ❌ OD discovery/suggestion (just validation) + +### Success Criteria +- ✅ Can validate if capability is executable +- ✅ Warnings shown for missing dependencies +- ✅ Related capabilities suggested accurately +- ✅ Graph loads quickly on startup + +### Decision Point +**After Phase 3**: Is manual annotation sustainable? Do we need static analysis? Is validation useful? + +**Effort**: 3-4 weeks (1-2 developers) +- Annotate tools: 1 week (15-20 critical tools) +- Build graph structure: 1 week +- Validation logic: 1 week +- API & testing: 1 week + +--- + +## Phase 4: Advanced Features (3-4 weeks) + +### Goal +Add persona system and advanced chaos integration. + +### What Researchers Can Do +- Filter capabilities by persona +- Create worlds with persona-based access +- Configure chaos at world/capability level with full priority cascade +- Analyze chaos impact through detailed telemetry + +### Deliverables + +**1. Persona System** (~15-20 personas) +```yaml +# config/personas.yaml +personas: + - id: store-manager + name: Store Manager + description: Manages store operations + capabilities: + - order-fulfillment + - inventory-management + - exception-handling +``` + +**2. Persona-Based Filtering** +``` +GET /api/capabilities?persona=store-manager +GET /api/personas # List all personas +GET /api/personas/:id/capabilities # Capabilities for persona +``` + +**3. Advanced Chaos Features** +```yaml +world: + chaos: + preset: "moderate" + # Capability-level overrides + capabilityOverrides: + order-fulfillment: + probability: 0.3 + scenarios: + - type: missing_data + weight: 10 + inventory-check: + probability: 0.0 # No chaos for critical path +``` + +**Chaos Configuration Priority Cascade**: +```typescript +// Implement full priority cascade +class ChaosConfigRegistry { + resolveChaosPolicy(context: ChaosContext): ChaosPolicy { + // 1. Check master kill-switch (CHAOS_ENABLED) + // 2. Step-level override (from OD) + // 3. OD-level policy + // 4. Capability-level override + // 5. World-level policy + // 6. System default + } +} +``` + +**Chaos Telemetry**: +```typescript +// Enhanced logging for chaos injections +{ + chaosInjected: true, + scenarioType: "stale_data", + configSource: "capability-override", + probability: 0.3, + seed: "repro-123" +} +``` + +**4. World Persona Overrides** +```yaml +world: + personaOverrides: + custom-agent: + capabilities: [order-fulfillment, inventory-check] +``` + +### What We're NOT Building Yet +- ❌ MongoDB persistence (still file-based) +- ❌ Complex persona hierarchies +- ❌ Dynamic persona creation UI +- ❌ OD Variants (deferred to Phase 6) + +### Success Criteria +- ✅ Personas defined and queryable +- ✅ Can filter capabilities by persona +- ✅ **Capability-level chaos overrides functional** +- ✅ **Chaos priority cascade implemented correctly** +- ✅ **Chaos telemetry logs all injections** +- ✅ Custom personas can be created per world + +### Decision Point +**After Phase 4**: Are personas granular enough? **Is chaos telemetry providing useful insights?** + +**Effort**: 3-4 weeks (2 developers) +- Define personas: 1 week +- Persona filtering: 1 week +- **Advanced chaos features**: 1 week + - Capability-level overrides + - Priority cascade implementation + - Enhanced telemetry +- Testing & docs: 1 week + +--- + +## Phase 5: Polish & Scale (Ongoing) + +### Goal +Production-ready system with performance, persistence, and documentation. + +### What Researchers Can Do +- Use system at scale (100+ capabilities, 50+ personas) +- Advanced sampling strategies +- Comprehensive documentation +- Performance monitoring + +### Deliverables + +**1. MongoDB Persistence** +- Capabilities collection +- Personas collection +- Worlds collection +- Knowledge graph nodes/edges + +**2. Performance Optimization** +- Caching for capability queries +- Efficient graph queries +- Pagination for large result sets + +**3. Advanced Sampling** +- Weighted sampling +- Hierarchical sampling +- Graph-based sampling + +**4. Comprehensive Documentation** +- API reference +- Researcher guide +- Capability cookbook +- Migration guide from old system + +**5. Monitoring & Telemetry** +- Usage metrics +- Performance metrics +- Error tracking + +**6. Chaos Management API & Tools** +``` +# Chaos configuration endpoints +GET /api/chaos/presets # List presets +GET /api/chaos/presets/:id # Get preset +POST /api/chaos/presets # Create custom preset +PUT /api/worlds/:worldId/chaos # Update world chaos +GET /api/chaos/metrics?worldId=... # Chaos impact metrics +POST /api/chaos/test # Test chaos (dry run) +``` + +**7. Chaos Impact Analysis** +- Chaos metrics aggregation (injection counts, scenario distribution) +- Success rate correlation (with vs without chaos) +- Chaos impact reports +- A/B testing tools (compare chaos variants) + +**8. Migration from Scattered Chaos Configs** +- Extract chaos from 14+ existing files +- Convert to centralized presets +- Update builders to use ChaosConfigRegistry +- Deprecation warnings for inline chaos + +### Success Criteria +- ✅ System handles 100+ capabilities +- ✅ Query performance < 100ms +- ✅ Complete documentation +- ✅ Migration plan for existing ODs +- ✅ **Chaos configuration API fully functional** +- ✅ **All scattered chaos configs migrated to presets** +- ✅ **Chaos impact analysis provides actionable insights** + +**Effort**: Ongoing (2+ developers) + +--- + +## Migration Strategy + +### Backward Compatibility + +**Phase 0-2**: New system runs alongside old +- Existing OD builders continue working +- New capability API available but optional +- No breaking changes + +**Phase 3-4**: Encourage migration +- Document migration path +- Create capabilities for common ODs +- Deprecation warnings for old patterns + +**Phase 5**: Complete migration +- All ODs accessible via capabilities +- Old builders deprecated but still functional +- Migration tooling to convert old → new + +### Coexistence Pattern + +```typescript +// Old way (still works) +const od = new GenericODBuilder() + .addStep(...) + .build(); + +// New way (recommended) +const capability = await capabilityRegistry.get("order-fulfillment"); +await capability.execute(worldId, inputs); +``` + +--- + +## Phase 6: OD Variants (Future) + +### Goal +Support multiple implementation approaches for the same capability through OD variants. + +### What Researchers Can Do +- Choose OD variants (simple vs complex implementations) +- Select complexity-based variants for capabilities +- Use chaos-focused variants with different chaos presets +- Execute capabilities with variant-specific configurations + +### Deliverables + +**1. OD Variant Type System** +```typescript +// Capability can have multiple OD implementation variants +capability: + id: order-fulfillment + variants: + # Complexity variants + - id: order-fulfillment-simple + complexity: simple + odId: "order-fulfillment-simple-v1" + - id: order-fulfillment-standard + complexity: medium + odId: "order-fulfillment-standard-v1" + - id: order-fulfillment-complex + complexity: complex + odId: "order-fulfillment-complex-v1" +``` + +**2. Complexity Variants** +- Convert 3-5 high-value capabilities to variant model +- Each capability has 2-3 variants (simple/medium/complex) +- Different OD implementations for different complexity levels + +**3. Chaos Variants** +```yaml +# Same workflow, different chaos configurations +capability: + variants: + - id: inventory-check-no-chaos + chaos: { enabled: false } + odId: "inventory-check-standard-v1" + - id: inventory-check-light + chaos: { preset: "light" } + odId: "inventory-check-standard-v1" + - id: inventory-check-aggressive + chaos: { preset: "aggressive" } + odId: "inventory-check-standard-v1" +``` + +**4. Variant Selection API** +``` +GET /capabilities/:id/variants # List all variants +POST /capabilities/:id/execute +{ + "worldId": "...", + "inputs": {...}, + "options": { + "variantId": "order-fulfillment-complex" // or + "complexity": "medium" + } +} +``` + +### What We're NOT Building Yet +- ❌ N:M composition (multiple ODs combined) +- ❌ Dynamic variant generation +- ❌ Variant recommendations + +### Success Criteria +- ✅ 3-5 capabilities with complexity variants +- ✅ Variant selection working (by ID and complexity) +- ✅ Chaos variants functional +- ✅ All variant tests passing +- ✅ API documentation complete + +### Decision Point +**After Phase 6**: Is 1:N variant mapping sufficient? Do we need N:M composition? + +**Effort**: 1-2 weeks (1 developer) +- Variant type system: 1 day +- Complexity variants (3-5 capabilities): 2-3 days +- Capability executor updates: 1 day +- Variant selection API: 1 day +- Chaos variants: 1 day +- Testing & docs: 1-2 days + +**Note**: This phase is deprioritized and will be scheduled based on research needs after Phase 5 is complete. + +--- + +## Risk Mitigation + +### Major Risks + +**1. Scope Creep** +- **Mitigation**: Each phase has clear "NOT building" list. Defer aggressively. + +**2. Over-Engineering** +- **Mitigation**: Start simple, add complexity only when needed. + +**3. Changing Requirements** +- **Mitigation**: Decision points after each phase. Can pivot based on learnings. + +**4. Adoption** +- **Mitigation**: Backward compatibility. Researchers can adopt incrementally. + +**5. Performance** +- **Mitigation**: Deferred to Phase 5. Measure early, optimize when needed. + +### Flexibility Points + +**Can Easily Change**: +- Number of capabilities (30 → 50 → 100) +- Sampling strategies +- Tag vocabulary +- Persona definitions + +**Hard to Change Later**: +- Capability ↔ OD relationship (1:1 vs 1:N vs N:M) +- Knowledge graph structure +- API design + +**Strategy**: Lock hard-to-change early, keep easy-to-change flexible. + +--- + +## Decision Points & Metrics + +### After Each Phase + +**Questions to Answer**: +1. Did we deliver value to researchers? +2. Were our assumptions correct? +3. What surprised us? +4. What should we adjust? + +**Metrics to Track**: +- API usage (which endpoints are popular?) +- Capability execution counts (which are used most?) +- Error rates (where do things fail?) +- Researcher feedback (qualitative) + +### Go/No-Go Criteria + +**Continue to next phase if**: +- ✅ Current phase delivered promised value +- ✅ Researchers are using the features +- ✅ No major architectural issues discovered + +**Pivot or adjust if**: +- ❌ Low adoption +- ❌ Fundamental design flaw found +- ❌ Requirements changed significantly + +--- + +## Effort Summary + +| Phase | Duration | Team Size | Focus | +|-------|----------|-----------|-------| +| Phase 0: Walking Skeleton | 2-3 weeks | 1 dev | Proof of concept | +| Phase 1: Core System | 3-4 weeks | 1-2 devs | Capability browsing | +| Phase 2: World Config | 2-3 weeks | 1 dev | Customization | +| Phase 3: Knowledge Graph | 3-4 weeks | 1-2 devs | Validation | +| Phase 4: Advanced Features | 3-4 weeks | 2 devs | Personas & chaos | +| Phase 5: Polish & Scale | Ongoing | 2+ devs | Production-ready | +| Phase 6: OD Variants | 1-2 weeks | 1 dev | Variant system (Future) | + +**Total to MVP (Phase 0-2)**: 7-10 weeks +**Total to Full Feature Set (Phase 0-4)**: 13-17 weeks (~3-4 months) +**Total with OD Variants (Phase 0-6)**: 14-19 weeks (~3.5-5 months) + +--- + +## Success Metrics + +### Phase 0 Success +- Concept validated +- Team aligned on approach + +### Phase 1-2 Success +- 10+ researchers using capability API +- 50+ capability executions per week + +### Phase 3-4 Success +- Dependency validation prevents 80% of execution errors +- Personas used in 50% of world configurations + +### Phase 5 Success +- 100+ capabilities defined +- < 100ms query performance +- Migration complete for 80% of existing ODs + +--- + +## Related Documents + +- [02. Conceptual Model](./02-conceptual-model.md) - Architecture we're implementing +- [06. Open Questions & Decisions](./06-open-questions.md) - Decisions guiding this roadmap +- [07. Chaos Integration](./07-chaos-integration.md) - How chaos integrates with OD architecture +- [04. Taxonomy & Organization](./04-taxonomy-organization.md) - How capabilities are organized +- [05. Sampling & World Config](./05-sampling-world-config.md) - World configuration details +- [Chaos Management](../chaos/chaos-management.md) - Complete chaos system design + +--- + +## Next Steps + +1. **Review roadmap** with team and stakeholders +2. **Get approval** for Phase 0 start +3. **Create Phase 0 task breakdown** (detailed stories) +4. **Assign developer(s)** to Phase 0 +5. **Set up** project tracking (GitHub issues, board, etc.) +6. **Start Phase 0 implementation** +7. **Review progress weekly**, adjust as needed + +**First Milestone**: Complete Phase 0 in 2-3 weeks, validate approach. diff --git a/docs/od-architecture/09-implementation-tasks.md b/docs/od-architecture/09-implementation-tasks.md new file mode 100644 index 0000000000000000000000000000000000000000..bc8426c48c2386aa2e0f3da9f01a1a1ab2b85d95 --- /dev/null +++ b/docs/od-architecture/09-implementation-tasks.md @@ -0,0 +1,1548 @@ +# 09. Implementation Tasks Tracker + +## Overview + +This document serves as a master index for all implementation tasks across phases. Detailed task specifications are maintained in phase-specific files within the `implementation/` directory. + +**Last Updated**: 2025-11-21 + +**How to Use**: +1. Navigate to the appropriate phase folder: `implementation/phaseN/` +2. Open `tasks.md` for detailed tickets +3. Pick a ticket with status 📝 TODO +4. Update status to 🚧 IN PROGRESS +5. Complete the work according to acceptance criteria +6. Update status to ✅ DONE + +**Status Key**: +- 📝 **TODO**: Not started +- 🚧 **IN PROGRESS**: Currently being worked on +- ✅ **DONE**: Completed and tested +- ❌ **BLOCKED**: Waiting on dependencies +- ⏸️ **ON HOLD**: Paused, will resume later + +--- + +## Phase Index + +### Phase 0: Walking Skeleton (2-3 weeks) + +**Epic**: MORPH-100 - Walking Skeleton + +**Goal**: Prove the concept works end-to-end with minimal implementation + +**Status**: ✅ **COMPLETED** + +**Documents**: +- [Tasks](./implementation/phase0/tasks.md) - Detailed 12 tickets +- [Demo Script](./implementation/phase0/demo.md) - Walkthrough +- [Test Results](./implementation/phase0/test-results.md) - Test execution results + +**Summary**: +- 12 tickets, ~21 story points +- 5 capabilities implemented +- API endpoints functional +- End-to-end flow validated + +--- + +### Phase 1: Core Capability System (3-4 weeks) + +**Epic**: MORPH-200 - Core Capability System + +**Goal**: Expand to 4 working capabilities with real OD execution, filtering, and chaos support + +**Status**: ✅ **COMPLETED** + +**Documents**: +- [Tasks](./implementation/phase1/tasks.md) - Detailed 20 tickets +- [Demo Script](./implementation/phase1/demo-script.md) - Walkthrough +- [Test Results](./implementation/phase1/test-results.md) - Test execution results +- [Retrospective](./implementation/phase1/retrospective.md) - Lessons learned + +**Summary**: +- 20 tickets, ~57 story points +- 4 working capabilities: inventory-check, shipment-tracking, equipment-availability-check, dock-appointment-scheduling +- Real OD execution with chaos engineering +- Search/filtering APIs functional +- Performance baselines established +- **Decision**: ✅ Proceed to Phase 2 (World Configuration) + +--- + +### Phase 2: World Configuration (2.5-3 weeks) + +**Epic**: MORPH-300 - World Configuration + +**Goal**: Enable researchers to create custom worlds with capability sampling and chaos configuration + +**Status**: 📝 **TODO** (Ready to Start) + +**Documents**: +- [Tasks](./implementation/phase2/tasks.md) - Detailed 10 tickets (REVISED) + +**Summary**: +- 10 tickets, 22 story points (revised from 30 - **27% effort reduction**) +- Extends existing World model (leverages 40% existing infrastructure) +- Sampling strategies: filter, random, seeded +- World-level chaos configuration with API endpoints +- Capability-level chaos overrides +- **Critical bug fix**: Capability executor chaos integration +- Reproducibility via seeds for sampling and chaos + +--- + +### Phase 3-5: Future Phases + +**Status**: 🔒 **LOCKED** (will be created after Phase 2 completion) + +See [08. Implementation Roadmap](./08-implementation-roadmap.md) for high-level phase definitions. + +--- + +## Quick Navigation + +| Phase | Status | Tasks | Demo | Tests | Retro | +|-------|--------|-------|------|-------|-------| +| Phase 0 | ✅ DONE | [tasks](./implementation/phase0/tasks.md) | [demo](./implementation/phase0/demo.md) | [tests](./implementation/phase0/test-results.md) | - | +| Phase 1 | ✅ DONE | [tasks](./implementation/phase1/tasks.md) | [demo](./implementation/phase1/demo-script.md) | [tests](./implementation/phase1/test-results.md) | [retro](./implementation/phase1/retrospective.md) | +| Phase 2 | 📝 TODO | [tasks](./implementation/phase2/tasks.md) | - | - | - | +| Phase 3+ | 🔒 LOCKED | - | - | - | - | + +--- + +## Historical Detail (Archived) + +The content below has been moved to phase-specific `tasks.md` files. This section is kept for reference only. + +
+Phase 0 Tickets (ARCHIVED - See implementation/phase0/tasks.md) + +### MORPH-101: Project Setup & Type Definitions + +**Type**: Task +**Priority**: High +**Estimate**: 2 points (1 day) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Set up project structure and create TypeScript type definitions for capabilities. + +**Acceptance Criteria**: +- [ ] Folder structure created: `src/capabilities/` +- [ ] Type definitions file created: `src/types/capability.type.ts` +- [ ] Core types defined: `Capability`, `CapabilityMetadata`, `CapabilityTags` +- [ ] Types compile without errors +- [ ] Types exported properly + +**Technical Details**: +```typescript +// src/types/capability.type.ts +export interface Capability { + id: string; + name: string; + description: string; + tags: CapabilityTags; + odId: string; // Maps to existing OD + version: string; + metadata?: CapabilityMetadata; +} + +export interface CapabilityTags { + domain: string[]; + complexity: 'simple' | 'medium' | 'complex'; + services: string[]; + personas?: string[]; +} + +export interface CapabilityMetadata { + author?: string; + createdAt?: Date; + estimatedDuration?: number; +} +``` + +**Dependencies**: None + +**Testing**: +- Types compile successfully +- Can import types in other files + +--- + +### MORPH-102: Create 5 Hardcoded Capabilities + +**Type**: Task +**Priority**: High +**Estimate**: 3 points (1 day) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Create a hardcoded catalog of 5 example capabilities spanning different domains and complexity levels. + +**Acceptance Criteria**: +- [ ] File created: `src/capabilities/catalog.ts` +- [ ] 5 capabilities defined with complete metadata +- [ ] Covers at least 3 different domains +- [ ] Mix of simple, medium, complex +- [ ] Each capability maps to an existing OD +- [ ] Data exported as constant array + +**Technical Details**: +```typescript +// src/capabilities/catalog.ts +import { Capability } from '../types/capability.type'; + +export const INITIAL_CAPABILITIES: Capability[] = [ + { + id: 'order-fulfillment-simple', + name: 'Order Fulfillment (Simple)', + description: 'Process customer order with basic workflow', + tags: { + domain: ['fulfillment', 'order-processing'], + complexity: 'simple', + services: ['erp', 'wms'] + }, + odId: 'order-fulfillment-standard-v1', // Existing OD + version: '1.0.0' + }, + // ... 4 more +]; +``` + +**Capabilities to Create**: +1. **order-fulfillment-simple** (Fulfillment, Simple, ERP+WMS) +2. **inventory-check** (Inventory, Simple, WMS) +3. **inbound-receiving** (Warehousing, Medium, TMS+WMS) +4. **edi-850-generation** (EDI, Medium, EDI+ERP) +5. **shipment-tracking** (Transportation, Simple, TMS) + +**Dependencies**: MORPH-101 + +**Testing**: +- Catalog imports successfully +- All 5 capabilities have required fields +- OD IDs reference existing ODs + +--- + +### MORPH-103: Build Capability Catalog Service + +**Type**: Story +**Priority**: High +**Estimate**: 3 points (1.5 days) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Create an in-memory capability catalog service with basic query methods. + +**Acceptance Criteria**: +- [ ] File created: `src/services/capability-catalog.service.ts` +- [ ] `CapabilityCatalog` class implemented +- [ ] Method: `getAll()` returns all capabilities +- [ ] Method: `getById(id)` returns single capability or null +- [ ] Method: `filter(tags)` returns filtered capabilities +- [ ] Service is a singleton +- [ ] Loads from hardcoded catalog on initialization + +**Technical Details**: +```typescript +// src/services/capability-catalog.service.ts +import { Capability, CapabilityTags } from '../types/capability.type'; +import { INITIAL_CAPABILITIES } from '../capabilities/catalog'; + +class CapabilityCatalog { + private capabilities: Map; + + constructor() { + this.capabilities = new Map(); + this.loadCapabilities(); + } + + private loadCapabilities(): void { + INITIAL_CAPABILITIES.forEach(cap => { + this.capabilities.set(cap.id, cap); + }); + } + + getAll(): Capability[] { + return Array.from(this.capabilities.values()); + } + + getById(id: string): Capability | null { + return this.capabilities.get(id) || null; + } + + filter(filters: Partial): Capability[] { + return this.getAll().filter(cap => { + if (filters.domain && !filters.domain.some(d => cap.tags.domain.includes(d))) { + return false; + } + if (filters.complexity && cap.tags.complexity !== filters.complexity) { + return false; + } + if (filters.services && !filters.services.every(s => cap.tags.services.includes(s))) { + return false; + } + return true; + }); + } +} + +// Singleton +export const capabilityCatalog = new CapabilityCatalog(); +``` + +**Dependencies**: MORPH-101, MORPH-102 + +**Testing**: +- `getAll()` returns 5 capabilities +- `getById('order-fulfillment-simple')` returns correct capability +- `filter({ domain: ['fulfillment'] })` returns matching capabilities +- `filter({ complexity: 'simple' })` returns 3 simple capabilities + +--- + +### MORPH-104: Create GET /api/capabilities Endpoint + +**Type**: Story +**Priority**: High +**Estimate**: 2 points (1 day) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Create REST API endpoint to list all capabilities with optional filtering. + +**Acceptance Criteria**: +- [ ] Route created: `src/routes/capabilities.route.ts` +- [ ] `GET /api/capabilities` returns all capabilities +- [ ] Query params supported: `domain`, `complexity`, `services` +- [ ] Returns JSON array of capabilities +- [ ] Proper HTTP status codes (200, 400) +- [ ] Route registered in main app + +**Technical Details**: +```typescript +// src/routes/capabilities.route.ts +import { Router } from 'express'; +import { capabilityCatalog } from '../services/capability-catalog.service'; + +const router = Router(); + +router.get('/', (req, res) => { + try { + const { domain, complexity, services } = req.query; + + const filters: any = {}; + if (domain) filters.domain = Array.isArray(domain) ? domain : [domain]; + if (complexity) filters.complexity = complexity; + if (services) filters.services = Array.isArray(services) ? services : [services]; + + const capabilities = Object.keys(filters).length > 0 + ? capabilityCatalog.filter(filters) + : capabilityCatalog.getAll(); + + res.json(capabilities); + } catch (error) { + res.status(400).json({ error: error.message }); + } +}); + +export default router; +``` + +**API Examples**: +```bash +# Get all capabilities +GET /api/capabilities +→ Returns: [5 capabilities] + +# Filter by domain +GET /api/capabilities?domain=fulfillment +→ Returns: [1-2 capabilities] + +# Filter by complexity +GET /api/capabilities?complexity=simple +→ Returns: [3 capabilities] + +# Multiple filters +GET /api/capabilities?domain=fulfillment&complexity=simple +→ Returns: [1 capability] +``` + +**Dependencies**: MORPH-103 + +**Testing**: +- Manual curl/Postman tests +- Returns correct number of capabilities +- Filtering works correctly +- Invalid filters return 400 + +--- + +### MORPH-105: Create GET /api/capabilities/:id Endpoint + +**Type**: Task +**Priority**: High +**Estimate**: 1 point (0.5 days) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Create REST API endpoint to get details of a single capability. + +**Acceptance Criteria**: +- [ ] `GET /api/capabilities/:id` returns capability details +- [ ] Returns 404 if capability not found +- [ ] Returns 200 with capability JSON if found +- [ ] Includes all capability metadata + +**Technical Details**: +```typescript +// Add to src/routes/capabilities.route.ts +router.get('/:id', (req, res) => { + const { id } = req.params; + const capability = capabilityCatalog.getById(id); + + if (!capability) { + return res.status(404).json({ error: 'Capability not found' }); + } + + res.json(capability); +}); +``` + +**API Examples**: +```bash +# Get capability details +GET /api/capabilities/order-fulfillment-simple +→ Returns: { id: "order-fulfillment-simple", ... } + +# Not found +GET /api/capabilities/non-existent +→ Returns 404: { error: "Capability not found" } +``` + +**Dependencies**: MORPH-104 + +**Testing**: +- Valid ID returns capability +- Invalid ID returns 404 +- Response includes all fields + +--- + +### MORPH-106: Build Capability → OD Mapper Service + +**Type**: Story +**Priority**: High +**Estimate**: 2 points (1 day) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Create service that maps capability IDs to existing OD builders and executes them. + +**Acceptance Criteria**: +- [ ] File created: `src/services/capability-executor.service.ts` +- [ ] `CapabilityExecutor` class implemented +- [ ] Method: `execute(capabilityId, inputs)` runs OD +- [ ] Uses existing OD builders (no refactoring) +- [ ] Returns OD execution results +- [ ] Handles errors gracefully + +**Technical Details**: +```typescript +// src/services/capability-executor.service.ts +import { capabilityCatalog } from './capability-catalog.service'; +import { executeOD } from '../operational-descriptor/executor.od'; + +export class CapabilityExecutor { + async execute( + capabilityId: string, + worldId: string, + inputs: any + ): Promise { + // 1. Get capability + const capability = capabilityCatalog.getById(capabilityId); + if (!capability) { + throw new Error(`Capability not found: ${capabilityId}`); + } + + // 2. Get OD (for now, assume OD exists in registry) + const odId = capability.odId; + + // 3. Execute OD using existing executor + const result = await executeOD(odId, worldId, inputs); + + return { + capabilityId, + odId, + worldId, + result, + executedAt: new Date() + }; + } +} + +export const capabilityExecutor = new CapabilityExecutor(); +``` + +**Dependencies**: MORPH-103 + +**Testing**: +- Can execute order-fulfillment-simple +- Returns proper result structure +- Throws error for non-existent capability +- OD execution works correctly + +--- + +### MORPH-107: Create POST /api/capabilities/:id/execute Endpoint + +**Type**: Story +**Priority**: High +**Estimate**: 2 points (1 day) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Create REST API endpoint to execute a capability. + +**Acceptance Criteria**: +- [ ] `POST /api/capabilities/:id/execute` endpoint created +- [ ] Accepts `worldId` and `inputs` in request body +- [ ] Executes capability via CapabilityExecutor +- [ ] Returns execution results +- [ ] Proper error handling (404, 400, 500) +- [ ] Request validation + +**Technical Details**: +```typescript +// Add to src/routes/capabilities.route.ts +router.post('/:id/execute', async (req, res) => { + try { + const { id } = req.params; + const { worldId, inputs } = req.body; + + // Validation + if (!worldId) { + return res.status(400).json({ error: 'worldId is required' }); + } + + // Execute + const result = await capabilityExecutor.execute(id, worldId, inputs); + + res.json(result); + } catch (error) { + if (error.message.includes('not found')) { + return res.status(404).json({ error: error.message }); + } + res.status(500).json({ error: error.message }); + } +}); +``` + +**API Examples**: +```bash +# Execute capability +POST /api/capabilities/order-fulfillment-simple/execute +Body: { + "worldId": "world-123", + "inputs": { + "orderId": "ORD-001" + } +} +→ Returns: { + "capabilityId": "order-fulfillment-simple", + "odId": "order-fulfillment-standard-v1", + "result": { ... }, + "executedAt": "2025-11-14T10:30:00Z" +} +``` + +**Dependencies**: MORPH-106 + +**Testing**: +- Successful execution returns results +- Missing worldId returns 400 +- Invalid capability ID returns 404 +- OD execution errors return 500 + +--- + +### MORPH-108: Register Capability Routes in App + +**Type**: Task +**Priority**: High +**Estimate**: 1 point (0.5 days) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Register the new capability routes in the main Express app. + +**Acceptance Criteria**: +- [ ] Capability routes mounted in main app +- [ ] Routes accessible via `/api/capabilities` +- [ ] Routes work in development environment +- [ ] No breaking changes to existing routes + +**Technical Details**: +```typescript +// In main app file (e.g., src/app.ts or src/index.ts) +import capabilitiesRouter from './routes/capabilities.route'; + +// Register routes +app.use('/api/capabilities', capabilitiesRouter); +``` + +**Dependencies**: MORPH-107 + +**Testing**: +- All capability endpoints accessible +- Existing routes still work +- Server starts without errors + +--- + +### MORPH-109: Integration Testing + +**Type**: Task +**Priority**: Medium +**Estimate**: 2 points (1 day) +**Status**: ⏸️ ON HOLD +**Note**: Requires MongoDB setup - will complete after environment setup + +**Description**: +Create integration tests for the end-to-end capability flow. + +**Acceptance Criteria**: +- [ ] Test file created: `tests/capabilities.integration.test.ts` +- [ ] Test: List all capabilities +- [ ] Test: Get capability by ID +- [ ] Test: Filter capabilities +- [ ] Test: Execute capability end-to-end +- [ ] All tests pass + +**Technical Details**: +```typescript +// tests/capabilities.integration.test.ts +describe('Capabilities API', () => { + describe('GET /api/capabilities', () => { + it('should return all 5 capabilities', async () => { + const res = await request(app).get('/api/capabilities'); + expect(res.status).toBe(200); + expect(res.body).toHaveLength(5); + }); + + it('should filter by domain', async () => { + const res = await request(app) + .get('/api/capabilities?domain=fulfillment'); + expect(res.status).toBe(200); + expect(res.body.every(c => c.tags.domain.includes('fulfillment'))).toBe(true); + }); + }); + + describe('GET /api/capabilities/:id', () => { + it('should return capability details', async () => { + const res = await request(app) + .get('/api/capabilities/order-fulfillment-simple'); + expect(res.status).toBe(200); + expect(res.body.id).toBe('order-fulfillment-simple'); + }); + + it('should return 404 for invalid ID', async () => { + const res = await request(app) + .get('/api/capabilities/invalid'); + expect(res.status).toBe(404); + }); + }); + + describe('POST /api/capabilities/:id/execute', () => { + it('should execute capability', async () => { + const res = await request(app) + .post('/api/capabilities/order-fulfillment-simple/execute') + .send({ + worldId: 'test-world', + inputs: { orderId: 'TEST-001' } + }); + expect(res.status).toBe(200); + expect(res.body.capabilityId).toBe('order-fulfillment-simple'); + expect(res.body.result).toBeDefined(); + }); + }); +}); +``` + +**Dependencies**: MORPH-108 + +**Testing**: +- Run tests: `npm test` +- All tests pass +- Coverage > 80% + +--- + +### MORPH-110: API Documentation + +**Type**: Task +**Priority**: Medium +**Estimate**: 2 points (1 day) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Document the new capability API endpoints. + +**Acceptance Criteria**: +- [ ] Document created: `docs/api/capabilities.md` +- [ ] All 3 endpoints documented +- [ ] Request/response examples provided +- [ ] Error codes documented +- [ ] Example curl commands included + +**Technical Details**: +```markdown +# Capabilities API + +## Endpoints + +### GET /api/capabilities +List all capabilities with optional filtering. + +**Query Parameters**: +- `domain` (string[]): Filter by domain +- `complexity` (string): Filter by complexity +- `services` (string[]): Filter by services + +**Example**: +curl http://localhost:3000/api/capabilities?domain=fulfillment + +### GET /api/capabilities/:id +Get details of a single capability. + +**Example**: +curl http://localhost:3000/api/capabilities/order-fulfillment-simple + +### POST /api/capabilities/:id/execute +Execute a capability. + +**Body**: +{ + "worldId": "string", + "inputs": object +} + +**Example**: +curl -X POST http://localhost:3000/api/capabilities/order-fulfillment-simple/execute \ + -H "Content-Type: application/json" \ + -d '{"worldId":"world-123","inputs":{"orderId":"ORD-001"}}' +``` + +**Dependencies**: MORPH-109 + +**Testing**: +- Documentation is clear +- Examples work +- Covers all endpoints + +--- + +### MORPH-111: Phase 0 Demo Preparation + +**Type**: Task +**Priority**: Medium +**Estimate**: 1 point (0.5 days) +**Status**: ✅ DONE +**Completed**: 2025-11-19 + +**Description**: +Prepare demo script and materials for Phase 0 review. + +**Acceptance Criteria**: +- [ ] Demo script created +- [ ] Example requests prepared (Postman/curl) +- [ ] Can demonstrate end-to-end flow +- [ ] Demo shows all 3 endpoints working +- [ ] Demo execution is successful + +**Demo Script**: +```bash +# 1. List all capabilities +curl http://localhost:3000/api/capabilities + +# 2. Filter by domain +curl http://localhost:3000/api/capabilities?domain=fulfillment + +# 3. Get capability details +curl http://localhost:3000/api/capabilities/order-fulfillment-simple + +# 4. Execute capability +curl -X POST http://localhost:3000/api/capabilities/order-fulfillment-simple/execute \ + -H "Content-Type: application/json" \ + -d '{"worldId":"demo-world","inputs":{"orderId":"DEMO-001"}}' + +# 5. Show execution result +``` + +**Dependencies**: MORPH-110 + +**Testing**: +- Demo runs successfully +- All endpoints work +- Results are as expected + +--- + +### MORPH-112: Phase 0 Retrospective & Decision + +**Type**: Task +**Priority**: High +**Estimate**: 1 point (0.5 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Conduct Phase 0 retrospective and make go/no-go decision for Phase 1. + +**Acceptance Criteria**: +- [ ] Team demo completed +- [ ] Feedback collected +- [ ] Decision documented: Continue to Phase 1 or Pivot +- [ ] Learnings documented for Phase 1 planning + +**Discussion Points**: +1. Does this approach provide value to researchers? +2. Is the API intuitive? +3. Are we on the right track? +4. What should we adjust for Phase 1? + +**Deliverables**: +- Meeting notes +- Decision: GO / NO-GO / PIVOT +- Feedback incorporated into Phase 1 planning + +**Dependencies**: MORPH-111 + +**Testing**: +- Decision is clear +- Feedback is actionable + +--- + +## Phase 0 Summary + +**Total Tickets**: 12 +**Total Story Points**: 21 points (~2-3 weeks with 1 developer) + +**Ticket Breakdown**: +- Setup & Foundation: 3 tickets (8 points) +- Capability System: 4 tickets (8 points) +- API Endpoints: 3 tickets (3 points) +- Testing & Documentation: 2 tickets (2 points) + +**Critical Path**: +MORPH-101 → MORPH-102 → MORPH-103 → MORPH-104 → MORPH-106 → MORPH-107 → MORPH-108 → MORPH-109 + +**Parallelizable**: +- MORPH-105 can be done alongside MORPH-106 +- MORPH-110 can be done alongside MORPH-109 + +--- + +
+ +
+Phase 1 Tickets (ARCHIVED - See implementation/phase1/tasks.md) + +### MORPH-201: Create OD Registry Service + +**Type**: Story +**Priority**: High +**Estimate**: 5 points (2-3 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Create a centralized OD Registry that maps capability IDs to actual OD builder functions. This registry will be used by the CapabilityExecutor to instantiate and build ODs for execution. + +**Acceptance Criteria**: +- [ ] File created: `src/services/od-registry.service.ts` +- [ ] `ODRegistry` class implemented with builder registration +- [ ] Method: `registerBuilder(odId, builderFactory)` to register OD builders +- [ ] Method: `getBuilder(odId)` returns builder factory or null +- [ ] Method: `buildOD(odId, config)` builds and returns OD instance +- [ ] Registry supports all existing OD builders (WMS, EDI, TMS) +- [ ] Singleton pattern for global access +- [ ] Initial registration for 5 Phase 0 ODs + +**Dependencies**: None (foundational) + +**Testing**: +- Can register builder without error +- Can retrieve registered builder +- `buildOD()` returns valid OperationalDescriptor +- Throws error for non-existent OD ID + +--- + +### MORPH-202: Integrate Real OD Execution in CapabilityExecutor + +**Type**: Story +**Priority**: High +**Estimate**: 5 points (2-3 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Replace stub implementation in CapabilityExecutor with real OD execution using the OD Registry, world initialization, and existing OD executor. + +**Acceptance Criteria**: +- [ ] Update `capability-executor.service.ts` to use real execution +- [ ] Initialize world context using `initOperationalDescriptor()` +- [ ] Build OD from registry using capability's `odId` +- [ ] Execute OD using `executeOperationalDescriptor()` +- [ ] Return properly formatted execution results +- [ ] Handle errors gracefully with proper status codes +- [ ] Include logger from capability execution context +- [ ] Pass chaos policy from capability to OD + +**Dependencies**: MORPH-201 + +**Testing**: +- Execute order-fulfillment-simple returns real results +- World context initialized correctly +- OD executes with proper steps +- Errors handled and returned with 'failed' status + +--- + +### MORPH-203: Build 15 Additional OD Builders + +**Type**: Task +**Priority**: High +**Estimate**: 8 points (4-5 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Create 15 new OD builders to expand from 5 to 20 total capabilities. Focus on common supply chain workflows across WMS, EDI, TMS, and ERP domains. + +**Acceptance Criteria**: +- [ ] 15 new OD builder functions created +- [ ] Each builder registered in OD Registry +- [ ] Mix of simple (5), medium (7), complex (3) workflows +- [ ] Cover all major domains: fulfillment, inventory, warehousing, EDI, transportation +- [ ] Use GenericODBuilder for consistent structure +- [ ] Include appropriate chaos scenarios for each +- [ ] All builders tested and working + +**OD Builders to Create**: +1. inventory-adjustment-simple-v1 (Simple, WMS) +2. shipment-status-check-v1 (Simple, TMS) +3. edi-856-asn-generation-v1 (Simple, EDI) +4. order-cancellation-simple-v1 (Simple, ERP) +5. dock-schedule-query-v1 (Simple, WMS) +6. cycle-count-workflow-v1 (Medium, WMS) +7. outbound-picking-v1 (Medium, WMS) +8. edi-810-invoice-v1 (Medium, EDI+ERP) +9. cross-dock-workflow-v1 (Medium, WMS+TMS) +10. replenishment-workflow-v1 (Medium, WMS) +11. edi-855-po-ack-v1 (Medium, EDI) +12. load-planning-v1 (Medium, TMS) +13. returns-processing-v1 (Complex, ERP+WMS+TMS) +14. wave-picking-v1 (Complex, WMS) +15. multi-location-transfer-v1 (Complex, WMS+TMS) + +**Dependencies**: MORPH-201 + +**Testing**: +- Each OD builds successfully +- ODs execute without errors (with mock data) +- Step structure is valid + +--- + +### MORPH-204: Expand Capability Catalog to 20 Capabilities + +**Type**: Task +**Priority**: High +**Estimate**: 3 points (1.5-2 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Add 15 new capability definitions to the catalog, mapping to the new OD builders. Ensure comprehensive metadata, tags, and descriptions. + +**Acceptance Criteria**: +- [ ] Add 15 new capabilities to `catalog.ts` +- [ ] Each capability maps to an OD from MORPH-203 +- [ ] Complete metadata: description, tags, personas, patterns +- [ ] Estimated durations realistic +- [ ] Exported as `EXPANDED_CAPABILITIES` array +- [ ] Update CapabilityCatalog to load expanded set + +**Dependencies**: MORPH-203 + +**Testing**: +- Catalog loads 20 capabilities +- All capabilities have valid `odId` references +- Filtering by tags works correctly +- No duplicate capability IDs + +--- + +### MORPH-205: Create Chaos Preset Configuration Files + +**Type**: Task +**Priority**: Medium +**Estimate**: 2 points (1 day) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Create JSON configuration files for chaos presets (light, moderate, aggressive, realistic) that can be applied to capabilities and worlds. + +**Acceptance Criteria**: +- [ ] Directory created: `config/chaos-presets/` +- [ ] 4 preset files created: `light.json`, `moderate.json`, `aggressive.json`, `realistic.json` +- [ ] Each preset defines probability and scenario weights +- [ ] Presets cover all chaos scenario types +- [ ] README documenting preset usage + +**Dependencies**: None + +**Testing**: +- All JSON files valid +- Probabilities sum correctly +- Scenario weights reasonable + +--- + +### MORPH-206: Build Chaos Config Registry Service + +**Type**: Story +**Priority**: Medium +**Estimate**: 4 points (2 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Create a Chaos Config Registry that loads presets, manages chaos configuration, and provides chaos resolution based on environment variables and context. + +**Acceptance Criteria**: +- [ ] File created: `src/services/chaos-config.registry.ts` +- [ ] Loads presets from JSON files on initialization +- [ ] Method: `loadPreset(presetId)` returns ChaosPolicy +- [ ] Method: `listPresets()` returns available presets +- [ ] Method: `isChaosEnabled()` checks CHAOS_ENABLED env var +- [ ] Method: `resolveChaosPolicy(context)` applies priority cascade +- [ ] Respects environment variable overrides +- [ ] Singleton pattern + +**Dependencies**: MORPH-205 + +**Testing**: +- Loads all 4 presets on initialization +- `isChaosEnabled()` respects env var +- `resolveChaosPolicy()` follows priority cascade +- CHAOS_ENABLED=false disables all chaos + +--- + +### MORPH-207: Integrate Chaos Registry with Capability Executor + +**Type**: Story +**Priority**: Medium +**Estimate**: 3 points (1.5 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Integrate ChaosConfigRegistry into CapabilityExecutor so chaos policies are resolved and applied during OD execution. + +**Acceptance Criteria**: +- [ ] CapabilityExecutor uses ChaosConfigRegistry +- [ ] Resolves chaos policy before building OD +- [ ] Passes resolved chaos to OD builder +- [ ] Capability-level chaos in catalog respected +- [ ] Environment variables control chaos behavior +- [ ] Chaos telemetry logged in execution results + +**Dependencies**: MORPH-206, MORPH-202 + +**Testing**: +- Chaos disabled when CHAOS_ENABLED=false +- Capability-level chaos overrides default +- Resolved chaos passed to OD builder +- Chaos telemetry in execution results + +--- + +### MORPH-208: Add Chaos Support to Capability Type + +**Type**: Task +**Priority**: Medium +**Estimate**: 1 point (0.5 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Extend Capability type definition to support chaos configuration at capability level. + +**Acceptance Criteria**: +- [ ] Add optional `chaos` field to Capability type +- [ ] Update catalog with chaos configs for select capabilities +- [ ] Type supports both preset reference and inline policy +- [ ] Backward compatible (chaos optional) + +**Dependencies**: MORPH-204 + +**Testing**: +- Types compile correctly +- Catalog capabilities with chaos load properly +- Preset references resolve correctly + +--- + +### MORPH-209: Enhanced Filtering with Search + +**Type**: Story +**Priority**: Medium +**Estimate**: 3 points (1.5 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Add full-text search and enhanced filtering capabilities to CapabilityCatalog for better discoverability. + +**Acceptance Criteria**: +- [ ] Method: `search(query)` performs full-text search +- [ ] Search across name, description, tags +- [ ] Filter by multiple tags simultaneously +- [ ] Filter by persona +- [ ] Filter by pattern +- [ ] Case-insensitive search +- [ ] Returns ranked results (most relevant first) + +**Dependencies**: MORPH-204 + +**Testing**: +- Search for 'inventory' returns relevant capabilities +- Multiple filters work together (AND logic) +- Empty query returns all + +--- + +### MORPH-210: Update GET /api/capabilities with Search + +**Type**: Task +**Priority**: Medium +**Estimate**: 2 points (1 day) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Update the capabilities API endpoint to support full-text search and enhanced filtering. + +**Acceptance Criteria**: +- [ ] Add `q` query parameter for search +- [ ] Add `persona` query parameter +- [ ] Add `pattern` query parameter +- [ ] Update to use `filterEnhanced()` method +- [ ] Return count metadata +- [ ] Backward compatible with existing filters + +**Dependencies**: MORPH-209 + +**Testing**: +- Search queries return relevant results +- Multiple filters work correctly +- Response includes count metadata + +--- + +### MORPH-211: Add GET /api/chaos/presets Endpoint + +**Type**: Task +**Priority**: Low +**Estimate**: 2 points (1 day) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Create API endpoints for chaos preset management and inspection. + +**Acceptance Criteria**: +- [ ] `GET /api/chaos/presets` lists all presets +- [ ] `GET /api/chaos/presets/:id` returns preset details +- [ ] `GET /api/chaos/status` returns chaos configuration status +- [ ] Returns preset metadata and configuration +- [ ] Documents chaos environment variables + +**Dependencies**: MORPH-206 + +**Testing**: +- Lists all 4 presets +- Returns 404 for invalid preset ID +- Status endpoint shows env vars + +--- + +### MORPH-212: Service Tools Enhancement for New ODs + +**Type**: Task +**Priority**: Medium +**Estimate**: 5 points (2-3 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Add new service tool methods to support the 15 new OD builders. Enhance existing WMS, EDI, and TMS service tools. + +**Acceptance Criteria**: +- [ ] Add 10-15 new tool methods across WMS, EDI, TMS +- [ ] Tools support new capability workflows +- [ ] Tools follow existing patterns (repositories, logging) +- [ ] Mock implementations for missing backend +- [ ] Proper error handling + +**New Tools Needed**: +- WMS: selectCycleCountLocations, performCycleCount, reconcileInventory, performReplenishment, createPickTask +- EDI: generate856ASN, generate855POAck, generate810Invoice, validateEDIDocument +- TMS: planLoad, optimizeRoute, trackMultipleShipments + +**Dependencies**: MORPH-203 + +**Testing**: +- Each new tool callable without errors +- Returns expected data structure +- Handles invalid inputs gracefully + +--- + +### MORPH-213: Update API Documentation + +**Type**: Task +**Priority**: Medium +**Estimate**: 2 points (1 day) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Update API documentation to reflect Phase 1 enhancements: search, filtering, chaos endpoints. + +**Acceptance Criteria**: +- [ ] Update `docs/api/capabilities.md` with new query params +- [ ] Document search functionality with examples +- [ ] Document chaos endpoints +- [ ] Add filtering examples +- [ ] Update Swagger/OpenAPI spec if exists +- [ ] Include chaos configuration guide + +**Dependencies**: MORPH-210, MORPH-211 + +**Testing**: +- Documentation accurate +- Examples work as shown +- All new features documented + +--- + +### MORPH-214: Integration Testing for Phase 1 + +**Type**: Task +**Priority**: High +**Estimate**: 5 points (2-3 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Create comprehensive integration tests for Phase 1 features: real OD execution, filtering, search, chaos. + +**Acceptance Criteria**: +- [ ] Test file: `tests/phase1-integration.test.ts` +- [ ] Test: Execute all 20 capabilities successfully +- [ ] Test: Search and filtering combinations +- [ ] Test: Chaos disabled with CHAOS_ENABLED=false +- [ ] Test: Different chaos presets produce different results +- [ ] Test: OD execution returns proper RunResult +- [ ] All tests pass +- [ ] Coverage > 80% + +**Dependencies**: MORPH-202, MORPH-210, MORPH-211 + +**Testing**: +- All test suites pass +- Tests cover happy path and error cases +- Tests run in CI/CD pipeline + +--- + +### MORPH-215: Performance Baseline Metrics + +**Type**: Task +**Priority**: Low +**Estimate**: 2 points (1 day) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Establish performance baselines for Phase 1 to track improvements in later phases. + +**Acceptance Criteria**: +- [ ] Measure capability list endpoint performance +- [ ] Measure capability execution time (per complexity) +- [ ] Measure search and filter performance +- [ ] Document baseline metrics +- [ ] Set up basic performance monitoring + +**Baseline Targets**: +- List 20 capabilities: < 50ms +- Search capabilities: < 100ms +- Execute simple capability: < 5s +- Execute medium capability: < 15s + +**Dependencies**: MORPH-214 + +**Testing**: +- Performance tests run successfully +- Baselines documented + +--- + +### MORPH-216: Add 10 More Capabilities (30 Total) + +**Type**: Task +**Priority**: Low +**Estimate**: 5 points (2-3 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Expand capability catalog from 20 to 30 capabilities for more comprehensive coverage. Optional based on Phase 1 decision point. + +**Acceptance Criteria**: +- [ ] 10 additional OD builders created +- [ ] 10 additional capabilities defined +- [ ] Mix of all complexity levels +- [ ] Cover edge cases and variants +- [ ] All tested and working + +**Additional Capabilities**: +1. multi-order-fulfillment-v1 +2. emergency-stock-transfer-v1 +3. quality-hold-workflow-v1 +4. edi-940-warehouse-order-v1 +5. carrier-appointment-v1 +6. labor-planning-v1 +7. exception-resolution-v1 +8. returns-authorization-v1 +9. kitting-assembly-v1 +10. bulk-location-update-v1 + +**Dependencies**: MORPH-203, MORPH-204 + +**Testing**: +- All 30 capabilities execute successfully +- Coverage across all domains + +--- + +### MORPH-217: Phase 1 Demo Preparation + +**Type**: Task +**Priority**: Medium +**Estimate**: 2 points (1 day) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Prepare demo materials and script for Phase 1 review and decision point. + +**Acceptance Criteria**: +- [ ] Demo script created showing all Phase 1 features +- [ ] Postman collection with example requests +- [ ] Demo data setup script +- [ ] Slides or presentation materials +- [ ] Comparison with Phase 0 stub + +**Dependencies**: All Phase 1 tickets + +**Testing**: +- Demo runs successfully +- All features demonstrated +- Clear value proposition shown + +--- + +### MORPH-218: Phase 1 Retrospective & Decision + +**Type**: Task +**Priority**: High +**Estimate**: 1 point (0.5 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Conduct Phase 1 retrospective and make go/no-go decision for Phase 2. + +**Acceptance Criteria**: +- [ ] Team demo completed +- [ ] Feedback collected from researchers +- [ ] Performance metrics reviewed +- [ ] Decision documented: Continue to Phase 2 or Pivot +- [ ] Learnings documented for Phase 2 planning + +**Discussion Points**: +1. Are 20-30 capabilities enough for meaningful research? +2. Is real OD execution meeting needs? +3. Are chaos presets providing value? +4. What should we add/change for Phase 2? + +**Dependencies**: MORPH-217 + +**Testing**: +- Decision is clear and documented +- Feedback is actionable + +--- + +### MORPH-219: Update README and Getting Started Guide + +**Type**: Task +**Priority**: Medium +**Estimate**: 2 points (1 day) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Update project README and getting started documentation to reflect Phase 1 capabilities. + +**Acceptance Criteria**: +- [ ] README updated with Phase 1 features +- [ ] Getting started guide includes search examples +- [ ] Chaos configuration documented +- [ ] Environment variables documented +- [ ] Migration notes from Phase 0 + +**Dependencies**: MORPH-217 + +**Testing**: +- Documentation accurate +- Examples work +- Links valid + +--- + +### MORPH-220: Code Cleanup and Refactoring + +**Type**: Task +**Priority**: Low +**Estimate**: 3 points (1.5 days) +**Status**: 📝 TODO +**Assignee**: _Unassigned_ + +**Description**: +Clean up code, remove Phase 0 stubs, improve code quality and consistency. + +**Acceptance Criteria**: +- [ ] Remove stub implementation comments +- [ ] Consistent error handling across services +- [ ] Add JSDoc comments to all public methods +- [ ] Extract common patterns to utilities +- [ ] Lint and format all code +- [ ] Update TypeScript strict mode compliance + +**Dependencies**: All Phase 1 tickets + +**Testing**: +- All tests still pass +- No regressions +- Linting passes + +--- + +## Phase 1 Summary + +**Total Tickets**: 20 +**Total Story Points**: 57 points (~3-4 weeks with 2 developers) + +**Ticket Breakdown by Type**: +- Foundation (Registry & Execution): 2 tickets (10 points) +- Capability Expansion: 3 tickets (16 points) +- Chaos Management: 4 tickets (10 points) +- Search & Filtering: 2 tickets (5 points) +- API & Documentation: 3 tickets (6 points) +- Testing & Quality: 3 tickets (12 points) +- Cleanup & Polish: 3 tickets (8 points) + +**Critical Path**: +MORPH-201 → MORPH-202 → MORPH-203 → MORPH-204 → MORPH-214 → MORPH-217 → MORPH-218 + +**Parallelizable Work**: +- MORPH-205, MORPH-206, MORPH-207 (Chaos stream) +- MORPH-209, MORPH-210 (Search stream) +- MORPH-212 (Service tools - parallel to OD builders) +- MORPH-213, MORPH-219 (Documentation - parallel to development) + +**Priority Tiers**: +- **High Priority (Must Have)**: MORPH-201, 202, 203, 204, 214, 217, 218 +- **Medium Priority (Should Have)**: MORPH-205, 206, 207, 209, 210, 212, 213, 219 +- **Low Priority (Nice to Have)**: MORPH-208, 211, 215, 216, 220 + +**Estimated Velocity**: +- Week 1: 12-15 points (Foundation + Core ODs) +- Week 2: 15-18 points (Chaos + Filtering + Service Tools) +- Week 3: 12-15 points (Testing + Documentation) +- Week 4: 8-10 points (Polish + Demo + Retrospective) + +
+ +--- + +## Tracking Metrics + +### Overall Progress + +| Phase | Story Points | Status | Completion | +|-------|--------------|--------|------------| +| Phase 0 | 21 | ✅ DONE | 100% | +| Phase 1 | 57 | ✅ DONE | 100% | +| Phase 2 | 22 | 📝 TODO | 0% | +| Phase 3+ | TBD | 🔒 LOCKED | - | + +**Total Completed**: 78 story points +**Next Phase**: Phase 2 (22 points, 2.5-3 weeks) + +--- + +## Related Documents + +- [README](./README.md) - Architecture overview and navigation +- [08. Implementation Roadmap](./08-implementation-roadmap.md) - High-level phases +- [06. Open Questions & Decisions](./06-open-questions.md) - Architectural decisions +- [02. Conceptual Model](./02-conceptual-model.md) - System architecture + +--- + +## Definition of Done + +**For Each Ticket**: +- All acceptance criteria met +- Code reviewed +- Tests passing +- Documentation updated +- No regressions + +**For Each Phase**: +- All tickets completed +- Demo executed successfully +- Test results documented +- Retrospective conducted +- Go/no-go decision made diff --git a/docs/od-architecture/README.md b/docs/od-architecture/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e8a8da339045fedf92064b0a7dcfd2d585b8428e --- /dev/null +++ b/docs/od-architecture/README.md @@ -0,0 +1,181 @@ +# Operational Descriptor (OD) Architecture + +## Overview + +This directory contains architectural documentation for the OD Management System redesign. The goal is to transform Morpheus from a collection of scattered operational descriptors into a well-organized, capability-based system that AI researchers can easily configure and use. + +## The Problem + +Currently, Morpheus has: +- **162 tools** across 4 main services (WMS: 64, ERP: 34, TMS: 33, EDI: 15) +- **Scattered ODs** across multiple builder files with no central registry +- **No persona model** - unclear who can execute what +- **No capability mapping** - can't answer "what can a store manager do?" +- **Unmanageable chaos configuration** - hardcoded in 14+ files +- **No way to sample/configure** - researchers can't easily customize worlds + +## The Vision + +Transform the system to support: + +1. **Capability-Based Organization**: ODs organized by business capabilities, not just services +2. **Persona Model**: Clear mapping of which personas can perform which capabilities +3. **Knowledge Graph**: Intelligent discovery of valid OD compositions from available tools +4. **World Configuration**: Researchers can sample capabilities and configure worlds +5. **Centralized Management**: OD registry, versioning, and customization + +## Implementation Files + +### Phase-Specific Documentation + +All phase-specific implementation details are organized in the `implementation/` directory: + +``` +implementation/ +├── phase0/ # Walking Skeleton +│ ├── tasks.md # 12 tickets, 21 story points +│ ├── demo.md # Demo walkthrough +│ └── test-results.md +├── phase1/ # Core Capability System +│ ├── tasks.md # 20 tickets, 57 story points +│ ├── demo-script.md +│ ├── test-results.md +│ └── retrospective.md +└── phase2/ # World Configuration + └── tasks.md # 10 tickets, 22 story points (REVISED) +``` + +See [09. Implementation Tasks](./09-implementation-tasks.md) for quick navigation links. + +--- + +## Architecture Documents + +### [01. Current State](./01-current-state.md) +Complete inventory of the existing system: +- Services and tools catalog +- Existing OD patterns +- Current organization +- Critical gaps + +### [02. Conceptual Model](./02-conceptual-model.md) +Proposed architectural model: +- Persona → Capability → OD → Tool → Service layering +- Definitions and relationships +- Design questions to resolve + +### [03. Knowledge Graph](./03-knowledge-graph.md) +Graph-based capability discovery: +- Graph structure (nodes, edges) +- Use cases (OD discovery, validation, suggestions) +- Implementation approaches + +### [04. Taxonomy & Organization](./04-taxonomy-organization.md) +How to categorize and organize ODs: +- Taxonomy options (domain, persona, complexity) +- Tagging and filtering strategies +- Discovery patterns + +### [05. Sampling & World Configuration](./05-sampling-world-config.md) +Enabling researcher customization: +- World configuration scenarios +- Sampling strategies +- Use cases and workflows + +### [06. Open Questions & Decisions](./06-open-questions.md) +Architectural decisions made: +- ✅ All critical and important questions decided +- Decision rationale and implementation notes +- Priority 3 recommendations for later phases + +### [07. Chaos Integration](./07-chaos-integration.md) +How chaos management integrates with OD architecture: +- Chaos configuration cascade (World → Capability → OD → Step) +- Integration points and workflows +- Chaos presets and telemetry +- Best practices for researchers + +### [08. Implementation Roadmap](./08-implementation-roadmap.md) +Value-driven implementation plan: +- Incremental phases delivering researcher value +- Phase 0: Walking skeleton (2-3 weeks) +- Phase 1-4: Core features (14-20 weeks) +- Migration strategy and risk mitigation + +### [09. Implementation Tasks](./09-implementation-tasks.md) +Master index for all implementation tasks: +- Phase 0: ✅ COMPLETED (12 tickets, 21 story points) +- Phase 1: ✅ COMPLETED (20 tickets, 57 story points) +- Phase 2: 📝 TODO - Ready to Start (12 tickets, 30 story points) +- Detailed tasks in `implementation/phaseN/` folders +- Quick navigation table with links to all phase documents + +## Key Concepts + +### Service +A simulated enterprise system (ERP, WMS, TMS, EDI) that exposes tools/APIs. + +### Tool +An API endpoint that performs a specific action (e.g., `getOrder`, `updateInventory`, `scheduleShipment`). + +### Operational Descriptor (OD) +A declarative workflow that orchestrates multiple tools to accomplish an end-to-end business process. Contains steps, input bindings, assertions, retry policies, and chaos configuration. + +### Capability +A semantic business function or process (e.g., "Order Fulfillment", "Inventory Management", "Shipment Tracking"). Capabilities are implemented by one or more ODs. + +### Persona +A role or actor in the system (e.g., Store Manager, Warehouse Worker, Logistics Coordinator). Personas have access to specific capabilities. + +### Knowledge Graph +A graph representation of relationships between services, tools, data entities, ODs, capabilities, and personas. Used for discovery, validation, and intelligent suggestions. + +## Status + +**Phase**: Phase 2 Implementation (World Configuration) +**Last Updated**: 2025-11-21 +**Contributors**: System Architects, AI Research Team + +**Completed**: +- ✅ Architecture designed (Persona → Capability → OD → Tool → Service) +- ✅ All critical decisions made (9/9 questions) +- ✅ Implementation roadmap created (value-driven, 6 phases) +- ✅ Phase 0: Walking Skeleton (12 tickets, 21 points) - DONE +- ✅ Phase 1: Core Capability System (20 tickets, 57 points) - DONE + - 4 working capabilities with real OD execution + - Chaos engineering integrated + - Search/filtering APIs functional + +**Current**: Phase 2 - World Configuration (10 tickets, 22 points) +- Extends existing World model (27% effort reduction) +- Capability sampling: filter, random, seeded +- World-level chaos configuration with CRUD API +- Capability-level chaos overrides +- Critical bug fix: capability executor chaos integration +- Reproducibility via seeds + +## Next Steps + +1. ✅ ~~Review and discuss each architecture document~~ **DONE** +2. ✅ ~~Answer open questions and make design decisions~~ **DONE** +3. ✅ ~~Create implementation plan~~ **DONE** +4. ✅ ~~Complete Phase 0 - Walking Skeleton~~ **DONE** +5. ✅ ~~Complete Phase 1 - Core Capability System~~ **DONE** +6. **NOW**: Start Phase 2 - World Configuration (3-4 weeks) + - Implement world configuration schema + - Build sampling strategies (filter, random, seeded) + - Add world-level chaos with seed support + - Create world management API endpoints +7. **NEXT**: Continue through remaining phases based on learnings + +## Related Documentation + +- [Main Architecture](../01-architecture.md) - Overall system architecture +- [Operational Descriptors](../02-operational-descriptors.md) - Current OD implementation +- [Chaos Engineering](../03-chaos-engineering.md) - Chaos injection framework +- [Chaos Management](../chaos/) - Chaos configuration and management system +- [Business Rules](../business-rules/) - Business rules system + +## Feedback + +This is a living document. If you have questions, suggestions, or concerns about the proposed architecture, please discuss in the team channels or create an issue. diff --git a/morpheus.local.pwd.yaml b/morpheus.local.pwd.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0133ce5399c3958fcabc08b8d2c58d79b8199af8 --- /dev/null +++ b/morpheus.local.pwd.yaml @@ -0,0 +1,26 @@ +services: + mongodb: + image: mongo:7 + ports: + - 27017:27017 + volumes: + - ./morpheus-data/mongodb:/data/db + restart: unless-stopped + + controlmart: + env_file: + - packages/controlmart/.env + build: + context: . + dockerfile: packages/controlmart/Dockerfile + image: controlmart-local + environment: + MONGO_URI: mongodb://mongodb:27017 + ports: + - "8282:8282" + restart: unless-stopped + depends_on: + - mongodb + +volumes: + mongodb-data: diff --git a/morpheus.pwd.yaml b/morpheus.pwd.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d02a32d5cb1874b3a35e1b6dd429b26b5f13f45d --- /dev/null +++ b/morpheus.pwd.yaml @@ -0,0 +1,61 @@ +version: "3.8" + +services: + mongodb: + image: mongo:7 + ports: + - 27017:27017 + volumes: + - /mnt/morpheus-data:/data/db + restart: unless-stopped + labels: + - "com.centurylinklabs.watchtower.enable=false" + + controlmart: + image: 129875285541.dkr.ecr.us-east-1.amazonaws.com/skyfall/morpheus:latest + labels: + - "com.centurylinklabs.watchtower.enable=true" + environment: + NODE_ENV: production + PORT: 8282 + MONGO_URI: mongodb://mongodb:27017 + DB_NAME: controlmart + LOG_LEVEL: debug + ENABLE_CORS: true + ports: + - "8282:8282" + restart: unless-stopped + depends_on: + - mongodb + + watchtower: + image: containrrr/watchtower + labels: + - "com.centurylinklabs.watchtower.enable=false" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - /home/ubuntu/.docker:/config:ro + environment: + DOCKER_CONFIG: /config + WATCHTOWER_CLEANUP: "true" + WATCHTOWER_POLL_INTERVAL: 60 + WATCHTOWER_DEBUG: "true" + restart: unless-stopped + + nginx: + image: nginx:1.25 + container_name: nginx-proxy + labels: + - "com.centurylinklabs.watchtower.enable=false" + ports: + - "80:80" + - "443:443" + volumes: + - /mnt/morpheus-data/morpheus/nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro + - /mnt/morpheus-data/morpheus/nginx/certs:/etc/ssl:ro + depends_on: + - controlmart + restart: unless-stopped + +volumes: + mongodb-data: diff --git a/nginx/nginx.conf b/nginx/nginx.conf new file mode 100644 index 0000000000000000000000000000000000000000..a8747a80617017ccc2158cee630118049123e0e0 --- /dev/null +++ b/nginx/nginx.conf @@ -0,0 +1,25 @@ +server { + listen 80; + server_name _; + + proxy_buffering off; + proxy_request_buffering off; + proxy_http_version 1.1; + proxy_set_header Accept-Encoding ""; + + proxy_connect_timeout 60s; + proxy_send_timeout 60s; + proxy_read_timeout 60s; + + location / { + proxy_pass http://controlmart:8282; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_redirect off; + } + + access_log /var/log/nginx/access.log; + error_log /var/log/nginx/error.log; +} diff --git a/package.json b/package.json new file mode 100644 index 0000000000000000000000000000000000000000..cead531d04c491ef04b805afaafafa0b994033ef --- /dev/null +++ b/package.json @@ -0,0 +1,18 @@ +{ + "name": "morpheus", + "module": "index.ts", + "type": "module", + "private": true, + "devDependencies": { + "@types/bun": "latest" + }, + "peerDependencies": { + "typescript": "^5" + }, + "workspaces": [ + "packages/controlmart" + ], + "dependencies": { + "axios": "^1.12.2" + } +} diff --git a/packages/controlmart/.dockerignore b/packages/controlmart/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..c26d0ed9cd23db7f81e541127b8cec77aabc6979 --- /dev/null +++ b/packages/controlmart/.dockerignore @@ -0,0 +1,33 @@ +# Node / Bun deps +node_modules +bun.lockb.bak +*.log + +# Bun cache +.bun +.cache +.vscode + +# Git / version control +.git +.gitignore + +# Docker junk +Dockerfile* +docker-compose* +.dockerignore + +# OS + editor noise +.DS_Store +Thumbs.db + +# Build output +dist +build +tmp +coverage + +# Environment and secrets +.env +.env.* +!.env.example diff --git a/packages/controlmart/.gitignore b/packages/controlmart/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..b3ecf4068973db2ae69dbcfda9f8a51786187996 --- /dev/null +++ b/packages/controlmart/.gitignore @@ -0,0 +1,37 @@ +# dependencies (bun install) +node_modules + +# output +out +dist +build-dist +*.tgz + +# code coverage +coverage +*.lcov + +# logs +logs +_.log +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# caches +.eslintcache +.cache +*.tsbuildinfo + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store + +tests/implementation-tests \ No newline at end of file diff --git a/packages/controlmart/.prettierignore b/packages/controlmart/.prettierignore new file mode 100644 index 0000000000000000000000000000000000000000..7c6c8b20fb70dddb8aa9db59f27224012335e427 --- /dev/null +++ b/packages/controlmart/.prettierignore @@ -0,0 +1,6 @@ +node_modules +dist +build +bun.lockb +coverage +.env \ No newline at end of file diff --git a/packages/controlmart/.prettierrc b/packages/controlmart/.prettierrc new file mode 100644 index 0000000000000000000000000000000000000000..8254d49e74f3c9b6f51ffabc4bfba8f0e7c30277 --- /dev/null +++ b/packages/controlmart/.prettierrc @@ -0,0 +1,17 @@ +{ + "$schema": "https://json.schemastore.org/prettierrc", + "printWidth": 100, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": false, + "trailingComma": "all", + "bracketSpacing": true, + "arrowParens": "always", + "endOfLine": "lf", + "quoteProps": "as-needed", + "jsxSingleQuote": false, + "proseWrap": "preserve", + "embeddedLanguageFormatting": "auto", + "singleAttributePerLine": false +} diff --git a/packages/controlmart/Dockerfile b/packages/controlmart/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..c2f966feb01bd968df13a0c1537a47d1ba818a84 --- /dev/null +++ b/packages/controlmart/Dockerfile @@ -0,0 +1,23 @@ +FROM oven/bun:latest + +WORKDIR /app + +COPY bun.lock package.json ./ + +COPY packages/controlmart/package.json packages/controlmart/ + +RUN bun install + +COPY . . + +WORKDIR /app/packages/controlmart +RUN bun run build:ui + +WORKDIR /app/packages/controlmart + +ENV NODE_ENV=production +ENV PORT=8282 + +EXPOSE ${PORT} + +CMD ["bun", "run", "start"] diff --git a/packages/controlmart/README.md b/packages/controlmart/README.md new file mode 100644 index 0000000000000000000000000000000000000000..36efe34f8205b5e3a53d169354f88ff18388f162 --- /dev/null +++ b/packages/controlmart/README.md @@ -0,0 +1,148 @@ +# ControlMart - Capability Orchestration Engine + +ControlMart is Morpheus's capability orchestration engine that provides a semantic layer over Operational Descriptors (ODs) for executing business capabilities with chaos engineering support. + +## Features + +- **4 Phase 1 Capabilities**: Inventory Check, Shipment Tracking, Equipment Availability, Dock Appointment Scheduling +- **Semantic Discovery**: Search and filter capabilities by domain, complexity, services, personas, and patterns +- **Chaos Engineering**: Built-in resilience testing with configurable chaos scenarios and presets +- **World Isolation**: Execute capabilities in isolated world contexts with independent data and business rules +- **RESTful API**: Simple HTTP endpoints for capability discovery and execution + +## Quick Start + +### Prerequisites + +- Bun v1.2.15+ +- MongoDB running on localhost:27017 + +### Installation + +```bash +bun install +``` + +### Running the Server + +```bash +# Development mode +bun run index.ts + +# With environment variables +MONGO_URI="mongodb://localhost:27017" DB_NAME="morpheus-test" PORT=4000 bun run index.ts + +# With chaos enabled +CHAOS_ENABLED=true CHAOS_PRESET=realistic bun run index.ts +``` + +### Basic Usage + +```bash +# List all capabilities +curl http://localhost:4000/capabilities + +# Search for capabilities +curl "http://localhost:4000/capabilities?q=inventory" + +# Get capability details +curl http://localhost:4000/capabilities/inventory-check + +# Execute a capability +curl -X POST http://localhost:4000/capabilities/inventory-check/execute \ + -H "Content-Type: application/json" \ + -d '{ + "worldId": "your-world-id", + "inputs": { + "sku": "SKU-001", + "locationId": "WH-01" + } + }' +``` + +## Environment Variables + +| Variable | Description | Default | +|----------|-------------|---------| +| `MONGO_URI` | MongoDB connection string | `mongodb://localhost:27017` | +| `DB_NAME` | Database name | `morpheus-test` | +| `PORT` | Server port | `4000` | +| `CHAOS_ENABLED` | Enable chaos injection | `false` | +| `CHAOS_PRESET` | Chaos preset (`light`/`moderate`/`realistic`/`aggressive`) | `realistic` | + +## Available Capabilities + +### 1. Inventory Check +Check current inventory levels for SKUs across warehouse locations. + +**Inputs**: `sku` (required), `locationId` (optional) + +### 2. Shipment Tracking +Track shipment status and location through the transportation network. + +**Inputs**: `shipmentId` (required) + +### 3. Equipment Availability Check +Check available warehouse equipment (forklifts, pallet jacks, etc.) by type and zone. + +**Inputs**: `equipmentType` (required), `zoneId` (optional) + +### 4. Dock Appointment Scheduling +Find available dock appointment time slots and view current schedule. + +**Inputs**: `date` (required), `dockDoorId` (required), `appointmentType` (optional) + +## Chaos Engineering + +ControlMart includes built-in chaos engineering for resilience testing: + +```bash +# Check chaos status +curl http://localhost:4000/chaos/status + +# List available presets +curl http://localhost:4000/chaos/presets + +# Run with aggressive chaos +CHAOS_PRESET=aggressive bun run index.ts +``` + +**Chaos Presets:** +- `light` (5%): Minimal chaos for production-like testing +- `moderate` (15%): Balanced chaos for resilience testing +- `realistic` (10%): Production-realistic failure rates +- `aggressive` (30%): High chaos for stress testing + +## Performance Measurement + +Measure baseline performance for all capabilities: + +```bash +bun run scripts/measure-performance.ts +``` + +Results are saved to `config/performance-baselines.json`. + +## Documentation + +- **API Reference**: [docs/api/capabilities-api.md](docs/api/capabilities-api.md) +- **Architecture**: [../../docs/od-architecture/](../../docs/od-architecture/) + +## Project Structure + +``` +src/ +├── capabilities/ # Capability catalog +├── ods/ # Operational Descriptor builders +├── routes/ # API routes (capabilities, chaos) +├── services/ # Core services (executor, catalog, chaos) +├── types/ # TypeScript type definitions +scripts/ +├── measure-performance.ts # Performance measurement tool +docs/ +└── api/ # API documentation +``` + +## Development + +Built with [Bun](https://bun.sh) - a fast all-in-one JavaScript runtime. diff --git a/packages/controlmart/bootstrap.ts b/packages/controlmart/bootstrap.ts new file mode 100644 index 0000000000000000000000000000000000000000..75925ad8782f2a7b98d4badea549f1f8e06c7bec --- /dev/null +++ b/packages/controlmart/bootstrap.ts @@ -0,0 +1,22 @@ +import { existsSync } from "fs"; +import path from "path"; + +// Bootstrap +(async () => { + // Check for environment file relative to executable + const execDir = path.dirname(process.execPath); + const localEnvPath = path.join(execDir, ".env"); + const devEnvPath = path.resolve(execDir, "../../..", ".env"); // For dev/source runs + + const envPath = existsSync(localEnvPath) ? localEnvPath : devEnvPath; + + if (!existsSync(envPath)) { + console.log(`[bootstrap] No .env file found at ${envPath}. Launching Setup Mode...`); + const { startSetup } = await import("./src/application/setup.app"); + await startSetup(); + } else { + // Launch Main Application + console.log("[bootstrap] Environment found. Starting Application..."); + await import("./main"); + } +})(); diff --git a/packages/controlmart/config/chaos-presets/aggressive.json b/packages/controlmart/config/chaos-presets/aggressive.json new file mode 100644 index 0000000000000000000000000000000000000000..729f3c37c15f9a973701d73a468b69ffb37dee14 --- /dev/null +++ b/packages/controlmart/config/chaos-presets/aggressive.json @@ -0,0 +1,109 @@ +{ + "id": "aggressive", + "name": "Aggressive Chaos", + "description": "High probability chaos with all scenario types. Tests extreme resilience under heavy failure conditions.", + "globalProbability": 0.3, + "scenarios": [ + { + "type": "data_corruption", + "weight": 10, + "description": "Severe data corruption", + "config": { + "corruptFields": ["*"], + "corruptionType": "random_value" + } + }, + { + "type": "missing_data", + "weight": 9, + "description": "Frequent missing records", + "config": { + "missingRecords": true, + "throwError": true + } + }, + { + "type": "stale_data", + "weight": 8, + "description": "Very stale data", + "config": { + "staleDataAge": 120 + } + }, + { + "type": "rate_limit", + "weight": 8, + "description": "Aggressive rate limiting", + "config": { + "rateLimitDelay": 3000, + "rateLimitMessage": "Rate limit exceeded - too many requests" + } + }, + { + "type": "format_change", + "weight": 7, + "description": "Breaking schema changes", + "config": { + "schemaChanges": [ + { + "field": "id", + "change": "rename", + "newName": "order_id" + }, + { + "field": "status", + "change": "change_type", + "newType": "number" + } + ] + } + }, + { + "type": "partial_data", + "weight": 7, + "description": "Heavily incomplete data", + "config": { + "partialResults": { + "percentage": 50, + "randomize": true + } + } + }, + { + "type": "permission_denied", + "weight": 6, + "description": "Frequent authorization failures", + "config": { + "permissionError": "Access denied - chaos injection" + } + }, + { + "type": "duplicate_data", + "weight": 6, + "description": "Heavy duplicate records", + "config": {} + }, + { + "type": "invalid_state", + "weight": 5, + "description": "Records in invalid states", + "config": { + "invalidStates": ["deleted", "suspended", "inactive", "pending_deletion"] + } + }, + { + "type": "dependency_failure", + "weight": 4, + "description": "Service dependency failures", + "config": { + "dependencyService": "downstream-service" + } + }, + { + "type": "timing_issue", + "weight": 3, + "description": "Timestamp inconsistencies", + "config": {} + } + ] +} \ No newline at end of file diff --git a/packages/controlmart/config/chaos-presets/infra.json b/packages/controlmart/config/chaos-presets/infra.json new file mode 100644 index 0000000000000000000000000000000000000000..5d6471196dcb8f888ceddf150039cbc35b04b0a7 --- /dev/null +++ b/packages/controlmart/config/chaos-presets/infra.json @@ -0,0 +1,33 @@ +{ + "id": "infra", + "name": "Infrastructure Chaos", + "description": "System-level faults: rate limits, timeouts, dependency failures. NO data corruption.", + "globalProbability": 0.1, + "scenarios": [ + { + "type": "rate_limit", + "weight": 50, + "description": "API throttling (429 Too Many Requests)", + "config": { + "rateLimitDelay": 1500, + "rateLimitMessage": "Rate limit exceeded" + } + }, + { + "type": "dependency_failure", + "weight": 30, + "description": "Dependency service outage (502/503)", + "config": { + "dependencyService": "database-shard-01" + } + }, + { + "type": "permission_denied", + "weight": 20, + "description": "Authorization failures (403 Forbidden)", + "config": { + "permissionError": "Insufficient permissions" + } + } + ] +} \ No newline at end of file diff --git a/packages/controlmart/config/chaos-presets/light.json b/packages/controlmart/config/chaos-presets/light.json new file mode 100644 index 0000000000000000000000000000000000000000..0399672b4604b68ee3b9d43ca42c1bf1144d3de2 --- /dev/null +++ b/packages/controlmart/config/chaos-presets/light.json @@ -0,0 +1,45 @@ +{ + "id": "light", + "name": "Light Chaos", + "description": "Low probability chaos for basic resilience testing. Simulates common, low-impact scenarios like eventual consistency and rate limiting.", + "globalProbability": 0.05, + "scenarios": [ + { + "type": "stale_data", + "weight": 10, + "description": "Simulate eventual consistency delays - most common in distributed systems", + "config": { + "staleDataAge": 30 + } + }, + { + "type": "rate_limit", + "weight": 5, + "description": "API throttling and rate limiting", + "config": { + "rateLimitDelay": 1000, + "rateLimitMessage": "Rate limit exceeded - please retry" + } + }, + { + "type": "missing_data", + "weight": 3, + "description": "Occasional missing records or null results", + "config": { + "missingRecords": true, + "throwError": false + } + }, + { + "type": "partial_data", + "weight": 2, + "description": "Incomplete data sets returned", + "config": { + "partialResults": { + "percentage": 80, + "randomize": false + } + } + } + ] +} \ No newline at end of file diff --git a/packages/controlmart/config/chaos-presets/moderate.json b/packages/controlmart/config/chaos-presets/moderate.json new file mode 100644 index 0000000000000000000000000000000000000000..3f109a52b4d47e85a1b31eab38e4a98a4962856a --- /dev/null +++ b/packages/controlmart/config/chaos-presets/moderate.json @@ -0,0 +1,68 @@ +{ + "id": "moderate", + "name": "Moderate Chaos", + "description": "Medium probability chaos with diverse scenario types. Good balance for resilience testing without overwhelming the system.", + "globalProbability": 0.15, + "scenarios": [ + { + "type": "stale_data", + "weight": 12, + "description": "Eventual consistency delays", + "config": { + "staleDataAge": 60 + } + }, + { + "type": "data_corruption", + "weight": 8, + "description": "Data quality issues and field corruption", + "config": { + "corruptFields": ["email", "status", "timestamp"], + "corruptionType": "wrong_type" + } + }, + { + "type": "rate_limit", + "weight": 7, + "description": "API throttling", + "config": { + "rateLimitDelay": 2000, + "rateLimitMessage": "Rate limit exceeded" + } + }, + { + "type": "partial_data", + "weight": 6, + "description": "Incomplete result sets", + "config": { + "partialResults": { + "percentage": 70, + "randomize": true + } + } + }, + { + "type": "missing_data", + "weight": 5, + "description": "Missing records and fields", + "config": { + "missingRecords": true, + "throwError": false + } + }, + { + "type": "permission_denied", + "weight": 3, + "description": "Authorization failures", + "config": { + "permissionError": "Access denied - insufficient permissions" + } + }, + { + "type": "duplicate_data", + "weight": 4, + "description": "Duplicate records in results", + "config": {} + } + ] +} diff --git a/packages/controlmart/config/chaos-presets/process.json b/packages/controlmart/config/chaos-presets/process.json new file mode 100644 index 0000000000000000000000000000000000000000..57b71aaf8554de9f908db46563827a3f73589fd6 --- /dev/null +++ b/packages/controlmart/config/chaos-presets/process.json @@ -0,0 +1,72 @@ +{ + "id": "process", + "name": "Process Chaos", + "description": "Business logic and data integrity failures only. No infrastructure or system faults.", + "globalProbability": 0.1, + "scenarios": [ + { + "type": "stale_data", + "weight": 30, + "description": "Eventual consistency delays (e.g. order not found immediately after creation)", + "config": { + "staleDataAge": 45 + } + }, + { + "type": "partial_data", + "weight": 20, + "description": "Pagination and partial results (e.g. search returns subset)", + "config": { + "partialResults": { + "percentage": 75, + "randomize": false + } + } + }, + { + "type": "data_corruption", + "weight": 20, + "description": "Data quality issues (invalid enums, wrong formats)", + "config": { + "corruptFields": [ + "orderStatus", + "orderPriority", + "warehouseId", + "customerId" + ], + "corruptionType": "invalid_format" + } + }, + { + "type": "missing_data", + "weight": 15, + "description": "Missing required or optional fields", + "config": { + "missingFields": [ + "customerName", + "shipToAddress", + "lines" + ], + "throwError": false + } + }, + { + "type": "duplicate_data", + "weight": 10, + "description": "Duplicate records in list responses", + "config": {} + }, + { + "type": "invalid_state", + "weight": 5, + "description": "Records in logic-breaking states", + "config": { + "invalidStates": [ + "SUSPENDED", + "ARCHIVED", + "UNKNOWN" + ] + } + } + ] +} \ No newline at end of file diff --git a/packages/controlmart/config/chaos-presets/realistic.json b/packages/controlmart/config/chaos-presets/realistic.json new file mode 100644 index 0000000000000000000000000000000000000000..ed81e0ca2b3207b590f1700136c1949a7afb1f93 --- /dev/null +++ b/packages/controlmart/config/chaos-presets/realistic.json @@ -0,0 +1,76 @@ +{ + "id": "realistic", + "name": "Realistic Chaos", + "description": "Chaos distribution matching real-world production failure rates. Based on observability data from distributed systems.", + "globalProbability": 0.08, + "scenarios": [ + { + "type": "stale_data", + "weight": 40, + "description": "Most common: eventual consistency delays", + "config": { + "staleDataAge": 45 + } + }, + { + "type": "rate_limit", + "weight": 20, + "description": "Common: API throttling", + "config": { + "rateLimitDelay": 1500, + "rateLimitMessage": "Rate limit exceeded" + } + }, + { + "type": "partial_data", + "weight": 15, + "description": "Common: pagination and partial results", + "config": { + "partialResults": { + "percentage": 75, + "randomize": false + } + } + }, + { + "type": "data_corruption", + "weight": 10, + "description": "Occasional: data quality issues", + "config": { + "corruptFields": ["email", "phoneNumber"], + "corruptionType": "invalid_format" + } + }, + { + "type": "missing_data", + "weight": 7, + "description": "Occasional: missing optional fields", + "config": { + "missingFields": ["metadata", "description"], + "throwError": false + } + }, + { + "type": "duplicate_data", + "weight": 4, + "description": "Rare: duplicate records", + "config": {} + }, + { + "type": "permission_denied", + "weight": 3, + "description": "Rare: authorization failures", + "config": { + "permissionError": "Insufficient permissions" + } + }, + { + "type": "dependency_failure", + "weight": 1, + "description": "Very rare: complete service outages", + "config": { + "dependencyService": "external-service" + } + } + ] +} \ No newline at end of file diff --git a/packages/controlmart/docs/api/capabilities-api.md b/packages/controlmart/docs/api/capabilities-api.md new file mode 100644 index 0000000000000000000000000000000000000000..a8e7a82dea30073259a7764e4333ab847180caa3 --- /dev/null +++ b/packages/controlmart/docs/api/capabilities-api.md @@ -0,0 +1,484 @@ +# Capabilities API Documentation + +## Overview + +The Capabilities API provides endpoints for discovering and executing business capabilities in the Morpheus platform. + +## Base URL + +``` +http://localhost:4000 +``` + +## Endpoints + +### 1. List All Capabilities + +**GET** `/capabilities` + +Returns all available capabilities with optional filtering and search. + +**Query Parameters:** +- `q` (string, optional): Full-text search across name, description, and tags +- `domain` (string[], optional): Filter by domain(s) +- `complexity` (string, optional): Filter by complexity level (`simple`, `medium`, `complex`) +- `services` (string[], optional): Filter by service(s) used +- `personas` (string[], optional): Filter by persona(s) +- `patterns` (string[], optional): Filter by workflow pattern(s) + +**Example Requests:** + +```bash +# Get all capabilities +curl http://localhost:4000/capabilities + +# Search for inventory-related capabilities +curl "http://localhost:4000/capabilities?q=inventory" + +# Filter by domain +curl "http://localhost:4000/capabilities?domain=inventory&domain=warehousing" + +# Filter by complexity +curl "http://localhost:4000/capabilities?complexity=simple" + +# Combined search and filter +curl "http://localhost:4000/capabilities?q=check&complexity=simple" +``` + +**Response:** + +```json +[ + { + "id": "inventory-check", + "name": "Inventory Check", + "description": "Check current inventory levels for one or more SKUs...", + "tags": { + "domain": ["inventory", "warehousing"], + "complexity": "simple", + "services": ["wms"], + "personas": ["warehouse-worker", "store-manager"], + "patterns": ["sequential"] + }, + "odId": "inventory-check-standard-v1", + "version": "1.0.0", + "metadata": { + "author": "morpheus-team", + "estimatedDuration": 2000 + }, + "chaos": { + "enabled": true, + "probability": 0.1, + "scenarios": [...] + } + } +] +``` + +### 2. Get Capability by ID + +**GET** `/capabilities/:id` + +Returns a single capability by its ID. + +**Example:** + +```bash +curl http://localhost:4000/capabilities/inventory-check +``` + +### 3. Execute Capability + +**POST** `/capabilities/:id/execute` + +Executes a capability within a world context. + +**Request Body:** + +```json +{ + "worldId": "world-id-here", + "inputs": { + "sku": "SKU-001", + "locationId": "WH-01" + } +} +``` + +**Example:** + +```bash +curl -X POST http://localhost:4000/capabilities/inventory-check/execute \ + -H "Content-Type: application/json" \ + -d '{ + "worldId": "673d9a8f1234567890abcdef", + "inputs": { + "sku": "SKU-001", + "locationId": "WH-01" + } + }' +``` + +**Response:** + +```json +{ + "capabilityId": "inventory-check", + "odId": "inventory-check-standard-v1", + "worldId": "673d9a8f1234567890abcdef", + "result": { + "runId": "inventory-check-standard-v1", + "worldId": "673d9a8f1234567890abcdef", + "descriptorId": "inventory-check-standard-v1", + "descriptorVersion": "1.0.0", + "status": "success", + "startTime": "2025-11-20T13:00:00.000Z", + "endTime": "2025-11-20T13:00:01.245Z", + "durationMs": 1245, + "stepResults": [...], + "totalSteps": 3, + "successfulSteps": 3, + "failedSteps": 0, + "skippedSteps": 0 + }, + "executedAt": "2025-11-20T13:00:00.000Z", + "durationMs": 1245, + "status": "success", + "capabilityInWorld": true, + "chaosMetadata": { + "enabled": true, + "injectionCount": 1, + "injections": [ + { + "stepId": "fetch-inventory", + "stepName": "Fetch Inventory Records", + "scenarioType": "missing_data", + "scenarioDescription": "Records not found or empty results", + "configSource": "world", + "probability": 0.2, + "seed": "test-seed-123", + "timestamp": "2025-11-20T13:00:00.500Z", + "modifications": [ + "Returned empty result set", + "Original had 5 records" + ], + "config": { + "missingRecords": true, + "throwError": false + } + } + ], + "cascadeResolution": { + "finalSource": "world" + }, + "probability": 0.2, + "seed": "test-seed-123" + } +} +``` + +**Note:** The `chaosMetadata` field is only present when chaos engineering is enabled. See the [Chaos Telemetry](#chaos-telemetry) section below for details. + +## Chaos Telemetry + +When chaos engineering is enabled, capability execution responses include comprehensive telemetry about chaos injections that occurred during execution. This telemetry helps you understand exactly what chaos was injected, when, and from which configuration level. + +### ChaosTelemetry Schema + +```typescript +{ + enabled: boolean; // Whether chaos was enabled for this execution + injectionCount: number; // Total number of chaos injections that occurred + injections: ChaosInjectionMetadata[]; // Details of each injection + cascadeResolution: { + finalSource: string; // Which config level provided the chaos policy + // Values: "env" | "step" | "od" | "capability" | "world" | "global" + }; + probability: number; // Probability that was used (0.0 to 1.0) + seed?: string; // Seed used for reproducible chaos (if any) +} +``` + +### ChaosInjectionMetadata Schema + +Each injection in the `injections` array contains: + +```typescript +{ + stepId: string; // OD step ID where chaos was injected + stepName: string; // Human-readable step name + scenarioType: string; // Type of chaos scenario (e.g., "missing_data", "data_corruption") + scenarioDescription: string; // Human-readable description + configSource: string; // Config level that provided the chaos scenario + // Values: "step" | "od" | "capability" | "world" | "global" | "env" + probability: number; // Probability setting at time of injection + seed?: string; // Seed if used for this injection + timestamp: string; // ISO 8601 timestamp of injection + modifications: string[]; // List of modifications made to the data + config?: object; // Scenario-specific configuration used +} +``` + +### Example: No Chaos Injected + +When chaos is enabled but no injections occurred (due to probability): + +```json +{ + "capabilityId": "inventory-check", + "status": "success", + "chaosMetadata": { + "enabled": true, + "injectionCount": 0, + "injections": [], + "cascadeResolution": { + "finalSource": "world" + }, + "probability": 0.1, + "seed": null + } +} +``` + +### Example: Multiple Chaos Injections + +When multiple steps experience chaos: + +```json +{ + "capabilityId": "order-fulfillment", + "status": "partial", + "chaosMetadata": { + "enabled": true, + "injectionCount": 2, + "injections": [ + { + "stepId": "fetch-order", + "stepName": "Fetch Order Details", + "scenarioType": "stale_data", + "scenarioDescription": "Return outdated data", + "configSource": "world", + "probability": 0.25, + "timestamp": "2025-11-20T14:30:00.100Z", + "modifications": [ + "Made data appear 60 minutes old", + "Updated timestamp to 2025-11-20T13:30:00.000Z" + ], + "config": { + "staleDataAge": 60 + } + }, + { + "stepId": "check-inventory", + "stepName": "Check Inventory Availability", + "scenarioType": "partial_data", + "scenarioDescription": "Return incomplete results", + "configSource": "world", + "probability": 0.25, + "timestamp": "2025-11-20T14:30:00.500Z", + "modifications": [ + "Returned 3 out of 6 SKUs", + "Missing SKUs: SKU-004, SKU-005, SKU-006" + ], + "config": { + "partialResults": { + "percentage": 50, + "randomize": true + } + } + } + ], + "cascadeResolution": { + "finalSource": "world" + }, + "probability": 0.25, + "seed": "test-123" + } +} +``` + +### Using Chaos Telemetry + +**1. Debugging Test Failures** + +When a test fails unexpectedly, check if chaos was injected: + +```bash +curl -X POST http://localhost:4000/capabilities/inventory-check/execute \ + -H "Content-Type: application/json" \ + -d '{"worldId": "..."}' | jq '.chaosMetadata' +``` + +**2. Analyzing Chaos Impact** + +Track which scenarios are triggered most frequently: + +```bash +# Execute capability multiple times +for i in {1..100}; do + curl -X POST http://localhost:4000/capabilities/inventory-check/execute \ + -H "Content-Type: application/json" \ + -d '{"worldId": "..."}' >> chaos-results.json +done + +# Analyze results +cat chaos-results.json | jq '.chaosMetadata.injections[].scenarioType' | sort | uniq -c +``` + +**3. Reproducing Specific Failures** + +Use the seed from a failed execution to reproduce the exact same chaos: + +```bash +# First execution - note the seed from response +RESPONSE=$(curl -X POST http://localhost:4000/capabilities/inventory-check/execute \ + -H "Content-Type: application/json" \ + -d '{"worldId": "..."}') + +SEED=$(echo $RESPONSE | jq -r '.chaosMetadata.seed') + +# Configure world with the same seed to reproduce +curl -X PUT "http://localhost:4000/world/$WORLD_ID/chaos" \ + -H "Content-Type: application/json" \ + -d "{\"enabled\": true, \"probability\": 0.5, \"seed\": \"$SEED\", ...}" +``` + +**4. Understanding Config Source** + +The `configSource` field in each injection tells you which configuration level provided the chaos scenario: + +- `"env"` - From `CHAOS_PRESET` environment variable +- `"world"` - From world-specific chaos configuration +- `"capability"` - From capability-level override +- `"od"` - From operational descriptor definition +- `"step"` - From step-level chaos policy +- `"global"` - From global preset loaded at startup + +This helps you understand the chaos configuration hierarchy and debug unexpected behavior. + +For comprehensive chaos engineering documentation, including all chaos endpoints and configuration options, see the [Chaos Engineering API documentation](./chaos-api.md). + +## Chaos Engineering API + +### 1. List Chaos Presets + +**GET** `/chaos/presets` + +Returns all available chaos presets. + +**Example:** + +```bash +curl http://localhost:4000/chaos/presets +``` + +**Response:** + +```json +[ + { + "id": "light", + "name": "Light Chaos", + "description": "Minimal chaos for production-like testing", + "probability": 0.05, + "scenarios": 4 + }, + { + "id": "moderate", + "name": "Moderate Chaos", + "description": "Balanced chaos for resilience testing", + "probability": 0.15, + "scenarios": 7 + } +] +``` + +### 2. Get Chaos Preset Details + +**GET** `/chaos/presets/:id` + +Returns detailed configuration for a specific chaos preset. + +**Example:** + +```bash +curl http://localhost:4000/chaos/presets/aggressive +``` + +### 3. Get Chaos Status + +**GET** `/chaos/status` + +Returns the current chaos system status. + +**Example:** + +```bash +curl http://localhost:4000/chaos/status +``` + +**Response:** + +```json +{ + "enabled": true, + "globalPreset": "realistic", + "availablePresets": ["light", "moderate", "realistic", "aggressive"], + "env": { + "CHAOS_ENABLED": "true", + "CHAOS_PRESET": "realistic" + } +} +``` + +## Capability Input Reference + +### Inventory Check + +```json +{ + "sku": "SKU-001", + "locationId": "WH-01" // optional +} +``` + +### Shipment Tracking + +```json +{ + "shipmentId": "SHIP-001" +} +``` + +### Equipment Availability Check + +```json +{ + "equipmentType": "forklift", + "zoneId": "ZONE-A" // optional +} +``` + +### Dock Appointment Scheduling + +```json +{ + "date": "2025-11-21", + "dockDoorId": "DOCK-01", + "appointmentType": "inbound" // optional: "inbound" or "outbound" +} +``` + +## Environment Variables + +- `CHAOS_ENABLED`: Enable/disable chaos injection (`true`/`false`) +- `CHAOS_PRESET`: Global chaos preset (`light`/`moderate`/`realistic`/`aggressive`) +- `MONGO_URI`: MongoDB connection string +- `DB_NAME`: Database name +- `PORT`: Server port (default: 4000) + +## Related APIs + +- [Persona API](./persona-api.md) - Discover personas and their capabilities +- [Chaos Engineering API](./chaos-api.md) - Configure chaos injection policies +- [World API](./world-api.md) - Manage world contexts for capability execution diff --git a/packages/controlmart/docs/api/chaos-api.md b/packages/controlmart/docs/api/chaos-api.md new file mode 100644 index 0000000000000000000000000000000000000000..fa636bf53bc141498bdd486252fde6e0d67980fb --- /dev/null +++ b/packages/controlmart/docs/api/chaos-api.md @@ -0,0 +1,628 @@ +# Chaos Engineering API Documentation + +## Overview + +The Chaos Engineering API provides endpoints for managing chaos injection configuration in the Morpheus platform. Chaos engineering helps test system resilience by intentionally injecting failures, data corruption, and other anomalies during capability execution. + +## Base URL + +``` +http://localhost:4000 +``` + +## Chaos Priority Cascade + +The Morpheus platform uses a priority cascade for chaos configuration resolution: + +1. **ENV** - Environment variables (`CHAOS_ENABLED`, `CHAOS_PRESET`) +2. **Step** - Step-level chaos policy (in OD definition) +3. **OD** - Operational Descriptor-level chaos policy +4. **Capability** - Capability-level chaos override +5. **World** - World-level chaos policy +6. **Global** - Global preset loaded at startup + +Higher priority levels override lower levels. This allows fine-grained control over chaos injection at different granularities. + +## Global Chaos Endpoints + +### 1. List All Chaos Presets + +**GET** `/chaos/presets` + +Returns all available chaos presets with metadata. Presets are pre-configured chaos policies that can be applied at any level. + +**Example:** + +```bash +curl http://localhost:4000/chaos/presets +``` + +**Response:** + +```json +{ + "success": true, + "count": 4, + "data": [ + { + "id": "light", + "name": "Light Chaos", + "description": "Minimal chaos for production-like testing", + "probability": 0.05, + "scenarioCount": 4 + }, + { + "id": "moderate", + "name": "Moderate Chaos", + "description": "Balanced chaos for resilience testing", + "probability": 0.15, + "scenarioCount": 7 + }, + { + "id": "realistic", + "name": "Realistic Chaos", + "description": "Real-world failure patterns for comprehensive testing", + "probability": 0.25, + "scenarioCount": 11 + }, + { + "id": "aggressive", + "name": "Aggressive Chaos", + "description": "High-frequency chaos for stress testing", + "probability": 0.40, + "scenarioCount": 11 + } + ] +} +``` + +### 2. Get Chaos Preset Details + +**GET** `/chaos/presets/:id` + +Returns the full chaos policy configuration for a specific preset, including all scenarios and their configurations. + +**Path Parameters:** +- `id` (string, required): Preset ID (e.g., "light", "moderate", "realistic", "aggressive") + +**Example:** + +```bash +curl http://localhost:4000/chaos/presets/realistic +``` + +**Response:** + +```json +{ + "success": true, + "data": { + "enabled": true, + "probability": 0.25, + "scenarios": [ + { + "type": "missing_data", + "weight": 20, + "description": "Records not found or empty results", + "config": { + "missingRecords": true, + "throwError": true + } + }, + { + "type": "data_corruption", + "weight": 15, + "description": "Corrupt critical fields in responses", + "config": { + "corruptFields": ["id", "status"], + "corruptionType": "null" + } + }, + { + "type": "stale_data", + "weight": 15, + "description": "Return outdated data", + "config": { + "staleDataAge": 60 + } + }, + { + "type": "partial_data", + "weight": 15, + "description": "Return incomplete results", + "config": { + "partialResults": { + "percentage": 50, + "randomize": true + } + } + } + ], + "seed": undefined + } +} +``` + +### 3. Get Chaos System Status + +**GET** `/chaos/status` + +Returns the current global chaos system configuration and statistics. + +**Example:** + +```bash +curl http://localhost:4000/chaos/status +``` + +**Response:** + +```json +{ + "success": true, + "data": { + "enabled": true, + "activePreset": "realistic", + "stats": { + "presetCount": 4, + "worldPolicyCount": 2, + "capabilityOverrideCount": 0, + "odOverrideCount": 0 + } + } +} +``` + +**Status Fields:** +- `enabled`: Whether chaos is globally enabled (from `CHAOS_ENABLED` env var) +- `activePreset`: Current global preset name (from `CHAOS_PRESET` env var) +- `stats.presetCount`: Number of available chaos presets +- `stats.worldPolicyCount`: Number of worlds with custom chaos policies +- `stats.capabilityOverrideCount`: Number of capabilities with chaos overrides +- `stats.odOverrideCount`: Number of ODs with chaos policies + +## World-Specific Chaos Endpoints + +Worlds can have their own chaos policies that override the global configuration for all capabilities executed in that world context. + +### 4. Get World Chaos Configuration + +**GET** `/world/:worldId/chaos` + +Returns the chaos policy configured for a specific world. + +**Path Parameters:** +- `worldId` (string, required): World ID (MongoDB ObjectId) + +**Example:** + +```bash +curl http://localhost:4000/world/673d9a8f1234567890abcdef/chaos +``` + +**Response:** + +```json +{ + "worldId": "673d9a8f1234567890abcdef", + "worldName": "chaos-test-world", + "chaos": { + "enabled": true, + "probability": 0.3, + "scenarios": [ + { + "type": "missing_data", + "weight": 50, + "description": "Simulate missing inventory records", + "config": { + "missingRecords": true, + "throwError": false + } + }, + { + "type": "data_corruption", + "weight": 30, + "description": "Corrupt quantity fields", + "config": { + "corruptFields": ["quantity", "available"], + "corruptionType": "wrong_type" + } + }, + { + "type": "rate_limit", + "weight": 20, + "description": "Simulate API rate limiting", + "config": { + "rateLimitDelay": 2000, + "rateLimitMessage": "Rate limit exceeded" + } + } + ], + "seed": "test-seed-123" + } +} +``` + +**Default Response (No Chaos Configured):** + +```json +{ + "worldId": "673d9a8f1234567890abcdef", + "worldName": "normal-world", + "chaos": { + "enabled": false, + "probability": 0.0, + "scenarios": [] + } +} +``` + +### 5. Update World Chaos Configuration + +**PUT** `/world/:worldId/chaos` + +Sets or updates the chaos policy for a specific world. This configuration will apply to all capabilities executed in this world. + +**Path Parameters:** +- `worldId` (string, required): World ID (MongoDB ObjectId) + +**Request Body:** + +```json +{ + "enabled": true, + "probability": 0.2, + "scenarios": [ + { + "type": "missing_data", + "weight": 60, + "description": "Simulate missing records", + "config": { + "missingRecords": true, + "throwError": true + } + }, + { + "type": "stale_data", + "weight": 40, + "description": "Return outdated data", + "config": { + "staleDataAge": 30 + } + } + ], + "seed": "reproducible-chaos-seed" +} +``` + +**Example:** + +```bash +curl -X PUT http://localhost:4000/world/673d9a8f1234567890abcdef/chaos \ + -H "Content-Type: application/json" \ + -d '{ + "enabled": true, + "probability": 0.2, + "scenarios": [ + { + "type": "missing_data", + "weight": 60, + "description": "Simulate missing records", + "config": { + "missingRecords": true, + "throwError": true + } + } + ] + }' +``` + +**Response:** + +```json +{ + "worldId": "673d9a8f1234567890abcdef", + "chaos": { + "enabled": true, + "probability": 0.2, + "scenarios": [...] + }, + "message": "World chaos configuration updated successfully" +} +``` + +### 6. Delete World Chaos Configuration + +**DELETE** `/world/:worldId/chaos` + +Removes the custom chaos policy from a world, causing it to fall back to the global chaos configuration. + +**Path Parameters:** +- `worldId` (string, required): World ID (MongoDB ObjectId) + +**Example:** + +```bash +curl -X DELETE http://localhost:4000/world/673d9a8f1234567890abcdef/chaos +``` + +**Response:** + +```json +{ + "worldId": "673d9a8f1234567890abcdef", + "message": "World chaos configuration removed successfully" +} +``` + +## Chaos Policy Schema + +### ChaosPolicy Object + +```typescript +{ + enabled: boolean; // Whether chaos injection is enabled + probability: number; // 0.0 to 1.0 - overall chance chaos occurs + scenarios: ChaosScenario[]; // Array of possible chaos scenarios + seed?: string; // Optional seed for reproducible chaos +} +``` + +### ChaosScenario Object + +```typescript +{ + type: string; // Scenario type (see Chaos Scenario Types below) + weight: number; // Relative probability weight (higher = more likely) + description: string; // Human-readable description + config: ChaosConfig; // Scenario-specific configuration +} +``` + +## Chaos Scenario Types + +### 1. missing_data +Simulates missing records or empty results from data sources. + +**Config:** +```json +{ + "missingRecords": true, // Return empty results + "missingFields": ["field1"], // Remove specific fields (optional) + "throwError": true // Throw error vs return empty (default: true) +} +``` + +### 2. data_corruption +Corrupts fields in response data with null, wrong types, or invalid values. + +**Config:** +```json +{ + "corruptFields": ["id", "status"], + "corruptionType": "null" | "wrong_type" | "invalid_format" | "random_value" +} +``` + +### 3. stale_data +Returns outdated data to simulate caching issues or sync delays. + +**Config:** +```json +{ + "staleDataAge": 60 // Age in minutes +} +``` + +### 4. format_change +Simulates breaking schema changes in data structures. + +**Config:** +```json +{ + "schemaChanges": [ + { + "field": "oldField", + "change": "rename", + "newName": "newField" + } + ] +} +``` + +### 5. permission_denied +Simulates access control failures. + +**Config:** +```json +{ + "permissionError": "Access denied: insufficient permissions" +} +``` + +### 6. rate_limit +Simulates API rate limiting with delays. + +**Config:** +```json +{ + "rateLimitDelay": 2000, // Delay in ms + "rateLimitMessage": "Rate limit exceeded, please retry" +} +``` + +### 7. partial_data +Returns incomplete data sets. + +**Config:** +```json +{ + "partialResults": { + "percentage": 50, // Percentage of data to return (0-100) + "randomize": true // Random subset vs first N items + } +} +``` + +### 8. duplicate_data +Injects duplicate records in results. + +**Config:** +```json +{ + "duplicateCount": 2 // Number of duplicates to create +} +``` + +### 9. invalid_state +Returns records in invalid or conflicting states. + +**Config:** +```json +{ + "invalidStates": ["CANCELLED_BUT_ACTIVE", "SHIPPED_NO_TRACKING"] +} +``` + +### 10. dependency_failure +Simulates downstream service failures. + +**Config:** +```json +{ + "dependencyService": "inventory-service", + "cascadeFailure": true // Propagate failure to other steps +} +``` + +### 11. timing_issue +Introduces timing-related problems like race conditions or delays. + +**Config:** +```json +{ + "delay": 5000, // Delay in ms + "timeout": true // Simulate timeout +} +``` + +## Chaos Telemetry + +When chaos is enabled, capability execution responses include detailed telemetry about which chaos scenarios were injected. See the [Capabilities API documentation](./capabilities-api.md#chaos-telemetry) for details. + +## Environment Variables + +- `CHAOS_ENABLED`: Enable/disable chaos injection globally (`"true"`/`"false"`) +- `CHAOS_PRESET`: Global chaos preset to load at startup (`"light"`, `"moderate"`, `"realistic"`, `"aggressive"`) +- `CHAOS_SEED`: Global seed for reproducible chaos (optional) + +## Use Cases + +### 1. Testing System Resilience + +Enable chaos to test how your system handles various failure modes: + +```bash +# Set aggressive chaos for stress testing +export CHAOS_ENABLED=true +export CHAOS_PRESET=aggressive + +# Run tests and observe behavior +``` + +### 2. World-Specific Testing + +Create worlds with different chaos levels for controlled testing: + +```bash +# Create a chaos test world +WORLD_ID=$(curl -X POST http://localhost:4000/world \ + -H "Content-Type: application/json" \ + -d '{"name": "chaos-test"}' | jq -r '.worldId') + +# Configure aggressive chaos for this world only +curl -X PUT "http://localhost:4000/world/$WORLD_ID/chaos" \ + -H "Content-Type: application/json" \ + -d @realistic-chaos.json + +# Execute capabilities in this world - they will experience chaos +curl -X POST http://localhost:4000/capabilities/inventory-check/execute \ + -H "Content-Type: application/json" \ + -d "{\"worldId\": \"$WORLD_ID\", \"inputs\": {}}" +``` + +### 3. Reproducible Chaos Testing + +Use seeds for deterministic chaos injection: + +```bash +# Configure chaos with a seed +curl -X PUT "http://localhost:4000/world/$WORLD_ID/chaos" \ + -H "Content-Type: application/json" \ + -d '{ + "enabled": true, + "probability": 0.5, + "seed": "test-run-1", + "scenarios": [...] + }' + +# Run the same test multiple times - chaos will be identical each time +``` + +### 4. Gradual Chaos Introduction + +Start with light chaos and gradually increase: + +```bash +# Week 1: Light chaos +export CHAOS_PRESET=light + +# Week 2: Moderate chaos +export CHAOS_PRESET=moderate + +# Week 3: Realistic chaos +export CHAOS_PRESET=realistic + +# Observe application behavior at each level +``` + +## Error Responses + +### 400 Bad Request + +```json +{ + "success": false, + "error": "worldId is required" +} +``` + +### 404 Not Found + +```json +{ + "success": false, + "error": "Chaos preset not found: invalid-preset" +} +``` + +```json +{ + "success": false, + "error": "World not found" +} +``` + +### 500 Internal Server Error + +```json +{ + "success": false, + "error": "Failed to update world chaos" +} +``` + +## Related APIs + +- [Capabilities API](./capabilities-api.md) - Execute capabilities and view chaos telemetry +- [World API](./world-api.md) - Manage worlds and their configurations diff --git a/packages/controlmart/docs/api/persona-api.md b/packages/controlmart/docs/api/persona-api.md new file mode 100644 index 0000000000000000000000000000000000000000..92c9eb2f9a0328f5f12f4e993d43a41be15d4a5b --- /dev/null +++ b/packages/controlmart/docs/api/persona-api.md @@ -0,0 +1,265 @@ +# Persona API Documentation + +## Overview + +The Persona API provides endpoints for discovering personas (user roles) and their associated capabilities in the Morpheus platform. Personas represent different types of users in a supply chain environment (e.g., warehouse workers, store managers, logistics coordinators). + +## Base URL + +``` +http://localhost:4000 +``` + +## Endpoints + +### 1. List All Personas + +**GET** `/personas` + +Returns all available personas with optional filtering by role, department, access level, or tags. + +**Query Parameters:** +- `role` (string, optional): Filter by role (e.g., "operations", "management", "specialist") +- `department` (string, optional): Filter by department (e.g., "warehouse", "store", "logistics") +- `accessLevel` (string, optional): Filter by access level (e.g., "operational", "supervisory", "executive") +- `tags` (string, optional): Comma-separated list of tags to filter by + +**Example Requests:** + +```bash +# Get all personas +curl http://localhost:4000/personas + +# Filter by role +curl "http://localhost:4000/personas?role=operations" + +# Filter by department +curl "http://localhost:4000/personas?department=warehouse" + +# Filter by access level +curl "http://localhost:4000/personas?accessLevel=operational" + +# Filter by multiple tags +curl "http://localhost:4000/personas?tags=frontline,operational" + +# Combined filters +curl "http://localhost:4000/personas?role=operations&department=warehouse" +``` + +**Response:** + +```json +{ + "count": 5, + "personas": [ + { + "id": "warehouse-worker", + "name": "Warehouse Worker", + "description": "Frontline warehouse operations staff", + "role": "operations", + "department": "warehouse", + "accessLevel": "operational", + "capabilityIds": [ + "inventory-check", + "shipment-tracking", + "equipment-availability" + ], + "tags": ["frontline", "operational", "warehouse"] + }, + { + "id": "store-manager", + "name": "Store Manager", + "description": "Retail store manager overseeing operations", + "role": "management", + "department": "store", + "accessLevel": "supervisory", + "capabilityIds": [ + "inventory-check", + "order-fulfillment-status" + ], + "tags": ["management", "retail", "supervisory"] + } + ] +} +``` + +### 2. Get Persona by ID + +**GET** `/personas/:personaId` + +Returns a single persona by its ID. + +**Path Parameters:** +- `personaId` (string, required): Unique identifier for the persona + +**Example:** + +```bash +curl http://localhost:4000/personas/warehouse-worker +``` + +**Response:** + +```json +{ + "id": "warehouse-worker", + "name": "Warehouse Worker", + "description": "Frontline warehouse operations staff performing daily tasks like receiving, putaway, picking, and shipping", + "role": "operations", + "department": "warehouse", + "accessLevel": "operational", + "capabilityIds": [ + "inventory-check", + "shipment-tracking", + "equipment-availability", + "dock-appointment-check" + ], + "tags": ["frontline", "operational", "warehouse", "physical-work"] +} +``` + +### 3. Get Capabilities for a Persona + +**GET** `/personas/:personaId/capabilities` + +Returns all capabilities associated with a specific persona, including full capability details. + +**Path Parameters:** +- `personaId` (string, required): Unique identifier for the persona + +**Example:** + +```bash +curl http://localhost:4000/personas/warehouse-worker/capabilities +``` + +**Response:** + +```json +{ + "personaId": "warehouse-worker", + "personaName": "Warehouse Worker", + "capabilityCount": 4, + "capabilities": [ + { + "id": "inventory-check", + "name": "Inventory Check", + "description": "Check current inventory levels for one or more SKUs", + "tags": { + "domain": ["inventory", "warehousing"], + "complexity": "simple", + "services": ["wms"], + "personas": ["warehouse-worker", "store-manager"], + "patterns": ["sequential"] + }, + "odId": "inventory-check-standard-v1", + "version": "1.0.0", + "metadata": { + "author": "morpheus-team", + "estimatedDuration": 2000 + } + }, + { + "id": "shipment-tracking", + "name": "Shipment Tracking", + "description": "Track a shipment and retrieve its status and location", + "tags": { + "domain": ["transportation", "logistics"], + "complexity": "simple", + "services": ["tms"], + "personas": ["warehouse-worker", "logistics-coordinator"], + "patterns": ["sequential"] + }, + "odId": "shipment-tracking-standard-v1", + "version": "1.0.0" + } + ] +} +``` + +## Persona Schema + +### Persona Object + +```typescript +{ + id: string; // Unique identifier (e.g., "warehouse-worker") + name: string; // Display name (e.g., "Warehouse Worker") + description: string; // Detailed description of the persona + role: string; // Role category (operations, management, specialist) + department: string; // Department (warehouse, store, logistics, etc.) + accessLevel: string; // Access level (operational, supervisory, executive) + capabilityIds: string[]; // Array of capability IDs this persona can execute + tags: string[]; // Additional tags for filtering and categorization +} +``` + +## Use Cases + +### 1. Building Role-Based UIs + +Use the persona API to build role-specific interfaces that only show capabilities relevant to the current user's role: + +```bash +# Get all capabilities for warehouse workers +curl http://localhost:4000/personas/warehouse-worker/capabilities + +# Use the response to build a UI that shows only these capabilities +``` + +### 2. Capability Discovery + +Find which personas can perform a specific capability: + +```bash +# Get all personas +curl http://localhost:4000/personas + +# Filter the response to find which personas include a specific capabilityId +``` + +### 3. Access Control + +Validate whether a persona should have access to a capability: + +```bash +# Get persona details +curl http://localhost:4000/personas/warehouse-worker + +# Check if the desired capabilityId is in the persona's capabilityIds array +``` + +## Common Personas + +The Morpheus platform includes these standard personas: + +| Persona ID | Name | Department | Role | Typical Capabilities | +|------------|------|------------|------|---------------------| +| `warehouse-worker` | Warehouse Worker | Warehouse | Operations | Inventory check, shipment tracking, equipment availability | +| `store-manager` | Store Manager | Store | Management | Inventory check, order fulfillment status | +| `logistics-coordinator` | Logistics Coordinator | Logistics | Specialist | Shipment tracking, route optimization, carrier rate lookup | +| `supply-chain-manager` | Supply Chain Manager | Supply Chain | Management | Advanced analytics, network optimization | +| `inventory-analyst` | Inventory Analyst | Inventory | Specialist | Inventory analytics, forecasting, replenishment | + +## Error Responses + +### 404 Not Found + +```json +{ + "error": "Persona 'invalid-persona' not found" +} +``` + +### 500 Internal Server Error + +```json +{ + "error": "Error message describing what went wrong" +} +``` + +## Related APIs + +- [Capabilities API](./capabilities-api.md) - Execute and discover capabilities +- [World API](./world-api.md) - Manage world contexts for capability execution diff --git a/packages/controlmart/driver-service-mesh.ts b/packages/controlmart/driver-service-mesh.ts new file mode 100644 index 0000000000000000000000000000000000000000..185a81b8530b8219224185911db637e3ff465748 --- /dev/null +++ b/packages/controlmart/driver-service-mesh.ts @@ -0,0 +1,27 @@ + +import { ServiceMesh } from './src/utils/service-mesh.util'; + +console.log('--- Service Mesh Driver & Test ---'); + +// 1. Inspect Registry Structure +const registry = ServiceMesh.getRegistry(); +const registeredServices = Object.keys(registry); +console.log(`[INFO] Registered Services (${registeredServices.length}):`, registeredServices.join(', ')); + +// 2. Check for System Service Docs +console.log('\n--- Checking System Docs ---'); +const systemEndpoints = ServiceMesh.findEndpoints('system'); +console.log(`[INFO] Found ${systemEndpoints.length} System endpoints.`); + +if (systemEndpoints.length > 0) { + systemEndpoints.forEach(ep => console.log(` -> [${ep.method.toUpperCase()}] ${ep.path}`)); + console.log('\n--- Sample formatted doc for /docs/mesh ---'); + const meshEp = systemEndpoints.find(ep => ep.path === '/docs/mesh'); + if (meshEp) { + console.log(ServiceMesh.getFormattedEndpointDocs(meshEp)); + } +} else { + console.error("[FAIL] No System endpoints found. Check docs.app.ts parsing."); +} + +console.log('\n--- Driver Complete ---'); diff --git a/packages/controlmart/eslint.config.js b/packages/controlmart/eslint.config.js new file mode 100644 index 0000000000000000000000000000000000000000..e92a2f244c2628e73b9f32ae13267087ad0496ff --- /dev/null +++ b/packages/controlmart/eslint.config.js @@ -0,0 +1,54 @@ +import tsParser from "@typescript-eslint/parser"; +import tsPlugin from "@typescript-eslint/eslint-plugin"; +import importPlugin from "eslint-plugin-import"; +import unusedImports from "eslint-plugin-unused-imports"; +import prettierConfig from "eslint-config-prettier"; + +export default [ + prettierConfig, + + { + files: ["**/*.ts"], + ignores: ["dist", "build", "node_modules"], + + languageOptions: { + parser: tsParser, + parserOptions: { + project: "./tsconfig.json", + tsconfigRootDir: process.cwd(), + }, + sourceType: "module", + ecmaVersion: "latest", + }, + + plugins: { + "@typescript-eslint": tsPlugin, + import: importPlugin, + "unused-imports": unusedImports, + }, + + rules: { + "unused-imports/no-unused-imports": "error", + + "unused-imports/no-unused-vars": [ + "warn", + { + vars: "all", + varsIgnorePattern: "^_", + args: "after-used", + argsIgnorePattern: "^_", + }, + ], + + "@typescript-eslint/no-unused-vars": "off", + + "import/order": [ + "warn", + { + groups: ["builtin", "external", "internal", ["parent", "sibling"], "index"], + "newlines-between": "always", + }, + ], + }, + }, +]; diff --git a/packages/controlmart/index.ts b/packages/controlmart/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..0f3b35afef0664e656da8e4a4be1219c4c17bd56 --- /dev/null +++ b/packages/controlmart/index.ts @@ -0,0 +1,17 @@ +import { existsSync } from "fs"; +import path from "path"; + +// Development Entry Point +// This bypasses the Setup UI and directly launches the main application. +// It assumes the developer has a .env file or environment variables set. + +(async () => { + const envPath = path.join(process.cwd(), ".env"); + + if (!existsSync(envPath)) { + console.warn(`[dev] ⚠️ No .env file found at ${envPath}. Application may fail if variables are missing.`); + } + + // Import the main application logic (migrations, seeding, server start) + await import("./main"); +})(); diff --git a/packages/controlmart/main.ts b/packages/controlmart/main.ts new file mode 100644 index 0000000000000000000000000000000000000000..9dd77a9bd53165c494edb5660027299442555582 --- /dev/null +++ b/packages/controlmart/main.ts @@ -0,0 +1,242 @@ +import { createApplication } from "./src/application/application.app"; +import { World } from "./src/models/world.model"; +import { WorldLog } from "./src/models/logs.model"; +import { + createAppLogger, + createHttpLogger, +} from "./src/utils/logger.util"; +import { getErrorMessage } from "./src/utils/error.util"; +// Research branch imports +import { loadEnv } from "./src/utils/env.util"; +import { registerTicketingJob } from "./src/jobs/ticketing.job"; +import { registerDeleteLogQueueJob } from "./src/jobs/delete-logqueue.job"; +import { startScheduler, stopScheduler } from "./src/services/scheduler.service"; +import { initializeODScheduling } from "./src/operational-descriptor/schedule.od"; +import { createCollectionsIfNotExist, connectMongo, syncModelIndexes } from "./src/services/mongo.service"; +import { auditLogger } from "./src/services/audit-logger.service"; +// od-arch branch imports +// import { seedBusinessRules } from "./src/business-rules/seed-rules"; // Temporarily disabled +import { initializeODRegistry } from "./src/ods/index"; +import { WorldRepository } from "./src/repository"; +import { ChaosConfigRegistry } from "./src/services/chaos-config.registry"; +import { capabilityCatalog } from './src/services/capability-catalog.service'; +import { personaRegistry } from './src/services/persona-registry.service'; +import { autoSeedIfEmpty } from './src/services/auto-seed.service'; + +const envValues = loadEnv(); + +// Parse CLI arguments +const forceSeed = process.argv.includes('--force-seed'); + +export const logger = createAppLogger({}); + +// Timeout wrapper for startup operations to prevent indefinite hangs +const withTimeout = ( + promise: Promise, + ms: number, + operation: string +): Promise => { + return Promise.race([ + promise, + new Promise((_, reject) => + setTimeout( + () => reject(new Error(`${operation} timed out after ${ms}ms`)), + ms + ) + ), + ]); +}; + + +export const httpLogger = createHttpLogger(logger); +export const mongoLogger = auditLogger; + + +try { + // 1. Validate Critical Configuration + if (!envValues.MONGO_URI || !envValues.OPENAI_API_KEY) { + logger.warn("[startup] Missing critical configuration (MONGO_URI or OPENAI_API_KEY). Launching Setup Mode..."); + const { startSetup } = await import("./src/application/setup.app"); + await startSetup(); + // Keep process alive for setup server + await new Promise(() => { }); + } + + // 2. Run Boot Check (Database Connection) + try { + await withTimeout( + connectMongo({ + uri: envValues.MONGO_URI, + dbName: envValues.DB_NAME, + log: true, + }), + 30000, + 'connectMongo' + ); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "[startup] Database connection failed. Launching Setup Mode..."); + const { startSetup } = await import("./src/application/setup.app"); + await startSetup(); + // Keep process alive for setup server + await new Promise(() => { }); + } + await withTimeout( + createCollectionsIfNotExist({ + models: [World, WorldLog], + collectionNames: ["schedules"], + log: true, + }), + 30000, + 'createCollectionsIfNotExist' + ); + + // Seed business rules on startup + // await seedBusinessRules(); + + await withTimeout(startScheduler(), 30000, 'startScheduler'); + initializeODScheduling(); + + // Register Ticketing Job + await withTimeout(registerTicketingJob(), 30000, 'registerTicketingJob'); + + // Register Log Cleanup Job + await withTimeout(registerDeleteLogQueueJob(), 30000, 'registerDeleteLogQueueJob'); + + // Initialize OD Registry with all available OD builders + initializeODRegistry(); + logger.info('OD Registry initialized'); + + // Auto-seed database if collections are empty (or force mode) + if (forceSeed) { + logger.info('[startup] Force seed mode enabled via --force-seed'); + } + const seedResult = await withTimeout( + autoSeedIfEmpty(logger, { force: forceSeed }), + 120000, + 'autoSeedIfEmpty' + ); + if (seedResult.capabilities.seeded || seedResult.personas.seeded || seedResult.knowledgeGraph.seeded) { + logger.info({ seedResult }, 'Auto-seeding completed'); + } + + // Initialize capability and persona services from MongoDB + const initializedServices: string[] = []; + + // 1. Initialize Capability Catalog + try { + await withTimeout(capabilityCatalog.initialize(), 60000, 'capabilityCatalog.initialize'); + initializedServices.push(`CapabilityCatalog (${capabilityCatalog.count()} capabilities)`); + } catch (error) { + const errorMsg = getErrorMessage(error); + if (errorMsg.includes('CapabilityCatalog not initialized')) { + logger.warn('Capability catalog database is empty. Run migration script: bun run scripts/migrate-capabilities.ts'); + } else { + logger.error({ error: errorMsg }, 'Failed to initialize capability catalog'); + } + } + + // 2. Initialize Persona Registry + try { + await withTimeout(personaRegistry.initialize(), 60000, 'personaRegistry.initialize'); + initializedServices.push(`PersonaRegistry (${personaRegistry.getCount()} personas)`); + } catch (error) { + const errorMsg = getErrorMessage(error); + if (errorMsg.includes('PersonaRegistry not initialized')) { + logger.warn('Persona registry database is empty. Run migration script: bun run scripts/migrate-personas.ts'); + } else { + logger.error({ error: errorMsg }, 'Failed to initialize persona registry'); + } + } + + // 3. Initialize Knowledge Graph + try { + const { knowledgeGraph } = await import('./src/services/knowledge-graph.service'); + await withTimeout(knowledgeGraph.initialize(), 60000, 'knowledgeGraph.initialize'); + initializedServices.push('KnowledgeGraph'); + } catch (error) { + const errorMsg = getErrorMessage(error); + if (errorMsg.includes('Knowledge graph database is empty')) { + logger.warn('Knowledge graph database is empty. Run migration script: bun run scripts/migrate-knowledge-graph.ts'); + } else { + logger.error({ error: errorMsg }, 'Failed to initialize knowledge graph'); + } + } + + // Log summary of initialized services + if (initializedServices.length > 0) { + logger.info({ services: initializedServices }, 'Initialized services'); + } else { + logger.warn('No services initialized - database appears empty. Application will start but some features may be unavailable.'); + } + + // Load persisted world chaos configurations into registry + // Load persisted world chaos configurations into registry + try { + const worldsResult = await withTimeout( + WorldRepository.getAllWorlds(), + 60000, + 'WorldRepository.getAllWorlds' + ); + // Handle both cursor pagination (items) and offset pagination (data) return types + const worldsList = 'items' in worldsResult ? worldsResult.items : worldsResult.data; + let chaosCount = 0; + for (const world of worldsList) { + if (world.chaos) { + const worldId = (world as any)._id?.toString() || ''; + if (worldId) { + ChaosConfigRegistry.setWorldChaosConfiguration(worldId, world.chaos); + chaosCount++; + } + } + } + logger.info( + { count: chaosCount }, + 'Loaded world chaos configurations' + ); + } catch (error) { + logger.error({ error: getErrorMessage(error) }, 'Failed to load chaos configs'); + } + + createApplication({ + port: envValues.PORT, + host: process.env.HOST, + env: envValues.NODE_ENV, + logger, + httpLogger, + }); + + // Open Browser + // Defaults to opening unless disabled via flag or env var + const noBrowserFlag = process.argv.includes("--no-browser"); + const noBrowserEnv = process.env.NO_BROWSER === "true"; + + if (!noBrowserFlag && !noBrowserEnv) { + logger.info(`[app] Opening browser at http://localhost:${envValues.PORT}/admin`); + Bun.spawn(["open", `http://localhost:${envValues.PORT}/admin`]); + } + + + syncModelIndexes({ log: true }) + .then(() => logger.info('Index sync completed successfully')) + .catch((err) => logger.error({ error: getErrorMessage(err) }, 'Index sync failed')); + + process.on("SIGTERM", async () => { + logger.info("SIGTERM received, shutting down gracefully"); + await stopScheduler(); + process.exit(0); + }); + + process.on("SIGINT", async () => { + logger.info("SIGINT received, shutting down gracefully"); + await stopScheduler(); + process.exit(0); + }); +} catch (err) { + logger.error( + { + error: getErrorMessage(err), + }, + "[app] Application failed to start:", + ); + process.exit(1); +} diff --git a/packages/controlmart/package.json b/packages/controlmart/package.json new file mode 100644 index 0000000000000000000000000000000000000000..a13226dde0d118ec7596b6bcabefd3596cc09402 --- /dev/null +++ b/packages/controlmart/package.json @@ -0,0 +1,73 @@ +{ + "name": "controlmart", + "module": "index.ts", + "type": "module", + "private": true, + "scripts": { + "start": "bun run src/application/bootcheck.app.ts && bun run index.ts", + "run:local": "bun run build:ui && bun run dev", + "run:hf": "bun run start --no-browser", + "validate:annotations": "bun run src/scripts/validate-tool-annotations.ts", + "prebuild": "bun run validate:annotations", + "build": "tsc -p tsconfig.json ", + "dev": "bun run src/application/bootcheck.app.ts && bun run --watch index.ts", + "generate": "bun run index.ts", + "seed-dev": "bun run scripts/seed-dev-data.ts", + "lint": "bunx eslint . --ext .ts", + "lint:fix": "bunx eslint . --ext .ts --fix", + "fmt:fix": "bunx prettier --write .", + "fmt": "bunx prettier --check .", + "build:ui": "cd ui && bun install && bun run build", + "dev:ui": "cd ui && bun run dev", + "dev:full": "bun run build:ui && bun run dev --no-browser", + "build:binary": "bun build ./bootstrap.ts --compile --outfile morpheus-server", + "build:app": "bun run scripts/build-macos-app.ts" + }, + "devDependencies": { + "@types/bun": "latest", + "@types/compression": "^1.8.1", + "@types/cors": "^2.8.19", + "@types/graphlib": "^2.1.12", + "@types/morgan": "^1.9.10", + "@types/seedrandom": "^3.0.8", + "@typescript-eslint/eslint-plugin": "^8.48.0", + "@typescript-eslint/parser": "^8.48.0", + "eslint": "^9.39.1", + "eslint-config-prettier": "^10.1.8", + "eslint-plugin-import": "^2.32.0", + "eslint-plugin-unused-imports": "^4.3.0", + "prettier": "^3.6.2" + }, + "peerDependencies": { + "typescript": "^5" + }, + "dependencies": { + "@faker-js/faker": "^10.0.0", + "@hokify/agenda": "^6.3.0", + "@scalar/express-api-reference": "^0.8.22", + "@types/express": "^5.0.3", + "@types/jmespath": "^0.15.2", + "@types/swagger-jsdoc": "^6.0.4", + "@types/swagger-ui-express": "^4.1.8", + "ajv": "^8.17.1", + "compression": "^1.8.1", + "cors": "^2.8.5", + "dotenv": "^17.2.3", + "express": "^5.1.0", + "graphlib": "^2.1.8", + "helmet": "^8.1.0", + "jmespath": "^0.16.0", + "mongoose": "^8.19.0", + "morgan": "^1.10.1", + "openai": "^6.10.0", + "pino": "^10.0.0", + "pino-http": "^11.0.0", + "pino-pretty": "^13.1.1", + "seedrandom": "^3.0.5", + "slugify": "^1.6.6", + "swagger-jsdoc": "^6.2.8", + "swagger-ui-express": "^5.0.1", + "uuid": "^13.0.0", + "zod": "^4.1.11" + } +} \ No newline at end of file diff --git a/packages/controlmart/scripts/build-macos-app.ts b/packages/controlmart/scripts/build-macos-app.ts new file mode 100644 index 0000000000000000000000000000000000000000..9f92205dc3c6612816e76065d2423b06cc301998 --- /dev/null +++ b/packages/controlmart/scripts/build-macos-app.ts @@ -0,0 +1,176 @@ +import { mkdirSync, chmodSync } from "node:fs"; +import { $ } from "bun"; + +const APP_NAME = "Skyfall - Morpheus"; +const BUILD_DIR = "build-dist"; +const APP_BUNDLE = `${BUILD_DIR}/${APP_NAME}.app`; +const CONTENTS_DIR = `${APP_BUNDLE}/Contents`; +const MACOS_DIR = `${CONTENTS_DIR}/MacOS`; +const RESOURCES_DIR = `${CONTENTS_DIR}/Resources`; + +console.log("Cleaning build directory..."); +await $`rm -rf ${BUILD_DIR}`; + +console.log("Building binary..."); +try { + // Build the standalone executable from the bootstrap script + await $`bun build ./bootstrap.ts --compile --outfile morpheus-server`; +} catch (e) { + console.error("Build failed:", e); + process.exit(1); +} + +console.log("Creating App Bundle structure..."); +// Create directories +mkdirSync(MACOS_DIR, { recursive: true }); +mkdirSync(RESOURCES_DIR, { recursive: true }); + +console.log("Moving binary..."); +// Move the built binary to the App Bundle +// Move the built binary to the App Bundle +await $`mv morpheus-server ${MACOS_DIR}/`; + +console.log("Copying UI assets..."); +// Copy dist/ui to Contents/MacOS/ui so the binary encounters it at ./ui +try { + const uiSource = "dist/ui"; + if (Bun.file(uiSource).size > 0 || (await $`ls ${uiSource}`.quiet()).exitCode === 0) { + await $`cp -R ${uiSource} ${MACOS_DIR}/ui`; + } else { + console.warn("Warning: dist/ui not found. UI will be missing."); + } +} catch (e) { + console.warn("Failed to copy UI assets:", e); +} + +console.log("Copying .env file..."); +try { + const envSource = ".env"; + if (Bun.file(envSource).size > 0) { + await $`cp ${envSource} ${MACOS_DIR}/.env`; + } else { + console.warn("Warning: .env not found. App will launch in Setup Mode."); + } +} catch (e) { + console.warn("Failed to copy .env:", e); +} + +console.log("Copying App Icon..."); +try { + const iconSource = "assets/icon.icns"; + if (Bun.file(iconSource).size > 0) { + await $`cp ${iconSource} ${RESOURCES_DIR}/AppIcon.icns`; + } else { + console.warn("Warning: assets/icon.icns not found. App will have default icon."); + } +} catch (e) { + console.warn("Failed to copy icon:", e); +} + +console.log("Creating Info.plist..."); +const plist = ` + + + + CFBundleName + ${APP_NAME} + CFBundleDisplayName + Morpheus ControlMart + CFBundleIdentifier + com.talkshopclub.morpheus + CFBundleVersion + 1.0.0 + CFBundleShortVersionString + 1.0.0 + CFBundleIconFile + AppIcon + CFBundlePackageType + APPL + CFBundleExecutable + MorpheusLauncher + LSMinimumSystemVersion + 11.0 + LSUIElement + + +`; +await Bun.write(`${CONTENTS_DIR}/Info.plist`, plist); + +console.log("Creating Launcher script..."); +const launcher = `#!/bin/bash +DIR=$(cd "$(dirname "$0")"; pwd) +# Change CWD to the folder containing the .app bundle so .env is stored there +cd "$DIR/../../.." +LOG_FILE="/tmp/morpheus_app.log" + +# Kill any existing instances to prevent EADDRINUSE +pkill -f "morpheus-server" || true + +export MORPHEUS_LAUNCHER=true + +while true; do + echo "$(date): Starting Morpheus..." >> "$LOG_FILE" + + # Run the server in background to allow signal trapping + "$DIR/morpheus-server" >> "$LOG_FILE" 2>&1 & + PID=$! + + # helper to kill server on exit + cleanup() { + echo "Stopping Morpheus..." >> "$LOG_FILE" + kill $PID + exit 0 + } + trap cleanup SIGINT SIGTERM + + # Wait for the process to finish + wait $PID + EXIT_CODE=$? + + # Remove trap for normal exit handling + trap - SIGINT SIGTERM + + # Check for restart request (Exit Code 100) + if [ $EXIT_CODE -eq 100 ]; then + echo "Restart requested..." >> "$LOG_FILE" + sleep 1 + continue + fi + + if [ $EXIT_CODE -ne 0 ]; then + echo "Morpheus exited with code $EXIT_CODE" >> "$LOG_FILE" + + # Write error msg to temp file for safe reading + ERROR_FILE="/tmp/morpheus_error.txt" + tail -n 15 "$LOG_FILE" > "$ERROR_FILE" + + # Show native alert dialog reading from file + osascript -e "display dialog (read POSIX file \\"$ERROR_FILE\\") with title \\"Morpheus Error\\" buttons {\\"OK\\"} default button \\"OK\\" icon stop" + fi + + # Break loop for normal exit or error + break +done + +exit $EXIT_CODE +`; + +const launcherPath = `${MACOS_DIR}/MorpheusLauncher`; +await Bun.write(launcherPath, launcher); + +// Make executable +chmodSync(launcherPath, "755"); +chmodSync(`${MACOS_DIR}/morpheus-server`, "755"); + +console.log("Signing app bundle (ad-hoc)..."); +try { + await $`codesign --force --deep --sign - ${APP_BUNDLE}`; +} catch (e) { + console.warn("Warning: Ad-hoc signing failed. App strictly requires xattr -cr to run on other machines."); +} + +console.log("Creating distribution zip..."); +await $`cd ${BUILD_DIR} && zip -r "${APP_NAME}.zip" "${APP_NAME}.app"`; + +console.log(`Successfully created ${APP_BUNDLE}`); +console.log(`Distribution zip ready: ${BUILD_DIR}/${APP_NAME}.zip`); diff --git a/packages/controlmart/scripts/measure-performance.ts b/packages/controlmart/scripts/measure-performance.ts new file mode 100644 index 0000000000000000000000000000000000000000..83bda533c643ae4dda9188b1e73d307a470a99b1 --- /dev/null +++ b/packages/controlmart/scripts/measure-performance.ts @@ -0,0 +1,207 @@ +#!/usr/bin/env bun + +/** + * Performance Baseline Measurement Script + * + * Measures execution time for all Phase 1 capabilities to establish baseline performance metrics. + * Run with: bun run scripts/measure-performance.ts + */ + +import { connectMongo, disconnectMongo } from '../src/services/mongo.service'; +import { CapabilityExecutor } from '../src/services/capability-executor.service'; +import { WorldRepository } from '../src/repository/world.repository'; +import { capabilityCatalog } from '../src/services/capability-catalog.service'; +import { initializeODRegistry } from '../src/ods'; + +interface PerformanceResult { + capabilityId: string; + capabilityName: string; + runs: number; + avgDurationMs: number; + minDurationMs: number; + maxDurationMs: number; + stdDeviation: number; + successRate: number; +} + +async function measureCapabilityPerformance( + executor: CapabilityExecutor, + capabilityId: string, + worldId: string, + inputs: any, + runs: number = 10 +): Promise { + const capability = capabilityCatalog.getById(capabilityId); + if (!capability) { + throw new Error(`Capability not found: ${capabilityId}`); + } + + const durations: number[] = []; + let successCount = 0; + + console.log(`\n📊 Measuring ${capability.name} (${runs} runs)...`); + + for (let i = 0; i < runs; i++) { + try { + const result = await executor.execute({ + capabilityId, + worldId, + inputs, + }); + + if (result.status === 'success' && result.durationMs) { + durations.push(result.durationMs); + successCount++; + } + + process.stdout.write('.'); + } catch (error) { + process.stdout.write('x'); + } + } + + console.log(' Done!'); + + // Calculate statistics + const avgDuration = durations.reduce((a, b) => a + b, 0) / durations.length; + const minDuration = Math.min(...durations); + const maxDuration = Math.max(...durations); + + // Calculate standard deviation + const variance = + durations.reduce((sum, d) => sum + Math.pow(d - avgDuration, 2), 0) / + durations.length; + const stdDeviation = Math.sqrt(variance); + + const successRate = (successCount / runs) * 100; + + return { + capabilityId, + capabilityName: capability.name, + runs, + avgDurationMs: Math.round(avgDuration), + minDurationMs: Math.round(minDuration), + maxDurationMs: Math.round(maxDuration), + stdDeviation: Math.round(stdDeviation), + successRate: Math.round(successRate), + }; +} + +async function main() { + console.log('🚀 Performance Baseline Measurement'); + console.log('==================================\n'); + + // Connect to database + console.log('📦 Connecting to database...'); + const mongoUri = process.env.MONGO_URI || 'mongodb://localhost:27017'; + const dbName = process.env.DB_NAME || 'morpheus-test'; + await connectMongo({ uri: mongoUri, dbName }); + + // Initialize OD Registry + console.log('🔧 Initializing OD Registry...'); + initializeODRegistry(); + + // Create test world + console.log('🌍 Creating test world...'); + const world = await WorldRepository.createWorld({ + name: `perf-test-${Date.now()}`, + description: 'Performance testing world', + status: 'active', + }); + console.log(`✅ World created: ${world._id}`); + + const executor = new CapabilityExecutor(); + const results: PerformanceResult[] = []; + + // Disable chaos for baseline measurements + process.env.CHAOS_ENABLED = 'false'; + + // Test each capability + const capabilities = [ + { + id: 'inventory-check', + inputs: { sku: 'SKU-001', locationId: 'WH-01' }, + }, + { + id: 'shipment-tracking', + inputs: { shipmentId: 'SHIP-001' }, + }, + { + id: 'equipment-availability-check', + inputs: { equipmentType: 'forklift', zoneId: 'ZONE-A' }, + }, + { + id: 'dock-appointment-scheduling', + inputs: { + date: '2025-11-21', + dockDoorId: 'DOCK-01', + appointmentType: 'inbound', + }, + }, + ]; + + for (const cap of capabilities) { + const result = await measureCapabilityPerformance( + executor, + cap.id, + world._id.toString(), + cap.inputs, + 10 + ); + results.push(result); + } + + // Print results + console.log('\n\n📈 Performance Baseline Results'); + console.log('================================\n'); + + console.table( + results.map((r) => ({ + Capability: r.capabilityName, + 'Avg (ms)': r.avgDurationMs, + 'Min (ms)': r.minDurationMs, + 'Max (ms)': r.maxDurationMs, + 'Std Dev': r.stdDeviation, + 'Success Rate': `${r.successRate}%`, + })) + ); + + // Overall statistics + const totalAvg = + results.reduce((sum, r) => sum + r.avgDurationMs, 0) / results.length; + console.log(`\n📊 Overall Average: ${Math.round(totalAvg)}ms`); + + // Save results to file + const timestamp = new Date().toISOString(); + const report = { + timestamp, + environment: { + nodeVersion: process.version, + platform: process.platform, + chaosEnabled: false, + }, + results, + summary: { + totalCapabilities: results.length, + overallAvgMs: Math.round(totalAvg), + }, + }; + + await Bun.write( + 'config/performance-baselines.json', + JSON.stringify(report, null, 2) + ); + console.log('\n💾 Results saved to config/performance-baselines.json'); + + // Cleanup + console.log('\n🧹 Cleaning up...'); + await WorldRepository.deleteWorld(world._id.toString()); + await disconnectMongo(); + + console.log('✅ Done!\n'); +} + +main().catch((error) => { + console.error('❌ Error:', error); + process.exit(1); +}); diff --git a/packages/controlmart/scripts/migrate-capabilities-to-db.ts b/packages/controlmart/scripts/migrate-capabilities-to-db.ts new file mode 100644 index 0000000000000000000000000000000000000000..c21fbcabc85ede842e63d5cd3d9b573158fed11d --- /dev/null +++ b/packages/controlmart/scripts/migrate-capabilities-to-db.ts @@ -0,0 +1,163 @@ +/** + * Capability Migration Script + * + * Migrates capabilities from src/capabilities/catalog.ts to MongoDB. + * Idempotent - safe to run multiple times. + * + * Usage: + * bun run scripts/migrate-capabilities-to-db.ts [--dry-run] [--force] [--clear-first] + * + * Options: + * --dry-run Preview changes without writing to database + * --force Update existing capabilities instead of skipping + * --clear-first Delete all existing capabilities before migration + */ + +import { connectMongo, createCollectionsIfNotExist } from "../src/services/mongo.service"; +import { Capability } from "../src/models/capability.model"; +import { CapabilityRepository } from "../src/repository/capability.repository"; +import { INITIAL_CAPABILITIES } from "../src/capabilities/catalog"; +import { getErrorMessage } from "../src/utils/error.util"; +import { loadEnv } from "../src/utils/env.util"; + +interface MigrateOptions { + dryRun?: boolean; + force?: boolean; + clearFirst?: boolean; +} + +interface MigrationStats { + created: number; + updated: number; + skipped: number; + errors: number; +} + +async function migrate(options: MigrateOptions = {}): Promise { + console.log("[migrate-capabilities] Starting migration..."); + console.log(`[migrate-capabilities] Options:`, { + dryRun: options.dryRun || false, + force: options.force || false, + clearFirst: options.clearFirst || false, + }); + + // Load environment variables + const env = loadEnv(); + + // Connect to MongoDB + await connectMongo({ + uri: env.MONGO_URI, + dbName: env.DB_NAME, + log: true, + }); + + // Ensure collection exists with indexes + await createCollectionsIfNotExist({ + models: [Capability], + log: true, + }); + + // Clear existing capabilities if requested + if (options.clearFirst) { + if (options.dryRun) { + console.log("[DRY-RUN] Would clear all existing capabilities"); + } else { + console.log("[migrate-capabilities] Clearing existing capabilities..."); + await (Capability as any).deleteMany({}).exec(); + console.log("[migrate-capabilities] Cleared all capabilities"); + } + } + + // Migrate capabilities + console.log(`\n[migrate-capabilities] Migrating ${INITIAL_CAPABILITIES.length} capabilities...\n`); + + const stats: MigrationStats = { + created: 0, + updated: 0, + skipped: 0, + errors: 0, + }; + + for (const capability of INITIAL_CAPABILITIES) { + try { + if (options.dryRun) { + console.log(`[DRY-RUN] Would create/update: ${capability.id} (${capability.name})`); + stats.created++; + } else { + // Check if capability already exists + const existing = await CapabilityRepository.findById(capability.id); + + if (existing && !options.force) { + console.log(`⏭️ Skipping existing: ${capability.id} (${capability.name})`); + stats.skipped++; + } else if (existing && options.force) { + // Update existing capability + await CapabilityRepository.update(capability.id, capability); + console.log(`✏️ Updated: ${capability.id} (${capability.name})`); + stats.updated++; + } else { + // Create new capability + await CapabilityRepository.create(capability); + console.log(`✅ Created: ${capability.id} (${capability.name})`); + stats.created++; + } + } + } catch (error) { + console.error(`❌ Error migrating ${capability.id}:`, getErrorMessage(error)); + stats.errors++; + } + } + + // Print summary + console.log(`\n${"=".repeat(60)}`); + console.log("[migrate-capabilities] Migration complete"); + console.log(`${"=".repeat(60)}`); + console.log(` Created: ${stats.created}`); + console.log(` Updated: ${stats.updated}`); + console.log(` Skipped: ${stats.skipped}`); + console.log(` Errors: ${stats.errors}`); + console.log(` Total: ${INITIAL_CAPABILITIES.length}`); + console.log(`${"=".repeat(60)}\n`); + + if (options.dryRun) { + console.log("💡 This was a dry-run. No changes were made to the database."); + console.log(" Run without --dry-run to apply changes.\n"); + } + + // Exit with appropriate code + if (stats.errors > 0) { + console.error("[migrate-capabilities] Migration completed with errors"); + process.exit(1); + } else { + console.log("[migrate-capabilities] Migration successful"); + process.exit(0); + } +} + +// Main execution +async function main() { + // Parse command line arguments + const args = process.argv.slice(2); + const options: MigrateOptions = { + dryRun: args.includes('--dry-run'), + force: args.includes('--force'), + clearFirst: args.includes('--clear-first'), + }; + + // Validate conflicting options + if (options.clearFirst && options.force) { + console.warn("[migrate-capabilities] Warning: --clear-first and --force both specified."); + console.warn(" --clear-first will delete all capabilities before migration."); + console.warn(" Continuing in 3 seconds... (Ctrl+C to cancel)"); + await new Promise(resolve => setTimeout(resolve, 3000)); + } + + // Run migration + await migrate(options); +} + +// Execute main function +main().catch((err) => { + console.error("[migrate-capabilities] Fatal error:", getErrorMessage(err)); + process.exit(1); +}); diff --git a/packages/controlmart/scripts/migrate-knowledge-graph-to-db.ts b/packages/controlmart/scripts/migrate-knowledge-graph-to-db.ts new file mode 100644 index 0000000000000000000000000000000000000000..b401b12aba4a14b1fc937233541fa92fb0bccc5b --- /dev/null +++ b/packages/controlmart/scripts/migrate-knowledge-graph-to-db.ts @@ -0,0 +1,245 @@ +/** + * Knowledge Graph Migration Script + * + * Builds knowledge graph from code annotations (ODs, capabilities, personas, tools) + * and persists it to MongoDB. + * + * Usage: + * bun run scripts/migrate-knowledge-graph-to-db.ts [--dry-run] [--clear-first] + * + * Options: + * --dry-run Preview graph build without writing to database + * --clear-first Delete existing graph before migration + * + * Dependencies: + * - Capabilities must be migrated first (run migrate-capabilities-to-db.ts) + * - Personas must be migrated first (run migrate-personas-to-db.ts) + */ + +import { connectMongo, createCollectionsIfNotExist } from "../src/services/mongo.service"; +import { KnowledgeGraphNode } from "../src/models/knowledge-graph-node.model"; +import { KnowledgeGraphEdge } from "../src/models/knowledge-graph-edge.model"; +import { knowledgeGraph } from "../src/services/knowledge-graph.service"; +import { capabilityCatalog } from "../src/services/capability-catalog.service"; +import { personaRegistry } from "../src/services/persona-registry.service"; +import { initializeODRegistry } from "../src/ods/index"; +import { getErrorMessage } from "../src/utils/error.util"; +import { loadEnv } from "../src/utils/env.util"; + +interface MigrateOptions { + dryRun?: boolean; + clearFirst?: boolean; +} + +interface MigrationResult { + nodeCount: number; + edgeCount: number; + source: string; + status: 'success' | 'error'; +} + +/** + * Verify that dependencies (capabilities and personas) are already migrated + */ +async function verifyDependencies(): Promise { + console.log("[migrate-knowledge-graph] Verifying dependencies...\n"); + + // Initialize capability catalog + try { + await capabilityCatalog.initialize(); + const capCount = capabilityCatalog.count(); + if (capCount === 0) { + console.error("❌ Error: No capabilities found in database"); + console.error(" Run: bun run scripts/migrate-capabilities-to-db.ts\n"); + process.exit(1); + } + console.log(` ✅ Capabilities: ${capCount} loaded`); + } catch (error) { + console.error("❌ Error: Failed to load capabilities"); + console.error(` ${getErrorMessage(error)}`); + console.error(" Run: bun run scripts/migrate-capabilities-to-db.ts\n"); + process.exit(1); + } + + // Initialize persona registry + try { + await personaRegistry.initialize(); + const personaCount = personaRegistry.getCount(); + if (personaCount === 0) { + console.error("❌ Error: No personas found in database"); + console.error(" Run: bun run scripts/migrate-personas-to-db.ts\n"); + process.exit(1); + } + console.log(` ✅ Personas: ${personaCount} loaded`); + } catch (error) { + console.error("❌ Error: Failed to load personas"); + console.error(` ${getErrorMessage(error)}`); + console.error(" Run: bun run scripts/migrate-personas-to-db.ts\n"); + process.exit(1); + } + + // Initialize OD Registry + try { + initializeODRegistry(); + console.log(` ✅ OD Registry initialized`); + } catch (error) { + console.error("❌ Error: Failed to initialize OD Registry"); + console.error(` ${getErrorMessage(error)}\n`); + process.exit(1); + } + + console.log(""); +} + +/** + * Clear existing knowledge graph from database + */ +async function clearGraph(): Promise { + console.log("[migrate-knowledge-graph] Clearing existing graph..."); + await (KnowledgeGraphNode as any).deleteMany({}).exec(); + await (KnowledgeGraphEdge as any).deleteMany({}).exec(); + console.log("[migrate-knowledge-graph] Cleared all nodes and edges\n"); +} + +/** + * Main migration function + */ +async function migrate(options: MigrateOptions = {}): Promise { + console.log("[migrate-knowledge-graph] Starting migration..."); + console.log(`[migrate-knowledge-graph] Options:`, { + dryRun: options.dryRun || false, + clearFirst: options.clearFirst || false, + }); + console.log(""); + + // Load environment variables + const env = loadEnv(); + + // Connect to MongoDB + await connectMongo({ + uri: env.MONGO_URI, + dbName: env.DB_NAME, + log: true, + }); + + // Ensure collections exist with indexes + await createCollectionsIfNotExist({ + models: [KnowledgeGraphNode, KnowledgeGraphEdge], + log: true, + }); + + console.log(""); + + // Verify dependencies (capabilities, personas, ODs) + await verifyDependencies(); + + // Clear existing graph if requested + if (options.clearFirst && !options.dryRun) { + await clearGraph(); + } else if (options.clearFirst && options.dryRun) { + console.log("[DRY-RUN] Would clear existing graph\n"); + } + + // Build knowledge graph from annotations + console.log("[migrate-knowledge-graph] Building knowledge graph from annotations...\n"); + + try { + if (options.dryRun) { + // In dry-run, build graph but don't save + console.log("[DRY-RUN] Building graph (will not save to database)..."); + knowledgeGraph.buildGraphFromAnnotations(); + + const nodeCount = (knowledgeGraph as any).graph.nodeCount(); + const edgeCount = (knowledgeGraph as any).graph.edgeCount(); + + console.log(`[DRY-RUN] Would save: ${nodeCount} nodes, ${edgeCount} edges\n`); + + return { + nodeCount, + edgeCount, + source: 'annotations', + status: 'success', + }; + } else { + // Build graph from annotations + knowledgeGraph.buildGraphFromAnnotations(); + + const nodeCount = (knowledgeGraph as any).graph.nodeCount(); + const edgeCount = (knowledgeGraph as any).graph.edgeCount(); + + console.log(`[KnowledgeGraphService] Built graph: ${nodeCount} nodes, ${edgeCount} edges\n`); + + // Save to MongoDB + console.log("[migrate-knowledge-graph] Saving to MongoDB..."); + await knowledgeGraph.saveToDB(); + console.log("[migrate-knowledge-graph] Successfully saved graph to database\n"); + + return { + nodeCount, + edgeCount, + source: 'annotations', + status: 'success', + }; + } + } catch (error) { + console.error("❌ Error building or saving knowledge graph:"); + console.error(` ${getErrorMessage(error)}\n`); + return { + nodeCount: 0, + edgeCount: 0, + source: 'annotations', + status: 'error', + }; + } +} + +/** + * Main execution + */ +async function main() { + // Parse command line arguments + const args = process.argv.slice(2); + const options: MigrateOptions = { + dryRun: args.includes('--dry-run'), + clearFirst: args.includes('--clear-first'), + }; + + // Run migration + const result = await migrate(options); + + // Print summary + console.log(`${"=".repeat(60)}`); + console.log("[migrate-knowledge-graph] Migration complete"); + console.log(`${"=".repeat(60)}`); + console.log(` Nodes: ${result.nodeCount}`); + console.log(` Edges: ${result.edgeCount}`); + console.log(` Source: ${result.source}`); + console.log(` Status: ${result.status}`); + console.log(`${"=".repeat(60)}\n`); + + if (options.dryRun) { + console.log("💡 This was a dry-run. No changes were made to the database."); + console.log(" Run without --dry-run to apply changes.\n"); + } + + if (result.status === 'error') { + console.error("[migrate-knowledge-graph] Migration failed"); + process.exit(1); + } else { + console.log("[migrate-knowledge-graph] Migration successful"); + + if (!options.dryRun) { + console.log("\n💡 Next steps:"); + console.log(" - Restart your application to load the knowledge graph from MongoDB"); + console.log(" - The graph will be loaded via knowledgeGraph.initialize()\n"); + } + + process.exit(0); + } +} + +// Execute main function +main().catch((err) => { + console.error("[migrate-knowledge-graph] Fatal error:", getErrorMessage(err)); + process.exit(1); +}); diff --git a/packages/controlmart/scripts/migrate-personas-to-db.ts b/packages/controlmart/scripts/migrate-personas-to-db.ts new file mode 100644 index 0000000000000000000000000000000000000000..dd1041d85c36ac874dd85e752416bd0c2f1c0c52 --- /dev/null +++ b/packages/controlmart/scripts/migrate-personas-to-db.ts @@ -0,0 +1,163 @@ +/** + * Persona Migration Script + * + * Migrates personas from src/personas/catalog.ts to MongoDB. + * Idempotent - safe to run multiple times. + * + * Usage: + * bun run scripts/migrate-personas-to-db.ts [--dry-run] [--force] [--clear-first] + * + * Options: + * --dry-run Preview changes without writing to database + * --force Update existing personas instead of skipping + * --clear-first Delete all existing personas before migration + */ + +import { connectMongo, createCollectionsIfNotExist } from "../src/services/mongo.service"; +import { Persona } from "../src/models/persona.model"; +import { PersonaRepository } from "../src/repository/persona.repository"; +import { personaCatalog } from "../src/personas/catalog"; +import { getErrorMessage } from "../src/utils/error.util"; +import { loadEnv } from "../src/utils/env.util"; + +interface MigrateOptions { + dryRun?: boolean; + force?: boolean; + clearFirst?: boolean; +} + +interface MigrationStats { + created: number; + updated: number; + skipped: number; + errors: number; +} + +async function migrate(options: MigrateOptions = {}): Promise { + console.log("[migrate-personas] Starting migration..."); + console.log(`[migrate-personas] Options:`, { + dryRun: options.dryRun || false, + force: options.force || false, + clearFirst: options.clearFirst || false, + }); + + // Load environment variables + const env = loadEnv(); + + // Connect to MongoDB + await connectMongo({ + uri: env.MONGO_URI, + dbName: env.DB_NAME, + log: true, + }); + + // Ensure collection exists with indexes + await createCollectionsIfNotExist({ + models: [Persona], + log: true, + }); + + // Clear existing personas if requested + if (options.clearFirst) { + if (options.dryRun) { + console.log("[DRY-RUN] Would clear all existing personas"); + } else { + console.log("[migrate-personas] Clearing existing personas..."); + await (Persona as any).deleteMany({}).exec(); + console.log("[migrate-personas] Cleared all personas"); + } + } + + // Migrate personas + console.log(`\n[migrate-personas] Migrating ${personaCatalog.length} personas...\n`); + + const stats: MigrationStats = { + created: 0, + updated: 0, + skipped: 0, + errors: 0, + }; + + for (const persona of personaCatalog) { + try { + if (options.dryRun) { + console.log(`[DRY-RUN] Would create/update: ${persona.id} (${persona.name}) - ${persona.capabilityIds.length} capabilities`); + stats.created++; + } else { + // Check if persona already exists + const existing = await PersonaRepository.findById(persona.id); + + if (existing && !options.force) { + console.log(`⏭️ Skipping existing: ${persona.id} (${persona.name})`); + stats.skipped++; + } else if (existing && options.force) { + // Update existing persona + await PersonaRepository.update(persona.id, persona); + console.log(`✏️ Updated: ${persona.id} (${persona.name}) - ${persona.capabilityIds.length} capabilities`); + stats.updated++; + } else { + // Create new persona + await PersonaRepository.create(persona); + console.log(`✅ Created: ${persona.id} (${persona.name}) - ${persona.capabilityIds.length} capabilities`); + stats.created++; + } + } + } catch (error) { + console.error(`❌ Error migrating ${persona.id}:`, getErrorMessage(error)); + stats.errors++; + } + } + + // Print summary + console.log(`\n${"=".repeat(60)}`); + console.log("[migrate-personas] Migration complete"); + console.log(`${"=".repeat(60)}`); + console.log(` Created: ${stats.created}`); + console.log(` Updated: ${stats.updated}`); + console.log(` Skipped: ${stats.skipped}`); + console.log(` Errors: ${stats.errors}`); + console.log(` Total: ${personaCatalog.length}`); + console.log(`${"=".repeat(60)}\n`); + + if (options.dryRun) { + console.log("💡 This was a dry-run. No changes were made to the database."); + console.log(" Run without --dry-run to apply changes.\n"); + } + + // Exit with appropriate code + if (stats.errors > 0) { + console.error("[migrate-personas] Migration completed with errors"); + process.exit(1); + } else { + console.log("[migrate-personas] Migration successful"); + process.exit(0); + } +} + +// Main execution +async function main() { + // Parse command line arguments + const args = process.argv.slice(2); + const options: MigrateOptions = { + dryRun: args.includes('--dry-run'), + force: args.includes('--force'), + clearFirst: args.includes('--clear-first'), + }; + + // Validate conflicting options + if (options.clearFirst && options.force) { + console.warn("[migrate-personas] Warning: --clear-first and --force both specified."); + console.warn(" --clear-first will delete all personas before migration."); + console.warn(" Continuing in 3 seconds... (Ctrl+C to cancel)"); + await new Promise(resolve => setTimeout(resolve, 3000)); + } + + // Run migration + await migrate(options); +} + +// Execute main function +main().catch((err) => { + console.error("[migrate-personas] Fatal error:", getErrorMessage(err)); + process.exit(1); +}); diff --git a/packages/controlmart/scripts/seed-dev-data.ts b/packages/controlmart/scripts/seed-dev-data.ts new file mode 100644 index 0000000000000000000000000000000000000000..136f043251be5fa2a834888beddcd9d77af72a2e --- /dev/null +++ b/packages/controlmart/scripts/seed-dev-data.ts @@ -0,0 +1,436 @@ +/** + * Seed Development Data Script + * + * One-command solution to seed MongoDB with all development data: + * - Capabilities (4) + * - Personas (5) + * - Knowledge Graph (63 nodes, 80 edges) + * - Sample Worlds (5 with diverse sampling strategies) + * + * Usage: + * bun run seed-dev # Interactive with confirmation + * bun run seed-dev --no-confirm # Skip confirmation + * bun run seed-dev --skip-worlds # Skip world creation + * + * Note: This script orchestrates other migration scripts + */ + +import { spawn } from 'child_process'; +import * as readline from 'readline'; +import * as crypto from 'crypto'; +import { connectMongo, createCollectionsIfNotExist } from "../src/services/mongo.service"; +import { World } from "../src/models/world.model"; +import { Capability } from "../src/models/capability.model"; +import { Persona } from "../src/models/persona.model"; +import { KnowledgeGraphNode } from "../src/models/knowledge-graph-node.model"; +import { KnowledgeGraphEdge } from "../src/models/knowledge-graph-edge.model"; +import { WorldRepository } from "../src/repository/world.repository"; +import { capabilitySamplingService } from "../src/services/capability-sampling.service"; +import { capabilityCatalog } from "../src/services/capability-catalog.service"; +import { getErrorMessage } from "../src/utils/error.util"; +import { loadEnv } from "../src/utils/env.util"; +import type { TWorldInput, SamplingStrategy, PersonaConfig } from "../src/models/world.model.type"; + +interface SeedOptions { + noConfirm?: boolean; + skipWorlds?: boolean; +} + +interface SeedResult { + capabilities: number; + personas: number; + knowledgeGraphNodes: number; + knowledgeGraphEdges: number; + worlds: number; + worldDetails: Array<{ name: string; capabilityCount: number }>; + duration: number; +} + +interface WorldSpec { + name: string; + url: string; + description: string; + samplingStrategy: SamplingStrategy; + personas?: PersonaConfig; + mpcCompany?: string; +} + +/** + * Sample world specifications demonstrating different sampling strategies + */ +const SAMPLE_WORLD_SPECS: WorldSpec[] = [ + { + name: 'development-local', + url: 'http://localhost:3000', + description: 'Full capability access for local development', + samplingStrategy: { type: 'all' }, + personas: { + allowedPersonas: ['warehouse-manager', 'system-administrator'] + }, + mpcCompany: 'Morpheus Labs' + }, + { + name: 'staging-integration', + url: 'https://staging.example.com', + description: 'Inventory-focused staging environment', + samplingStrategy: { + type: 'filter', + filter: { domain: ['inventory', 'warehousing'] } + }, + personas: { + allowedPersonas: ['warehouse-manager', 'warehouse-worker', 'store-manager'] + }, + mpcCompany: 'Morpheus Staging' + }, + { + name: 'demo-showcase', + url: 'https://demo.example.com', + description: 'Reproducible demo with seeded capabilities', + samplingStrategy: { + type: 'seeded', + count: 3, + seed: 12345 + }, + personas: { + allowedPersonas: ['store-manager', 'customer-service-rep'] + }, + mpcCompany: 'Morpheus Demo' + }, + { + name: 'test-automation', + url: 'http://test.example.com', + description: 'Random capability subset for testing', + samplingStrategy: { + type: 'random', + count: 2 + }, + personas: { + allowedPersonas: ['warehouse-worker', 'store-manager'], + personaOverrides: { + 'warehouse-worker': { + capabilityIds: ['inventory-check'] + } + } + }, + mpcCompany: 'Morpheus Test' + }, + { + name: 'performance-load', + url: 'http://perf.example.com', + description: 'Simple capabilities for load testing', + samplingStrategy: { + type: 'filter', + filter: { complexity: 'simple' } + }, + mpcCompany: 'Morpheus Performance' + } +]; + +/** + * Prompt user for confirmation + */ +async function promptConfirmation(env: ReturnType): Promise { + console.log('\n⚠️ This will clear ALL existing data and reseed the database.'); + console.log(` Database: ${env.DB_NAME} (${env.MONGO_URI})\n`); + console.log(' This will:'); + console.log(' - Delete all capabilities, personas, knowledge graph, and worlds'); + console.log(' - Migrate 4 capabilities'); + console.log(' - Migrate 5 personas'); + console.log(' - Build and save knowledge graph (63 nodes, 80 edges)'); + console.log(' - Create 5 sample worlds with sampling strategies\n'); + + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + + return new Promise((resolve) => { + rl.question('Continue? (y/N): ', (answer) => { + rl.close(); + resolve(answer.toLowerCase() === 'y' || answer.toLowerCase() === 'yes'); + }); + }); +} + +/** + * Run a migration script + */ +async function runScript(scriptPath: string, args: string[] = []): Promise { + return new Promise((resolve, reject) => { + const proc = spawn('bun', ['run', scriptPath, ...args], { + stdio: 'inherit', + cwd: process.cwd() + }); + + proc.on('close', (code) => { + if (code === 0) { + resolve(); + } else { + reject(new Error(`Script ${scriptPath} exited with code ${code}`)); + } + }); + + proc.on('error', (error) => { + reject(error); + }); + }); +} + +/** + * Clear all collections + */ +async function clearAllData(): Promise { + console.log('\n[seed-dev] Clearing existing data...'); + + await (KnowledgeGraphNode as any).deleteMany({}).exec(); + await (KnowledgeGraphEdge as any).deleteMany({}).exec(); + console.log(' ✓ Cleared knowledge graph'); + + await (Persona as any).deleteMany({}).exec(); + console.log(' ✓ Cleared personas'); + + await (Capability as any).deleteMany({}).exec(); + console.log(' ✓ Cleared capabilities'); + + await (World as any).deleteMany({}).exec(); + console.log(' ✓ Cleared worlds'); + + console.log('[seed-dev] All data cleared\n'); +} + +/** + * Generate API credentials + */ +function generateApiKey(): string { + return `api_${crypto.randomBytes(16).toString('hex')}`; +} + +function generateApiSecret(): string { + return crypto.randomBytes(32).toString('hex'); +} + +/** + * Create sample worlds with sampling strategies + */ +async function createSampleWorlds(): Promise> { + console.log('\n[seed-dev] Creating sample worlds with sampling strategies...\n'); + + // Initialize capability catalog (needed for sampling) + await capabilityCatalog.initialize(); + + const worldDetails: Array<{ name: string; capabilityCount: number }> = []; + + for (const spec of SAMPLE_WORLD_SPECS) { + try { + // Apply sampling strategy to get capability IDs + const capabilityIds = capabilitySamplingService.applySamplingStrategy( + spec.samplingStrategy, + spec.personas + ); + + // Create world with sampled capabilities + const worldInput: TWorldInput = { + name: spec.name, + url: spec.url, + apiKey: generateApiKey(), + apiSecret: generateApiSecret(), + description: spec.description, + mpcCompany: spec.mpcCompany, + samplingStrategy: spec.samplingStrategy, + capabilityIds, + personas: spec.personas + }; + + const world = await WorldRepository.createWorld(worldInput); + + const strategyDesc = + spec.samplingStrategy.type === 'all' ? 'ALL' : + spec.samplingStrategy.type === 'filter' ? 'FILTERED' : + spec.samplingStrategy.type === 'seeded' ? 'SEEDED' : + 'RANDOM'; + + console.log(` ✓ Created: ${world.name}`); + console.log(` Strategy: ${strategyDesc} (${capabilityIds.length} capabilities)`); + + worldDetails.push({ + name: world.name, + capabilityCount: capabilityIds.length + }); + } catch (error) { + console.error(` ✗ Failed to create world: ${spec.name}`); + console.error(` Error: ${getErrorMessage(error)}`); + throw error; + } + } + + console.log(''); + return worldDetails; +} + +/** + * Validate seeded data + */ +async function validateData(skipWorlds: boolean): Promise<{ + capabilities: number; + personas: number; + nodes: number; + edges: number; + worlds: number; +}> { + console.log('[seed-dev] Validating seeded data...\n'); + + const capCount = await (Capability as any).countDocuments().exec(); + const personaCount = await (Persona as any).countDocuments().exec(); + const nodeCount = await (KnowledgeGraphNode as any).countDocuments().exec(); + const edgeCount = await (KnowledgeGraphEdge as any).countDocuments().exec(); + const worldCount = await (World as any).countDocuments().exec(); + + console.log(` Capabilities: ${capCount} ${capCount === 4 ? '✓' : '✗'}`); + console.log(` Personas: ${personaCount} ${personaCount === 5 ? '✓' : '✗'}`); + console.log(` KG Nodes: ${nodeCount} ${nodeCount > 0 ? '✓' : '✗'}`); + console.log(` KG Edges: ${edgeCount} ${edgeCount > 0 ? '✓' : '✗'}`); + console.log(` Worlds: ${worldCount} ${skipWorlds || worldCount === 5 ? '✓' : '✗'}`); + console.log(''); + + if (capCount !== 4 || personaCount !== 5 || nodeCount === 0 || edgeCount === 0) { + throw new Error('Validation failed: Data counts do not match expected values'); + } + + if (!skipWorlds && worldCount !== 5) { + throw new Error('Validation failed: Expected 5 worlds but found ' + worldCount); + } + + return { + capabilities: capCount, + personas: personaCount, + nodes: nodeCount, + edges: edgeCount, + worlds: worldCount + }; +} + +/** + * Main seeding function + */ +async function seedDevData(options: SeedOptions): Promise { + const startTime = Date.now(); + + console.log('[seed-dev] Starting development data seeding...\n'); + + // Load environment + const env = loadEnv(); + + // Confirmation prompt (unless --no-confirm) + if (!options.noConfirm) { + const confirmed = await promptConfirmation(env); + if (!confirmed) { + console.log('\n[seed-dev] Seeding cancelled by user\n'); + process.exit(0); + } + } + + // Connect to MongoDB + await connectMongo({ + uri: env.MONGO_URI, + dbName: env.DB_NAME, + log: false + }); + + // Ensure collections exist + await createCollectionsIfNotExist({ + models: [World, Capability, Persona, KnowledgeGraphNode, KnowledgeGraphEdge], + log: false + }); + + // Clear existing data + await clearAllData(); + + // Run capability migration + console.log('[seed-dev] Running capability migration...'); + await runScript('scripts/migrate-capabilities-to-db.ts', ['--clear-first']); + + // Run persona migration + console.log('\n[seed-dev] Running persona migration...'); + await runScript('scripts/migrate-personas-to-db.ts', ['--clear-first']); + + // Create sample worlds (unless --skip-worlds) + let worldDetails: Array<{ name: string; capabilityCount: number }> = []; + if (!options.skipWorlds) { + worldDetails = await createSampleWorlds(); + } else { + console.log('\n[seed-dev] Skipping world creation (--skip-worlds)\n'); + } + + // Run knowledge graph migration + console.log('[seed-dev] Running knowledge graph migration...'); + await runScript('scripts/migrate-knowledge-graph-to-db.ts', ['--clear-first']); + + // Validate data + const counts = await validateData(options.skipWorlds || false); + + const duration = (Date.now() - startTime) / 1000; + + return { + capabilities: counts.capabilities, + personas: counts.personas, + knowledgeGraphNodes: counts.nodes, + knowledgeGraphEdges: counts.edges, + worlds: counts.worlds, + worldDetails, + duration + }; +} + +/** + * Main execution + */ +async function main() { + // Parse command line arguments + const args = process.argv.slice(2); + const options: SeedOptions = { + noConfirm: args.includes('--no-confirm'), + skipWorlds: args.includes('--skip-worlds') + }; + + try { + const result = await seedDevData(options); + + // Print summary + console.log(`${"=".repeat(60)}`); + console.log('Development Data Seeding Complete'); + console.log(`${"=".repeat(60)}`); + console.log(` Capabilities: ${result.capabilities} migrated`); + console.log(` Personas: ${result.personas} migrated`); + console.log(` Knowledge Graph: ${result.knowledgeGraphNodes} nodes, ${result.knowledgeGraphEdges} edges`); + console.log(` Worlds: ${result.worlds} created`); + + if (result.worldDetails.length > 0) { + result.worldDetails.forEach(w => { + const strategyType = SAMPLE_WORLD_SPECS.find(s => s.name === w.name)?.samplingStrategy.type || 'unknown'; + const label = strategyType.toUpperCase(); + console.log(` - ${w.name.padEnd(22)} ${label.padEnd(10)} (${w.capabilityCount} caps)`); + }); + } + + console.log(` Duration: ${result.duration.toFixed(1)}s`); + console.log(` Status: ✅ SUCCESS`); + console.log(`${"=".repeat(60)}\n`); + + if (result.worldDetails.length > 0) { + console.log('💡 Sample worlds demonstrate different sampling strategies:'); + console.log(' - Use \'development-local\' for full access testing'); + console.log(' - Use \'demo-showcase\' for reproducible demos (seeded)'); + console.log(' - Use \'test-automation\' for integration tests\n'); + } + + console.log('[seed-dev] ✅ Development environment ready!\n'); + process.exit(0); + } catch (error) { + console.error('\n[seed-dev] ✗ Seeding failed:'); + console.error(` ${getErrorMessage(error)}\n`); + process.exit(1); + } +} + +// Execute main function +main(); diff --git a/packages/controlmart/scripts/validate-seed-data.ts b/packages/controlmart/scripts/validate-seed-data.ts new file mode 100644 index 0000000000000000000000000000000000000000..6d679e67db3260c9fa60a4d671cb429292eb6e55 --- /dev/null +++ b/packages/controlmart/scripts/validate-seed-data.ts @@ -0,0 +1,76 @@ +/** + * Quick validation script to check seeded data + */ + +import { connectMongo } from "../src/services/mongo.service"; +import { Capability } from "../src/models/capability.model"; +import { Persona } from "../src/models/persona.model"; +import { KnowledgeGraphNode } from "../src/models/knowledge-graph-node.model"; +import { KnowledgeGraphEdge } from "../src/models/knowledge-graph-edge.model"; +import { World } from "../src/models/world.model"; +import { loadEnv } from "../src/utils/env.util"; + +async function validate() { + const env = loadEnv(); + + await connectMongo({ + uri: env.MONGO_URI, + dbName: env.DB_NAME, + log: false + }); + + console.log("=== Test 1 Validation ===\n"); + + const capCount = await (Capability as any).countDocuments().exec(); + const personaCount = await (Persona as any).countDocuments().exec(); + const nodeCount = await (KnowledgeGraphNode as any).countDocuments().exec(); + const edgeCount = await (KnowledgeGraphEdge as any).countDocuments().exec(); + const worldCount = await (World as any).countDocuments().exec(); + + console.log(`Capabilities: ${capCount} ${capCount === 4 ? '✓' : '✗'}`); + console.log(`Personas: ${personaCount} ${personaCount === 5 ? '✓' : '✗'}`); + console.log(`KG Nodes: ${nodeCount} ${nodeCount === 63 ? '✓' : '✗'}`); + console.log(`KG Edges: ${edgeCount} ${edgeCount === 80 ? '✓' : '✗'}`); + console.log(`Worlds: ${worldCount} ${worldCount === 0 ? '✓' : '✗'}`); + + console.log("\n=== Sample Capability ==="); + const sampleCap = await (Capability as any).findOne({}).select('id name domain').lean().exec(); + console.log(JSON.stringify(sampleCap, null, 2)); + + console.log("\n=== Sample Persona ==="); + const samplePersona = await (Persona as any).findOne({}).select('id name capabilityIds').lean().exec(); + console.log(JSON.stringify(samplePersona, null, 2)); + + console.log("\n=== KG Node Types ==="); + const nodeTypes = await (KnowledgeGraphNode as any).aggregate([ + { $group: { _id: '$type', count: { $sum: 1 } } }, + { $sort: { _id: 1 } } + ]).exec(); + console.log(JSON.stringify(nodeTypes, null, 2)); + + // Validate worlds if they exist + if (worldCount > 0) { + console.log("\n=== World Details ==="); + const worlds = await (World as any) + .find({}) + .select('name capabilityIds samplingStrategy personas.allowedPersonas apiKey mpcCompany') + .lean() + .exec(); + + for (const world of worlds) { + console.log(`\n${world.name}:`); + console.log(` Company: ${world.mpcCompany || 'N/A'}`); + console.log(` Strategy: ${world.samplingStrategy?.type?.toUpperCase() || 'N/A'}`); + console.log(` Capabilities: ${world.capabilityIds?.length || 0} - [${(world.capabilityIds || []).join(', ')}]`); + console.log(` Personas: ${world.personas?.allowedPersonas?.length || 0} - [${(world.personas?.allowedPersonas || []).join(', ')}]`); + console.log(` API Key: ${world.apiKey ? '✓ Set' : '✗ Missing'}`); + } + } + + process.exit(0); +} + +validate().catch(err => { + console.error("Validation error:", err); + process.exit(1); +}); diff --git a/packages/controlmart/src/__tests__/phase4-integration.test.ts b/packages/controlmart/src/__tests__/phase4-integration.test.ts new file mode 100644 index 0000000000000000000000000000000000000000..23d2af2d6df4e834b556f9d9afa9b6962961b2d1 --- /dev/null +++ b/packages/controlmart/src/__tests__/phase4-integration.test.ts @@ -0,0 +1,773 @@ +/** + * Phase 4 Integration Tests (MORPH-415) + * + * Comprehensive integration tests covering all Phase 4 features: + * - Persona System (MORPH-401-406) + * - World Persona Configuration (MORPH-407) + * - Chaos Cascade (MORPH-412) + * - End-to-End Cross-System Integration + */ + +import { describe, it, expect, beforeAll, afterEach } from 'bun:test'; +import { personaRegistry } from '../services/persona-registry.service'; +import { capabilityCatalog } from '../services/capability-catalog.service'; +import { ChaosConfigRegistry } from '../services/chaos-config.registry'; +import type { TWorld, PersonaConfig } from '../models/world.model.type'; +import type { ChaosPolicy } from '../types/od.type'; + +// Dynamic imports to avoid circular dependency issues +let WorldRepository: any; +let capabilityExecutor: any; + +describe('Phase 4 Integration Tests (MORPH-415)', () => { + // Track created worlds for cleanup + const createdWorldIds: string[] = []; + + // Save original env vars + const originalChaosEnabled = process.env.CHAOS_ENABLED; + const originalChaosPreset = process.env.CHAOS_PRESET; + + beforeAll(async () => { + // Ensure MongoDB is connected + const { connectMongo } = await import('../services/mongo.service'); + const { loadEnv } = await import('../utils/env.util'); + + loadEnv(); + await connectMongo({ + uri: process.env.MONGO_URI || 'mongodb://localhost:27017', + dbName: process.env.DB_NAME || 'morpheus-test', + }); + + // Dynamic imports to defer loading until after MongoDB connection + const worldRepoModule = await import('../repository/world.repository'); + WorldRepository = worldRepoModule.WorldRepository; + + const capExecModule = await import('../services/capability-executor.service'); + capabilityExecutor = capExecModule.capabilityExecutor; + }); + + afterEach(async () => { + // Clean up created worlds + for (const worldId of createdWorldIds) { + try { + await WorldRepository.deleteWorld(worldId); + } catch (error) { + // Ignore cleanup errors + } + } + createdWorldIds.length = 0; + + // Clear chaos overrides + ChaosConfigRegistry.clearOverrides(); + + // Restore env vars + if (originalChaosEnabled !== undefined) { + process.env.CHAOS_ENABLED = originalChaosEnabled; + } else { + delete process.env.CHAOS_ENABLED; + } + if (originalChaosPreset !== undefined) { + process.env.CHAOS_PRESET = originalChaosPreset; + } else { + delete process.env.CHAOS_PRESET; + } + }); + + // ============================================================================= + // A. PERSONA SYSTEM INTEGRATION (MORPH-401-406) + // ============================================================================= + + describe('A. Persona System Integration', () => { + it('Test 1: Persona-Capability Bidirectional Lookup', () => { + // 1. Verify all personas exist + const allPersonas = personaRegistry.getAll(); + expect(allPersonas.length).toBeGreaterThanOrEqual(5); + + const personaIds = allPersonas.map((p) => p.id); + expect(personaIds).toContain('store-manager'); + expect(personaIds).toContain('warehouse-worker'); + expect(personaIds).toContain('customer-service-rep'); + expect(personaIds).toContain('warehouse-manager'); + expect(personaIds).toContain('system-administrator'); + + // 2. Test store-manager capabilities + const storeManagerCaps = personaRegistry.getCapabilitiesForPersona('store-manager'); + expect(storeManagerCaps).toContain('inventory-check'); + expect(storeManagerCaps).toContain('shipment-tracking'); + expect(storeManagerCaps.length).toBe(2); + + // 3. Test reverse lookup - inventory-check should list store-manager + const inventoryCheckPersonas = personaRegistry.getPersonasForCapability('inventory-check'); + const inventoryPersonaIds = inventoryCheckPersonas.map((p) => p.id); + expect(inventoryPersonaIds).toContain('store-manager'); + + // 4. Test warehouse-worker capabilities + const warehouseWorkerCaps = personaRegistry.getCapabilitiesForPersona('warehouse-worker'); + expect(warehouseWorkerCaps).toContain('inventory-check'); + expect(warehouseWorkerCaps).toContain('equipment-availability-check'); + expect(warehouseWorkerCaps.length).toBe(2); + + // 5. Verify bidirectional consistency for all personas + allPersonas.forEach((persona) => { + const capabilities = personaRegistry.getCapabilitiesForPersona(persona.id); + capabilities.forEach((capId) => { + const personasForCap = personaRegistry.getPersonasForCapability(capId); + const personaIdsForCap = personasForCap.map((p) => p.id); + expect(personaIdsForCap).toContain(persona.id); + }); + }); + }); + + it('Test 2: Persona API Filtering', () => { + // Test filtering by role + const managementPersonas = personaRegistry.filter({ role: 'management' }); + const managementIds = managementPersonas.map((p) => p.id); + expect(managementIds).toContain('store-manager'); + expect(managementIds).toContain('warehouse-manager'); + + // Test filtering by department + const warehousePersonas = personaRegistry.filter({ department: 'warehouse' }); + const warehouseIds = warehousePersonas.map((p) => p.id); + expect(warehouseIds).toContain('warehouse-worker'); + expect(warehouseIds).toContain('warehouse-manager'); + + // Test filtering by access level + const adminPersonas = personaRegistry.filter({ accessLevel: 'admin' }); + expect(adminPersonas.length).toBeGreaterThanOrEqual(1); + const adminIds = adminPersonas.map((p) => p.id); + expect(adminIds).toContain('system-administrator'); + + // Test filtering by tags + const customerFacingPersonas = personaRegistry.filter({ tags: ['customer-facing'] }); + expect(customerFacingPersonas.length).toBeGreaterThan(0); + }); + + it('Test 3: Capability-Persona Integration', () => { + // Test filtering capabilities by single persona + const storeManagerCapabilities = capabilityCatalog.filterByPersona('store-manager'); + expect(storeManagerCapabilities.length).toBe(2); + const storeManagerCapIds = storeManagerCapabilities.map((c) => c.id); + expect(storeManagerCapIds).toContain('inventory-check'); + expect(storeManagerCapIds).toContain('shipment-tracking'); + + // Test system-administrator has access to all capabilities + const adminCapabilities = capabilityCatalog.filterByPersona('system-administrator'); + expect(adminCapabilities.length).toBeGreaterThanOrEqual(4); + + // Verify each capability has correct personas listed + const allCapabilities = capabilityCatalog.getAll(); + allCapabilities.forEach((capability) => { + if (capability.personas && capability.personas.length > 0) { + capability.personas.forEach((personaId) => { + expect(personaRegistry.exists(personaId)).toBe(true); + const personaCaps = personaRegistry.getCapabilitiesForPersona(personaId); + expect(personaCaps).toContain(capability.id); + }); + } + }); + }); + }); + + // ============================================================================= + // B. WORLD PERSONA CONFIGURATION (MORPH-407) + // ============================================================================= + + describe('B. World Persona Configuration', () => { + it('Test 4: World Creation with Single Persona Restriction', async () => { + // Create world restricted to store-manager + const world = await WorldRepository.createWorld({ + name: 'Store Manager World', + description: 'World restricted to store manager persona', + samplingStrategy: { type: 'all' }, + personas: { + allowedPersonas: ['store-manager'], + }, + }); + + createdWorldIds.push(world._id.toString()); + + // Verify capabilities are filtered to store-manager's capabilities + expect(world.capabilityIds).toBeDefined(); + expect(world.capabilityIds!.length).toBe(2); + expect(world.capabilityIds).toContain('inventory-check'); + expect(world.capabilityIds).toContain('shipment-tracking'); + + // Verify personas config persisted + expect(world.personas).toBeDefined(); + expect(world.personas!.allowedPersonas).toEqual(['store-manager']); + + // Verify world can be retrieved with persona config + const retrievedWorld = await WorldRepository.findWorldById(world._id.toString()); + expect(retrievedWorld).toBeDefined(); + expect(retrievedWorld!.personas?.allowedPersonas).toEqual(['store-manager']); + }); + + it('Test 5: World Creation with Multiple Personas', async () => { + // Create world for warehouse operations (worker + manager) + const world = await WorldRepository.createWorld({ + name: 'Warehouse Operations World', + description: 'World for warehouse worker and manager', + samplingStrategy: { type: 'all' }, + personas: { + allowedPersonas: ['warehouse-worker', 'warehouse-manager'], + }, + }); + + createdWorldIds.push(world._id.toString()); + + // Warehouse-worker capabilities: inventory-check, equipment-availability-check + // Warehouse-manager capabilities: inventory-check, shipment-tracking, equipment-availability-check, dock-appointment-scheduling + // Union should contain all 4 unique capabilities + expect(world.capabilityIds).toBeDefined(); + expect(world.capabilityIds!.length).toBe(4); + expect(world.capabilityIds).toContain('inventory-check'); + expect(world.capabilityIds).toContain('equipment-availability-check'); + expect(world.capabilityIds).toContain('shipment-tracking'); + expect(world.capabilityIds).toContain('dock-appointment-scheduling'); + + // Verify personas config + expect(world.personas!.allowedPersonas).toEqual([ + 'warehouse-worker', + 'warehouse-manager', + ]); + }); + + it('Test 6: Persona Filtering with Random Sampling', async () => { + // Create world with random sampling + persona restriction + const world = await WorldRepository.createWorld({ + name: 'Random Persona World', + description: 'Random sampling with persona filter', + samplingStrategy: { + type: 'random', + count: 2, + seed: 12345, + }, + personas: { + allowedPersonas: ['customer-service-rep'], + }, + }); + + createdWorldIds.push(world._id.toString()); + + // Customer-service-rep capabilities: inventory-check, shipment-tracking + // Random 2 should be subset of these + expect(world.capabilityIds).toBeDefined(); + expect(world.capabilityIds!.length).toBeLessThanOrEqual(2); + + // Verify all returned capabilities are accessible by customer-service-rep + const csr_capabilities = personaRegistry.getCapabilitiesForPersona('customer-service-rep'); + world.capabilityIds!.forEach((capId) => { + expect(csr_capabilities).toContain(capId); + }); + + // Test seed repeatability + const world2 = await WorldRepository.createWorld({ + name: 'Random Persona World 2', + description: 'Same seed for repeatability test', + samplingStrategy: { + type: 'random', + count: 2, + seed: 12345, + }, + personas: { + allowedPersonas: ['customer-service-rep'], + }, + }); + + createdWorldIds.push(world2._id.toString()); + + // Same seed should produce same capabilities + expect(world2.capabilityIds).toEqual(world.capabilityIds); + }); + }); + + // ============================================================================= + // C. CHAOS CASCADE INTEGRATION (MORPH-412) + // ============================================================================= + + describe('C. Chaos Cascade Integration', () => { + it('Test 7: World Chaos Persistence and Retrieval', async () => { + const chaosPolicy: ChaosPolicy = { + enabled: true, + probability: 0.3, + scenarios: [], + }; + + // Create world with chaos config + const world = await WorldRepository.createWorld({ + name: 'Chaos Test World', + description: 'World with chaos configuration', + samplingStrategy: { type: 'all' }, + chaos: chaosPolicy, + }); + + createdWorldIds.push(world._id.toString()); + + // Verify chaos config saved + expect(world.chaos).toBeDefined(); + expect(world.chaos!.enabled).toBe(true); + expect(world.chaos!.probability).toBe(0.3); + + // Verify chaos config can be retrieved + const retrievedWorld = await WorldRepository.findWorldById(world._id.toString()); + expect(retrievedWorld).toBeDefined(); + expect(retrievedWorld!.chaos).toBeDefined(); + expect(retrievedWorld!.chaos!.probability).toBe(0.3); + + // Verify chaos loaded into registry (would happen on server startup) + ChaosConfigRegistry.setWorldChaosPolicy(world._id.toString(), chaosPolicy); + + // Resolve chaos policy for this world + const { policy, source } = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: world._id.toString(), + odId: 'test-od', + stepId: 'test-step', + }); + + expect(policy.probability).toBe(0.3); + expect(source).toBe('world'); + }); + + it('Test 8: Chaos Priority Cascade - World vs Capability', async () => { + // Create world with chaos + const world = await WorldRepository.createWorld({ + name: 'Cascade Test World', + description: 'Testing chaos priority cascade', + samplingStrategy: { type: 'all' }, + chaos: { + enabled: true, + probability: 0.2, + scenarios: [], + }, + }); + + createdWorldIds.push(world._id.toString()); + + const worldId = world._id.toString(); + ChaosConfigRegistry.setWorldChaosPolicy(worldId, world.chaos!); + + // Set capability-level chaos override (higher priority) + const capabilityChaos: ChaosPolicy = { + enabled: true, + probability: 0.8, + scenarios: [], + }; + ChaosConfigRegistry.setCapabilityChaos('inventory-check', capabilityChaos); + + // Test 1: Capability with override should use capability-level chaos + const result1 = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + capabilityId: 'inventory-check', + odId: 'test-od', + stepId: 'test-step', + }); + + expect(result1.policy.probability).toBe(0.8); + expect(result1.source).toBe('capability'); + + // Test 2: Capability without override should use world-level chaos + const result2 = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + capabilityId: 'shipment-tracking', + odId: 'test-od', + stepId: 'test-step', + }); + + expect(result2.policy.probability).toBe(0.2); + expect(result2.source).toBe('world'); + }); + + it('Test 9: Master Kill-Switch Override', async () => { + // Set master kill-switch + process.env.CHAOS_ENABLED = 'false'; + + // Create world with chaos enabled + const world = await WorldRepository.createWorld({ + name: 'Kill-Switch Test World', + description: 'Testing master kill-switch', + samplingStrategy: { type: 'all' }, + chaos: { + enabled: true, + probability: 1.0, + scenarios: [], + }, + }); + + createdWorldIds.push(world._id.toString()); + + const worldId = world._id.toString(); + ChaosConfigRegistry.setWorldChaosPolicy(worldId, world.chaos!); + + // Set capability-level chaos too + ChaosConfigRegistry.setCapabilityChaos('inventory-check', { + enabled: true, + probability: 1.0, + scenarios: [], + }); + + // Resolve chaos policy - should be disabled by env var + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + capabilityId: 'inventory-check', + odId: 'test-od', + stepId: 'test-step', + }); + + expect(result.policy.enabled).toBe(false); + expect(result.policy.probability).toBe(0); + expect(result.source).toBe('env'); + }); + + it('Test 10: Chaos CRUD Operations', async () => { + // Create world without chaos + const world = await WorldRepository.createWorld({ + name: 'CRUD Test World', + description: 'Testing chaos CRUD operations', + samplingStrategy: { type: 'all' }, + }); + + createdWorldIds.push(world._id.toString()); + const worldId = world._id.toString(); + + // Initially no chaos + expect(world.chaos).toBeUndefined(); + + // Add chaos policy + const chaosPolicy: ChaosPolicy = { + enabled: true, + probability: 0.4, + scenarios: [], + }; + + const updatedWorld = await WorldRepository.updateWorld(worldId, { + chaos: chaosPolicy, + }); + + expect(updatedWorld.chaos).toBeDefined(); + expect(updatedWorld.chaos!.probability).toBe(0.4); + + // Update registry + ChaosConfigRegistry.setWorldChaosPolicy(worldId, chaosPolicy); + + // Verify chaos applied + let result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + odId: 'test-od', + stepId: 'test-step', + }); + expect(result.policy.probability).toBe(0.4); + expect(result.source).toBe('world'); + + // Remove chaos policy + const worldWithoutChaos = await WorldRepository.updateWorld(worldId, { + chaos: undefined, + }); + + expect(worldWithoutChaos.chaos).toBeUndefined(); + + // Clear from registry + ChaosConfigRegistry.setWorldChaosPolicy(worldId, { + enabled: false, + probability: 0, + scenarios: [], + }); + + // Verify no chaos applied + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + odId: 'test-od', + stepId: 'test-step', + }); + expect(result.policy.enabled).toBe(false); + expect(result.policy.probability).toBe(0); + }); + }); + + // ============================================================================= + // D. END-TO-END INTEGRATION + // ============================================================================= + + describe('D. End-to-End Integration', () => { + it('Test 11: Complete Persona + World + Chaos Flow', async () => { + // Get warehouse-manager capabilities + const warehouseManagerCaps = personaRegistry.getCapabilitiesForPersona('warehouse-manager'); + expect(warehouseManagerCaps.length).toBeGreaterThan(0); + + // Create world with persona restriction and chaos + const world = await WorldRepository.createWorld({ + name: 'E2E Test World', + description: 'End-to-end integration test', + samplingStrategy: { type: 'all' }, + personas: { + allowedPersonas: ['warehouse-manager'], + }, + chaos: { + enabled: true, + probability: 0.15, + scenarios: [], + }, + }); + + createdWorldIds.push(world._id.toString()); + const worldId = world._id.toString(); + + // Verify world created correctly + expect(world.capabilityIds).toBeDefined(); + expect(world.personas?.allowedPersonas).toEqual(['warehouse-manager']); + expect(world.chaos).toBeDefined(); + expect(world.chaos!.probability).toBe(0.15); + + // Verify all capabilityIds match warehouse-manager's capabilities + world.capabilityIds!.forEach((capId) => { + expect(warehouseManagerCaps).toContain(capId); + }); + + // Load chaos into registry + ChaosConfigRegistry.setWorldChaosPolicy(worldId, world.chaos!); + + // Verify chaos resolution + const chaosResult = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + capabilityId: world.capabilityIds![0], + odId: 'test-od', + stepId: 'test-step', + }); + + expect(chaosResult.policy.probability).toBe(0.15); + expect(chaosResult.source).toBe('world'); + + // Execute one capability from the world + if (world.capabilityIds!.length > 0) { + const capabilityId = world.capabilityIds![0]; + const capability = capabilityCatalog.getById(capabilityId); + + if (capability) { + const executionResult = await capabilityExecutor.execute({ + capabilityId, + worldId, + inputs: { test: true }, + }); + + // Verify execution succeeded + expect(executionResult).toBeDefined(); + expect(executionResult.worldId).toBe(worldId); + expect(executionResult.capabilityId).toBe(capabilityId); + + // In permissive mode, should succeed even if there are warnings + expect(['success', 'failed']).toContain(executionResult.status); + + // Verify capabilityInWorld flag + expect(executionResult.capabilityInWorld).toBeDefined(); + } + } + }); + + it('Test 12: Persona Restriction Enforcement (Permissive Mode)', async () => { + // Tests permissive mode: capability execution outside world scope should succeed with warning + + // Create world restricted to warehouse-worker + const world = await WorldRepository.createWorld({ + name: 'Permissive Test World', + description: 'Testing permissive mode enforcement', + samplingStrategy: { type: 'all' }, + personas: { + allowedPersonas: ['warehouse-worker'], + }, + }); + + createdWorldIds.push(world._id.toString()); + const worldId = world._id.toString(); + + // warehouse-worker has: inventory-check, equipment-availability-check + // NOT: shipment-tracking + expect(world.capabilityIds).toBeDefined(); + expect(world.capabilityIds).not.toContain('shipment-tracking'); + + // Attempt to execute shipment-tracking (not in warehouse-worker's scope) + const capability = capabilityCatalog.getById('shipment-tracking'); + if (capability) { + const executionResult = await capabilityExecutor.execute({ + capabilityId: 'shipment-tracking', + worldId, + inputs: { test: true }, + }); + + // In permissive mode, should succeed with warning + expect(executionResult).toBeDefined(); + + // capabilityInWorld should be false + expect(executionResult.capabilityInWorld).toBe(false); + + // Note: Logs would contain warning about capability not in world scope + } + }); + + it('Test 13: Multi-Level Sampling with Persona Filter', async () => { + // Create world with domain filter + persona restriction + const world = await WorldRepository.createWorld({ + name: 'Multi-Filter World', + description: 'Testing multiple filter levels', + samplingStrategy: { + type: 'filter', + filter: { + domain: ['inventory'], + }, + }, + personas: { + allowedPersonas: ['store-manager', 'customer-service-rep'], + }, + }); + + createdWorldIds.push(world._id.toString()); + + // Both store-manager and customer-service-rep have inventory-check + // Domain filter should include inventory-check + // Result should be intersection: inventory-check only + expect(world.capabilityIds).toBeDefined(); + expect(world.capabilityIds).toContain('inventory-check'); + + // Verify all capabilities are in allowed domain and accessible by allowed personas + const allowedPersonas = ['store-manager', 'customer-service-rep']; + const allowedCaps = new Set(); + + allowedPersonas.forEach((personaId) => { + const caps = personaRegistry.getCapabilitiesForPersona(personaId); + caps.forEach((cap) => allowedCaps.add(cap)); + }); + + world.capabilityIds!.forEach((capId) => { + expect(allowedCaps.has(capId)).toBe(true); + + const capability = capabilityCatalog.getById(capId); + expect(capability).toBeDefined(); + expect(capability!.tags.domain).toContain('inventory'); + }); + }); + + it('Test 14: Chaos Cascade with Multiple Active Levels', async () => { + // Set global preset + process.env.CHAOS_PRESET = 'moderate'; + + // Create world with chaos + const world = await WorldRepository.createWorld({ + name: 'Multi-Level Chaos World', + description: 'Testing chaos cascade with multiple levels', + samplingStrategy: { type: 'all' }, + chaos: { + enabled: true, + probability: 0.25, + scenarios: [], + }, + }); + + createdWorldIds.push(world._id.toString()); + const worldId = world._id.toString(); + + ChaosConfigRegistry.setWorldChaosPolicy(worldId, world.chaos!); + + // Set capability-level chaos + ChaosConfigRegistry.setCapabilityChaos('inventory-check', { + enabled: true, + probability: 0.5, + scenarios: [], + }); + + // Set OD-level chaos (highest priority among these) + ChaosConfigRegistry.setODChaos('test-od', { + enabled: true, + probability: 0.7, + scenarios: [], + }); + + // Test 1: OD-level should win (priority 3) + let result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + capabilityId: 'inventory-check', + odId: 'test-od', + stepId: 'test-step', + }); + expect(result.policy.probability).toBe(0.7); + expect(result.source).toBe('od'); + + // Test 2: Clear OD chaos, capability-level should win (priority 4) + ChaosConfigRegistry.setODChaos('test-od', { + enabled: false, + probability: 0, + scenarios: [], + }); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + capabilityId: 'inventory-check', + odId: 'different-od', + stepId: 'test-step', + }); + expect(result.policy.probability).toBe(0.5); + expect(result.source).toBe('capability'); + + // Test 3: Clear capability chaos, world-level should win (priority 5) + ChaosConfigRegistry.clearOverrides(); + ChaosConfigRegistry.setWorldChaosPolicy(worldId, world.chaos!); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId, + capabilityId: 'shipment-tracking', + odId: 'different-od', + stepId: 'test-step', + }); + expect(result.policy.probability).toBe(0.25); + expect(result.source).toBe('world'); + + // Test 4: Clear world chaos, global preset should win (priority 6) + ChaosConfigRegistry.clearOverrides(); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'different-world', + capabilityId: 'shipment-tracking', + odId: 'different-od', + stepId: 'test-step', + }); + expect(result.policy.enabled).toBe(true); + expect(result.source).toBe('global'); + }); + + it('Test 15: Persona Catalog Consistency Check', () => { + // Get all personas + const allPersonas = personaRegistry.getAll(); + + // For each persona, verify all their capabilities reference them back + allPersonas.forEach((persona) => { + const capabilityIds = personaRegistry.getCapabilitiesForPersona(persona.id); + + capabilityIds.forEach((capId) => { + const capability = capabilityCatalog.getById(capId); + expect(capability).toBeDefined(); + + // Verify capability has personas field + if (capability && capability.personas) { + expect(capability.personas).toContain(persona.id); + } + }); + }); + + // For each capability, verify all their personas reference them back + const allCapabilities = capabilityCatalog.getAll(); + + allCapabilities.forEach((capability) => { + if (capability.personas) { + capability.personas.forEach((personaId) => { + const persona = personaRegistry.getById(personaId); + expect(persona).toBeDefined(); + + if (persona) { + const personaCaps = personaRegistry.getCapabilitiesForPersona(personaId); + expect(personaCaps).toContain(capability.id); + } + }); + } + }); + + // Verify no orphaned references + console.log( + `✓ Persona-Capability consistency verified: ${allPersonas.length} personas, ${allCapabilities.length} capabilities` + ); + }); + }); +}); diff --git a/packages/controlmart/src/__tests__/phase5-integration.test.ts b/packages/controlmart/src/__tests__/phase5-integration.test.ts new file mode 100644 index 0000000000000000000000000000000000000000..d36c2b43bcb78aed13de58ffd6538dead9e891ce --- /dev/null +++ b/packages/controlmart/src/__tests__/phase5-integration.test.ts @@ -0,0 +1,1252 @@ +/** + * Phase 5 Integration Tests (MORPH-517) + * + * Comprehensive integration tests covering all Phase 5 features: + * - Capability & Persona Repository CRUD (MORPH-501-504) + * - Knowledge Graph Persistence (MORPH-505) + * - Service DB Loading (MORPH-506-508) + * - CRUD Management APIs (MORPH-512-514) + * - Pagination Support (MORPH-516) + * - Performance with 100+ Documents (MORPH-515) + */ + +import { describe, it, expect, beforeAll, afterEach, afterAll } from 'bun:test'; +import { connectMongo, disconnectMongo } from '../services/mongo.service'; +import { CapabilityRepository } from '../repository/capability.repository'; +import { PersonaRepository } from '../repository/persona.repository'; +import { KnowledgeGraphRepository } from '../repository/knowledge-graph.repository'; +import { WorldRepository } from '../repository/world.repository'; +import { capabilityCatalog } from '../services/capability-catalog.service'; +import { personaRegistry } from '../services/persona-registry.service'; +import { knowledgeGraph } from '../services/knowledge-graph.service'; +import { loadEnv } from '../utils/env.util'; +import type { TCapabilityInput } from '../models/capability.model.type'; +import type { TPersonaInput } from '../models/persona.model.type'; +import type { GraphNode, GraphEdge } from '../types/knowledge-graph.type'; + +describe('Phase 5 Integration Tests (MORPH-517)', () => { + // Cleanup tracking + const createdCapabilityIds: string[] = []; + const createdPersonaIds: string[] = []; + const createdWorldIds: string[] = []; + + beforeAll(async () => { + // Load environment and connect to test DB + loadEnv(); + await connectMongo({ + uri: process.env.MONGO_URI || 'mongodb://localhost:27017', + dbName: process.env.DB_NAME || 'morpheus-test', + }); + + // Initialize services (load from DB) + await capabilityCatalog.initialize(); + await personaRegistry.initialize(); + await knowledgeGraph.initialize(); + }); + + afterEach(async () => { + // Cleanup created test data + for (const id of createdCapabilityIds) { + try { + await CapabilityRepository.delete(id); + } catch (e) { + // Ignore cleanup errors + } + } + for (const id of createdPersonaIds) { + try { + await PersonaRepository.delete(id); + } catch (e) { + // Ignore cleanup errors + } + } + for (const id of createdWorldIds) { + try { + await WorldRepository.deleteWorld(id); + } catch (e) { + // Ignore cleanup errors + } + } + + // Clear arrays + createdCapabilityIds.length = 0; + createdPersonaIds.length = 0; + createdWorldIds.length = 0; + }); + + afterAll(async () => { + await disconnectMongo(); + }); + + // ============================================================================= + // A. CAPABILITY REPOSITORY CRUD (MORPH-502) + // ============================================================================= + describe('A. Capability Repository CRUD', () => { + it('Test 1: Should create capability', async () => { + // Arrange + const capabilityData: TCapabilityInput = { + id: 'test-capability-create', + name: 'Test Capability Create', + description: 'Test capability for creation', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + personas: ['test-persona'], + patterns: ['sequential'], + }, + personas: ['test-persona'], + odId: 'test-od', + version: '1.0.0', + metadata: { + author: 'test', + createdAt: new Date().toISOString(), + }, + }; + + // Act + const created = await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push(created.id); + + // Assert + expect(created).toBeDefined(); + expect(created.id).toBe('test-capability-create'); + expect(created.name).toBe('Test Capability Create'); + expect(created.tags.complexity).toBe('simple'); + }); + + it('Test 2: Should find capability by ID', async () => { + // Arrange - create test capability + const capabilityData: TCapabilityInput = { + id: 'test-capability-findbyid', + name: 'Test Find By ID', + description: 'Test capability', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + const created = await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push(created.id); + + // Act + const found = await CapabilityRepository.findById('test-capability-findbyid'); + + // Assert + expect(found).toBeDefined(); + expect(found?.id).toBe('test-capability-findbyid'); + expect(found?.name).toBe('Test Find By ID'); + }); + + it('Test 3: Should find multiple capabilities by IDs', async () => { + // Arrange - create 2 test capabilities + const cap1Data: TCapabilityInput = { + id: 'test-cap-1', + name: 'Test Cap 1', + description: 'First capability', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + const cap2Data: TCapabilityInput = { + id: 'test-cap-2', + name: 'Test Cap 2', + description: 'Second capability', + tags: { + domain: ['test'], + complexity: 'medium', + services: ['tms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(cap1Data); + await CapabilityRepository.create(cap2Data); + createdCapabilityIds.push('test-cap-1', 'test-cap-2'); + + // Act + const found = await CapabilityRepository.findByIds(['test-cap-1', 'test-cap-2']); + + // Assert + expect(found).toBeDefined(); + expect(found.length).toBe(2); + expect(found.some(c => c.id === 'test-cap-1')).toBe(true); + expect(found.some(c => c.id === 'test-cap-2')).toBe(true); + }); + + it('Test 4: Should update capability', async () => { + // Arrange - create test capability + const capabilityData: TCapabilityInput = { + id: 'test-capability-update', + name: 'Original Name', + description: 'Original description', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push('test-capability-update'); + + // Act + const updated = await CapabilityRepository.update('test-capability-update', { + name: 'Updated Name', + description: 'Updated description', + }); + + // Assert + expect(updated).toBeDefined(); + expect(updated?.name).toBe('Updated Name'); + expect(updated?.description).toBe('Updated description'); + }); + + it('Test 5: Should delete capability', async () => { + // Arrange - create test capability + const capabilityData: TCapabilityInput = { + id: 'test-capability-delete', + name: 'To Be Deleted', + description: 'This will be deleted', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + + // Act + const deleted = await CapabilityRepository.delete('test-capability-delete'); + + // Assert + expect(deleted).toBe(true); + const found = await CapabilityRepository.findById('test-capability-delete'); + expect(found).toBeNull(); + }); + + it('Test 6: Should filter capabilities by domain', async () => { + // Arrange - create test capabilities with different domains + const cap1: TCapabilityInput = { + id: 'test-filter-1', + name: 'Inventory Capability', + description: 'Test', + tags: { + domain: ['inventory'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + const cap2: TCapabilityInput = { + id: 'test-filter-2', + name: 'Transport Capability', + description: 'Test', + tags: { + domain: ['transportation'], + complexity: 'simple', + services: ['tms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(cap1); + await CapabilityRepository.create(cap2); + createdCapabilityIds.push('test-filter-1', 'test-filter-2'); + + // Act + const result = await CapabilityRepository.getAll({ domain: ['inventory'] }); + + // Assert + expect(result.data).toBeDefined(); + expect(result.data.some(c => c.id === 'test-filter-1')).toBe(true); + expect(result.data.some(c => c.id === 'test-filter-2')).toBe(false); + }); + + it('Test 7: Should search capabilities by text', async () => { + // Arrange - create test capability with searchable text + const capabilityData: TCapabilityInput = { + id: 'test-search-unique-term', + name: 'UniqueSearchTerm Capability', + description: 'Contains a very unique search term for testing', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push('test-search-unique-term'); + + // Act + const results = await CapabilityRepository.search('UniqueSearchTerm'); + + // Assert + expect(results).toBeDefined(); + expect(results.length).toBeGreaterThan(0); + expect(results.some(c => c.id === 'test-search-unique-term')).toBe(true); + }); + + it('Test 8: Should handle undefined values in update (unset)', async () => { + // Arrange - create capability with metadata + const capabilityData: TCapabilityInput = { + id: 'test-unset', + name: 'Test Unset', + description: 'Test', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + metadata: { + author: 'test-author', + }, + }; + await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push('test-unset'); + + // Act - update with undefined to unset + const updated = await CapabilityRepository.update('test-unset', { + metadata: undefined, + }); + + // Assert + expect(updated).toBeDefined(); + expect(updated?.metadata).toBeUndefined(); + }); + }); + + // ============================================================================= + // B. PERSONA REPOSITORY CRUD (MORPH-504) + // ============================================================================= + describe('B. Persona Repository CRUD', () => { + it('Test 9: Should create persona', async () => { + // Arrange + const personaData: TPersonaInput = { + id: 'test-persona-create', + name: 'Test Persona Create', + description: 'Test persona for creation', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + metadata: { + accessLevel: 'basic', + }, + }; + + // Act + const created = await PersonaRepository.create(personaData); + createdPersonaIds.push(created.id); + + // Assert + expect(created).toBeDefined(); + expect(created.id).toBe('test-persona-create'); + expect(created.name).toBe('Test Persona Create'); + expect(created.role).toBe('operational'); + }); + + it('Test 10: Should find persona by ID', async () => { + // Arrange + const personaData: TPersonaInput = { + id: 'test-persona-findbyid', + name: 'Test Find By ID', + description: 'Test persona', + role: 'specialist', + department: 'inventory', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + createdPersonaIds.push('test-persona-findbyid'); + + // Act + const found = await PersonaRepository.findById('test-persona-findbyid'); + + // Assert + expect(found).toBeDefined(); + expect(found?.id).toBe('test-persona-findbyid'); + expect(found?.role).toBe('specialist'); + }); + + it('Test 11: Should update persona', async () => { + // Arrange + const personaData: TPersonaInput = { + id: 'test-persona-update', + name: 'Original Name', + description: 'Original description', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + createdPersonaIds.push('test-persona-update'); + + // Act + const updated = await PersonaRepository.update('test-persona-update', { + name: 'Updated Persona Name', + role: 'management', + }); + + // Assert + expect(updated).toBeDefined(); + expect(updated?.name).toBe('Updated Persona Name'); + expect(updated?.role).toBe('management'); + }); + + it('Test 12: Should delete persona', async () => { + // Arrange + const personaData: TPersonaInput = { + id: 'test-persona-delete', + name: 'To Be Deleted', + description: 'This will be deleted', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + + // Act + const deleted = await PersonaRepository.delete('test-persona-delete'); + + // Assert + expect(deleted).toBe(true); + const found = await PersonaRepository.findById('test-persona-delete'); + expect(found).toBeNull(); + }); + + it('Test 13: Should filter personas by role', async () => { + // Arrange + const persona1: TPersonaInput = { + id: 'test-filter-role-1', + name: 'Operational Persona', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + const persona2: TPersonaInput = { + id: 'test-filter-role-2', + name: 'Management Persona', + description: 'Test', + role: 'management', + department: 'warehouse', + capabilityIds: [], + }; + await PersonaRepository.create(persona1); + await PersonaRepository.create(persona2); + createdPersonaIds.push('test-filter-role-1', 'test-filter-role-2'); + + // Act + const result = await PersonaRepository.getAll({ role: 'operational' }); + + // Assert + expect(result.data).toBeDefined(); + expect(result.data.some(p => p.id === 'test-filter-role-1')).toBe(true); + expect(result.data.some(p => p.id === 'test-filter-role-2')).toBe(false); + }); + + it('Test 14: Should filter personas by department', async () => { + // Arrange + const persona1: TPersonaInput = { + id: 'test-filter-dept-1', + name: 'Warehouse Persona', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + const persona2: TPersonaInput = { + id: 'test-filter-dept-2', + name: 'Transport Persona', + description: 'Test', + role: 'operational', + department: 'transportation', + capabilityIds: [], + }; + await PersonaRepository.create(persona1); + await PersonaRepository.create(persona2); + createdPersonaIds.push('test-filter-dept-1', 'test-filter-dept-2'); + + // Act + const result = await PersonaRepository.getAll({ department: 'warehouse' }); + + // Assert + expect(result.data).toBeDefined(); + expect(result.data.some(p => p.id === 'test-filter-dept-1')).toBe(true); + expect(result.data.some(p => p.id === 'test-filter-dept-2')).toBe(false); + }); + }); + + // ============================================================================= + // C. KNOWLEDGE GRAPH PERSISTENCE (MORPH-505) + // ============================================================================= + describe('C. Knowledge Graph Persistence', () => { + it('Test 15: Should save graph to DB', async () => { + // Arrange + const testNodes: GraphNode[] = [ + { + id: 'test-node-1', + type: 'capability', + label: 'Test Capability 1', + metadata: { test: true }, + }, + { + id: 'test-node-2', + type: 'persona', + label: 'Test Persona 1', + metadata: { test: true }, + }, + ]; + const testEdges: GraphEdge[] = [ + { + from: 'test-node-1', + to: 'test-node-2', + type: 'requires', + metadata: {}, + }, + ]; + + // Act + await KnowledgeGraphRepository.saveGraph(testNodes, testEdges); + + // Assert + const loaded = await KnowledgeGraphRepository.loadGraph(); + expect(loaded.nodes.length).toBeGreaterThanOrEqual(2); + expect(loaded.edges.length).toBeGreaterThanOrEqual(1); + expect(loaded.nodes.some(n => n.id === 'test-node-1')).toBe(true); + }); + + it('Test 16: Should load graph from DB', async () => { + // Act + const loaded = await KnowledgeGraphRepository.loadGraph(); + + // Assert + expect(loaded).toBeDefined(); + expect(loaded.nodes).toBeDefined(); + expect(loaded.edges).toBeDefined(); + expect(Array.isArray(loaded.nodes)).toBe(true); + expect(Array.isArray(loaded.edges)).toBe(true); + }); + + it('Test 17: Should round-trip save and load', async () => { + // Arrange + const originalNodes: GraphNode[] = [ + { + id: 'roundtrip-node-1', + type: 'capability', + label: 'Roundtrip Test', + metadata: { value: 123 }, + }, + ]; + const originalEdges: GraphEdge[] = []; + + // Act - Save + await KnowledgeGraphRepository.saveGraph(originalNodes, originalEdges); + + // Act - Load + const loaded = await KnowledgeGraphRepository.loadGraph(); + + // Assert + const foundNode = loaded.nodes.find(n => n.id === 'roundtrip-node-1'); + expect(foundNode).toBeDefined(); + expect(foundNode?.label).toBe('Roundtrip Test'); + expect(foundNode?.metadata?.value).toBe(123); + }); + + it('Test 18: Should handle large graphs (100+ nodes)', async () => { + // Arrange - Create 100 nodes + const largeNodes: GraphNode[] = []; + for (let i = 0; i < 100; i++) { + largeNodes.push({ + id: `large-graph-node-${i}`, + type: 'capability', + label: `Node ${i}`, + metadata: { index: i }, + }); + } + const largeEdges: GraphEdge[] = []; + + // Act + const startTime = Date.now(); + await KnowledgeGraphRepository.saveGraph(largeNodes, largeEdges); + const saveTime = Date.now() - startTime; + + const loadStartTime = Date.now(); + const loaded = await KnowledgeGraphRepository.loadGraph(); + const loadTime = Date.now() - loadStartTime; + + // Assert + expect(loaded.nodes.length).toBeGreaterThanOrEqual(100); + expect(saveTime).toBeLessThan(2000); // Should save in < 2 seconds + expect(loadTime).toBeLessThan(2000); // Should load in < 2 seconds + }); + }); + + // ============================================================================= + // D. SERVICE DB LOADING (MORPH-506-508) + // ============================================================================= + describe('D. Service DB Loading', () => { + it('Test 19: CapabilityCatalog should load from DB', async () => { + // Arrange - Create a test capability + const capabilityData: TCapabilityInput = { + id: 'test-catalog-load', + name: 'Test Catalog Load', + description: 'Test', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push('test-catalog-load'); + + // Act - Reload catalog + await capabilityCatalog.reload(); + + // Assert + const capability = capabilityCatalog.getById('test-catalog-load'); + expect(capability).toBeDefined(); + expect(capability?.name).toBe('Test Catalog Load'); + }); + + it('Test 20: CapabilityCatalog should reload cache', async () => { + // Arrange - Create a new capability + const capabilityData: TCapabilityInput = { + id: 'test-catalog-reload', + name: 'Test Catalog Reload', + description: 'Test', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push('test-catalog-reload'); + + // Before reload - capability should NOT be in cache yet + const beforeReload = capabilityCatalog.getById('test-catalog-reload'); + expect(beforeReload).toBeNull(); + + // Act - Reload + await capabilityCatalog.reload(); + + // Assert - After reload, capability should be in cache + const afterReload = capabilityCatalog.getById('test-catalog-reload'); + expect(afterReload).toBeDefined(); + expect(afterReload?.name).toBe('Test Catalog Reload'); + }); + + it('Test 21: PersonaRegistry should load from DB', async () => { + // Arrange - Create a test persona + const personaData: TPersonaInput = { + id: 'test-registry-load', + name: 'Test Registry Load', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + createdPersonaIds.push('test-registry-load'); + + // Act - Reload registry + await personaRegistry.reload(); + + // Assert + const persona = personaRegistry.getById('test-registry-load'); + expect(persona).toBeDefined(); + expect(persona?.name).toBe('Test Registry Load'); + }); + + it('Test 22: PersonaRegistry should reload cache', async () => { + // Arrange - Create a new persona + const personaData: TPersonaInput = { + id: 'test-registry-reload', + name: 'Test Registry Reload', + description: 'Test', + role: 'specialist', + department: 'inventory', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + createdPersonaIds.push('test-registry-reload'); + + // Before reload - persona should NOT be in cache yet + const beforeReload = personaRegistry.getById('test-registry-reload'); + expect(beforeReload).toBeNull(); + + // Act - Reload + await personaRegistry.reload(); + + // Assert - After reload, persona should be in cache + const afterReload = personaRegistry.getById('test-registry-reload'); + expect(afterReload).toBeDefined(); + expect(afterReload?.name).toBe('Test Registry Reload'); + }); + + it('Test 23: Should handle bidirectional persona-capability lookup', async () => { + // Arrange - Create capability and persona + const capData: TCapabilityInput = { + id: 'test-bidirectional-cap', + name: 'Test Bidirectional Capability', + description: 'Test', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: ['test-bidirectional-persona'], + odId: 'test-od', + version: '1.0.0', + }; + const personaData: TPersonaInput = { + id: 'test-bidirectional-persona', + name: 'Test Bidirectional Persona', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: ['test-bidirectional-cap'], + }; + await CapabilityRepository.create(capData); + await PersonaRepository.create(personaData); + createdCapabilityIds.push('test-bidirectional-cap'); + createdPersonaIds.push('test-bidirectional-persona'); + + // Act - Reload both + await capabilityCatalog.reload(); + await personaRegistry.reload(); + + // Assert - Check both directions + const capability = capabilityCatalog.getById('test-bidirectional-cap'); + expect(capability?.personas).toContain('test-bidirectional-persona'); + + const persona = personaRegistry.getById('test-bidirectional-persona'); + expect(persona?.capabilityIds).toContain('test-bidirectional-cap'); + }); + }); + + // ============================================================================= + // E. CAPABILITY MANAGEMENT API (MORPH-512) + // ============================================================================= + describe('E. Capability Management API', () => { + // Note: These tests would typically use supertest or similar for HTTP testing + // For now, we test the repository layer which is used by the controllers + + it('Test 24: Should create capability via repository (API layer)', async () => { + // Arrange + const capabilityData: TCapabilityInput = { + id: 'test-api-create', + name: 'Test API Create', + description: 'Test API capability creation', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + + // Act + const created = await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push(created.id); + + // Verify cache reload would work + await capabilityCatalog.reload(); + const fromCache = capabilityCatalog.getById('test-api-create'); + + // Assert + expect(created.id).toBe('test-api-create'); + expect(fromCache).toBeDefined(); + expect(fromCache?.name).toBe('Test API Create'); + }); + + it('Test 25: Should update capability via repository (API layer)', async () => { + // Arrange + const capabilityData: TCapabilityInput = { + id: 'test-api-update', + name: 'Original API Name', + description: 'Original', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push('test-api-update'); + + // Act + const updated = await CapabilityRepository.update('test-api-update', { + name: 'Updated API Name', + }); + + // Verify cache reload + await capabilityCatalog.reload(); + const fromCache = capabilityCatalog.getById('test-api-update'); + + // Assert + expect(updated?.name).toBe('Updated API Name'); + expect(fromCache?.name).toBe('Updated API Name'); + }); + + it('Test 26: Should delete capability via repository (API layer)', async () => { + // Arrange + const capabilityData: TCapabilityInput = { + id: 'test-api-delete', + name: 'To Delete', + description: 'Test', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + + // Act + const deleted = await CapabilityRepository.delete('test-api-delete'); + + // Verify cache reload + await capabilityCatalog.reload(); + const fromCache = capabilityCatalog.getById('test-api-delete'); + + // Assert + expect(deleted).toBe(true); + expect(fromCache).toBeNull(); + }); + + it('Test 27: Should validate required fields', async () => { + // Arrange - missing required field 'name' + const invalidData: any = { + id: 'test-validation', + description: 'Missing name field', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + + // Act & Assert + let errorThrown = false; + try { + await CapabilityRepository.create(invalidData); + } catch (error) { + errorThrown = true; + } + expect(errorThrown).toBe(true); + }); + + it('Test 28: Should reload catalog after mutations', async () => { + // Arrange + const beforeReload = capabilityCatalog.getAll().length; + + const capabilityData: TCapabilityInput = { + id: 'test-reload-check', + name: 'Test Reload', + description: 'Test', + tags: { + domain: ['test'], + complexity: 'simple', + services: ['wms'], + }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }; + await CapabilityRepository.create(capabilityData); + createdCapabilityIds.push('test-reload-check'); + + // Act + await capabilityCatalog.reload(); + + // Assert + const afterReload = capabilityCatalog.getAll().length; + expect(afterReload).toBeGreaterThan(beforeReload); + }); + + it('Test 29: Should return null for missing capability', async () => { + // Act + const found = await CapabilityRepository.findById('non-existent-capability-id'); + + // Assert + expect(found).toBeNull(); + }); + }); + + // ============================================================================= + // F. PERSONA MANAGEMENT API (MORPH-513) + // ============================================================================= + describe('F. Persona Management API', () => { + it('Test 30: Should create persona via repository (API layer)', async () => { + // Arrange + const personaData: TPersonaInput = { + id: 'test-persona-api-create', + name: 'Test Persona API Create', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + + // Act + const created = await PersonaRepository.create(personaData); + createdPersonaIds.push(created.id); + + // Verify cache reload + await personaRegistry.reload(); + const fromCache = personaRegistry.getById('test-persona-api-create'); + + // Assert + expect(created.id).toBe('test-persona-api-create'); + expect(fromCache).toBeDefined(); + }); + + it('Test 31: Should update persona via repository (API layer)', async () => { + // Arrange + const personaData: TPersonaInput = { + id: 'test-persona-api-update', + name: 'Original Persona Name', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + createdPersonaIds.push('test-persona-api-update'); + + // Act + const updated = await PersonaRepository.update('test-persona-api-update', { + name: 'Updated Persona Name', + }); + + // Verify cache reload + await personaRegistry.reload(); + const fromCache = personaRegistry.getById('test-persona-api-update'); + + // Assert + expect(updated?.name).toBe('Updated Persona Name'); + expect(fromCache?.name).toBe('Updated Persona Name'); + }); + + it('Test 32: Should delete persona via repository (API layer)', async () => { + // Arrange + const personaData: TPersonaInput = { + id: 'test-persona-api-delete', + name: 'To Delete', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + + // Act + const deleted = await PersonaRepository.delete('test-persona-api-delete'); + + // Verify cache reload + await personaRegistry.reload(); + const fromCache = personaRegistry.getById('test-persona-api-delete'); + + // Assert + expect(deleted).toBe(true); + expect(fromCache).toBeNull(); + }); + + it('Test 33: Should validate required fields', async () => { + // Arrange - missing required field 'role' + const invalidData: any = { + id: 'test-persona-validation', + name: 'Test Persona', + description: 'Missing role field', + department: 'warehouse', + capabilityIds: [], + }; + + // Act & Assert + let errorThrown = false; + try { + await PersonaRepository.create(invalidData); + } catch (error) { + errorThrown = true; + } + expect(errorThrown).toBe(true); + }); + + it('Test 34: Should reload registry after mutations', async () => { + // Arrange + const beforeReload = personaRegistry.getAll().length; + + const personaData: TPersonaInput = { + id: 'test-persona-reload-check', + name: 'Test Reload', + description: 'Test', + role: 'specialist', + department: 'inventory', + capabilityIds: [], + }; + await PersonaRepository.create(personaData); + createdPersonaIds.push('test-persona-reload-check'); + + // Act + await personaRegistry.reload(); + + // Assert + const afterReload = personaRegistry.getAll().length; + expect(afterReload).toBeGreaterThan(beforeReload); + }); + + it('Test 35: Should return null for missing persona', async () => { + // Act + const found = await PersonaRepository.findById('non-existent-persona-id'); + + // Assert + expect(found).toBeNull(); + }); + }); + + // ============================================================================= + // G. BULK OPERATIONS (MORPH-514) + // ============================================================================= + describe('G. Bulk Operations', () => { + it('Test 36: Should bulk create multiple capabilities', async () => { + // Arrange + const capabilities: TCapabilityInput[] = [ + { + id: 'bulk-cap-1', + name: 'Bulk Capability 1', + description: 'Test', + tags: { domain: ['test'], complexity: 'simple', services: ['wms'] }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }, + { + id: 'bulk-cap-2', + name: 'Bulk Capability 2', + description: 'Test', + tags: { domain: ['test'], complexity: 'simple', services: ['wms'] }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }, + { + id: 'bulk-cap-3', + name: 'Bulk Capability 3', + description: 'Test', + tags: { domain: ['test'], complexity: 'simple', services: ['wms'] }, + personas: [], + odId: 'test-od', + version: '1.0.0', + }, + ]; + + // Act + for (const cap of capabilities) { + await CapabilityRepository.create(cap); + createdCapabilityIds.push(cap.id); + } + + // Assert + const cap1 = await CapabilityRepository.findById('bulk-cap-1'); + const cap2 = await CapabilityRepository.findById('bulk-cap-2'); + const cap3 = await CapabilityRepository.findById('bulk-cap-3'); + + expect(cap1).toBeDefined(); + expect(cap2).toBeDefined(); + expect(cap3).toBeDefined(); + }); + + it('Test 37: Should retrieve all capabilities (export simulation)', async () => { + // Act + const result = await CapabilityRepository.getAll(); + + // Assert + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + expect(result.data.length).toBeGreaterThan(0); + }); + + it('Test 38: Should bulk create multiple personas', async () => { + // Arrange + const personas: TPersonaInput[] = [ + { + id: 'bulk-persona-1', + name: 'Bulk Persona 1', + description: 'Test', + role: 'operational', + department: 'warehouse', + capabilityIds: [], + }, + { + id: 'bulk-persona-2', + name: 'Bulk Persona 2', + description: 'Test', + role: 'specialist', + department: 'inventory', + capabilityIds: [], + }, + ]; + + // Act + for (const persona of personas) { + await PersonaRepository.create(persona); + createdPersonaIds.push(persona.id); + } + + // Assert + const p1 = await PersonaRepository.findById('bulk-persona-1'); + const p2 = await PersonaRepository.findById('bulk-persona-2'); + + expect(p1).toBeDefined(); + expect(p2).toBeDefined(); + }); + + it('Test 39: Should retrieve all personas (export simulation)', async () => { + // Act + const result = await PersonaRepository.getAll(); + + // Assert + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + expect(result.data.length).toBeGreaterThan(0); + }); + }); + + // ============================================================================= + // H. PAGINATION (MORPH-516) + // ============================================================================= + describe('H. Pagination', () => { + it('Test 40: Should paginate capabilities', async () => { + // Act + const result = await CapabilityRepository.getAll(undefined, { page: 1, limit: 2 }); + + // Assert + expect(result.data).toBeDefined(); + expect(result.pagination).toBeDefined(); + expect(result.pagination?.page).toBe(1); + expect(result.pagination?.limit).toBe(2); + expect(result.pagination?.total).toBeGreaterThanOrEqual(0); + expect(typeof result.pagination?.hasNext).toBe('boolean'); + expect(typeof result.pagination?.hasPrevious).toBe('boolean'); + }); + + it('Test 41: Should paginate personas', async () => { + // Act + const result = await PersonaRepository.getAll(undefined, { page: 1, limit: 2 }); + + // Assert + expect(result.data).toBeDefined(); + expect(result.pagination).toBeDefined(); + expect(result.pagination?.page).toBe(1); + expect(result.pagination?.limit).toBe(2); + expect(result.pagination?.total).toBeGreaterThanOrEqual(0); + expect(typeof result.pagination?.hasNext).toBe('boolean'); + }); + + it('Test 42: Should paginate worlds', async () => { + // Act + const result = await WorldRepository.getAllWorlds(undefined, { page: 1, limit: 2 }); + + // Assert + expect(result.data).toBeDefined(); + expect(result.pagination).toBeDefined(); + expect(result.pagination?.page).toBe(1); + expect(result.pagination?.limit).toBe(2); + expect(result.pagination?.total).toBeGreaterThanOrEqual(0); + }); + }); + + // ============================================================================= + // I. PERFORMANCE TESTS (MORPH-515) + // ============================================================================= + describe('I. Performance Tests', () => { + it('Test 43: Should load capabilities in < 100ms', async () => { + // Act + const startTime = Date.now(); + await CapabilityRepository.getAll(); + const duration = Date.now() - startTime; + + // Assert + expect(duration).toBeLessThan(100); + }); + + it('Test 44: Should search capabilities in < 200ms', async () => { + // Act + const startTime = Date.now(); + await CapabilityRepository.search('test'); + const duration = Date.now() - startTime; + + // Assert + expect(duration).toBeLessThan(200); + }); + }); + + // ============================================================================= + // J. DATABASE INDEXES (MORPH-515) + // ============================================================================= + describe('J. Database Indexes', () => { + it('Test 45: Should verify capability collection has indexes', async () => { + // This test verifies that indexes were created on the Capability model + // In a real scenario, you would query MongoDB's index information + // For now, we verify the model exists and queries work + + const result = await CapabilityRepository.getAll({ domain: ['inventory'] }); + expect(result).toBeDefined(); + }); + + it('Test 46: Should verify persona collection has indexes', async () => { + // Verify persona indexes exist by testing filtered queries + const result = await PersonaRepository.getAll({ role: 'operational' }); + expect(result).toBeDefined(); + }); + + it('Test 47: Should verify world collection has indexes', async () => { + // Verify world indexes exist by testing filtered queries + const result = await WorldRepository.getAllWorlds({ is_default: true }); + expect(result).toBeDefined(); + }); + }); +}); diff --git a/packages/controlmart/src/application/application.app.ts b/packages/controlmart/src/application/application.app.ts new file mode 100644 index 0000000000000000000000000000000000000000..30b8e37e57b6514bda102a3f7d58592f9ceb30a8 --- /dev/null +++ b/packages/controlmart/src/application/application.app.ts @@ -0,0 +1,89 @@ +import express, { type Application, type Request, type Response } from "express"; +import compression from "compression"; +import helmet from "helmet"; +import path from "path"; +import { existsSync } from "fs"; +import type { Logger } from "pino"; + +import registryRouter from "../routes/registry.route"; +import { globalErrorHandler } from "../utils/error.util"; + +type TAppOptions = { + port?: number; + host?: string; + env?: "development" | "production" | "test"; + log?: boolean; + logger: Logger; + httpLogger: express.RequestHandler; +}; + +export const createApplication = ({ + port = 3000, + host = "localhost", + env = "development", + log = true, + logger, + httpLogger, +}: TAppOptions): Application => { + const app = express(); + + app.use(httpLogger); + app.use(express.json({ limit: "50mb" })); + app.use(express.urlencoded({ extended: true })); + app.use(compression()); + app.use( + helmet({ + crossOriginOpenerPolicy: false, + crossOriginEmbedderPolicy: false, + crossOriginResourcePolicy: false, + hsts: false, + contentSecurityPolicy: false, + }), + ); + + app.get("/health", (_: Request, res: Response) => + res.status(200).json({ + success: true, + status: "ok", + uptime: process.uptime(), + timestamp: new Date().toISOString(), + }), + ); + app.use("/", registryRouter); + + // Serve UI static files + // Check relative to executable first (for binary distribution) + const execDir = path.dirname(process.execPath); + const localUiPath = path.join(execDir, "ui"); // User should copy dist/ui to ./ui next to binary + const distUiPath = path.join(__dirname, "../../dist/ui"); // Dev/Source mode + + const uiDistPath = existsSync(localUiPath) ? localUiPath : distUiPath; + + if (existsSync(uiDistPath)) { + app.use("/admin", express.static(uiDistPath)); + // SPA fallback - serve index.html for any /admin routes + app.get("/admin", (_: Request, res: Response) => { + res.sendFile(path.join(uiDistPath, "index.html")); + }); + app.get(/^\/admin\/.*/, (_: Request, res: Response) => { + res.sendFile(path.join(uiDistPath, "index.html")); + }); + if (log) logger.info(`[app] UI available at /admin`); + } else { + logger.warn(`[app] UI NOT FOUND. Expecting 'ui' folder at ${localUiPath} or 'dist/ui' at ${distUiPath}`); + } + + app.use((_: Request, res: Response) => + res.status(404).json({ success: false, error: "Not Found" }), + ); + + app.use(globalErrorHandler); + + if (log) logger.info(`[app] Environment: ${env}`); + + app.listen(port, host, () => { + if (log) logger.info(`[app] Server running on http://${host}:${port}`); + }); + + return app; +}; diff --git a/packages/controlmart/src/application/bootcheck.app.ts b/packages/controlmart/src/application/bootcheck.app.ts new file mode 100644 index 0000000000000000000000000000000000000000..4abbde435eccb694ad9b920c7d08cfe83aea4b47 --- /dev/null +++ b/packages/controlmart/src/application/bootcheck.app.ts @@ -0,0 +1,33 @@ +import { connectMongo, checkMongoConnection, disconnectMongo } from "../services/mongo.service"; +import { loadEnv } from "../utils/env.util"; + +export const runBootCheck = async () => { + try { + const envs = loadEnv(); + + console.log("[bootcheck] Starting system checks..."); + + await connectMongo({ + uri: envs.MONGO_URI, + dbName: envs.DB_NAME, + log: false, + }); + const mongo = checkMongoConnection(); + + if (!mongo.isConnected) throw new Error("[bootcheck] MongoDB connection failed"); + + console.log(`[bootcheck] Mongo connected: ${mongo.dbName}`); + console.log("[bootcheck] Environment validated"); + console.log("[bootcheck] All systems operational"); + + await disconnectMongo(false); + process.exit(0); + } catch (err) { + console.error("[bootcheck] Boot check failed:", (err as Error).message); + process.exit(1); + } +}; + +if (import.meta.main) { + await runBootCheck(); +} diff --git a/packages/controlmart/src/application/setup.app.ts b/packages/controlmart/src/application/setup.app.ts new file mode 100644 index 0000000000000000000000000000000000000000..fc451560da9c495c6a18a5b28fdced2c532e4e31 --- /dev/null +++ b/packages/controlmart/src/application/setup.app.ts @@ -0,0 +1,140 @@ +import { $ } from "bun"; +import { writeFileSync } from "node:fs"; +import path from "path"; + +const SETUP_PORT = 8283; + +export const startSetup = async () => { + console.log(`[setup] Starting Setup Mode on http://localhost:${SETUP_PORT}`); + Bun.spawn(["open", `http://localhost:${SETUP_PORT}`]); + + // Open browser + setTimeout(() => { + $`open http://localhost:${SETUP_PORT}`; + }, 1000); + + const server = Bun.serve({ + port: SETUP_PORT, + async fetch(req) { + const url = new URL(req.url); + + if (req.method === "GET" && url.pathname === "/") { + return new Response(getHtml(), { + headers: { "Content-Type": "text/html" }, + }); + } + + if (req.method === "POST" && url.pathname === "/save") { + const formData = await req.formData(); + const envContent = [ + `MONGO_URI=${formData.get("mongoUri")}`, + `OPENAI_API_KEY=${formData.get("openaiKey")}`, + `DB_NAME=${formData.get("dbName") || "morpheus"}`, + `PORT=${formData.get("port") || "8282"}`, + `NODE_ENV=production` + ].join("\n"); + + // Calculate path relative to the executable + const execDir = path.dirname(process.execPath); + // In setup mode, we prefer writing to the local dir if we are running as binary + // But if we are in dev (bun run), we might want project root. + // Simple heuristic: if we are in a build (not typical node_modules structure), write local. + const envPath = path.join(execDir, ".env"); + + console.log(`[setup] Writing configuration to: ${envPath}`); + writeFileSync(envPath, envContent); + + // Auto-restart after save + setTimeout(() => { + console.log("[setup] Restarting application..."); + process.exit(100); + }, 1000); + + return new Response(getSuccessHtml(formData.get("port")?.toString() || "8282"), { + headers: { "Content-Type": "text/html" }, + }); + } + + if (req.method === "POST" && url.pathname === "/restart") { + // Exit to let launcher restart or user manually restart + setTimeout(() => process.exit(100), 500); + return new Response("Restarting...", { status: 200 }); + } + + return new Response("Not Found", { status: 404 }); + }, + }); +}; + +const getHtml = () => ` + + + + Morpheus Setup + + + +
+

Welcome to Morpheus

+

Please configure the application to get started.

+
+ + + + + + + + + + + + + +
+
+ + +`; + +const getSuccessHtml = (port: string) => ` + + + + Configuration Saved + + + + +
+

Configuration Saved

+

Morpheus is restarting...

+
+

Redirecting you to the application...

+
+ + +`; diff --git a/packages/controlmart/src/business-rules/actions/create-record.action.ts b/packages/controlmart/src/business-rules/actions/create-record.action.ts new file mode 100644 index 0000000000000000000000000000000000000000..1c4d5b8ef0cdc90f7ea1f4e5a16e3935ffc91568 --- /dev/null +++ b/packages/controlmart/src/business-rules/actions/create-record.action.ts @@ -0,0 +1,64 @@ +import type { ActionHandler, ExecutionContext } from "../types/rule.type"; +import type { TBusinessRuleModel } from "../../models/business-rule.model"; +import { resolveTemplateBindings } from "../rule-evaluator"; +// import { getAgenda } from "../../services/agenda.service"; + +/** + * CreateRecord action handler + * Asynchronously creates a new record in the specified collection + * + * Config structure: + * { + * collection: "Task", + * data: { + * field1: "{{template}}", + * field2: "static value", + * ... + * }, + * delay?: 1000 // Optional delay in milliseconds before creating + * } + */ +export class CreateRecordActionHandler implements ActionHandler { + async execute( + action: { type: string; config: any }, + context: ExecutionContext, + rule: TBusinessRuleModel, + ): Promise { + const { collection, data, delay } = action.config; + + if (!collection) { + throw new Error("CreateRecord action requires collection in config"); + } + + if (!data) { + throw new Error("CreateRecord action requires data in config"); + } + + // Resolve template bindings in the data + const resolvedData = resolveTemplateBindings(data, context); + + // Get agenda instance + // const agenda = getAgenda(); + + // Queue the job + const jobData = { + ruleId: rule.ruleId, + ruleName: rule.name, + collection, + data: resolvedData, + worldId: context.worldId, + domain: context.domain, + }; + + if (delay && delay > 0) { + // Schedule job with delay + // await agenda.schedule(new Date(Date.now() + delay), "business-rule-create-record", jobData); + } else { + // Execute immediately (asynchronously) + // await agenda.now("business-rule-create-record", jobData); + } + } +} + +// Export singleton instance +export const createRecordActionHandler = new CreateRecordActionHandler(); diff --git a/packages/controlmart/src/business-rules/actions/index.ts b/packages/controlmart/src/business-rules/actions/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..8f15101cb915c158c8d41f93451d9ce0d09a6a70 --- /dev/null +++ b/packages/controlmart/src/business-rules/actions/index.ts @@ -0,0 +1,41 @@ +import type { ActionHandler } from "../types/rule.type"; +import { validateActionHandler } from "./validate.action"; +import { transformActionHandler } from "./transform.action"; +import { createRecordActionHandler } from "./create-record.action"; +import { triggerODActionHandler } from "./trigger-od.action"; +import { logActionHandler } from "./log.action"; + +/** + * Registry of all action handlers + * Maps action type to handler implementation + */ +export const actionHandlers: Record = { + validate: validateActionHandler, + transform: transformActionHandler, + createRecord: createRecordActionHandler, + triggerOD: triggerODActionHandler, + log: logActionHandler, +}; + +/** + * Get action handler by type + * @param type - Action type + * @returns Action handler instance + * @throws Error if action type is not supported + */ +export function getActionHandler(type: string): ActionHandler { + const handler = actionHandlers[type]; + if (!handler) { + throw new Error( + `Unknown action type: ${type}. Supported types: ${Object.keys(actionHandlers).join(", ")}`, + ); + } + return handler; +} + +// Re-export individual handlers for direct access if needed +export { validateActionHandler } from "./validate.action"; +export { transformActionHandler } from "./transform.action"; +export { createRecordActionHandler } from "./create-record.action"; +export { triggerODActionHandler } from "./trigger-od.action"; +export { logActionHandler } from "./log.action"; diff --git a/packages/controlmart/src/business-rules/actions/log.action.ts b/packages/controlmart/src/business-rules/actions/log.action.ts new file mode 100644 index 0000000000000000000000000000000000000000..74ae8bd8f21f348cd2cbb99462ed02331b409197 --- /dev/null +++ b/packages/controlmart/src/business-rules/actions/log.action.ts @@ -0,0 +1,62 @@ +import type { ActionHandler, ExecutionContext } from "../types/rule.type"; +import type { TBusinessRuleModel } from "../../models/business-rule.model"; +import { resolveTemplateBindings } from "../rule-evaluator"; +import { createAppLogger } from "../../utils/logger.util"; + +// Create logger for business rules +const logger = createAppLogger({ service: "business-rules" }); + +/** + * Log action handler + * Writes structured log entries for audit and monitoring + * + * Config structure: + * { + * level: "info" | "warn" | "error" | "debug", + * message: "Message with {{template}} support", + * metadata?: { any additional context } + * } + */ +export class LogActionHandler implements ActionHandler { + async execute( + action: { type: string; config: any }, + context: ExecutionContext, + rule: TBusinessRuleModel, + ): Promise { + const { level, message, metadata } = action.config; + + if (!message) { + throw new Error("Log action requires message in config"); + } + + // Validate log level + const validLevels = ["info", "warn", "error", "debug"]; + const logLevel = level || "info"; + if (!validLevels.includes(logLevel)) { + throw new Error(`Invalid log level: ${logLevel}. Must be one of: ${validLevels.join(", ")}`); + } + + // Resolve template bindings in message + const resolvedMessage = resolveTemplateBindings(message, context); + + // Resolve template bindings in metadata if present + const resolvedMetadata = metadata ? resolveTemplateBindings(metadata, context) : {}; + + // Create log entry + const logData = { + ruleId: rule.ruleId, + ruleName: rule.name, + domain: context.domain, + collection: context.collection, + trigger: context.trigger, + worldId: context.worldId, + ...resolvedMetadata, + }; + + // Write log using pino + logger[logLevel as "info" | "warn" | "error" | "debug"](logData, resolvedMessage); + } +} + +// Export singleton instance +export const logActionHandler = new LogActionHandler(); diff --git a/packages/controlmart/src/business-rules/actions/transform.action.ts b/packages/controlmart/src/business-rules/actions/transform.action.ts new file mode 100644 index 0000000000000000000000000000000000000000..c19483734789ef67ed210be1f18b5f7ec47acecd --- /dev/null +++ b/packages/controlmart/src/business-rules/actions/transform.action.ts @@ -0,0 +1,129 @@ +import jmespath from "jmespath"; + +import type { ActionHandler, ExecutionContext } from "../types/rule.type"; +import type { TBusinessRuleModel } from "../../models/business-rule.model"; +import { resolveTemplateBindings } from "../rule-evaluator"; + +/** + * Transform action handler + * Modifies the data before it's saved + * + * Config structure: + * { + * type: "set" | "javascript", + * // For type: "set" + * field: "path.to.field", + * value: "static value or {{template}}", + * // For type: "javascript" + * script: "function(data, ctx) { ... return modifiedData; }" + * } + */ +export class TransformActionHandler implements ActionHandler { + async execute( + action: { type: string; config: any }, + context: ExecutionContext, + rule: TBusinessRuleModel, + ): Promise { + const { type, field, value, script } = action.config; + + if (type === "set") { + return this.transformWithSet(field, value, context); + } else if (type === "javascript") { + return this.transformWithJavaScript(script, context); + } else { + throw new Error(`Unknown transform type: ${type}. Use "set" or "javascript"`); + } + } + + /** + * Transform data by setting a field value + */ + private transformWithSet(field: string, value: any, context: ExecutionContext): any { + if (!field) { + throw new Error('Transform action with type "set" requires field in config'); + } + + // Resolve template bindings in the value + const resolvedValue = resolveTemplateBindings(value, context); + + // Clone the data to avoid mutations + const modifiedData = structuredClone(context.data); + + // Set the field using path notation (e.g., "inventory.availableQty") + this.setNestedProperty(modifiedData, field, resolvedValue); + + return modifiedData; + } + + /** + * Transform data using a JavaScript function + */ + private transformWithJavaScript(script: string, context: ExecutionContext): any { + if (!script) { + throw new Error('Transform action with type "javascript" requires script in config'); + } + + // Create safe execution context + const safeContext = { + data: structuredClone(context.data), + originalData: context.originalData, + domain: context.domain, + collection: context.collection, + trigger: context.trigger, + worldId: context.worldId, + metadata: context.metadata, + }; + + try { + // Create transform function + const transformFunction = new Function( + "data", + "ctx", + "helpers", + ` + return (${script})(data, ctx, helpers); + `, + ); + + const helpers = { + jmespath: (query: string) => jmespath.search(safeContext, query), + clone: (obj: any) => structuredClone(obj), + }; + + const result = transformFunction(safeContext.data, safeContext, helpers); + + if (!result) { + throw new Error("Transform script must return the modified data"); + } + + return result; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Transform script execution failed: ${message}`); + } + } + + /** + * Set a nested property using dot notation + * Example: setNestedProperty(obj, "a.b.c", 123) sets obj.a.b.c = 123 + */ + private setNestedProperty(obj: any, path: string, value: any): void { + const parts = path.split("."); + const last = parts.pop()!; + let current = obj; + + // Navigate to the parent object + for (const part of parts) { + if (!(part in current)) { + current[part] = {}; + } + current = current[part]; + } + + // Set the value + current[last] = value; + } +} + +// Export singleton instance +export const transformActionHandler = new TransformActionHandler(); diff --git a/packages/controlmart/src/business-rules/actions/trigger-od.action.ts b/packages/controlmart/src/business-rules/actions/trigger-od.action.ts new file mode 100644 index 0000000000000000000000000000000000000000..1c3a5de58921d14c2591e70ca61d621b193f0415 --- /dev/null +++ b/packages/controlmart/src/business-rules/actions/trigger-od.action.ts @@ -0,0 +1,59 @@ +import type { ActionHandler, ExecutionContext } from "../types/rule.type"; +import type { TBusinessRuleModel } from "../../models/business-rule.model"; +import { resolveTemplateBindings } from "../rule-evaluator"; +// import { getAgenda } from "../../services/agenda.service"; + +/** + * TriggerOD action handler + * Asynchronously executes an Operational Descriptor + * + * Config structure: + * { + * odName: "allocation-workflow", + * input: { + * orderId: "{{data.orderId}}", + * ... + * }, + * delay?: 1000 // Optional delay in milliseconds before execution + * } + */ +export class TriggerODActionHandler implements ActionHandler { + async execute( + action: { type: string; config: any }, + context: ExecutionContext, + rule: TBusinessRuleModel, + ): Promise { + const { odName, input, delay } = action.config; + + if (!odName) { + throw new Error("TriggerOD action requires odName in config"); + } + + // Resolve template bindings in the input + const resolvedInput = input ? resolveTemplateBindings(input, context) : {}; + + // Get agenda instance + // const agenda = getAgenda(); + + // Queue the job + const jobData = { + ruleId: rule.ruleId, + ruleName: rule.name, + odName, + input: resolvedInput, + worldId: context.worldId, + domain: context.domain, + }; + + if (delay && delay > 0) { + // Schedule job with delay + // await agenda.schedule(new Date(Date.now() + delay), "business-rule-trigger-od", jobData); + } else { + // Execute immediately (asynchronously) + // await agenda.now("business-rule-trigger-od", jobData); + } + } +} + +// Export singleton instance +export const triggerODActionHandler = new TriggerODActionHandler(); diff --git a/packages/controlmart/src/business-rules/actions/validate.action.ts b/packages/controlmart/src/business-rules/actions/validate.action.ts new file mode 100644 index 0000000000000000000000000000000000000000..5246e16b5ce3a775fc4d1f35c5a814f15a491f26 --- /dev/null +++ b/packages/controlmart/src/business-rules/actions/validate.action.ts @@ -0,0 +1,47 @@ +import type { ActionHandler, ExecutionContext } from "../types/rule.type"; +import { BusinessRuleError } from "../types/rule.type"; +import type { TBusinessRuleModel } from "../../models/business-rule.model"; +import { resolveTemplateBindings } from "../rule-evaluator"; + +/** + * Validate action handler + * Blocks the operation by throwing an error when validation fails + * + * Config structure: + * { + * errorMessage: "Error message with {{template}} support", + * errorCode?: "CUSTOM_ERROR_CODE", + * details?: { any additional context } + * } + */ +export class ValidateActionHandler implements ActionHandler { + async execute( + action: { type: string; config: any }, + context: ExecutionContext, + rule: TBusinessRuleModel, + ): Promise { + const { errorMessage, errorCode, details } = action.config; + + if (!errorMessage) { + throw new Error("Validate action requires errorMessage in config"); + } + + // Resolve template bindings in error message + const resolvedMessage = resolveTemplateBindings(errorMessage, context); + + // Resolve template bindings in details if present + const resolvedDetails = details ? resolveTemplateBindings(details, context) : undefined; + + // Throw BusinessRuleError to block the operation + throw new BusinessRuleError( + resolvedMessage, + rule.ruleId, + rule.name, + errorCode || "VALIDATION_FAILED", + resolvedDetails, + ); + } +} + +// Export singleton instance +export const validateActionHandler = new ValidateActionHandler(); diff --git a/packages/controlmart/src/business-rules/mongoose-hooks.ts b/packages/controlmart/src/business-rules/mongoose-hooks.ts new file mode 100644 index 0000000000000000000000000000000000000000..005f3bddbaa6d0d571154b2bd034cadefa924fbe --- /dev/null +++ b/packages/controlmart/src/business-rules/mongoose-hooks.ts @@ -0,0 +1,430 @@ +import type { Schema } from "mongoose"; + +import { businessRuleEngine } from "./rule-engine"; +import { createAppLogger } from "../utils/logger.util"; + +const logger = createAppLogger({ service: "mongoose-hooks" }); + +/** + * Attach business rule hooks to a Mongoose schema + * This enables automatic business rule execution on database operations + * + * @param schema - Mongoose schema to attach hooks to + * @param domain - Domain (ERP, WMS, TMS, EDI) + * @param collection - Collection name (e.g., Inventory, Order) + */ +export function attachBusinessRuleHooks(schema: Schema, domain: string, collection: string): void { + // Store isNew state for post-save hook + schema.pre("save", function (next) { + (this as any)._wasNew = this.isNew; + next(); + }); + + // Pre-save hook: before_insert or before_update + schema.pre("save", async function (next) { + const doc = this; + const isNew = doc.isNew; + const trigger = isNew ? "before_insert" : "before_update"; + + try { + // Extract worldId from document (if exists) + const worldId = (doc as any).worldRef?.worldId || null; + + // Convert Mongoose document to plain object + const data = doc.toObject(); + + // Get original data for updates + let originalData; + if (!isNew) { + try { + originalData = await (doc.constructor as any).findById(doc._id).lean(); + } catch (error) { + // If we can't fetch original, continue without it + originalData = undefined; + } + } + + logger.debug( + { + domain, + collection, + trigger, + worldId, + docId: doc._id?.toString(), + }, + "Executing pre-save business rules", + ); + + // Execute rules + const result = await businessRuleEngine.executeRules(domain, collection, trigger, data, { + skipAsync: false, + context: { + worldId, + originalData, + }, + }); + + // If rules modified data (transform actions), update document + if (result.data) { + // Merge transformed data back into the document + Object.assign(doc, result.data); + } + + // Continue with save + next(); + } catch (error) { + // If sync rule failed validation, block the save + logger.error( + { + domain, + collection, + trigger, + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + docId: doc._id?.toString(), + }, + "Business rule validation failed (pre-save)", + ); + next(error as Error); + } + }); + + // Post-save hook: after_insert or after_update + schema.post("save", async function (doc) { + // Use stored _wasNew state to determine trigger + const trigger = (doc as any)._wasNew ? "after_insert" : "after_update"; + + try { + const worldId = (doc as any).worldRef?.worldId || null; + const data = doc.toObject(); + + logger.debug( + { + domain, + collection, + trigger, + worldId, + docId: doc._id?.toString(), + }, + "Executing post-save business rules", + ); + + // Execute rules (async rules will be queued) + await businessRuleEngine.executeRules(domain, collection, trigger, data, { + skipAsync: false, + context: { worldId }, + }); + } catch (error) { + // Log error but don't block (already saved) + logger.error( + { + domain, + collection, + trigger, + error: error instanceof Error ? error.message : String(error), + }, + "Post-save business rule execution failed", + ); + } + }); + + // Pre-remove hook: before_delete + schema.pre("findOneAndDelete", async function (next) { + const doc = this; + + try { + const worldId = (doc as any).worldRef?.worldId || null; + const data = doc.toObject(); + + logger.debug( + { + domain, + collection, + trigger: "before_delete", + worldId, + docId: doc._id?.toString(), + }, + "Executing pre-remove business rules", + ); + + await businessRuleEngine.executeRules(domain, collection, "before_delete", data, { + skipAsync: false, + context: { worldId }, + }); + + next(); + } catch (error) { + logger.error( + { + domain, + collection, + trigger: "before_delete", + error: error instanceof Error ? error.message : String(error), + }, + "Business rule validation failed", + ); + next(error as Error); + } + }); + + // Post-remove hook: after_delete + schema.post("remove", async function (doc) { + try { + const worldId = (doc as any).worldRef?.worldId || null; + const data = doc.toObject(); + + logger.debug( + { + domain, + collection, + trigger: "after_delete", + worldId, + docId: doc._id?.toString(), + }, + "Executing post-remove business rules", + ); + + await businessRuleEngine.executeRules(domain, collection, "after_delete", data, { + skipAsync: false, + context: { worldId }, + }); + } catch (error) { + logger.error( + { + domain, + collection, + trigger: "after_delete", + error: error instanceof Error ? error.message : String(error), + }, + "Post-remove business rule execution failed", + ); + } + }); + + // Query middleware for update operations + const updateOps = [ + "findOneAndUpdate", + "findOneAndReplace", + "updateOne", + "updateMany", + "replaceOne", + ] as const; + + for (const op of updateOps) { + schema.pre(op, async function () { + const query = this.getQuery(); + + // Fetch document(s) before update + if (op === "updateMany") { + const docs = await this.model.find(query).lean(); + (this as any)._beforeDocs = docs; + } else { + const doc = await this.model.findOne(query).lean(); + (this as any)._beforeDoc = doc; + } + }); + + schema.post(op, async function (res: any) { + const query = this.getQuery(); + + try { + // Handle updateMany (multiple documents) + if (op === "updateMany") { + const beforeDocs = (this as any)._beforeDocs || []; + const afterDocs = await this.model.find(query).lean(); + + for (const beforeDoc of beforeDocs) { + const afterDoc = afterDocs.find( + (d: any) => d._id.toString() === beforeDoc._id.toString(), + ); + + if (!afterDoc) continue; + + const worldId = afterDoc.worldRef?.worldId || null; + + logger.debug( + { + domain, + collection, + trigger: "before_update", + worldId, + docId: afterDoc._id?.toString(), + }, + "Executing query update business rules", + ); + + // Execute before_update rules + await businessRuleEngine.executeRules(domain, collection, "before_update", afterDoc, { + skipAsync: false, + context: { + worldId, + originalData: beforeDoc, + }, + }); + + // Execute after_update rules + await businessRuleEngine.executeRules(domain, collection, "after_update", afterDoc, { + skipAsync: false, + context: { worldId }, + }); + } + return; + } + + // Handle single document operations + const beforeDoc = (this as any)._beforeDoc; + if (!beforeDoc) return; + + // Get the updated document + let afterDoc: any; + if (res && typeof res.toObject === "function") { + afterDoc = res.toObject({ depopulate: true }); + } else { + afterDoc = await this.model.findOne(query).lean(); + } + + if (!afterDoc) return; + + const worldId = afterDoc.worldRef?.worldId || null; + + logger.debug( + { + domain, + collection, + trigger: "before_update", + worldId, + docId: afterDoc._id?.toString(), + }, + "Executing query update business rules", + ); + + // Execute before_update rules (validation) + await businessRuleEngine.executeRules(domain, collection, "before_update", afterDoc, { + skipAsync: false, + context: { + worldId, + originalData: beforeDoc, + }, + }); + + // Execute after_update rules (async actions) + await businessRuleEngine.executeRules(domain, collection, "after_update", afterDoc, { + skipAsync: false, + context: { worldId }, + }); + } catch (error) { + logger.error( + { + domain, + collection, + trigger: "update_query", + error: error instanceof Error ? error.message : String(error), + }, + "Query update business rule execution failed", + ); + } + }); + } + + // Query middleware for delete operations + const deleteOps = ["deleteOne", "deleteMany", "findOneAndDelete"] as const; + + for (const op of deleteOps) { + schema.pre(op, async function () { + const query = this.getQuery(); + + // Fetch document(s) before delete + if (op === "deleteMany") { + const docs = await this.model.find(query).lean(); + (this as any)._beforeDocs = docs; + } else { + const doc = await this.model.findOne(query).lean(); + (this as any)._beforeDoc = doc; + } + }); + + schema.post(op, async function () { + try { + // Handle deleteMany (multiple documents) + if (op === "deleteMany") { + const beforeDocs = (this as any)._beforeDocs || []; + + for (const doc of beforeDocs) { + const worldId = doc.worldRef?.worldId || null; + + logger.debug( + { + domain, + collection, + trigger: "before_delete", + worldId, + docId: doc._id?.toString(), + }, + "Executing query delete business rules", + ); + + // Execute before_delete rules + await businessRuleEngine.executeRules(domain, collection, "before_delete", doc, { + skipAsync: false, + context: { worldId }, + }); + + // Execute after_delete rules + await businessRuleEngine.executeRules(domain, collection, "after_delete", doc, { + skipAsync: false, + context: { worldId }, + }); + } + return; + } + + // Handle single document operations + const doc = (this as any)._beforeDoc; + if (!doc) return; + + const worldId = doc.worldRef?.worldId || null; + + logger.debug( + { + domain, + collection, + trigger: "before_delete", + worldId, + docId: doc._id?.toString(), + }, + "Executing query delete business rules", + ); + + // Execute before_delete rules + await businessRuleEngine.executeRules(domain, collection, "before_delete", doc, { + skipAsync: false, + context: { worldId }, + }); + + // Execute after_delete rules + await businessRuleEngine.executeRules(domain, collection, "after_delete", doc, { + skipAsync: false, + context: { worldId }, + }); + } catch (error) { + logger.error( + { + domain, + collection, + trigger: "delete_query", + error: error instanceof Error ? error.message : String(error), + }, + "Query delete business rule execution failed", + ); + } + }); + } + + logger.debug( + { + domain, + collection, + }, + "Business rule hooks attached to schema", + ); +} diff --git a/packages/controlmart/src/business-rules/rule-engine.ts b/packages/controlmart/src/business-rules/rule-engine.ts new file mode 100644 index 0000000000000000000000000000000000000000..96cb6ee635d6e87793c01bc3eee582236d891259 --- /dev/null +++ b/packages/controlmart/src/business-rules/rule-engine.ts @@ -0,0 +1,261 @@ +import type { TBusinessRuleModel } from "../models/business-rule.model"; +import { ruleRegistry } from "./rule-registry"; +import { evaluateCondition } from "./rule-evaluator"; +import { getActionHandler } from "./actions"; +import type { + ExecutionContext, + RuleExecutionOptions, + RuleExecutionResult, +} from "./types/rule.type"; +import { BusinessRuleError } from "./types/rule.type"; +import { createAppLogger } from "../utils/logger.util"; + +const logger = createAppLogger({ service: "business-rules-engine" }); + +/** + * Business Rule Engine + * Orchestrates rule execution for database operations + */ +export class BusinessRuleEngine { + /** + * Execute rules for a specific trigger + * @param domain - Domain (ERP, WMS, TMS, EDI) + * @param collection - Collection name + * @param trigger - Trigger type (e.g., before_insert, after_update) + * @param data - The data being operated on + * @param options - Execution options + * @returns Execution result with modified data and metrics + */ + async executeRules( + domain: string, + collection: string, + trigger: string, + data: any, + options: RuleExecutionOptions = {}, + ): Promise { + const startTime = Date.now(); + + // Build execution context + const context: ExecutionContext = { + data: structuredClone(data), // Clone to avoid mutations + originalData: structuredClone(data), + worldId: options.context?.worldId || null, + domain, + collection, + trigger, + actor: options.context?.actor, + metadata: options.context, + }; + + const executedRules: RuleExecutionResult["executedRules"] = []; + const errors: RuleExecutionResult["errors"] = []; + + try { + // Load applicable rules + const rules = await this.loadRules(domain, collection, context.worldId); + + // Filter rules by trigger + const applicableRules = this.filterRulesByTrigger(rules, trigger); + + logger.debug( + { + domain, + collection, + trigger, + worldId: context.worldId, + totalRules: rules.length, + applicableRules: applicableRules.length, + }, + `Executing business rules`, + ); + + // Execute sync rules first + const syncRules = applicableRules.filter((r) => r.executionMode === "sync"); + for (const rule of syncRules) { + await this.executeRule(rule, context, options, executedRules, errors); + } + + // Execute async rules if not skipped + if (!options.skipAsync) { + const asyncRules = applicableRules.filter((r) => r.executionMode === "async"); + for (const rule of asyncRules) { + // Fire and forget for async rules - don't wait for completion + this.executeRule(rule, context, options, executedRules, errors).catch((error) => { + logger.error( + { + ruleId: rule.ruleId, + ruleName: rule.name, + error: error instanceof Error ? error.message : String(error), + }, + `Async rule execution failed`, + ); + }); + } + } + + const executionTimeMs = Date.now() - startTime; + + // Return success result + return { + success: errors.length === 0, + data: context.data, + errors: errors.length > 0 ? errors : undefined, + executedRules, + metrics: { + totalRules: applicableRules.length, + executedRules: executedRules.length, + syncRules: syncRules.length, + asyncRules: options.skipAsync ? 0 : applicableRules.length - syncRules.length, + executionTimeMs, + }, + }; + } catch (error) { + const executionTimeMs = Date.now() - startTime; + + // If it's a BusinessRuleError, propagate it + if (error instanceof BusinessRuleError) { + throw error; + } + + // Otherwise, log and return error result + logger.error( + { + domain, + collection, + trigger, + error: error instanceof Error ? error.message : String(error), + }, + `Rule execution failed`, + ); + + return { + success: false, + errors: [ + { + ruleId: "system", + ruleName: "Rule Engine", + error: error instanceof Error ? error.message : String(error), + }, + ], + executedRules, + metrics: { + totalRules: 0, + executedRules: executedRules.length, + syncRules: 0, + asyncRules: 0, + executionTimeMs, + }, + }; + } + } + + /** + * Load rules from registry + */ + private async loadRules( + domain: string, + collection: string, + worldId: string | null, + ): Promise { + return await ruleRegistry.getRules(domain, collection, worldId); + } + + /** + * Filter rules by trigger type + */ + private filterRulesByTrigger(rules: TBusinessRuleModel[], trigger: string): TBusinessRuleModel[] { + return rules.filter((rule) => rule.trigger.includes(trigger)); + } + + /** + * Execute a single rule + */ + private async executeRule( + rule: TBusinessRuleModel, + context: ExecutionContext, + options: RuleExecutionOptions, + executedRules: RuleExecutionResult["executedRules"], + errors: RuleExecutionResult["errors"], + ): Promise { + try { + // Evaluate condition + const conditionMet = evaluateCondition(rule.condition, context); + + // Track rule execution + executedRules.push({ + ruleId: rule.ruleId, + ruleName: rule.name, + conditionMet, + actions: rule.actions.map((a: any) => a.type), + }); + + // Skip if condition not met + if (!conditionMet) { + logger.debug( + { + ruleId: rule.ruleId, + ruleName: rule.name, + }, + `Rule condition not met, skipping`, + ); + return; + } + + logger.debug( + { + ruleId: rule.ruleId, + ruleName: rule.name, + actions: rule.actions.map((a: any) => a.type), + }, + `Executing rule actions`, + ); + + // Execute actions in order + for (const action of rule.actions) { + if (options.dryRun) { + logger.info( + { + ruleId: rule.ruleId, + actionType: action.type, + }, + `[DRY RUN] Would execute action`, + ); + continue; + } + + const handler = getActionHandler(action.type); + const result = await handler.execute(action, context, rule); + + // For transform actions, update context data + if (action.type === "transform" && result !== undefined) { + context.data = result; + } + } + } catch (error) { + // Handle errors based on rule mode + if (rule.executionMode === "sync") { + // Sync rules: propagate error (blocks operation) + throw error; + } else { + // Async rules: log error and continue + errors?.push({ + ruleId: rule.ruleId, + ruleName: rule.name, + error: error instanceof Error ? error.message : String(error), + }); + + logger.error( + { + ruleId: rule.ruleId, + ruleName: rule.name, + error: error instanceof Error ? error.message : String(error), + }, + `Async rule execution failed`, + ); + } + } + } +} + +// Export singleton instance +export const businessRuleEngine = new BusinessRuleEngine(); diff --git a/packages/controlmart/src/business-rules/rule-evaluator.ts b/packages/controlmart/src/business-rules/rule-evaluator.ts new file mode 100644 index 0000000000000000000000000000000000000000..28c51d190349ea49f06e6c97c8cc234fe67b5f98 --- /dev/null +++ b/packages/controlmart/src/business-rules/rule-evaluator.ts @@ -0,0 +1,126 @@ +import jmespath from "jmespath"; + +import type { ExecutionContext } from "./types/rule.type"; + +/** + * Evaluate a condition expression against execution context + * @param condition - Condition with language and expression + * @param context - Execution context + * @returns Boolean result of condition evaluation + */ +export const evaluateCondition = ( + condition: { language: string; expression: string }, + context: ExecutionContext, +): boolean => { + try { + switch (condition.language) { + case "jmespath": + return evaluateJMESPathCondition(condition.expression, context); + + case "javascript": + return evaluateJavaScriptCondition(condition.expression, context); + + default: + throw new Error(`Unsupported condition language: ${condition.language}`); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Condition evaluation failed: ${message}`); + } +}; + +/** + * Evaluate JMESPath expression + */ +const evaluateJMESPathCondition = (expression: string, context: ExecutionContext): boolean => { + const result = jmespath.search(context, expression); + return Boolean(result); +}; + +/** + * Evaluate JavaScript expression safely + * Uses Function constructor similar to OD Script steps + */ +const evaluateJavaScriptCondition = (expression: string, context: ExecutionContext): boolean => { + // Create safe execution context (remove sensitive data) + const safeContext = { + data: context.data, + originalData: context.originalData, + domain: context.domain, + collection: context.collection, + trigger: context.trigger, + worldId: context.worldId, + metadata: context.metadata, + }; + + try { + // Create function with context and helper utilities + const evalFunction = new Function( + "ctx", + "helpers", + ` + const { data, originalData, domain, collection, trigger, worldId, metadata } = ctx; + return (${expression}); + `, + ); + + const helpers = { + jmespath: (query: string) => jmespath.search(safeContext, query), + }; + + const result = evalFunction(safeContext, helpers); + return Boolean(result); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`JavaScript evaluation failed: ${message}`); + } +}; + +/** + * Resolve template bindings in a value + * Replaces {{expression}} patterns with JMESPath results + * @param value - Value with potential template bindings + * @param context - Execution context for binding resolution + * @returns Resolved value + */ +export const resolveTemplateBindings = (value: any, context: ExecutionContext): any => { + const resolveValue = (val: any): any => { + if (typeof val === "string") { + // Case 1: Whole string is a single {{expr}} - return the result directly + const fullMatch = val.match(/^\s*\{\{(.*?)\}\}\s*$/); + if (fullMatch) { + const expr = fullMatch[1]!.trim(); + return jmespath.search(context, expr); + } + + // Case 2: Inline replacements in a larger string + return val.replace(/\{\{(.*?)\}\}/g, (_, expr) => { + const trimmed = expr.trim(); + const result = jmespath.search(context, trimmed); + if (typeof result === "object" && result !== null) { + return JSON.stringify(result); + } + return result ?? ""; + }); + } + + // Case 3: Array - recurse through elements + if (Array.isArray(val)) { + return val.map((v) => resolveValue(v)); + } + + // Case 4: Object - recurse through properties + if (val && typeof val === "object") { + const obj: Record = {}; + for (const [k, v] of Object.entries(val)) { + obj[k] = resolveValue(v); + } + return obj; + } + + // Case 5: Primitive fallback + return val; + }; + + return resolveValue(value); +}; diff --git a/packages/controlmart/src/business-rules/rule-registry.ts b/packages/controlmart/src/business-rules/rule-registry.ts new file mode 100644 index 0000000000000000000000000000000000000000..7b079394a829fb3273c3e95e94e131e394f99e0f --- /dev/null +++ b/packages/controlmart/src/business-rules/rule-registry.ts @@ -0,0 +1,164 @@ +import { BusinessRule } from "../models/business-rule.model"; +import type { TBusinessRuleModel } from "../models/business-rule.model"; + +/** + * Registry for managing and caching business rules + * Provides efficient rule lookup with in-memory caching + */ +export class RuleRegistry { + private cache: Map = new Map(); + + /** + * Get rules for a specific domain, collection, and world + * @param domain - Domain (ERP, WMS, TMS, EDI) + * @param collection - Collection name (e.g., Inventory, Order) + * @param worldId - World ID (null for global rules) + * @returns Array of applicable rules sorted by priority + */ + async getRules( + domain: string, + collection: string, + worldId: string | null, + ): Promise { + const cacheKey = this.getCacheKey(domain, collection, worldId); + + // Return cached rules if available + if (this.cache.has(cacheKey)) { + return this.cache.get(cacheKey)!; + } + + // Load rules from database + const rules = await this.loadRules(domain, collection, worldId); + + // Cache the results + this.cache.set(cacheKey, rules); + + return rules; + } + + /** + * Load rules from database + * Merges global rules with world-specific rules (world-specific takes precedence) + */ + private async loadRules( + domain: string, + collection: string, + worldId: string | null, + ): Promise { + // Load global rules (worldId = null) + const globalRules = await BusinessRule.find({ + domain, + targetCollection: collection, + worldId: null, + enabled: true, + }).lean(); + + // If no worldId specified, return only global rules + if (!worldId) { + return this.sortRulesByPriority(globalRules); + } + + // Load world-specific rules + const worldRules = await BusinessRule.find({ + domain, + targetCollection: collection, + worldId, + enabled: true, + }).lean(); + + // Merge rules: world-specific overrides global + const mergedRules = this.mergeRules(globalRules, worldRules); + + return this.sortRulesByPriority(mergedRules); + } + + /** + * Merge global and world-specific rules + * If a world rule has the same ruleId as a global rule, the world rule takes precedence + */ + private mergeRules( + globalRules: TBusinessRuleModel[], + worldRules: TBusinessRuleModel[], + ): TBusinessRuleModel[] { + const worldRuleIds = new Set(worldRules.map((r) => r.ruleId)); + + // Filter out global rules that are overridden by world rules + const activeGlobalRules = globalRules.filter((r) => !worldRuleIds.has(r.ruleId)); + + // Combine world rules and non-overridden global rules + return [...worldRules, ...activeGlobalRules]; + } + + /** + * Sort rules by priority (lower number = higher priority) + */ + private sortRulesByPriority(rules: TBusinessRuleModel[]): TBusinessRuleModel[] { + return rules.sort((a, b) => a.priority - b.priority); + } + + /** + * Generate cache key for rules + */ + private getCacheKey(domain: string, collection: string, worldId: string | null): string { + return `${domain}:${collection}:${worldId || "global"}`; + } + + /** + * Invalidate cache for specific domain/collection/world + */ + invalidateCache(domain?: string, collection?: string, worldId?: string | null): void { + if (!domain) { + // Clear entire cache + this.cache.clear(); + return; + } + + if (!collection) { + // Clear all entries for this domain + const keysToDelete: string[] = []; + for (const key of this.cache.keys()) { + if (key.startsWith(`${domain}:`)) { + keysToDelete.push(key); + } + } + keysToDelete.forEach((key) => this.cache.delete(key)); + return; + } + + if (worldId === undefined) { + // Clear all entries for this domain/collection (all worlds) + const keysToDelete: string[] = []; + for (const key of this.cache.keys()) { + if (key.startsWith(`${domain}:${collection}:`)) { + keysToDelete.push(key); + } + } + keysToDelete.forEach((key) => this.cache.delete(key)); + return; + } + + // Clear specific cache entry + const cacheKey = this.getCacheKey(domain, collection, worldId); + this.cache.delete(cacheKey); + } + + /** + * Clear entire cache + */ + clearCache(): void { + this.cache.clear(); + } + + /** + * Get cache statistics + */ + getCacheStats(): { size: number; keys: string[] } { + return { + size: this.cache.size, + keys: Array.from(this.cache.keys()), + }; + } +} + +// Singleton instance +export const ruleRegistry = new RuleRegistry(); diff --git a/packages/controlmart/src/business-rules/rules/wms-rules.ts b/packages/controlmart/src/business-rules/rules/wms-rules.ts new file mode 100644 index 0000000000000000000000000000000000000000..c2dee60dbe47083e5bfb57be9ff8d0b856598514 --- /dev/null +++ b/packages/controlmart/src/business-rules/rules/wms-rules.ts @@ -0,0 +1,253 @@ +import type { TBusinessRuleModel } from "../../models/business-rule.model"; + +/** + * Rule 1: Prevent Negative Stock + * Validates that inventory quantities never go below zero + */ +export const wmsPreventNegativeStock: Partial = { + ruleId: "wms-prevent-negative-stock", + name: "Prevent Negative Stock", + description: "Ensure inventory quantities never go below zero", + domain: "WMS", + targetCollection: "Inventory", + trigger: ["before_insert", "before_update"], + executionMode: "sync", + priority: 100, + condition: { + language: "jmespath", + expression: "data.quantityOnHand < `0` || data.quantityAllocated < `0`", + }, + actions: [ + { + type: "validate", + config: { + errorCode: "WMS_NEGATIVE_STOCK", + errorMessage: "Inventory quantities cannot be negative", + }, + }, + ], + enabled: true, + worldId: null, + version: 1, +}; + +/** + * Rule 2: Auto-Calculate Available Quantity + * Automatically calculates quantityAvailable = quantityOnHand - quantityAllocated + * Note: This is redundant with the Inventory model's pre-save hook, + * but demonstrates the transform action capability + */ +export const wmsCalculateAvailable: Partial = { + ruleId: "wms-calculate-available", + name: "Calculate Available Quantity", + description: "Automatically calculate quantityAvailable = quantityOnHand - quantityAllocated", + domain: "WMS", + targetCollection: "Inventory", + trigger: ["before_insert", "before_update"], + executionMode: "sync", + priority: 50, + condition: { + language: "jmespath", + expression: "data.quantityOnHand != `null` && data.quantityAllocated != `null`", + }, + actions: [ + { + type: "transform", + config: { + type: "javascript", + script: + "(data) => { data.quantityAvailable = data.quantityOnHand - data.quantityAllocated; return data; }", + }, + }, + ], + enabled: true, + worldId: null, + version: 1, +}; + +/** + * Rule 3: Validate Allocation + * Prevents allocating more inventory than available on hand + */ +export const wmsValidateAllocation: Partial = { + ruleId: "wms-validate-allocation", + name: "Validate Allocation Does Not Exceed Available", + description: "Prevent allocating more inventory than available", + domain: "WMS", + targetCollection: "Inventory", + trigger: ["before_insert", "before_update"], + executionMode: "sync", + priority: 100, + condition: { + language: "jmespath", + expression: "data.quantityAllocated > data.quantityOnHand", + }, + actions: [ + { + type: "validate", + config: { + errorCode: "WMS_ALLOCATION_EXCEEDS_AVAILABLE", + errorMessage: + "Cannot allocate more than available inventory (allocated: {{data.quantityAllocated}}, on hand: {{data.quantityOnHand}})", + }, + }, + ], + enabled: true, + worldId: null, + version: 1, +}; + +/** + * Rule 4: Auto-Allocate Inventory on Order Creation + * Triggers allocation workflow when outbound order is created + */ +export const wmsAutoAllocateOrder: Partial = { + ruleId: "wms-auto-allocate-order", + name: "Auto-Allocate Inventory on Order Creation", + description: "Automatically allocate inventory when outbound order is created", + domain: "WMS", + targetCollection: "OutboundOrder", + trigger: ["after_insert"], + executionMode: "async", + priority: 200, + condition: { + language: "jmespath", + expression: "data.orderStatus == `CREATED`", + }, + actions: [ + { + type: "triggerOD", + config: { + odName: "wms-inventory-allocation", + input: { + orderId: "{{data._id}}", + orderNumber: "{{data.orderNumber}}", + lines: "{{data.lines}}", + worldId: "{{data.worldRef.worldId}}", + }, + }, + }, + { + type: "log", + config: { + level: "info", + message: "Triggered inventory allocation workflow for order {{data.orderNumber}}", + metadata: { + orderId: "{{data._id}}", + orderNumber: "{{data.orderNumber}}", + }, + }, + }, + ], + enabled: true, + worldId: null, + version: 1, +}; + +/** + * Rule 5: Trigger Replenishment When Low Stock + * Creates replenishment task when inventory falls below threshold + */ +export const wmsTriggerReplenishment: Partial = { + ruleId: "wms-trigger-replenishment", + name: "Trigger Replenishment on Low Stock", + description: "Create replenishment task when inventory falls below threshold", + domain: "WMS", + targetCollection: "Inventory", + trigger: ["after_insert", "after_update"], + executionMode: "async", + priority: 150, + condition: { + language: "jmespath", + expression: "data.quantityAvailable < `10` && data.inventoryStatus == `AVAILABLE`", + }, + actions: [ + { + type: "createRecord", + config: { + collection: "Task", + data: { + taskId: "TASK-REPL-{{data.sku}}", + taskType: "REPLENISHMENT", + sku: "{{data.sku}}", + productId: "{{data.productId}}", + from: { + binId: "BULK_STORAGE", + }, + to: { + binId: "{{data.binId}}", + }, + quantity: { + requested: 50, + uom: "{{data.uom}}", + }, + priority: 50, + taskStatus: "CREATED", + worldRef: { + worldId: "{{data.worldRef.worldId}}", + dcId: "{{data.worldRef.dcId}}", + }, + }, + }, + }, + { + type: "log", + config: { + level: "warn", + message: "Low stock detected for SKU {{data.sku}}, replenishment task created", + metadata: { + sku: "{{data.sku}}", + quantityAvailable: "{{data.quantityAvailable}}", + binId: "{{data.binId}}", + }, + }, + }, + ], + enabled: true, + worldId: null, + version: 2, +}; + +/** + * Rule 6: Prevent Allocation of Quarantined/Damaged Inventory + * Blocks allocation of inventory not in AVAILABLE status + */ +export const wmsPreventBadInventoryAllocation: Partial = { + ruleId: "wms-prevent-bad-inventory-allocation", + name: "Prevent Allocation of Unusable Inventory", + description: "Block allocation of inventory in QUARANTINE, DAMAGED, or EXPIRED status", + domain: "WMS", + targetCollection: "Inventory", + trigger: ["before_update"], + executionMode: "sync", + priority: 90, + condition: { + language: "jmespath", + expression: "data.inventoryStatus != `AVAILABLE` && data.quantityAllocated > `0`", + }, + actions: [ + { + type: "validate", + config: { + errorCode: "WMS_CANNOT_ALLOCATE_UNUSABLE_INVENTORY", + errorMessage: + "Cannot allocate inventory that is not in AVAILABLE status (current status: {{data.inventoryStatus}})", + }, + }, + ], + enabled: true, + worldId: null, + version: 1, +}; + +/** + * All WMS rules for seeding + */ +export const allWmsRules = [ + wmsPreventNegativeStock, + wmsCalculateAvailable, + wmsValidateAllocation, + wmsAutoAllocateOrder, + wmsTriggerReplenishment, + wmsPreventBadInventoryAllocation, +]; diff --git a/packages/controlmart/src/business-rules/seed-rules.ts b/packages/controlmart/src/business-rules/seed-rules.ts new file mode 100644 index 0000000000000000000000000000000000000000..c3e93088cbcf308d704e20e981c6bc4e7cf7db92 --- /dev/null +++ b/packages/controlmart/src/business-rules/seed-rules.ts @@ -0,0 +1,92 @@ +import { BusinessRule } from "../models/business-rule.model"; +import { allWmsRules } from "./rules/wms-rules"; +import { createAppLogger } from "../utils/logger.util"; + +const logger = createAppLogger({ service: "seed-rules" }); + +/** + * Seed business rules into the database + * Uses version-aware upsert logic: + * - If rule doesn't exist → create it + * - If rule exists but version changed → update it + * - If rule exists and version matches → skip it + */ +export async function seedBusinessRules(): Promise { + try { + logger.info("Starting business rules seeding..."); + + let created = 0; + let updated = 0; + let skipped = 0; + + for (const ruleData of allWmsRules) { + try { + const existing = await BusinessRule.findOne({ ruleId: ruleData.ruleId }); + + if (!existing) { + // Create new rule + await BusinessRule.create(ruleData); + created++; + logger.info( + { + ruleId: ruleData.ruleId, + name: ruleData.name, + version: ruleData.version, + }, + "Created business rule", + ); + } else if (existing.version !== ruleData.version) { + // Update existing rule (version changed) + await BusinessRule.findByIdAndUpdate(existing._id, ruleData); + updated++; + logger.info( + { + ruleId: ruleData.ruleId, + name: ruleData.name, + oldVersion: existing.version, + newVersion: ruleData.version, + }, + "Updated business rule", + ); + } else { + // Skip (already exists with same version) + skipped++; + logger.debug( + { + ruleId: ruleData.ruleId, + version: ruleData.version, + }, + "Skipped business rule (already exists)", + ); + } + } catch (error) { + logger.error( + { + ruleId: ruleData.ruleId, + error: error instanceof Error ? error.message : String(error), + }, + "Failed to seed business rule", + ); + // Continue with next rule instead of failing entire seeding + } + } + + logger.info( + { + created, + updated, + skipped, + total: allWmsRules.length, + }, + "Business rules seeding complete", + ); + } catch (error) { + logger.error( + { + error: error instanceof Error ? error.message : String(error), + }, + "Business rules seeding failed", + ); + // Don't throw - allow server to start even if seeding fails + } +} diff --git a/packages/controlmart/src/business-rules/types/rule.type.ts b/packages/controlmart/src/business-rules/types/rule.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..be25310f0a763d92e32da90e5dce50a9e8ebe8b4 --- /dev/null +++ b/packages/controlmart/src/business-rules/types/rule.type.ts @@ -0,0 +1,138 @@ +import type { TBusinessRuleModel } from "../../models/business-rule.model"; + +/** + * Execution context passed to rule evaluators and action handlers + */ +export interface ExecutionContext { + /** The data being operated on */ + data: any; + + /** Original data before any transformations */ + originalData?: any; + + /** World ID for multi-tenancy */ + worldId: string | null; + + /** Domain (ERP, WMS, TMS, EDI) */ + domain: string; + + /** Collection/model name */ + collection: string; + + /** Trigger type (before_insert, after_update, etc.) */ + trigger: string; + + /** User or system that initiated the operation */ + actor?: string; + + /** Additional metadata */ + metadata?: Record; +} + +/** + * Options for rule execution + */ +export interface RuleExecutionOptions { + /** Skip async rule execution */ + skipAsync?: boolean; + + /** Dry run mode (don't execute actions, just evaluate) */ + dryRun?: boolean; + + /** Maximum execution time in milliseconds */ + timeout?: number; + + /** Additional context to pass to rules */ + context?: Record; +} + +/** + * Result of rule execution + */ +export interface RuleExecutionResult { + /** Whether execution was successful */ + success: boolean; + + /** Modified data (for transform actions) */ + data?: any; + + /** Errors that occurred */ + errors?: Array<{ + ruleId: string; + ruleName: string; + error: string; + action?: string; + }>; + + /** Rules that were executed */ + executedRules: Array<{ + ruleId: string; + ruleName: string; + conditionMet: boolean; + actions: string[]; + }>; + + /** Execution metrics */ + metrics?: { + totalRules: number; + executedRules: number; + syncRules: number; + asyncRules: number; + executionTimeMs: number; + }; +} + +/** + * Action handler interface + */ +export interface ActionHandler { + /** + * Execute the action + * @param action - Action configuration from rule + * @param context - Execution context + * @param rule - The rule being executed + * @returns Modified data (for sync actions) or void (for async actions) + */ + execute( + action: { type: string; config: any }, + context: ExecutionContext, + rule: TBusinessRuleModel, + ): Promise; +} + +/** + * Custom error class for business rule violations + */ +export class BusinessRuleError extends Error { + public readonly code: string; + public readonly ruleId: string; + public readonly ruleName: string; + public readonly details?: any; + + constructor(message: string, ruleId: string, ruleName: string, code?: string, details?: any) { + super(message); + this.name = "BusinessRuleError"; + this.code = code || "BUSINESS_RULE_VIOLATION"; + this.ruleId = ruleId; + this.ruleName = ruleName; + this.details = details; + + // Maintains proper stack trace for where error was thrown + if (Error.captureStackTrace) { + Error.captureStackTrace(this, BusinessRuleError); + } + } + + /** + * Convert error to JSON for API responses + */ + toJSON() { + return { + error: this.message, + code: this.code, + ruleId: this.ruleId, + ruleName: this.ruleName, + details: this.details, + }; + } +} diff --git a/packages/controlmart/src/capabilities/catalog.ts b/packages/controlmart/src/capabilities/catalog.ts new file mode 100644 index 0000000000000000000000000000000000000000..c549881e2170ba4ed7ea6bdddb9e5aea09dda7d6 --- /dev/null +++ b/packages/controlmart/src/capabilities/catalog.ts @@ -0,0 +1,389 @@ +/** + * Initial Capability Catalog + * + * Catalog of capabilities mapped 1:1 to ODs. + * Includes capabilities for both perishables-food-manufacturer and manufacturing-unit worlds. + */ + +import type { Capability } from '../types/capability.type'; + +export const INITIAL_CAPABILITIES: Capability[] = [ + // === INVENTORY & WAREHOUSE ODs === + { + id: 'aging-inventory-check', + name: 'Aging Inventory Check', + description: + 'Check for expired inventory and write off spoilage. Background job that identifies perishable items past their shelf life and records inventory adjustments.', + tags: { + domain: ['inventory', 'quality-control'], + complexity: 'medium', + services: ['erp', 'wms', 'finance'], + personas: ['quality-control-specialist'], + patterns: ['background-job'], + }, + personas: ['warehouse-manager', 'system-administrator'], + odId: 'aging-inventory-check', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 5000, + }, + }, + + { + id: 'daily-inventory-check', + name: 'Daily Inventory Check', + description: + 'Daily cycle count, inventory verification, and metrics recording. Background job that reconciles inventory levels and generates daily warehouse metrics.', + tags: { + domain: ['inventory', 'warehousing'], + complexity: 'medium', + services: ['wms', 'finance'], + personas: ['inventory-manager'], + patterns: ['background-job'], + }, + personas: ['warehouse-manager', 'system-administrator'], + odId: 'daily-inventory-check', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 5000, + }, + }, + + { + id: 'putaway-process', + name: 'Putaway Process', + description: + 'Move received inventory from Dock to Storage locations. Assigns warehouse tasks to transfer goods from receiving area to appropriate bin locations.', + tags: { + domain: ['warehousing', 'logistics'], + complexity: 'medium', + services: ['wms'], + personas: ['forklift-operator'], + patterns: ['sequential'], + }, + personas: ['warehouse-worker', 'warehouse-manager'], + odId: 'putaway-process', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 4000, + }, + }, + + // === SALES & FULFILLMENT ODs === + { + id: 'customer-full-edi-cycle', + name: 'Customer Full EDI Cycle', + description: + 'Complete EDI flow: Customer places PO (850) -> Sales Order -> Ack (855) -> Shipment -> ASN (856) -> Invoice (810). End-to-end B2B transaction with EDI document exchange.', + tags: { + domain: ['sales', 'edi', 'fulfillment'], + complexity: 'complex', + services: ['erp', 'wms', 'tms', 'edi', 'finance'], + personas: ['system-integrator'], + patterns: ['sequential', 'multi-service'], + }, + personas: ['system-administrator'], + odId: 'customer-full-edi-cycle', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 15000, + }, + }, + + { + id: 'pick-pack-ship', + name: 'Pick Pack and Ship', + description: + 'Fulfill sales order - pick inventory, pack, and ship to customer. Creates warehouse tasks, deducts inventory, schedules carrier, and generates ASN.', + tags: { + domain: ['fulfillment', 'warehousing', 'shipping'], + complexity: 'complex', + services: ['wms', 'erp', 'tms', 'edi'], + personas: ['fulfillment-associate'], + patterns: ['sequential', 'multi-service'], + }, + personas: ['warehouse-worker', 'warehouse-manager'], + odId: 'pick-pack-ship', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 10000, + }, + }, + + { + id: 'invoice-and-payment', + name: 'Invoice and Payment', + description: + 'Invoice customer for shipped order and record payment receipt. Creates invoice, sends EDI 810, and processes customer payment.', + tags: { + domain: ['sales', 'finance', 'edi'], + complexity: 'medium', + services: ['erp', 'edi', 'finance'], + personas: ['accounts-receivable-clerk'], + patterns: ['sequential'], + }, + personas: ['store-manager', 'system-administrator'], + odId: 'invoice-and-payment', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 6000, + }, + }, + + // === PROCUREMENT & RECEIVING ODs === + { + id: 'raw-material-procurement', + name: 'Raw Material Procurement', + description: + 'MPC orders raw materials from suppliers when inventory is low. Creates purchase orders with EDI 850 and coordinates with suppliers.', + tags: { + domain: ['procurement', 'purchasing'], + complexity: 'medium', + services: ['erp', 'edi', 'finance'], + personas: ['procurement-manager'], + patterns: ['sequential'], + }, + personas: ['warehouse-manager', 'system-administrator'], + odId: 'raw-material-procurement', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 8000, + }, + }, + + { + id: 'receive-supplier-shipment', + name: 'Receive Supplier Shipment', + description: + 'Receive raw materials at warehouse and process supplier payment. Creates receiving transactions, updates inventory, and records accounts payable.', + tags: { + domain: ['receiving', 'procurement'], + complexity: 'medium', + services: ['erp', 'wms', 'finance'], + personas: ['dock-receiver'], + patterns: ['sequential'], + }, + personas: ['warehouse-worker', 'warehouse-manager'], + odId: 'receive-supplier-shipment', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 6000, + }, + }, + + { + id: 'supplier-reorder-trigger', + name: 'Supplier Reorder Trigger', + description: + 'Check inventory levels and trigger reorder when stock is low. Background job that monitors inventory and initiates procurement when thresholds are crossed.', + tags: { + domain: ['procurement', 'inventory'], + complexity: 'medium', + services: ['wms', 'erp', 'edi', 'finance'], + personas: ['procurement-bot'], + patterns: ['background-job'], + }, + personas: ['system-administrator'], + odId: 'supplier-reorder-trigger', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 10000, + }, + }, + + { + id: 'inbound-asn-process', + name: 'Inbound ASN Process', + description: + 'Receive Advanced Shipping Notice (ASN) from supplier and create Inbound Order. Processes EDI 856 and prepares warehouse for incoming shipment.', + tags: { + domain: ['logistics', 'receiving', 'edi'], + complexity: 'medium', + services: ['erp', 'wms', 'edi'], + personas: ['logistics-coordinator'], + patterns: ['sequential'], + }, + personas: ['warehouse-manager', 'system-administrator'], + odId: 'inbound-asn-process', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 5000, + }, + }, + + // === MANUFACTURING ODs === + { + id: 'production-order', + name: 'Production Order', + description: + 'Create production order for chips or ice cream based on product BOM. Schedules manufacturing run and reserves raw materials.', + tags: { + domain: ['manufacturing', 'production'], + complexity: 'medium', + services: ['erp', 'wms'], + personas: ['production-planner'], + patterns: ['sequential'], + }, + personas: ['warehouse-manager', 'system-administrator'], + odId: 'production-order', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 5000, + }, + }, + + { + id: 'inventory-consumption', + name: 'Inventory Consumption', + description: + 'Consume raw materials for production and add finished goods. Deducts BOM components and creates inventory for manufactured products.', + tags: { + domain: ['manufacturing', 'inventory'], + complexity: 'medium', + services: ['erp', 'wms'], + personas: ['production-operator'], + patterns: ['sequential'], + }, + personas: ['warehouse-worker', 'warehouse-manager'], + odId: 'inventory-consumption', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 5000, + }, + }, + + { + id: 'material-pick-for-production', + name: 'Material Pick for Production', + description: + 'Pick raw materials from warehouse storage for production orders. Creates warehouse tasks to transfer materials to production staging area.', + tags: { + domain: ['warehouse', 'manufacturing'], + complexity: 'medium', + services: ['wms', 'erp'], + personas: ['forklift-operator'], + patterns: ['pick-operation'], + }, + personas: ['warehouse-worker', 'production-manager'], + odId: 'material-pick-for-production', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 4000, + }, + }, + + { + id: 'manufacturing-execution', + name: 'Manufacturing Execution', + description: + 'Execute production runs and consume raw materials to create finished goods. Manages BOM consumption and production output.', + tags: { + domain: ['manufacturing'], + complexity: 'complex', + services: ['erp', 'wms'], + personas: ['production-operator'], + patterns: ['production-run'], + }, + personas: ['production-manager', 'plant-manager'], + odId: 'manufacturing-execution', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 8000, + }, + }, + + { + id: 'manufacturing-execution-detailed', + name: 'Manufacturing Execution (Detailed)', + description: + 'Detailed manufacturing execution with step-by-step tracking through MES integration. Includes quality checkpoints and production metrics.', + tags: { + domain: ['manufacturing'], + complexity: 'complex', + services: ['erp', 'wms', 'mes'], + personas: ['production-operator'], + patterns: ['production-run', 'detailed-tracking'], + }, + personas: ['production-manager', 'plant-manager'], + odId: 'manufacturing-execution-detailed', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 12000, + }, + }, + + { + id: 'finished-goods-receipt', + name: 'Finished Goods Receipt', + description: + 'Receive finished goods from production into warehouse inventory. Creates putaway tasks and updates inventory with manufactured products.', + tags: { + domain: ['warehouse', 'manufacturing'], + complexity: 'medium', + services: ['wms', 'erp'], + personas: ['forklift-operator'], + patterns: ['receipt-operation'], + }, + personas: ['warehouse-worker', 'production-manager'], + odId: 'finished-goods-receipt', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 4000, + }, + }, + + // === FINANCE ODs === + { + id: 'financial-reconciliation', + name: 'Financial Reconciliation', + description: + 'Daily financial reconciliation and reporting. Background job that aggregates transactions, updates ledger, and generates financial summaries.', + tags: { + domain: ['finance', 'accounting'], + complexity: 'medium', + services: ['finance'], + personas: ['financial-controller'], + patterns: ['background-job'], + }, + personas: ['store-manager', 'system-administrator'], + odId: 'financial-reconciliation', + version: '1.0.0', + metadata: { + author: 'morpheus-team', + createdAt: new Date('2025-01-01'), + estimatedDuration: 5000, + }, + }, +]; diff --git a/packages/controlmart/src/controller/auditlog.controller.ts b/packages/controlmart/src/controller/auditlog.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..c36e623f245a5303c5b68601a8d32018b341132f --- /dev/null +++ b/packages/controlmart/src/controller/auditlog.controller.ts @@ -0,0 +1,26 @@ +import type { Request, Response } from "express"; + +import { AuditRecordRepository } from "../repository"; +import { asyncHandler, sendResponse } from "../utils/http.util"; + +export const getAuditLogsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { model, documentId, dateStart, dateEnd } = req.query; + + if (!worldId) { + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + } + + const auditLogs = await AuditRecordRepository.getAuditLogs({ + worldId, + model: model as string, + documentId: documentId as string, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + }); + return sendResponse({ res, status: 200, data: auditLogs }); +}); diff --git a/packages/controlmart/src/controller/business-rules.controller.ts b/packages/controlmart/src/controller/business-rules.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..67804dd35c4e8accb6938d59ffdc97fc0f90a4ef --- /dev/null +++ b/packages/controlmart/src/controller/business-rules.controller.ts @@ -0,0 +1,482 @@ +import type { Request, Response } from "express"; + +import { BusinessRule } from "../models/business-rule.model"; +import { businessRuleEngine } from "../business-rules/rule-engine"; +import { ruleRegistry } from "../business-rules/rule-registry"; +import { sendResponse } from "../utils/http.util"; +import { getErrorMessage } from "../utils/error.util"; +import { createAppLogger } from "../utils/logger.util"; + +const logger = createAppLogger({ service: "business-rules-controller" }); + +/** + * Business Rules Controller + * Phase 4: REST API for Business Rule Management + */ +export class BusinessRulesController { + /** + * POST /rules - Create a new business rule + */ + static async createRuleController(req: Request, res: Response) { + try { + const ruleData = req.body; + + // Check if rule with this ID already exists + const existing = await BusinessRule.findOne({ ruleId: ruleData.ruleId }); + if (existing) { + return sendResponse({ + res, + status: 409, + error: `Business rule with ID '${ruleData.ruleId}' already exists`, + }); + } + + // Create the rule + const rule = await BusinessRule.create(ruleData); + + // Invalidate cache + ruleRegistry.invalidateCache(rule.domain, rule.targetCollection, rule.worldId); + + logger.info( + { + ruleId: rule.ruleId, + domain: rule.domain, + targetCollection: rule.targetCollection, + }, + "Business rule created via API", + ); + + sendResponse({ + res, + status: 201, + data: rule, + meta: { message: "Business rule created successfully" }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to create business rule"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * GET /rules - List business rules with optional filters + */ + static async listRulesController(req: Request, res: Response) { + try { + const { + domain, + targetCollection, + worldId, + enabled, + trigger, + limit = 100, + skip = 0, + } = req.query; + + // Build filter + const filter: any = {}; + if (domain) filter.domain = domain; + if (targetCollection) filter.targetCollection = targetCollection; + if (worldId !== undefined) filter.worldId = worldId === "null" ? null : worldId; + if (enabled !== undefined) filter.enabled = enabled === true || enabled === "true"; + if (trigger) filter.trigger = trigger; + + // Query with pagination + const rules = await BusinessRule.find(filter) + .sort({ domain: 1, targetCollection: 1, priority: 1 }) + .limit(Number(limit)) + .skip(Number(skip)) + .lean(); + + const count = await BusinessRule.countDocuments(filter); + + sendResponse({ + res, + status: 200, + data: rules, + meta: { + count, + limit: Number(limit), + skip: Number(skip), + }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to list business rules"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * GET /rules/:ruleId - Get a single business rule + */ + static async getRuleController(req: Request, res: Response) { + try { + const { ruleId } = req.params; + + const rule = await BusinessRule.findOne({ ruleId }).lean(); + if (!rule) { + return sendResponse({ + res, + status: 404, + error: `Business rule '${ruleId}' not found`, + }); + } + + sendResponse({ + res, + status: 200, + data: rule, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to get business rule"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * PUT /rules/:ruleId - Update a business rule + */ + static async updateRuleController(req: Request, res: Response) { + try { + const { ruleId } = req.params; + const updates = req.body; + + const rule = await BusinessRule.findOne({ ruleId }); + if (!rule) { + return sendResponse({ + res, + status: 404, + error: `Business rule '${ruleId}' not found`, + }); + } + + // Apply updates + Object.assign(rule, updates); + await rule.save(); + + // Invalidate cache + ruleRegistry.invalidateCache(rule.domain, rule.targetCollection, rule.worldId); + + logger.info( + { + ruleId: rule.ruleId, + domain: rule.domain, + targetCollection: rule.targetCollection, + }, + "Business rule updated via API", + ); + + sendResponse({ + res, + status: 200, + data: rule, + meta: { message: "Business rule updated successfully" }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to update business rule"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * DELETE /rules/:ruleId - Delete a business rule + */ + static async deleteRuleController(req: Request, res: Response) { + try { + const { ruleId } = req.params; + + const rule = await BusinessRule.findOne({ ruleId }); + if (!rule) { + return sendResponse({ + res, + status: 404, + error: `Business rule '${ruleId}' not found`, + }); + } + + const domain = rule.domain; + const targetCollection = rule.targetCollection; + const worldId = rule.worldId; + + await BusinessRule.deleteOne({ ruleId }); + + // Invalidate cache + ruleRegistry.invalidateCache(domain, targetCollection, worldId); + + logger.info( + { + ruleId, + domain, + targetCollection, + }, + "Business rule deleted via API", + ); + + sendResponse({ + res, + status: 200, + data: { ruleId }, + meta: { message: "Business rule deleted successfully" }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to delete business rule"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * POST /rules/:ruleId/enable - Enable a business rule + */ + static async enableRuleController(req: Request, res: Response) { + try { + const { ruleId } = req.params; + + const rule = await BusinessRule.findOne({ ruleId }); + if (!rule) { + return sendResponse({ + res, + status: 404, + error: `Business rule '${ruleId}' not found`, + }); + } + + rule.enabled = true; + await rule.save(); + + // Invalidate cache + ruleRegistry.invalidateCache(rule.domain, rule.targetCollection, rule.worldId); + + logger.info( + { + ruleId: rule.ruleId, + domain: rule.domain, + targetCollection: rule.targetCollection, + }, + "Business rule enabled via API", + ); + + sendResponse({ + res, + status: 200, + data: rule, + meta: { message: "Business rule enabled successfully" }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to enable business rule"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * POST /rules/:ruleId/disable - Disable a business rule + */ + static async disableRuleController(req: Request, res: Response) { + try { + const { ruleId } = req.params; + + const rule = await BusinessRule.findOne({ ruleId }); + if (!rule) { + return sendResponse({ + res, + status: 404, + error: `Business rule '${ruleId}' not found`, + }); + } + + rule.enabled = false; + await rule.save(); + + // Invalidate cache + ruleRegistry.invalidateCache(rule.domain, rule.targetCollection, rule.worldId); + + logger.info( + { + ruleId: rule.ruleId, + domain: rule.domain, + targetCollection: rule.targetCollection, + }, + "Business rule disabled via API", + ); + + sendResponse({ + res, + status: 200, + data: rule, + meta: { message: "Business rule disabled successfully" }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to disable business rule"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * POST /rules/test - Test a rule against sample data + */ + static async testRuleController(req: Request, res: Response) { + try { + const { rule, sampleData, context } = req.body; + + // Execute the rule in test mode (no side effects) + const result = await businessRuleEngine.executeRules( + rule.domain, + rule.targetCollection, + rule.trigger[0] || "test_trigger", + sampleData, + { + skipAsync: true, // Don't execute async actions in test mode + context: context || {}, + }, + ); + + sendResponse({ + res, + status: 200, + data: { + applicable: result.rulesExecuted > 0, + result, + }, + meta: { message: "Rule tested successfully" }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to test business rule"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * POST /rules/bulk - Bulk create business rules + */ + static async bulkCreateRulesController(req: Request, res: Response) { + try { + const { rules } = req.body; + + const results = { + created: 0, + failed: 0, + errors: [] as Array<{ ruleId: string; error: string }>, + }; + + for (const ruleData of rules) { + try { + // Check if rule exists + const existing = await BusinessRule.findOne({ ruleId: ruleData.ruleId }); + if (existing) { + results.failed++; + results.errors.push({ + ruleId: ruleData.ruleId, + error: "Rule already exists", + }); + continue; + } + + // Create rule + await BusinessRule.create(ruleData); + results.created++; + + // Invalidate cache + ruleRegistry.invalidateCache( + ruleData.domain, + ruleData.targetCollection, + ruleData.worldId, + ); + } catch (err) { + results.failed++; + results.errors.push({ + ruleId: ruleData.ruleId, + error: getErrorMessage(err), + }); + } + } + + logger.info(results, "Bulk create business rules completed"); + + sendResponse({ + res, + status: 200, + data: results, + meta: { + message: `Bulk create completed: ${results.created} created, ${results.failed} failed`, + }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to bulk create business rules"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } + + /** + * DELETE /rules/bulk - Bulk delete business rules + */ + static async bulkDeleteRulesController(req: Request, res: Response) { + try { + const { ruleIds } = req.body; + + const result = await BusinessRule.deleteMany({ ruleId: { $in: ruleIds } }); + + // Invalidate entire cache (can't track individual rules) + ruleRegistry.invalidateCache(); + + logger.info( + { + ruleIds, + deletedCount: result.deletedCount, + }, + "Bulk delete business rules completed", + ); + + sendResponse({ + res, + status: 200, + data: { + deleted: result.deletedCount, + requested: ruleIds.length, + }, + meta: { message: `Bulk delete completed: ${result.deletedCount} rules deleted` }, + }); + } catch (err) { + logger.error({ error: getErrorMessage(err) }, "Failed to bulk delete business rules"); + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); + } + } +} diff --git a/packages/controlmart/src/controller/capabilities.controller.ts b/packages/controlmart/src/controller/capabilities.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..fa9595aa1c53b58148da54d13afff28ca448dbc1 --- /dev/null +++ b/packages/controlmart/src/controller/capabilities.controller.ts @@ -0,0 +1,241 @@ +import type { Request, Response } from "express"; +import { CapabilityRepository } from "../repository/capability.repository"; +import { capabilityCatalog } from "../services/capability-catalog.service"; +import { CapabilityCreateSchema, CapabilityUpdateSchema } from "../utils/validators/capability.validator"; +import { sendResponse } from "../utils/http.util"; +import { getErrorMessage } from "../utils/error.util"; +import { RepositoryError } from "../utils/error.util"; +import { ZodError } from "zod"; + +/** + * Create a new capability + * POST /api/capabilities + */ +export const createCapabilityController = async (req: Request, res: Response) => { + try { + // Validate request body + const validated = CapabilityCreateSchema.parse(req.body); + + // Create capability + const capability = await CapabilityRepository.create(validated); + + // Reload catalog cache + await capabilityCatalog.reload(); + + return sendResponse({ + res, + statusCode: 201, + data: { + success: true, + message: "Capability created successfully", + data: capability, + }, + }); + } catch (error) { + if (error instanceof ZodError) { + return sendResponse({ + res, + statusCode: 400, + data: { + success: false, + error: "Validation failed", + details: error.errors, + }, + }); + } + + if (error instanceof RepositoryError) { + const statusCode = error.code === "VALIDATION_ERROR" ? 400 : 500; + return sendResponse({ + res, + statusCode, + data: { + success: false, + error: error.message, + }, + }); + } + + return sendResponse({ + res, + statusCode: 500, + data: { + success: false, + error: "Failed to create capability", + details: getErrorMessage(error), + }, + }); + } +}; + +/** + * Update an existing capability + * PUT /api/capabilities/:id + */ +export const updateCapabilityController = async (req: Request, res: Response) => { + try { + const { id } = req.params; + + if (!id) { + return sendResponse({ + res, + statusCode: 400, + data: { + success: false, + error: "Capability ID is required", + }, + }); + } + + // Validate request body + const validated = CapabilityUpdateSchema.parse(req.body); + + // Check if capability exists + const existing = await CapabilityRepository.findById(id); + if (!existing) { + return sendResponse({ + res, + statusCode: 404, + data: { + success: false, + error: `Capability '${id}' not found`, + }, + }); + } + + // Update capability + const updated = await CapabilityRepository.update(id, validated); + + // Reload catalog cache + await capabilityCatalog.reload(); + + return sendResponse({ + res, + statusCode: 200, + data: { + success: true, + message: "Capability updated successfully", + data: updated, + }, + }); + } catch (error) { + if (error instanceof ZodError) { + return sendResponse({ + res, + statusCode: 400, + data: { + success: false, + error: "Validation failed", + details: error.errors, + }, + }); + } + + if (error instanceof RepositoryError) { + const statusCode = error.code === "NOT_FOUND_ERROR" ? 404 : + error.code === "VALIDATION_ERROR" ? 400 : 500; + return sendResponse({ + res, + statusCode, + data: { + success: false, + error: error.message, + }, + }); + } + + return sendResponse({ + res, + statusCode: 500, + data: { + success: false, + error: "Failed to update capability", + details: getErrorMessage(error), + }, + }); + } +}; + +/** + * Delete a capability + * DELETE /api/capabilities/:id + */ +export const deleteCapabilityController = async (req: Request, res: Response) => { + try { + const { id } = req.params; + + if (!id) { + return sendResponse({ + res, + statusCode: 400, + data: { + success: false, + error: "Capability ID is required", + }, + }); + } + + // Check if capability exists + const existing = await CapabilityRepository.findById(id); + if (!existing) { + return sendResponse({ + res, + statusCode: 404, + data: { + success: false, + error: `Capability '${id}' not found`, + }, + }); + } + + // Delete capability + const deleted = await CapabilityRepository.delete(id); + + if (!deleted) { + return sendResponse({ + res, + statusCode: 500, + data: { + success: false, + error: "Failed to delete capability", + }, + }); + } + + // Reload catalog cache + await capabilityCatalog.reload(); + + return sendResponse({ + res, + statusCode: 200, + data: { + success: true, + message: `Capability '${id}' deleted successfully`, + data: { id, deleted: true }, + }, + }); + } catch (error) { + if (error instanceof RepositoryError) { + const statusCode = error.code === "NOT_FOUND_ERROR" ? 404 : + error.code === "VALIDATION_ERROR" ? 400 : 500; + return sendResponse({ + res, + statusCode, + data: { + success: false, + error: error.message, + }, + }); + } + + return sendResponse({ + res, + statusCode: 500, + data: { + success: false, + error: "Failed to delete capability", + details: getErrorMessage(error), + }, + }); + } +}; diff --git a/packages/controlmart/src/controller/capability-graph.controller.ts b/packages/controlmart/src/controller/capability-graph.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..3ab60961adb0711aea249a4ec88d37f83b0668dd --- /dev/null +++ b/packages/controlmart/src/controller/capability-graph.controller.ts @@ -0,0 +1,209 @@ +/** + * Capability Graph Controllers + * + * API controllers for knowledge graph queries on capabilities: + * - Validation: Check if a capability's OD is executable + * - Dependencies: Get comprehensive dependency information + * - Related: Find related capabilities based on graph relationships + */ + +import type { Request, Response } from 'express'; +import { knowledgeGraph } from '../services/knowledge-graph.service'; +import { capabilityCatalog } from '../services/capability-catalog.service'; +import { sendResponse } from '../utils/http.util'; +import { getErrorMessage } from '../utils/error.util'; + +/** + * Validate Capability Controller + * + * GET /api/capabilities/:id/validate + * + * Validates if a capability's OD can be executed by checking: + * - All required tools exist in the knowledge graph + * - All required services are available + * - Entity dependencies are satisfied + * + * @param req - Express request with capability ID in params + * @param res - Express response + * @returns ValidationResult with executable status and warnings + */ +export async function validateCapabilityController( + req: Request, + res: Response +): Promise { + try { + const { id } = req.params; + + // Validate capability ID + if (!id) { + return sendResponse({ + res, + status: 400, + error: 'Capability ID is required', + }); + } + + // Check if capability exists + const capability = capabilityCatalog.getById(id); + if (!capability) { + return sendResponse({ + res, + status: 404, + error: `Capability not found: ${id}`, + }); + } + + // Validate the capability's OD + const validation = knowledgeGraph.validateOD(capability.odId); + + // Set the capability ID in the result + validation.capabilityId = id; + + // Return validation result + sendResponse({ + res, + status: 200, + data: validation, + meta: { + capabilityId: id, + odId: capability.odId, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to validate capability: ${getErrorMessage(error)}`, + }); + } +} + +/** + * Get Capability Dependencies Controller + * + * GET /api/capabilities/:id/dependencies + * + * Returns comprehensive dependency information for a capability: + * - All tools used with execution order + * - Services required + * - Input entities (required externally) + * - Output entities (produced by the capability) + * + * @param req - Express request with capability ID in params + * @param res - Express response + * @returns DependencyInfo with tools, services, and entities + */ +export async function getCapabilityDependenciesController( + req: Request, + res: Response +): Promise { + try { + const { id } = req.params; + + // Validate capability ID + if (!id) { + return sendResponse({ + res, + status: 400, + error: 'Capability ID is required', + }); + } + + // Check if capability exists + const capability = capabilityCatalog.getById(id); + if (!capability) { + return sendResponse({ + res, + status: 404, + error: `Capability not found: ${id}`, + }); + } + + // Get dependency information + const dependencies = knowledgeGraph.getDependencies(id); + + // Return dependency information + sendResponse({ + res, + status: 200, + data: dependencies, + meta: { + capabilityId: id, + toolCount: dependencies.tools.length, + serviceCount: dependencies.services.length, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get dependencies: ${getErrorMessage(error)}`, + }); + } +} + +/** + * Get Related Capabilities Controller + * + * GET /api/capabilities/:id/related + * + * Finds capabilities related to the given capability based on: + * - Shared services (both use same service) + * - Shared entities (both work with same entities) + * - Prerequisites (produce entities this requires) + * - Suggested next (require entities this produces) + * + * @param req - Express request with capability ID in params + * @param res - Express response + * @returns Array of RelatedCapability with similarity scores + */ +export async function getRelatedCapabilitiesController( + req: Request, + res: Response +): Promise { + try { + const { id } = req.params; + + // Validate capability ID + if (!id) { + return sendResponse({ + res, + status: 400, + error: 'Capability ID is required', + }); + } + + // Check if capability exists + const capability = capabilityCatalog.getById(id); + if (!capability) { + return sendResponse({ + res, + status: 404, + error: `Capability not found: ${id}`, + }); + } + + // Find related capabilities + const related = knowledgeGraph.findRelatedCapabilities(id); + + // Return formatted response + sendResponse({ + res, + status: 200, + data: { + capabilityId: id, + count: related.length, + related, + }, + meta: { + capabilityId: id, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to find related capabilities: ${getErrorMessage(error)}`, + }); + } +} diff --git a/packages/controlmart/src/controller/docs.controller.ts b/packages/controlmart/src/controller/docs.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..919196976b45c16f1ebac125f3f22511e91157d7 --- /dev/null +++ b/packages/controlmart/src/controller/docs.controller.ts @@ -0,0 +1,54 @@ + +import { type Request, type Response } from "express"; +import { swaggerSpec } from "../utils/swagger.util"; +import { ServiceMesh, type THttpMethod } from "../utils/service-mesh.util"; +import { apiReference } from "@scalar/express-api-reference"; + +export const getSwaggerSpec = (_: Request, res: Response) => { + res.setHeader("Content-Type", "application/json"); + res.send(swaggerSpec); +}; + +export const getApiReference = apiReference({ + title: "ControlMart API Reference", + theme: "purple", + url: "/docs/swagger.json", +}); + +export const getServiceMeshDocs = (req: Request, res: Response) => { + try { + const { service, action, method, includeExamples } = req.query; + + if (!service || typeof service !== 'string') { + res.status(400).json({ + error: "Service parameter is required", + availableServices: Object.keys(ServiceMesh.getRegistry()) + }); + return; + } + + const filters: { action?: string; method?: THttpMethod } = {}; + if (action && typeof action === 'string') filters.action = action; + if (method && typeof method === 'string') filters.method = method as THttpMethod; + + const endpoints = ServiceMesh.findEndpoints(service, filters); + + const formattedDocs = endpoints.map(ep => ({ + path: ep.path, + method: ep.method, + summary: ep.summary, + formatted: ServiceMesh.getFormattedEndpointDocs(ep, { + includeExamples: includeExamples !== 'false' + }) + })); + + res.json({ + service, + filters, + count: endpoints.length, + endpoints: formattedDocs + }); + } catch (error) { + res.status(500).json({ error: "Failed to retrieve documentation" }); + } +}; diff --git a/packages/controlmart/src/controller/edi.controller.ts b/packages/controlmart/src/controller/edi.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..2beabc641d18ec30f6a6fb97bbc182359c3e91e5 --- /dev/null +++ b/packages/controlmart/src/controller/edi.controller.ts @@ -0,0 +1,295 @@ +import type { Request, Response } from "express"; + +import { EdiTransactionRepository } from "../repository/edi/edi.repository"; +import { sendResponse, asyncHandler } from "../utils/http.util"; + +export const getEdiDashboardController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = EdiTransactionRepository(worldId); + const dashboard = await repo.getEdiDashboard(); + sendResponse({ res, data: dashboard }); +}); + +export const createEdiTransactionController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + const repo = EdiTransactionRepository(worldId); + const ediTransaction = await repo.createEdiTransaction(data); + sendResponse({ res, data: ediTransaction, status: 201 }); +}); + +export const getEdiTransactionByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + + const repo = EdiTransactionRepository(worldId); + const ediTransaction = await repo.getEdiTransactionById(transactionId); + if (!ediTransaction) + return sendResponse({ + res, + status: 404, + error: `EDI Transaction ${transactionId} not found`, + }); + + sendResponse({ res, data: ediTransaction }); +}); + +export const getAllEdiTransactionsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + partnerId: req.query.partnerId as string, + customerId: req.query.customerId as string, + docType: req.query.docType as string, + direction: req.query.direction as "INBOUND" | "OUTBOUND", + status: req.query.status as string, + flowId: req.query.flowId as string, + dateStart: req.query.dateStart ? new Date(req.query.dateStart as string) : undefined, + dateEnd: req.query.dateEnd ? new Date(req.query.dateEnd as string) : undefined, + limit: req.query.limit ? parseInt(req.query.limit as string, 10) : undefined, + cursor: req.query.cursor ? (req.query.cursor as string) : undefined, + }; + + const repo = EdiTransactionRepository(worldId); + const result = await repo.getAllEdiTransactions(filters); + sendResponse({ + res, + data: result.items, + pagination: { + totalCount: result.totalCount, + limit: result.limit, + hasMore: result.hasMore, + nextCursor: result.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); +}); + +export const getAllEdiTransactionsControllerByPagesDeprecated = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + partnerId: req.query.partnerId as string, + customerId: req.query.customerId as string, + docType: req.query.docType as string, + direction: req.query.direction as "INBOUND" | "OUTBOUND", + status: req.query.status as string, + flowId: req.query.flowId as string, + dateStart: req.query.dateStart ? new Date(req.query.dateStart as string) : undefined, + dateEnd: req.query.dateEnd ? new Date(req.query.dateEnd as string) : undefined, + limit: req.query.limit ? parseInt(req.query.limit as string, 10) : undefined, + page: req.query.page ? parseInt(req.query.page as string, 10) : undefined, + pageSize: req.query.pageSize ? parseInt(req.query.pageSize as string, 10) : undefined, + }; + + const repo = EdiTransactionRepository(worldId); + const result = await repo.getEdiTransactionsByPageNumber(filters); + sendResponse({ + res, + data: result.items, + pagination: { + totalCount: result.totalCount, + limit: result.limit, + hasMore: result.hasMore, + nextCursor: null, + previousCursor: null, + }, + }); + }, +); + +export const updateEdiTransactionController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + const updateData = req.body; + + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + + const repo = EdiTransactionRepository(worldId); + const updated = await repo.updateEdiTransaction(transactionId, updateData); + sendResponse({ res, data: updated }); +}); + +export const updateEdiStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + const { status, errorReason, errorDetails } = req.body; + + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + if (!status) return sendResponse({ res, status: 400, error: "status is required" }); + + const repo = EdiTransactionRepository(worldId); + const updated = await repo.updateEdiStatus(transactionId, status, errorReason, errorDetails); + sendResponse({ res, data: updated }); +}); + +export const requeueEdiTransactionController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + + const repo = EdiTransactionRepository(worldId); + const requeued = await repo.requeueEdiTransaction(transactionId); + sendResponse({ res, data: requeued }); +}); + +export const deleteEdiTransactionController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + + const repo = EdiTransactionRepository(worldId); + const success = await repo.deleteEdiTransaction(transactionId); + if (!success) + return sendResponse({ + res, + status: 404, + error: `EDI Transaction ${transactionId} not found`, + }); + + sendResponse({ res, data: { deleted: true } }); +}); + +export const topEdiErrorsStatsByDocTypeController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { topLimit } = req.query; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = EdiTransactionRepository(worldId); + const stats = await repo.getTopEdiErrorsStatsByDocType( + topLimit ? parseInt(topLimit as string, 10) : 10, + ); + sendResponse({ res, data: { topEdiErrorsStatsByDocType: stats } }); + }, +); + +export const topEdiErrorStatsByPartnersController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { topLimit } = req.query; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = EdiTransactionRepository(worldId); + const stats = await repo.getTopEdiErrorStatsByPartners( + topLimit ? parseInt(topLimit as string, 10) : 10, + ); + sendResponse({ res, data: { topEdiErrorStatsByPartners: stats } }); + }, +); + +export const ediErrorStatsAggregationController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { aggregationType, dateStart, dateEnd } = req.query; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = EdiTransactionRepository(worldId); + let stats; + if (aggregationType === "by-doctype") { + stats = await repo.getTopEdiErrorsStatsByDocType( + 100, + dateStart ? new Date(dateStart as string) : undefined, + dateEnd ? new Date(dateEnd as string) : undefined, + ); + } else if (aggregationType === "by-partners") { + stats = await repo.getTopEdiErrorStatsByPartners( + 100, + dateStart ? new Date(dateStart as string) : undefined, + dateEnd ? new Date(dateEnd as string) : undefined, + ); + } else { + return sendResponse({ + res, + status: 400, + error: "Invalid aggregationType. Use 'by-doctype' or 'by-partners'.", + }); + } + + sendResponse({ res, data: { ediErrorStats: stats } }); + }, +); + +export const invoiceStatisticsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { dateStart, dateEnd } = req.query; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = EdiTransactionRepository(worldId); + const stats = await repo.ediInvoiceStatistics( + dateStart ? new Date(dateStart as string) : undefined, + dateEnd ? new Date(dateEnd as string) : undefined, + ); + sendResponse({ res, data: { invoiceStatistics: stats } }); +}); + +export const getEdiDollarAmountStatisticsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { aggregationType } = req.query; + const { dateStart, dateEnd } = req.query; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = EdiTransactionRepository(worldId); + if (aggregationType === "by-partners") { + const stats = await repo.getEdiDollarAmountExposureByPartners( + 100, + dateStart ? new Date(dateStart as string) : undefined, + dateEnd ? new Date(dateEnd as string) : undefined, + ); + sendResponse({ res, data: { ediDollarAmountExposureByPartners: stats } }); + return; + } else if (aggregationType === "by-document-type") { + const stats = await repo.getEdiDollarAmountExposureByDocumentType( + 100, + dateStart ? new Date(dateStart as string) : undefined, + dateEnd ? new Date(dateEnd as string) : undefined, + ); + sendResponse({ res, data: { ediDollarAmountExposureByDocumentType: stats } }); + return; + } else { + sendResponse({ + res, + status: 400, + error: "Invalid aggregationType. Use 'by-partners' or 'by-document-type'.", + }); + return; + } + }, +); diff --git a/packages/controlmart/src/controller/erp/company.controller.ts b/packages/controlmart/src/controller/erp/company.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..c4c71290424a884872903e2d3ef33301d50b9639 --- /dev/null +++ b/packages/controlmart/src/controller/erp/company.controller.ts @@ -0,0 +1,315 @@ +import type { Request, Response } from "express"; + +import { CompanyRepository } from "../../repository"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createERPCompanyController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = CompanyRepository(worldId); + const company = await repo.createCompany(data); + sendResponse({ + res, + data: { ...company, id: getIdFromMongoObject(company) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create company: ${getErrorMessage(error)}`, + }); + } +}; + +export const getERPCompanyByIdController = async (req: Request, res: Response) => { + const { worldId, companyId } = req.params; + + if (!worldId || !companyId) + return sendResponse({ + res, + status: 400, + error: "worldId and companyId are required", + }); + + try { + const repo = CompanyRepository(worldId); + const company = await repo.getCompanyById(companyId); + + if (!company) { + return sendResponse({ + res, + status: 404, + error: "Company not found", + }); + } + + sendResponse({ + res, + data: { ...company, id: getIdFromMongoObject(company) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get company: ${getErrorMessage(error)}`, + }); + } +}; + +export const getERPCompanyByDunsNumberController = async (req: Request, res: Response) => { + const { worldId, dunsNumber } = req.params; + + if (!worldId || !dunsNumber) + return sendResponse({ + res, + status: 400, + error: "worldId and dunsNumber are required", + }); + + try { + const repo = CompanyRepository(worldId); + const company = await repo.getCompanyByDunsNumber(dunsNumber); + + if (!company) { + return sendResponse({ + res, + status: 404, + error: "Company not found", + }); + } + + sendResponse({ + res, + data: { ...company, id: getIdFromMongoObject(company) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get company by DUNS number: ${getErrorMessage(error)}`, + }); + } +}; + +export const getAllERPCompaniesController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + status: req.query.status as string | undefined, + companyType: req.query.companyType as string | undefined, + currency: req.query.currency as string | undefined, + isMpcCompany: req.query.isMpcCompany === "true", + search: req.query.search as string | undefined, + }; + + try { + const repo = CompanyRepository(worldId); + const companies = await repo.getAllCompanies(filters); + sendResponse({ + res, + data: companies.items.map((company) => ({ + ...company, + id: getIdFromMongoObject(company), + })), + pagination: { + limit: companies.limit, + previousCursor: req.query.cursor as string | null, + totalCount: companies.totalCount, + hasMore: companies.hasMore, + nextCursor: companies.nextCursor || null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get companies: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPCompanyController = async (req: Request, res: Response) => { + const { worldId, companyId } = req.params; + const data = req.body; + + if (!worldId || !companyId) + return sendResponse({ + res, + status: 400, + error: "worldId and companyId are required", + }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = CompanyRepository(worldId); + const company = await repo.updateCompany(companyId, data); + + if (!company) { + return sendResponse({ + res, + status: 404, + error: "Company not found", + }); + } + + sendResponse({ + res, + data: { ...company, id: getIdFromMongoObject(company) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update company: ${getErrorMessage(error)}`, + }); + } +}; + +export const deleteERPCompanyController = async (req: Request, res: Response) => { + const { worldId, companyId } = req.params; + + if (!worldId || !companyId) + return sendResponse({ + res, + status: 400, + error: "worldId and companyId are required", + }); + + try { + const repo = CompanyRepository(worldId); + const deleted = await repo.deleteCompany(companyId); + + if (!deleted) { + return sendResponse({ + res, + status: 404, + error: "Company not found", + }); + } + + sendResponse({ + res, + data: { message: "Company deleted successfully" }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to delete company: ${getErrorMessage(error)}`, + }); + } +}; + +export const getMpcERPCompanyController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + try { + const repo = CompanyRepository(worldId); + const company = await repo.getMpcCompany(); + + if (!company) { + return sendResponse({ + res, + status: 404, + error: "MPC company not found", + }); + } + + sendResponse({ + res, + data: { ...company, id: getIdFromMongoObject(company) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get MPC company: ${getErrorMessage(error)}`, + }); + } +}; + +export const getRandomERPCompanyController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const { type } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const companyType = (type as "npc" | "mpc") || "npc"; + + try { + const repo = CompanyRepository(worldId); + const company = await repo.getRandomCompany(companyType); + + if (!company) { + return sendResponse({ + res, + status: 404, + error: `No ${companyType} companies found`, + }); + } + + sendResponse({ + res, + data: { ...company, id: getIdFromMongoObject(company) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get random company: ${getErrorMessage(error)}`, + }); + } +}; + +export const bulkUpsertERPCompaniesController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const { companies } = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!companies || !Array.isArray(companies) || companies.length === 0) + return sendResponse({ + res, + status: 400, + error: "Companies array is required", + }); + + try { + const repo = CompanyRepository(worldId); + const count = await repo.bulkUpsertCompanies(companies); + sendResponse({ + res, + data: { message: `${count} companies processed successfully` }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to bulk upsert companies: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/erp/index.ts b/packages/controlmart/src/controller/erp/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..9b36d3bad81f060fc2300826716a668a44d2c251 --- /dev/null +++ b/packages/controlmart/src/controller/erp/index.ts @@ -0,0 +1,7 @@ +export * from "./company.controller"; +export * from "./product.controller"; +export * from "./order.controller"; +export * from "./invoice.controller"; +export * from "./shipment.controller"; +export * from "./payment.controller"; +export * from "./operations_dashboard.controller"; diff --git a/packages/controlmart/src/controller/erp/invoice.controller.ts b/packages/controlmart/src/controller/erp/invoice.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..a4200c69c20828520bdd953ef8d81b0ed8e702cf --- /dev/null +++ b/packages/controlmart/src/controller/erp/invoice.controller.ts @@ -0,0 +1,233 @@ +import type { Request, Response } from "express"; + +import { InvoiceRepository } from "../../repository"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createERPInvoiceController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = InvoiceRepository(worldId); + const invoice = await repo.createInvoice(data); + sendResponse({ + res, + data: { ...invoice, id: getIdFromMongoObject(invoice) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create invoice: ${getErrorMessage(error)}`, + }); + } +}; + +export const getERPInvoiceByIdController = async (req: Request, res: Response) => { + const { worldId, invoiceId } = req.params; + + if (!worldId || !invoiceId) + return sendResponse({ + res, + status: 400, + error: "worldId and invoiceId are required", + }); + + try { + const repo = InvoiceRepository(worldId); + const invoice = await repo.getInvoiceById(invoiceId); + + if (!invoice) { + return sendResponse({ + res, + status: 404, + error: "Invoice not found", + }); + } + + sendResponse({ + res, + data: { ...invoice, id: getIdFromMongoObject(invoice) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get invoice: ${getErrorMessage(error)}`, + }); + } +}; + +export const getAllERPInvoicesController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + status: req.query.status as string | undefined, + customerId: req.query.customerId as string | undefined, + partnerId: req.query.partnerId as string | undefined, + cursor: req.query.cursor as string | undefined, + limit: req.query.limit ? parseInt(req.query.limit as string) : undefined, + dateStart: req.query.dateStart ? new Date(req.query.dateStart as string) : undefined, + dateEnd: req.query.dateEnd ? new Date(req.query.dateEnd as string) : undefined, + }; + + try { + const repo = InvoiceRepository(worldId); + const invoices = await repo.getAllInvoices(filters); + sendResponse({ + res, + data: invoices.items.map((invoice) => ({ + ...invoice, + id: getIdFromMongoObject(invoice), + })), + pagination: { + limit: invoices.limit, + previousCursor: req.query.cursor as string | null, + totalCount: invoices.totalCount, + hasMore: invoices.hasMore, + nextCursor: invoices.nextCursor || null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get invoices: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPInvoiceController = async (req: Request, res: Response) => { + const { worldId, invoiceId } = req.params; + const data = req.body; + + if (!worldId || !invoiceId) + return sendResponse({ + res, + status: 400, + error: "worldId and invoiceId are required", + }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = InvoiceRepository(worldId); + const invoice = await repo.updateInvoice(invoiceId, data); + + if (!invoice) { + return sendResponse({ + res, + status: 404, + error: "Invoice not found", + }); + } + + sendResponse({ + res, + data: { ...invoice, id: getIdFromMongoObject(invoice) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update invoice: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPInvoiceStatusController = async (req: Request, res: Response) => { + const { worldId, invoiceId } = req.params; + const { status } = req.body; + + if (!worldId || !invoiceId) + return sendResponse({ + res, + status: 400, + error: "worldId and invoiceId are required", + }); + + if (!status) + return sendResponse({ + res, + status: 400, + error: "Status is required", + }); + + try { + const repo = InvoiceRepository(worldId); + const invoice = await repo.updateInvoiceStatus(invoiceId, status); + + if (!invoice) { + return sendResponse({ + res, + status: 404, + error: "Invoice not found", + }); + } + + sendResponse({ + res, + data: { ...invoice, id: getIdFromMongoObject(invoice) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update invoice status: ${getErrorMessage(error)}`, + }); + } +}; + +export const deleteERPInvoiceController = async (req: Request, res: Response) => { + const { worldId, invoiceId } = req.params; + + if (!worldId || !invoiceId) + return sendResponse({ + res, + status: 400, + error: "worldId and invoiceId are required", + }); + + try { + const repo = InvoiceRepository(worldId); + const deleted = await repo.deleteInvoice(invoiceId); + + if (!deleted) { + return sendResponse({ + res, + status: 404, + error: "Invoice not found", + }); + } + + sendResponse({ + res, + data: { message: "Invoice deleted successfully" }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to delete invoice: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/erp/operations_dashboard.controller.ts b/packages/controlmart/src/controller/erp/operations_dashboard.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..76e930887693fcb4a95f6f4c45357649d86f142b --- /dev/null +++ b/packages/controlmart/src/controller/erp/operations_dashboard.controller.ts @@ -0,0 +1,422 @@ +import type { Request, Response } from "express"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; +import { PurchaseOrder } from "../../models/erp/orders.erp.model"; +import { Invoice } from "../../models/erp/invoice.erp.model"; +import { Product } from "../../models/erp/product.erp.model"; +import { Company } from "../../models/erp/company.erp.model"; + +/** + * ERP Operations Dashboard Controller + * + * Provides aggregated metrics for the ERP Command Center dashboard. + * This is a single endpoint that returns all KPIs needed for the UI. + */ + +export type TERPOperationsDashboardResponse = { + orders: { + purchaseOrders: { + total: number; + byStatus: Record; + recentOrders: number; + totalValue: number; + averageOrderValue: number; + }; + salesOrders: { + total: number; + byStatus: Record; + recentOrders: number; + totalValue: number; + averageOrderValue: number; + }; + }; + invoices: { + total: number; + byStatus: Record; + totalOutstanding: number; + overdueCount: number; + paidThisMonth: number; + }; + companies: { + total: number; + byType: Record; + activeCustomers: number; + activeSuppliers: number; + activeCompanies: number; + }; + products: { + total: number; + activeProducts: number; + discontinuedProducts: number; + }; +}; + +export const getERPOperationsDashboardController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + // Run all aggregations in parallel for performance + const [ordersStats, invoicesStats, companiesStats, productsStats] = await Promise.all([ + // Orders aggregation - now returns both PO and SO + (async () => ({ + purchaseOrders: await aggregatePurchaseOrdersStats(worldId), + salesOrders: await aggregateSalesOrdersStats(worldId), + }))(), + // Invoices aggregation + aggregateInvoicesStats(worldId), + // Companies aggregation + aggregateCompaniesStats(worldId), + // Products aggregation + aggregateProductsStats(worldId), + ]); + + const dashboard: TERPOperationsDashboardResponse = { + orders: ordersStats, + invoices: invoicesStats, + companies: companiesStats, + products: productsStats, + }; + + sendResponse({ res, data: dashboard }); + }, +); + +async function aggregatePurchaseOrdersStats(worldId: string) { + const thirtyDaysAgo = new Date(); + thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30); + + const result = await PurchaseOrder.aggregate([ + { + $match: { + "worldRef.worldId": worldId, + direction: "INBOUND" // Filter for purchase orders + } + }, + { + $group: { + _id: null, + total: { $sum: 1 }, + statusCounts: { $push: "$status" }, + totalValue: { $sum: { $ifNull: ["$totalAmount", 0] } }, + // Count orders in the last 30 days + recentOrders: { + $sum: { + $cond: [ + { $gte: ["$orderDate", thirtyDaysAgo] }, + 1, + 0 + ] + } + }, + // Sum for average calculation + ordersWithValue: { + $sum: { + $cond: [ + { $gt: ["$totalAmount", 0] }, + 1, + 0 + ] + } + }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + byStatus: {}, + recentOrders: 0, + totalValue: 0, + averageOrderValue: 0, + }; + } + + const data = result[0]; + + // Process status counts + const byStatus: Record = {}; + data.statusCounts.forEach((status: string) => { + byStatus[status] = (byStatus[status] || 0) + 1; + }); + + const totalValue = data.totalValue || 0; + const ordersWithValue = data.ordersWithValue || data.total || 1; + + return { + total: data.total || 0, + byStatus, + recentOrders: data.recentOrders || 0, + totalValue: Math.round(totalValue * 100) / 100, + averageOrderValue: Math.round((totalValue / ordersWithValue) * 100) / 100, + }; +} + +async function aggregateSalesOrdersStats(worldId: string) { + const thirtyDaysAgo = new Date(); + thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30); + + const result = await PurchaseOrder.aggregate([ + { + $match: { + "worldRef.worldId": worldId, + direction: "OUTBOUND" // Filter for sales orders + } + }, + { + $group: { + _id: null, + total: { $sum: 1 }, + statusCounts: { $push: "$status" }, + totalValue: { $sum: { $ifNull: ["$totalAmount", 0] } }, + // Count orders in the last 30 days + recentOrders: { + $sum: { + $cond: [ + { $gte: ["$orderDate", thirtyDaysAgo] }, + 1, + 0 + ] + } + }, + // Sum for average calculation + ordersWithValue: { + $sum: { + $cond: [ + { $gt: ["$totalAmount", 0] }, + 1, + 0 + ] + } + }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + byStatus: {}, + recentOrders: 0, + totalValue: 0, + averageOrderValue: 0, + }; + } + + const data = result[0]; + + // Process status counts + const byStatus: Record = {}; + data.statusCounts.forEach((status: string) => { + byStatus[status] = (byStatus[status] || 0) + 1; + }); + + const totalValue = data.totalValue || 0; + const ordersWithValue = data.ordersWithValue || data.total || 1; + + return { + total: data.total || 0, + byStatus, + recentOrders: data.recentOrders || 0, + totalValue: Math.round(totalValue * 100) / 100, + averageOrderValue: Math.round((totalValue / ordersWithValue) * 100) / 100, + }; +} + +async function aggregateInvoicesStats(worldId: string) { + const today = new Date(); + const startOfMonth = new Date(today.getFullYear(), today.getMonth(), 1); + + const result = await Invoice.aggregate([ + { $match: { "worldRef.worldId": worldId } }, + { + $group: { + _id: null, + total: { $sum: 1 }, + statusCounts: { $push: "$status" }, + // Total outstanding balance (invoices not fully paid) + totalOutstanding: { + $sum: { + $cond: [ + { $in: ["$status", ["SENT", "VALIDATED", "PARTIALLY_PAID"]] }, + { $ifNull: ["$balanceDue", 0] }, + 0 + ] + } + }, + // Count overdue invoices (due date passed and not paid) + overdueCount: { + $sum: { + $cond: [ + { + $and: [ + { $lt: ["$dueDate", today] }, + { $in: ["$status", ["SENT", "VALIDATED", "PARTIALLY_PAID"]] }, + ] + }, + 1, + 0 + ] + } + }, + // Count paid invoices this month + paidThisMonth: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$status", "PAID"] }, + { $gte: ["$updatedAt", startOfMonth] }, + ] + }, + 1, + 0 + ] + } + }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + byStatus: {}, + totalOutstanding: 0, + overdueCount: 0, + paidThisMonth: 0, + }; + } + + const data = result[0]; + + // Process status counts + const byStatus: Record = {}; + data.statusCounts.forEach((status: string) => { + byStatus[status] = (byStatus[status] || 0) + 1; + }); + + return { + total: data.total || 0, + byStatus, + totalOutstanding: Math.round((data.totalOutstanding || 0) * 100) / 100, + overdueCount: data.overdueCount || 0, + paidThisMonth: data.paidThisMonth || 0, + }; +} + +async function aggregateCompaniesStats(worldId: string) { + const result = await Company.aggregate([ + { $match: { "worldRef.worldId": worldId } }, + { + $group: { + _id: null, + total: { $sum: 1 }, + typeCounts: { $push: "$companyType" }, + // Count active customers + activeCustomers: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$companyType", "CUSTOMER"] }, + { $eq: ["$status", "ACTIVE"] }, + ] + }, + 1, + 0 + ] + } + }, + // Count active suppliers + activeSuppliers: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$companyType", "SUPPLIER"] }, + { $eq: ["$status", "ACTIVE"] }, + ] + }, + 1, + 0 + ] + } + }, + // Count all active companies (regardless of type) + activeCompanies: { + $sum: { + $cond: [ + { $eq: ["$status", "ACTIVE"] }, + 1, + 0 + ] + } + }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + byType: {}, + activeCustomers: 0, + activeSuppliers: 0, + activeCompanies: 0, + }; + } + + const data = result[0]; + + // Process type counts + const byType: Record = {}; + data.typeCounts.forEach((type: string) => { + byType[type] = (byType[type] || 0) + 1; + }); + + return { + total: data.total || 0, + byType, + activeCustomers: data.activeCustomers || 0, + activeSuppliers: data.activeSuppliers || 0, + activeCompanies: data.activeCompanies || 0, + }; +} + +async function aggregateProductsStats(worldId: string) { + const result = await Product.aggregate([ + { $match: { "worldRef.worldId": worldId } }, + { + $group: { + _id: null, + total: { $sum: 1 }, + activeProducts: { + $sum: { $cond: [{ $eq: ["$status", "ACTIVE"] }, 1, 0] } + }, + discontinuedProducts: { + $sum: { $cond: [{ $eq: ["$status", "DISCONTINUED"] }, 1, 0] } + }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + activeProducts: 0, + discontinuedProducts: 0, + }; + } + + const data = result[0]; + + return { + total: data.total || 0, + activeProducts: data.activeProducts || 0, + discontinuedProducts: data.discontinuedProducts || 0, + }; +} diff --git a/packages/controlmart/src/controller/erp/order.controller.ts b/packages/controlmart/src/controller/erp/order.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..f6f6bf3652f36a28d69f28c1f0a9f4e80937e5d1 --- /dev/null +++ b/packages/controlmart/src/controller/erp/order.controller.ts @@ -0,0 +1,251 @@ +import type { Request, Response } from "express"; + +import { OrderRepository } from "../../repository"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createERPOrderController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = OrderRepository(worldId); + const order = await repo.createOrder(data); + sendResponse({ + res, + data: { ...order, id: getIdFromMongoObject(order) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create order: ${getErrorMessage(error)}`, + }); + } +}; + +export const getERPOrderByIdController = async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + + if (!worldId || !orderId) + return sendResponse({ + res, + status: 400, + error: "worldId and orderId are required", + }); + + try { + const repo = OrderRepository(worldId); + const order = await repo.getOrderById(orderId); + + if (!order) { + return sendResponse({ + res, + status: 404, + error: "Order not found", + }); + } + + sendResponse({ + res, + data: { ...order, id: getIdFromMongoObject(order) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get order: ${getErrorMessage(error)}`, + }); + } +}; + +export const getAllERPOrdersController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const { + status, + customerId, + partnerId, + orderId, + direction, + orderdateStart, + orderdateEnd, + requesteddateStart, + requesteddateEnd, + cursor, + limit, + } = req.query; + + const filters = { + ...(status && { status: status as string }), + ...(customerId && { customerId: customerId as string }), + ...(partnerId && { partnerId: partnerId as string }), + ...(orderId && { orderId: orderId as string }), + ...(direction && { direction: direction as string }), + ...(orderdateStart && { orderDateStart: new Date(orderdateStart as string) }), + ...(orderdateEnd && { orderDateEnd: new Date(orderdateEnd as string) }), + ...(requesteddateStart && { requestedDateStart: new Date(requesteddateStart as string) }), + ...(requesteddateEnd && { requestedDateEnd: new Date(requesteddateEnd as string) }), + ...(cursor && { cursor: cursor as string }), + ...(limit && { limit: parseInt(limit as string) }), + }; + + try { + const repo = OrderRepository(worldId); + const orders = await repo.getAllOrders(filters); + sendResponse({ + res, + data: orders.items.map((order) => ({ + ...order, + id: getIdFromMongoObject(order), + })), + pagination: { + limit: orders.limit, + previousCursor: req.query.cursor as string | null, + totalCount: orders.totalCount, + hasMore: orders.hasMore, + nextCursor: orders.nextCursor || null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get orders: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPOrderController = async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + const data = req.body; + + if (!worldId || !orderId) + return sendResponse({ + res, + status: 400, + error: "worldId and orderId are required", + }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = OrderRepository(worldId); + const order = await repo.updateOrder(orderId, data); + + if (!order) { + return sendResponse({ + res, + status: 404, + error: "Order not found", + }); + } + + sendResponse({ + res, + data: { ...order, id: getIdFromMongoObject(order) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update order: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPOrderStatusController = async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + const { status } = req.body; + + if (!worldId || !orderId) + return sendResponse({ + res, + status: 400, + error: "worldId and orderId are required", + }); + + if (!status) + return sendResponse({ + res, + status: 400, + error: "Status is required", + }); + + try { + const repo = OrderRepository(worldId); + const order = await repo.updateOrderStatus(orderId, status); + + if (!order) { + return sendResponse({ + res, + status: 404, + error: "Order not found", + }); + } + + sendResponse({ + res, + data: { ...order, id: getIdFromMongoObject(order) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update order status: ${getErrorMessage(error)}`, + }); + } +}; + +export const deleteERPOrderController = async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + + if (!worldId || !orderId) + return sendResponse({ + res, + status: 400, + error: "worldId and orderId are required", + }); + + try { + const repo = OrderRepository(worldId); + const deleted = await repo.deleteOrder(orderId); + + if (!deleted) { + return sendResponse({ + res, + status: 404, + error: "Order not found", + }); + } + + sendResponse({ + res, + data: { message: "Order deleted successfully" }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to delete order: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/erp/payment.controller.ts b/packages/controlmart/src/controller/erp/payment.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..aed925c547f9f8b97bff285d61dbc66bd9a13eea --- /dev/null +++ b/packages/controlmart/src/controller/erp/payment.controller.ts @@ -0,0 +1,277 @@ +import type { Request, Response } from "express"; + +import { PaymentRepository } from "../../repository"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; +import type { TPaymentModel } from "../../models/erp/payment.erp.model"; + +export const createERPPaymentController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = PaymentRepository(worldId); + const payment = await repo.createPayment(data); + sendResponse({ + res, + data: { ...payment, id: getIdFromMongoObject(payment) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create payment: ${getErrorMessage(error)}`, + }); + } +}; + +export const getERPPaymentByIdController = async (req: Request, res: Response) => { + const { worldId, paymentId } = req.params; + + if (!worldId || !paymentId) + return sendResponse({ + res, + status: 400, + error: "worldId and paymentId are required", + }); + + try { + const repo = PaymentRepository(worldId); + const payment = await repo.getPaymentById(paymentId); + + if (!payment) { + return sendResponse({ + res, + status: 404, + error: "Payment not found", + }); + } + + sendResponse({ + res, + data: { ...payment, id: getIdFromMongoObject(payment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get payment: ${getErrorMessage(error)}`, + }); + } +}; + +export const getAllERPPaymentsController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + status: req.query.status as string | undefined, + customerId: req.query.customerId as string | undefined, + partnerId: req.query.partnerId as string | undefined, + dateStart: req.query.dateStart ? new Date(req.query.dateStart as string) : undefined, + dateEnd: req.query.dateEnd ? new Date(req.query.dateEnd as string) : undefined, + cursor: req.query.cursor as string | undefined, + limit: req.query.limit ? parseInt(req.query.limit as string) : undefined, + }; + + try { + const repo = PaymentRepository(worldId); + const payments = await repo.getAllPayments(filters); + sendResponse({ + res, + data: payments.items.map((payment: TPaymentModel) => ({ + ...payment, + id: getIdFromMongoObject(payment), + })), + pagination: { + limit: payments.limit, + previousCursor: req.query.cursor as string | null, + totalCount: payments.totalCount, + hasMore: payments.hasMore, + nextCursor: payments.nextCursor || null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get payments: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPPaymentController = async (req: Request, res: Response) => { + const { worldId, paymentId } = req.params; + const data = req.body; + + if (!worldId || !paymentId) + return sendResponse({ + res, + status: 400, + error: "worldId and paymentId are required", + }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = PaymentRepository(worldId); + const payment = await repo.updatePayment(paymentId, data); + + if (!payment) { + return sendResponse({ + res, + status: 404, + error: "Payment not found", + }); + } + + sendResponse({ + res, + data: { ...payment, id: getIdFromMongoObject(payment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update payment: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPPaymentStatusController = async (req: Request, res: Response) => { + const { worldId, paymentId } = req.params; + const { status } = req.body; + + if (!worldId || !paymentId) + return sendResponse({ + res, + status: 400, + error: "worldId and paymentId are required", + }); + + if (!status) + return sendResponse({ + res, + status: 400, + error: "Status is required", + }); + + try { + const repo = PaymentRepository(worldId); + const payment = await repo.updatePaymentStatus(paymentId, status); + + if (!payment) { + return sendResponse({ + res, + status: 404, + error: "Payment not found", + }); + } + + sendResponse({ + res, + data: { ...payment, id: getIdFromMongoObject(payment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update payment status: ${getErrorMessage(error)}`, + }); + } +}; + +export const applyERPPaymentAllocationsController = async (req: Request, res: Response) => { + const { worldId, paymentId } = req.params; + const { allocations } = req.body; + + if (!worldId || !paymentId) + return sendResponse({ + res, + status: 400, + error: "worldId and paymentId are required", + }); + + if (!allocations || !Array.isArray(allocations)) + return sendResponse({ + res, + status: 400, + error: "Allocations array is required", + }); + + try { + const repo = PaymentRepository(worldId); + const payment = await repo.applyAllocations(paymentId, allocations); + + if (!payment) { + return sendResponse({ + res, + status: 404, + error: "Payment not found", + }); + } + + sendResponse({ + res, + data: { ...payment, id: getIdFromMongoObject(payment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to apply payment allocations: ${getErrorMessage(error)}`, + }); + } +}; + +export const deleteERPPaymentController = async (req: Request, res: Response) => { + const { worldId, paymentId } = req.params; + + if (!worldId || !paymentId) + return sendResponse({ + res, + status: 400, + error: "worldId and paymentId are required", + }); + + try { + const repo = PaymentRepository(worldId); + const deleted = await repo.deletePayment(paymentId); + + if (!deleted) { + return sendResponse({ + res, + status: 404, + error: "Payment not found", + }); + } + + sendResponse({ + res, + data: { message: "Payment deleted successfully" }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to delete payment: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/erp/product.controller.ts b/packages/controlmart/src/controller/erp/product.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..78a47bff020faa8fcf986b8717b748dd93ccefdf --- /dev/null +++ b/packages/controlmart/src/controller/erp/product.controller.ts @@ -0,0 +1,242 @@ +import type { Request, Response } from "express"; + +import { ProductRepository } from "../../repository"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createERPProductController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = ProductRepository(worldId); + const product = await repo.createProduct(data); + sendResponse({ + res, + data: { ...product, id: getIdFromMongoObject(product) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create product: ${getErrorMessage(error)}`, + }); + } +}; + +export const getERPProductByIdController = async (req: Request, res: Response) => { + const { worldId, productId } = req.params; + + if (!worldId || !productId) + return sendResponse({ + res, + status: 400, + error: "worldId and sku are required", + }); + + try { + const repo = ProductRepository(worldId); + const product = await repo.getProductById(productId); + + if (!product) { + return sendResponse({ + res, + status: 404, + error: "Product not found", + }); + } + + sendResponse({ + res, + data: { ...product, id: getIdFromMongoObject(product) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get product: ${getErrorMessage(error)}`, + }); + } +}; + +export const getAllERPProductsController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + ...(req.query.status ? { status: req.query.status as string } : {}), + ...(req.query.inventoryTracking ? { inventoryTracking: req.query.inventoryTracking === "true" } : {}), + ...(req.query.minPrice ? { minPrice: Number(req.query.minPrice) } : {}), + ...(req.query.maxPrice ? { maxPrice: Number(req.query.maxPrice) } : {}), + ...(req.query.searchText ? { searchText: req.query.searchText as string } : {}), + ...(req.query.cursor ? { cursor: req.query.cursor as string } : {}), + ...(req.query.limit ? { limit: parseInt(req.query.limit as string) } : {}), + }; + + try { + const repo = ProductRepository(worldId); + const products = await repo.getAllProducts(filters); + sendResponse({ + res, + data: products.items.map((product) => ({ + ...product, + id: getIdFromMongoObject(product), + })), + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get products: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPProductController = async (req: Request, res: Response) => { + const { worldId, productId } = req.params; + const data = req.body; + + if (!worldId || !productId) + return sendResponse({ + res, + status: 400, + error: "worldId and productId are required", + }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = ProductRepository(worldId); + const product = await repo.updateProduct(productId, data); + + if (!product) { + return sendResponse({ + res, + status: 404, + error: "Product not found", + }); + } + + sendResponse({ + res, + data: { ...product, id: getIdFromMongoObject(product) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update product: ${getErrorMessage(error)}`, + }); + } +}; + +export const deleteERPProductController = async (req: Request, res: Response) => { + const { worldId, productId } = req.params; + + if (!worldId || !productId) + return sendResponse({ + res, + status: 400, + error: "worldId and productId are required", + }); + + try { + const repo = ProductRepository(worldId); + const deleted = await repo.deleteProduct(productId); + + if (!deleted) { + return sendResponse({ + res, + status: 404, + error: "Product not found", + }); + } + + sendResponse({ + res, + data: { message: "Product deleted successfully" }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to delete product: ${getErrorMessage(error)}`, + }); + } +}; + +export const bulkUpsertERPProductsController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const { products } = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!products || !Array.isArray(products) || products.length === 0) + return sendResponse({ + res, + status: 400, + error: "Products array is required", + }); + + try { + const repo = ProductRepository(worldId); + const count = await repo.bulkUpsertProducts(products); + sendResponse({ + res, + data: { message: `${count} products processed successfully` }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to bulk upsert products: ${getErrorMessage(error)}`, + }); + } +}; + +export const getRandomERPProductController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + try { + const repo = ProductRepository(worldId); + const product = await repo.getRandomProduct(); + + if (!product) { + return sendResponse({ + res, + status: 404, + error: "No products found", + }); + } + + sendResponse({ + res, + data: { ...product, id: getIdFromMongoObject(product) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get random product: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/erp/shipment.controller.ts b/packages/controlmart/src/controller/erp/shipment.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..30815144afe095b4f95abfbd0f92a3c5a729eb8e --- /dev/null +++ b/packages/controlmart/src/controller/erp/shipment.controller.ts @@ -0,0 +1,438 @@ +import type { Request, Response } from "express"; + +import { ERPShipmentRepository } from "../../repository"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createERPShipmentController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.createShipment(data); + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create shipment: ${getErrorMessage(error)}`, + }); + } +}; + +export const getERPShipmentByIdController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.getShipmentById(shipmentId); + + if (!shipment) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get shipment: ${getErrorMessage(error)}`, + }); + } +}; + +export const getAllERPShipmentsController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const { status, productId, carrierName, dateStart, dateEnd, cursor, limit, shipmentId } = + req.query; + + const filters = { + ...(status && { status: status as string }), + ...(productId && { productId: productId as string }), + ...(carrierName && { carrierName: carrierName as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(cursor && { cursor: cursor as string }), + ...(limit && { limit: parseInt(limit as string) }), + ...(shipmentId && { shipmentId: shipmentId as string }), + }; + + try { + const repo = ERPShipmentRepository(worldId); + const shipments = await repo.getAllShipments(filters); + sendResponse({ + res, + data: shipments.items.map((shipment) => ({ + ...shipment, + id: getIdFromMongoObject(shipment), + })), + pagination: { + limit: shipments.limit, + previousCursor: req.query.cursor as string | null, + totalCount: shipments.totalCount, + hasMore: shipments.hasMore, + nextCursor: shipments.nextCursor || null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get shipments: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPShipmentController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const data = req.body; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.updateShipment(shipmentId, data); + + if (!shipment) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update shipment: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPShipmentStatusController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const { status } = req.body; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!status) + return sendResponse({ + res, + status: 400, + error: "Status is required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.updateShipmentStatus(shipmentId, status); + + if (!shipment) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update shipment status: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPShipmentTrackingController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const { carrier, trackingNumber } = req.body; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!carrier) + return sendResponse({ + res, + status: 400, + error: "Carrier information is required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.updateTrackingDetails(shipmentId, carrier, trackingNumber); + + if (!shipment) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update tracking details: ${getErrorMessage(error)}`, + }); + } +}; + +export const addERPShipmentEventController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const event = req.body; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!event || !event.ts || !event.location || !event.status) + return sendResponse({ + res, + status: 400, + error: "Event with ts, location, and status are required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.addShipmentEvent(shipmentId, event); + + if (!shipment) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to add shipment event: ${getErrorMessage(error)}`, + }); + } +}; + +export const addERPShipmentDocumentController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const { documentUrl } = req.body; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!documentUrl) + return sendResponse({ + res, + status: 400, + error: "documentUrl is required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.addShipmentDocument(shipmentId, documentUrl); + + if (!shipment) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to add shipment document: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateERPShipmentLinesController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const { lines } = req.body; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!lines || !Array.isArray(lines)) + return sendResponse({ + res, + status: 400, + error: "Lines array is required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const shipment = await repo.updateShipmentLines(shipmentId, lines); + + if (!shipment) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update shipment lines: ${getErrorMessage(error)}`, + }); + } +}; + +export const bulkUpsertERPShipmentsController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const { shipments } = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!shipments || !Array.isArray(shipments) || shipments.length === 0) + return sendResponse({ + res, + status: 400, + error: "Shipments array is required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const count = await repo.bulkUpsertShipments(shipments); + sendResponse({ + res, + data: { message: `${count} shipments processed successfully` }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to bulk upsert shipments: ${getErrorMessage(error)}`, + }); + } +}; + +export const deleteERPShipmentController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + try { + const repo = ERPShipmentRepository(worldId); + const deleted = await repo.deleteShipment(shipmentId); + + if (!deleted) { + return sendResponse({ + res, + status: 404, + error: "Shipment not found", + }); + } + + sendResponse({ + res, + data: { message: "Shipment deleted successfully" }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to delete shipment: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/finance/finance.controller.ts b/packages/controlmart/src/controller/finance/finance.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..9e50fcbd7e8a0c36383fe49910a2324b6af91333 --- /dev/null +++ b/packages/controlmart/src/controller/finance/finance.controller.ts @@ -0,0 +1,266 @@ +import type { Request, Response } from "express"; + +import { FinanceRepository } from "../../repository/finance/finance.repository"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createFinanceTransactionController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + const repo = FinanceRepository(worldId); + const transaction = await repo.createTransaction(data); + sendResponse({ res, data: transaction, status: 201 }); + }, +); + +export const getFinanceTransactionByIdController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + + const repo = FinanceRepository(worldId); + const transaction = await repo.getTransactionById(transactionId); + if (!transaction) + return sendResponse({ + res, + status: 404, + error: `Finance Transaction ${transactionId} not found`, + }); + + sendResponse({ res, data: transaction }); + }, +); + +export const getAllFinanceTransactionsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + partnerId: req.query.partnerId as string, + type: req.query.type as "payment_in" | "payment_out", + sourceType: req.query.sourceType as "invoice" | "bill" | "manual" | "interest" | "payment", + sourceId: req.query.sourceId as string, + amountMin: req.query.amountMin ? parseFloat(req.query.amountMin as string) : undefined, + amountMax: req.query.amountMax ? parseFloat(req.query.amountMax as string) : undefined, + dateStart: req.query.dateStart ? new Date(req.query.dateStart as string) : undefined, + dateEnd: req.query.dateEnd ? new Date(req.query.dateEnd as string) : undefined, + search: req.query.search as string, + limit: req.query.limit ? parseInt(req.query.limit as string, 10) : undefined, + cursor: req.query.cursor ? (req.query.cursor as string) : null, + }; + + const repo = FinanceRepository(worldId); + const result = await repo.getTransactions(filters); + + sendResponse({ + res, + data: result.transactions, + pagination: { + totalCount: result.transactions.length, + limit: filters.limit || result.transactions.length, + hasMore: !!result.nextCursor, + nextCursor: result.nextCursor || null, + previousCursor: req.query.cursor ? (req.query.cursor as string) : null, + }, + }); + }, +); + +export const getTransactionsBySourceController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, sourceType, sourceId } = req.params; + const { limit } = req.query; + + if (!worldId || !sourceType || !sourceId) + return sendResponse({ + res, + status: 400, + error: "worldId, sourceType, and sourceId are required", + }); + + const repo = FinanceRepository(worldId); + const result = await repo.getTransactionsBySource(sourceType, sourceId, { + limit: limit ? parseInt(limit as string, 10) : undefined, + }); + + sendResponse({ res, data: result.transactions }); + }, +); + +export const updateFinanceTransactionController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + const updateData = req.body; + + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + + const repo = FinanceRepository(worldId); + const updated = await repo.updateTransaction(transactionId, updateData); + + if (!updated) + return sendResponse({ + res, + status: 404, + error: `Finance Transaction ${transactionId} not found`, + }); + + sendResponse({ res, data: updated }); + }, +); + +export const deleteFinanceTransactionController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + + if (!worldId || !transactionId) + return sendResponse({ + res, + status: 400, + error: "worldId and transactionId are required", + }); + + const repo = FinanceRepository(worldId); + const success = await repo.deleteTransaction(transactionId); + + if (!success) + return sendResponse({ + res, + status: 404, + error: `Finance Transaction ${transactionId} not found`, + }); + + sendResponse({ res, data: { deleted: true } }); + }, +); + +export const bulkInsertTransactionsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { transactions } = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!Array.isArray(transactions) || transactions.length === 0) + return sendResponse({ + res, + status: 400, + error: "transactions array is required and cannot be empty", + }); + + const repo = FinanceRepository(worldId); + const insertedCount = await repo.bulkInsertTransactions(transactions); + + sendResponse({ + res, + data: { insertedCount }, + status: 201, + }); + }, +); + +export const getFinanceStatsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { partnerId, dateStart, dateEnd } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + partnerId: partnerId as string, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + }; + + const repo = FinanceRepository(worldId); + + const [typeStats, partnerStats, summary] = await Promise.all([ + repo.aggregateByType(filters), + repo.aggregateByPartner(filters), + repo.getFinancialSummary(filters), + ]); + + sendResponse({ + res, + data: { + byType: typeStats, + byPartner: partnerStats, + summary, + }, + }); +}); + +export const getFinancialSummaryController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { partnerId, dateStart, dateEnd } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + partnerId: partnerId as string, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + }; + + const repo = FinanceRepository(worldId); + const summary = await repo.getFinancialSummary(filters); + + sendResponse({ res, data: summary }); +}); + +export const getTransactionsByTypeController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { partnerId, dateStart, dateEnd } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + partnerId: partnerId as string, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + }; + + const repo = FinanceRepository(worldId); + const stats = await repo.aggregateByType(filters); + + sendResponse({ res, data: stats }); +}); + +export const getTransactionsByPartnerController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { type, dateStart, dateEnd, limit } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + type: type as "payment_in" | "payment_out", + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + limit: limit ? parseInt(limit as string, 10) : undefined, + }; + + const repo = FinanceRepository(worldId); + const stats = await repo.aggregateByPartner(filters); + + sendResponse({ res, data: stats }); + }, +); diff --git a/packages/controlmart/src/controller/finance/ledger.controller.ts b/packages/controlmart/src/controller/finance/ledger.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..fc86eb96300a54fc76d2eedd8f5eea6ac9104725 --- /dev/null +++ b/packages/controlmart/src/controller/finance/ledger.controller.ts @@ -0,0 +1,110 @@ +import type { Request, Response } from "express"; + +import { CompanyLedgerRepository } from "../../repository/finance/ledger.repository"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const getLedgerByWorldIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + + const repo = CompanyLedgerRepository(worldId); + const ledger = await repo.get(); + + if (!ledger) + return sendResponse({ + res, + status: 404, + error: `Ledger for world ${worldId} not found`, + }); + + sendResponse({ res, data: ledger }); +}); + +export const updateLedgerController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const updateData = req.body; + + if (!worldId) + return sendResponse({ + res, + status: 400, + error: "worldId are required", + }); + + const repo = CompanyLedgerRepository(worldId); + const updatedLedger = await repo.update(updateData); + + if (!updatedLedger) + return sendResponse({ + res, + status: 404, + error: `Ledger for world ${worldId} not found`, + }); + + sendResponse({ res, data: updatedLedger }); +}); + +export const upsertLedgerController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = CompanyLedgerRepository(worldId); + const ledger = await repo.ensure(data); + + sendResponse({ res, data: ledger, status: 201 }); +}); + +export const incrementLedgerBalancesController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { cashDelta, receivablesDelta, payablesDelta } = req.body; + + if (!worldId) + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + + if (cashDelta === undefined && receivablesDelta === undefined && payablesDelta === undefined) + return sendResponse({ + res, + status: 400, + error: "At least one delta (cashDelta, receivablesDelta, payablesDelta) is required", + }); + + const repo = CompanyLedgerRepository(worldId); + const updatedLedger = await repo.increment({ + cashDelta, + receivablesDelta, + payablesDelta, + }); + + if (!updatedLedger) + return sendResponse({ + res, + status: 404, + error: `Ledger for world ${worldId} not found`, + }); + + sendResponse({ res, data: updatedLedger }); + }, +); + +export const getLedgersSummaryController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = CompanyLedgerRepository(worldId); + const summary = await repo.summary(); + + sendResponse({ res, data: summary }); +}); diff --git a/packages/controlmart/src/controller/index.ts b/packages/controlmart/src/controller/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..8b9f52e9703a8ae35c10e948d631ea93e736168e --- /dev/null +++ b/packages/controlmart/src/controller/index.ts @@ -0,0 +1,4 @@ +export * from "./logs.controller"; +export * from "./world.controller"; +export * from "./wms"; +export * from "./tms"; diff --git a/packages/controlmart/src/controller/logs.controller.ts b/packages/controlmart/src/controller/logs.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..3dabf0f6df1226bc87a230aad54d3606e8b825f0 --- /dev/null +++ b/packages/controlmart/src/controller/logs.controller.ts @@ -0,0 +1,49 @@ +import type { Request, Response } from "express"; + +import { WorldLogRepository } from "../repository"; +import { sendResponse, asyncHandler } from "../utils/http.util"; + +export const getLogsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { + serviceType, + level, + searchText, + dateStart, + dateEnd, + limit = 100, + cursor = null, + } = req.query; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + const repo = WorldLogRepository(worldId); + const logs = await repo.getLogs({ + serviceType: serviceType as string | undefined, + level: level as string | undefined, + searchText: searchText as string | undefined, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + limit: + typeof limit === "string" + ? parseInt(limit, 10) + : typeof limit === "number" + ? limit + : undefined, + cursor: cursor as string | null, + }); + sendResponse({ + res, + data: logs, + status: 200, + pagination: { + totalCount: logs.totalCount, + limit: logs.limit, + hasMore: logs.hasMore, + nextCursor: logs.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); +}); diff --git a/packages/controlmart/src/controller/od.controller.ts b/packages/controlmart/src/controller/od.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..05c2ded7accdb8373ba5a4f09131f01607a9e413 --- /dev/null +++ b/packages/controlmart/src/controller/od.controller.ts @@ -0,0 +1,726 @@ +import type { Request, Response } from "express"; +import Ajv from "ajv"; + +import { sendResponse, asyncHandler } from "../utils/http.util"; +import { getErrorMessage } from "../utils/error.util"; +import { ODRepository } from "../repository/od.repository"; +import { WorldRepository } from "../repository/world.repository"; +import { executeOperationalDescriptor } from "../operational-descriptor/executor.od"; +import { auditLogger } from "../services/audit-logger.service"; +import { OD_SCHEMA } from "../operational-descriptor/schema.od"; +import { + scheduleOD, + scheduleRecurringOD, + cancelScheduledOD, + listScheduledJobsForOD, + rescheduleOD, + initializeODScheduling, + getODWithSchedules, + deleteODSafely, + bulkScheduleODs, + validateODSchema, + pauseODSchedule, + resumeODSchedule, + pauseAllODSchedulesForWorld, + resumeAllODSchedulesForWorld, + getScheduleStatusForWorld, +} from "../operational-descriptor/schedule.od"; +import { createAppLogger } from "../utils/logger.util"; +import type { OperationalDescriptor } from "../types/od.type"; +import type { TOperationalDescriptorInput, TOperationalDescriptorModel } from "../models/od.model"; + +// Initialize OD scheduling on module load +initializeODScheduling(); + +const logger = createAppLogger({ service: "od-controller" }); +const validator = new Ajv({ allErrors: true }); + +// Add custom keyword for function validation +validator.addKeyword("function", { + keyword: "function", + validate: (schema: any, data: any) => { + return typeof data === "function"; + }, + errors: true, +}); + +export const createODController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { + schedule, + ...odData + }: TOperationalDescriptorInput & { + schedule?: { type: "once" | "recurring"; time?: string; interval?: string }; + } = req.body; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + if (!odData || Object.keys(odData).length === 0) { + return sendResponse({ + res, + status: 400, + error: "OD data is required", + }); + } + + // Validate OD data structure + if (odData.data) { + const validate = validator.compile(OD_SCHEMA); + const isValid = validate(odData.data); + + if (!isValid) { + const validationErrors = validate.errors + ?.map((e) => `${e.instancePath} ${e.message}`) + .join(", "); + + return sendResponse({ + res, + status: 400, + error: `Invalid OD schema: ${validationErrors}`, + }); + } + } + + // Get world reference + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + return sendResponse({ + res, + status: 404, + error: `World ${worldId} not found`, + }); + } + + const { name, description, type } = odData.data || {}; + + const repoData = { + ...odData, + name: odData.name || name, + description: odData.description || description, + odType: odData.odType || type || "standard", + persona: odData.persona || "", + }; + + const worldRef = { worldId }; + const od = await ODRepository.createOD(worldRef, repoData); + + // Handle scheduling if provided + if (schedule) { + try { + if (schedule.type === "once" && schedule.time) { + await scheduleOD(schedule.time, od, world); + logger.info(`Scheduled OD ${od.odId} for one-time execution at ${schedule.time}`); + } else if (schedule.type === "recurring" && schedule.interval) { + await scheduleRecurringOD(schedule.interval, od, world); + logger.info( + `Scheduled OD ${od.odId} for recurring execution with interval ${schedule.interval}`, + ); + } + } catch (scheduleError) { + logger.error({ error: scheduleError }, `Failed to schedule OD ${od.odId}`); + // Return 207 Multi-Status to indicate partial success (created but not scheduled) + return sendResponse({ + res, + status: 207, + data: { + od, + scheduleError: getErrorMessage(scheduleError), + message: "OD created but scheduling failed", + }, + }); + } + } + + sendResponse({ res, data: od, status: 201 }); +}); + +export const getODsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + const filters = { + odType: req.query.odType as TOperationalDescriptorModel["odType"], + name: req.query.name as string, + cursor: req.query.cursor as string, + limit: req.query.limit ? parseInt(req.query.limit as string, 10) : undefined, + }; + + const result = await ODRepository.getODs(worldId, filters); + + sendResponse({ + res, + data: result.items, + pagination: { + totalCount: result.totalCount, + limit: result.limit, + hasMore: result.hasMore, + nextCursor: result.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); +}); + +export const getODByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId } = req.params; + + if (!worldId || !odId) { + return sendResponse({ + res, + status: 400, + error: "worldId and odId are required", + }); + } + + const od = await ODRepository.getODById(odId, worldId); + + if (!od) { + return sendResponse({ + res, + status: 404, + error: `OD ${odId} not found`, + }); + } + + sendResponse({ res, data: od }); +}); + +export const updateODController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId } = req.params; + const updateData = req.body; + + if (!worldId || !odId) { + return sendResponse({ + res, + status: 400, + error: "worldId and odId are required", + }); + } + + // Validate OD data structure if provided + if (updateData.data) { + const validate = validator.compile(OD_SCHEMA); + const isValid = validate(updateData.data); + + if (!isValid) { + const validationErrors = validate.errors + ?.map((e) => `${e.instancePath} ${e.message}`) + .join(", "); + + return sendResponse({ + res, + status: 400, + error: `Invalid OD schema: ${validationErrors}`, + }); + } + } + + const updatedOD = await ODRepository.updateODById(odId, worldId, updateData); + + sendResponse({ + res, + data: updatedOD, + status: 200, + }); +}); + +export const deleteODController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId } = req.params; + + if (!worldId || !odId) { + return sendResponse({ + res, + status: 400, + error: "worldId and odId are required", + }); + } + + const result = await deleteODSafely(odId, worldId); + + sendResponse({ + res, + status: 200, + data: { + message: `OD ${odId} deleted successfully`, + cancelledSchedules: result.cancelledSchedules, + }, + }); +}); + +export const executeODController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId } = req.params; + const { context: userContext = {} } = req.body; + + if (!worldId || !odId) { + return sendResponse({ + res, + status: 400, + error: "worldId and odId are required", + }); + } + + // Get the OD + const od = await ODRepository.getODById(odId, worldId); + + if (!od) { + return sendResponse({ + res, + status: 404, + error: `OD ${odId} not found`, + }); + } + + // Get world + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + return sendResponse({ + res, + status: 404, + error: `World ${worldId} not found`, + }); + } + + // Execute the OD + const executionOptions = { + world, + tools: {}, // Add your tools here + logger: createAppLogger({ service: "od-execution" }), + auditLogger, + }; + + const executionResult = await executeOperationalDescriptor( + od.data as OperationalDescriptor, + executionOptions, + ); + + sendResponse({ + res, + data: executionResult, + status: 200, + }); +}); + +export const scheduleODController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId } = req.params; + const { type, time, interval, metadata = {} } = req.body; + + if (!worldId || !odId) { + return sendResponse({ + res, + status: 400, + error: "worldId and odId are required", + }); + } + + if (!type || !["once", "recurring"].includes(type)) { + return sendResponse({ + res, + status: 400, + error: "type must be 'once' or 'recurring'", + }); + } + + if (type === "once" && !time) { + return sendResponse({ + res, + status: 400, + error: "time is required for one-time scheduling", + }); + } + + if (type === "recurring" && !interval) { + return sendResponse({ + res, + status: 400, + error: "interval is required for recurring scheduling", + }); + } + + // Get the OD + const od = await ODRepository.getODById(odId, worldId); + + if (!od) { + return sendResponse({ + res, + status: 404, + error: `OD ${odId} not found`, + }); + } + + // Get world + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + return sendResponse({ + res, + status: 404, + error: `World ${worldId} not found`, + }); + } + + let job; + if (type === "once") { + job = await scheduleOD(time, od, world, metadata); + } else { + job = await scheduleRecurringOD(interval, od, world, metadata); + } + + sendResponse({ + res, + data: { + jobId: job.attrs._id, + nextRunAt: job.attrs.nextRunAt, + type, + ...(type === "once" ? { time } : { interval }), + }, + status: 201, + }); +}); + +export const getODSchedulesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId } = req.params; + + if (!worldId || !odId) { + return sendResponse({ + res, + status: 400, + error: "worldId and odId are required", + }); + } + + const jobs = await listScheduledJobsForOD(odId, worldId); + + const schedules = jobs.map((job) => ({ + jobId: job.attrs._id, + nextRunAt: job.attrs.nextRunAt, + lastRunAt: job.attrs.lastRunAt, + interval: job.attrs.repeatInterval, + isRecurring: !!job.attrs.repeatInterval, + lastRunResult: (job.attrs as any).lastRunResult, + data: job.attrs.data, + })); + + sendResponse({ res, data: schedules }); +}); + +export const cancelODScheduleController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId, jobId } = req.params; + + if (!worldId || !odId || !jobId) { + return sendResponse({ + res, + status: 400, + error: "worldId, odId, and jobId are required", + }); + } + + const cancelledCount = await cancelScheduledOD(jobId); + + sendResponse({ + res, + data: { + message: `Cancelled ${cancelledCount} scheduled job(s)`, + cancelled: cancelledCount > 0, + }, + status: 200, + }); +}); + +export const rescheduleODController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId, jobId } = req.params; + const { newTime } = req.body; + + if (!worldId || !odId || !jobId) { + return sendResponse({ + res, + status: 400, + error: "worldId, odId, and jobId are required", + }); + } + + if (!newTime) { + return sendResponse({ + res, + status: 400, + error: "newTime is required", + }); + } + + const job = await rescheduleOD(jobId, newTime); + + if (!job) { + return sendResponse({ + res, + status: 404, + error: `Job ${jobId} not found`, + }); + } + + sendResponse({ + res, + data: { + jobId: job.attrs._id, + newScheduledTime: job.attrs.nextRunAt, + previousTime: req.body.previousTime, + }, + status: 200, + }); +}); + +export const validateODController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const odData: OperationalDescriptor = req.body; + + if (!worldId) { + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + } + + if (!odData || Object.keys(odData).length === 0) { + return sendResponse({ + res, + status: 400, + error: "OD data is required", + }); + } + + const validation = validateODSchema(odData); + + sendResponse({ + res, + data: validation, + status: validation.isValid ? 200 : 400, + }); +}); + +export const getODWithSchedulesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId } = req.params; + + if (!worldId || !odId) { + return sendResponse({ + res, + status: 400, + error: "worldId and odId are required", + }); + } + + const od = await getODWithSchedules(odId, worldId); + + if (!od) { + return sendResponse({ + res, + status: 404, + error: `OD ${odId} not found`, + }); + } + + sendResponse({ res, data: od }); +}); + +export const bulkScheduleODsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { schedules } = req.body; + + if (!worldId) { + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + } + + if (!schedules || !Array.isArray(schedules) || schedules.length === 0) { + return sendResponse({ + res, + status: 400, + error: "schedules array is required", + }); + } + + const result = await bulkScheduleODs(schedules, worldId); + + sendResponse({ + res, + data: result, + status: result.failed.length > 0 ? 207 : 200, // 207 Multi-Status if partial success + }); +}); +export const runOdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const odName = req.query.odName; + + // Validate worldId parameter + if (!worldId) { + return sendResponse({ + res, + status: 400, + error: "World ID is required", + }); + } + + // Validate odName parameter + if (!odName) { + return sendResponse({ + res, + status: 400, + error: "OD name is required", + }); + } + + // Validate odName is a supported workflow + if (typeof odName !== "string" || !["simple-edi", "simple-wms"].includes(odName)) { + return sendResponse({ + res, + status: 400, + error: "Invalid OD name. Supported values: simple-edi, simple-wms", + }); + } + + console.log(`Executing OD workflow '${odName}' in world '${worldId}'`); + + sendResponse({ + res, + status: 200, + data: null, + }); +}); + +export const pauseODScheduleController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId, jobId } = req.params; + + if (!worldId || !odId || !jobId) { + return sendResponse({ + res, + status: 400, + error: "worldId, odId, and jobId are required", + }); + } + + const job = await pauseODSchedule(jobId); + + if (!job) { + return sendResponse({ + res, + status: 404, + error: `Job ${jobId} not found`, + }); + } + + sendResponse({ + res, + data: { + message: "Schedule paused successfully", + jobId: job.attrs._id, + nextRunAt: job.attrs.nextRunAt, + disabled: (job.attrs as any).disabled, + }, + status: 200, + }); +}); + +export const resumeODScheduleController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, odId, jobId } = req.params; + + if (!worldId || !odId || !jobId) { + return sendResponse({ + res, + status: 400, + error: "worldId, odId, and jobId are required", + }); + } + + const job = await resumeODSchedule(jobId); + + if (!job) { + return sendResponse({ + res, + status: 404, + error: `Job ${jobId} not found`, + }); + } + + sendResponse({ + res, + data: { + message: "Schedule resumed successfully", + jobId: job.attrs._id, + nextRunAt: job.attrs.nextRunAt, + disabled: (job.attrs as any).disabled, + }, + status: 200, + }); +}); + +export const pauseWorldSchedulesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) { + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + } + + const numPaused = await pauseAllODSchedulesForWorld(worldId); + + sendResponse({ + res, + data: { + message: `Paused ${numPaused} schedules for world`, + count: numPaused, + }, + status: 200, + }); +}); + +export const resumeWorldSchedulesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) { + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + } + + const numResumed = await resumeAllODSchedulesForWorld(worldId); + + sendResponse({ + res, + data: { + message: `Resumed ${numResumed} schedules for world`, + count: numResumed, + }, + status: 200, + }); +}); + +export const getScheduleStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) { + return sendResponse({ + res, + status: 400, + error: "worldId is required", + }); + } + + const status = await getScheduleStatusForWorld(worldId); + + sendResponse({ + res, + data: { + status, + }, + status: 200, + }); +}); diff --git a/packages/controlmart/src/controller/persona.controller.ts b/packages/controlmart/src/controller/persona.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..86242ad94eef4ae4479963a05706ba4a3bdb01fa --- /dev/null +++ b/packages/controlmart/src/controller/persona.controller.ts @@ -0,0 +1,333 @@ +/** + * Persona Controller + * + * Handles HTTP requests for persona-related endpoints. + */ + +import type { Request, Response } from 'express'; +import { personaRegistry } from '../services/persona-registry.service'; +import { sendResponse } from '../utils/http.util'; +import { getErrorMessage } from '../utils/error.util'; +import { capabilityCatalog } from '../services/capability-catalog.service'; +import { PersonaRepository } from '../repository/persona.repository'; +import { PersonaCreateSchema, PersonaUpdateSchema } from '../utils/validators/persona.validator'; +import { RepositoryError } from '../utils/error.util'; +import { ZodError } from 'zod'; +import { parseOffsetParams, applyOffsetPagination, buildOffsetMeta } from '../utils/pagination.util'; + +/** + * List all personas with optional filtering and pagination + * GET /personas?role=&department=&accessLevel=&tags=&page=&limit= + */ +const listPersonasController = async (req: Request, res: Response) => { + try { + const { role, department, accessLevel, tags } = req.query; + + // Parse pagination parameters + const pagination = parseOffsetParams(req.query); + + let allPersonas = personaRegistry.getAll(); + + // Apply filters if provided + if (role || department || accessLevel || tags) { + allPersonas = personaRegistry.filter({ + role: role as any, + department: department as any, + accessLevel: accessLevel as any, + tags: tags ? String(tags).split(',') : undefined, + }); + } + + // Apply pagination to filtered results + const total = allPersonas.length; + const { skip, limit } = applyOffsetPagination(pagination.page, pagination.limit); + const data = allPersonas.slice(skip, skip + limit); + const paginationMeta = buildOffsetMeta(total, pagination.page || 1, pagination.limit || 20); + + sendResponse({ + res, + status: 200, + data: { + count: data.length, + total, + personas: data, + pagination: paginationMeta, + }, + }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +/** + * Get single persona by ID + * GET /personas/:personaId + */ +const getPersonaController = async (req: Request, res: Response) => { + const { personaId } = req.params; + + try { + const persona = personaRegistry.getById(personaId); + + if (!persona) { + sendResponse({ + res, + status: 404, + error: `Persona '${personaId}' not found`, + }); + return; + } + + sendResponse({ res, status: 200, data: persona }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +/** + * Get capabilities for a persona + * GET /personas/:personaId/capabilities + */ +const getPersonaCapabilitiesController = async ( + req: Request, + res: Response +) => { + const { personaId } = req.params; + + try { + const persona = personaRegistry.getById(personaId); + + if (!persona) { + sendResponse({ + res, + status: 404, + error: `Persona '${personaId}' not found`, + }); + return; + } + + // Get full capability objects from catalog + const capabilities = persona.capabilityIds + .map((id) => capabilityCatalog.getById(id)) + .filter((cap) => cap !== null); + + sendResponse({ + res, + status: 200, + data: { + personaId: persona.id, + personaName: persona.name, + capabilityCount: capabilities.length, + capabilities, + }, + }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +/** + * Create a new persona + * POST /personas + */ +const createPersonaController = async (req: Request, res: Response) => { + try { + // Validate request body + const validated = PersonaCreateSchema.parse(req.body); + + // Create persona + const persona = await PersonaRepository.create(validated); + + // Reload registry cache + await personaRegistry.reload(); + + return sendResponse({ + res, + status: 201, + data: { + success: true, + message: "Persona created successfully", + data: persona, + }, + }); + } catch (error) { + if (error instanceof ZodError) { + return sendResponse({ + res, + status: 400, + error: "Validation failed", + data: { details: error.errors }, + }); + } + + if (error instanceof RepositoryError) { + const status = error.code === "VALIDATION_ERROR" ? 400 : 500; + return sendResponse({ + res, + status, + error: error.message, + }); + } + + return sendResponse({ + res, + status: 500, + error: "Failed to create persona", + data: { details: getErrorMessage(error) }, + }); + } +}; + +/** + * Update an existing persona + * PUT /personas/:personaId + */ +const updatePersonaController = async (req: Request, res: Response) => { + try { + const { personaId } = req.params; + + if (!personaId) { + return sendResponse({ + res, + status: 400, + error: "Persona ID is required", + }); + } + + // Validate request body + const validated = PersonaUpdateSchema.parse(req.body); + + // Check if persona exists + const existing = await PersonaRepository.findById(personaId); + if (!existing) { + return sendResponse({ + res, + status: 404, + error: `Persona '${personaId}' not found`, + }); + } + + // Update persona + const updated = await PersonaRepository.update(personaId, validated); + + // Reload registry cache + await personaRegistry.reload(); + + return sendResponse({ + res, + status: 200, + data: { + success: true, + message: "Persona updated successfully", + data: updated, + }, + }); + } catch (error) { + if (error instanceof ZodError) { + return sendResponse({ + res, + status: 400, + error: "Validation failed", + data: { details: error.errors }, + }); + } + + if (error instanceof RepositoryError) { + const status = error.code === "NOT_FOUND_ERROR" ? 404 : + error.code === "VALIDATION_ERROR" ? 400 : 500; + return sendResponse({ + res, + status, + error: error.message, + }); + } + + return sendResponse({ + res, + status: 500, + error: "Failed to update persona", + data: { details: getErrorMessage(error) }, + }); + } +}; + +/** + * Delete a persona + * DELETE /personas/:personaId + */ +const deletePersonaController = async (req: Request, res: Response) => { + try { + const { personaId } = req.params; + + if (!personaId) { + return sendResponse({ + res, + status: 400, + error: "Persona ID is required", + }); + } + + // Check if persona exists + const existing = await PersonaRepository.findById(personaId); + if (!existing) { + return sendResponse({ + res, + status: 404, + error: `Persona '${personaId}' not found`, + }); + } + + // Delete persona + const deleted = await PersonaRepository.delete(personaId); + + if (!deleted) { + return sendResponse({ + res, + status: 500, + error: "Failed to delete persona", + }); + } + + // Reload registry cache + await personaRegistry.reload(); + + return sendResponse({ + res, + status: 200, + data: { + success: true, + message: `Persona '${personaId}' deleted successfully`, + data: { id: personaId, deleted: true }, + }, + }); + } catch (error) { + if (error instanceof RepositoryError) { + const status = error.code === "NOT_FOUND_ERROR" ? 404 : + error.code === "VALIDATION_ERROR" ? 400 : 500; + return sendResponse({ + res, + status, + error: error.message, + }); + } + + return sendResponse({ + res, + status: 500, + error: "Failed to delete persona", + data: { details: getErrorMessage(error) }, + }); + } +}; + +/** + * Export all persona controllers + */ +export const PersonaController = { + listPersonasController, + getPersonaController, + getPersonaCapabilitiesController, + createPersonaController, + updatePersonaController, + deletePersonaController, +}; diff --git a/packages/controlmart/src/controller/tickets.controller.ts b/packages/controlmart/src/controller/tickets.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..bdc3dc0fb34c1b0c36e113b2d96987ac7367ac85 --- /dev/null +++ b/packages/controlmart/src/controller/tickets.controller.ts @@ -0,0 +1,206 @@ +import type { Request, Response } from "express"; + +import { TicketRepository } from "../repository/tickets.repository"; +import { sendResponse, asyncHandler } from "../utils/http.util"; +import type { TWorldItsmTicketInput } from "../models/tickets.model"; + +export const createTicketController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const ticketData: TWorldItsmTicketInput = req.body; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + if (!ticketData.title || !ticketData.description || !ticketData.requester) { + return sendResponse({ + res, + status: 400, + error: "title, description, and requester are required", + }); + } + + const repo = TicketRepository(worldId); + const ticket = await repo.createTicket(ticketData); + sendResponse({ + res, + data: ticket, + status: 201, + }); +}); + +export const getTicketController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, ticketId } = req.params; + + if (!worldId || !ticketId) { + return sendResponse({ + res, + status: 400, + error: "worldId and ticketId are required", + }); + } + + const repo = TicketRepository(worldId); + const ticket = await repo.getTicketById(ticketId); + if (!ticket) { + return sendResponse({ res, status: 404, error: "Ticket not found" }); + } + sendResponse({ + res, + data: ticket, + status: 200, + }); +}); + +export const getTicketsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { + status, + priority, + department, + assignedTo, + impact, + urgency, + dateEnd, + dateStart, + limit = 100, + cursor = null, + } = req.query; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + const filters: any = {}; + + if (status) filters.status = status as string; + if (priority) filters.priority = priority as string; + if (department) filters.department = department as string; + if (assignedTo) filters.assignedTo = assignedTo as string; + if (impact) filters.impact = impact as string; + if (urgency) filters.urgency = urgency as string; + if (dateStart) filters.dateStart = new Date(dateStart as string); + if (dateEnd) filters.dateEnd = new Date(dateEnd as string); + + filters.limit = + typeof limit === "string" ? parseInt(limit, 10) : typeof limit === "number" ? limit : 100; + filters.cursor = cursor as string | null; + + const repo = TicketRepository(worldId); + const tickets = await repo.getTickets(filters); + sendResponse({ + res, + data: tickets, + status: 200, + pagination: { + totalCount: tickets.totalCount, + limit: tickets.limit, + hasMore: tickets.hasMore, + nextCursor: tickets.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); +}); + +export const updateTicketController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, ticketId } = req.params; + const updateData = req.body; + + if (!worldId || !ticketId) { + return sendResponse({ + res, + status: 400, + error: "worldId and ticketId are required", + }); + } + + const repo = TicketRepository(worldId); + const updatedTicket = await repo.updateTicket(ticketId, updateData); + if (!updatedTicket) { + return sendResponse({ res, status: 404, error: "Ticket not found" }); + } + sendResponse({ + res, + data: updatedTicket, + status: 200, + }); +}); + +export const updateTicketStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, ticketId } = req.params; + const { status } = req.body; + + if (!worldId || !ticketId) { + return sendResponse({ + res, + status: 400, + error: "worldId and ticketId are required", + }); + } + + if (!status) { + return sendResponse({ res, status: 400, error: "status is required" }); + } + + const repo = TicketRepository(worldId); + const updatedTicket = await repo.updateTicketStatus(ticketId, status); + if (!updatedTicket) { + return sendResponse({ res, status: 404, error: "Ticket not found" }); + } + sendResponse({ + res, + data: updatedTicket, + status: 200, + }); +}); + +export const updateTicketWorkNotesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, ticketId } = req.params; + const { workNotes } = req.body; + + if (!worldId || !ticketId) { + return sendResponse({ + res, + status: 400, + error: "worldId and ticketId are required", + }); + } + + if (!workNotes) { + return sendResponse({ res, status: 400, error: "workNotes is required" }); + } + + const repo = TicketRepository(worldId); + const updatedTicket = await repo.updateTicketWorkNotes(ticketId, workNotes); + if (!updatedTicket) { + return sendResponse({ res, status: 404, error: "Ticket not found" }); + } + sendResponse({ + res, + data: updatedTicket, + status: 200, + }); +}); + +export const deleteTicketController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, ticketId } = req.params; + + if (!worldId || !ticketId) { + return sendResponse({ + res, + status: 400, + error: "worldId and ticketId are required", + }); + } + + const repo = TicketRepository(worldId); + const deletedTicket = await repo.deleteTicket(ticketId); + if (!deletedTicket) { + return sendResponse({ res, status: 404, error: "Ticket not found" }); + } + sendResponse({ + res, + data: { message: "Ticket deleted successfully" }, + status: 200, + }); +}); diff --git a/packages/controlmart/src/controller/tms/carrier.controller.ts b/packages/controlmart/src/controller/tms/carrier.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..f39d0177cec55eb8281a0aa9564b3037f23b723b --- /dev/null +++ b/packages/controlmart/src/controller/tms/carrier.controller.ts @@ -0,0 +1,326 @@ +import type { Request, Response } from "express"; + +import { TmsCarrierRepository } from "../../repository"; +import type { TCarrierModel } from "../../models/tms/carrier.tms.model"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createTMSCarrierController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = TmsCarrierRepository(worldId); + const carrier = await repo.createCarrier(data); + sendResponse({ + res, + data: { ...carrier, id: getIdFromMongoObject(carrier) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create carrier: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSCarrierByIdController = async (req: Request, res: Response) => { + const { worldId, carrierId } = req.params; + + if (!worldId || !carrierId) + return sendResponse({ + res, + status: 400, + error: "worldId and carrierId are required", + }); + + try { + const repo = TmsCarrierRepository(worldId); + const carrier = await repo.getCarrierById(carrierId); + if (!carrier) + return sendResponse({ + res, + status: 404, + error: `Carrier ${carrierId} not found`, + }); + + sendResponse({ + res, + data: { ...carrier, id: getIdFromMongoObject(carrier) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get carrier: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSCarrierByCodeController = async (req: Request, res: Response) => { + const { worldId, carrierCode } = req.params; + + if (!worldId || !carrierCode) + return sendResponse({ + res, + status: 400, + error: "worldId and carrierCode are required", + }); + + try { + const repo = TmsCarrierRepository(worldId); + const carrier = await repo.getCarrierByCode(carrierCode); + if (!carrier) + return sendResponse({ + res, + status: 404, + error: `Carrier with code ${carrierCode} not found`, + }); + + sendResponse({ + res, + data: { ...carrier, id: getIdFromMongoObject(carrier) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get carrier by code: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSActiveCarriersController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const filters = { + carrierType: req.query.carrierType as string | undefined, + serviceRegion: req.query.serviceRegion as string | undefined, + smartWayCertified: req.query.smartWayCertified + ? req.query.smartWayCertified === "true" + : undefined, + limit: req.query.limit ? Number(req.query.limit) : undefined, + cursor: req.query.cursor as string | undefined, + }; + + try { + const repo = TmsCarrierRepository(worldId); + const carriers = await repo.getActiveCarriers(filters); + sendResponse({ + res, + data: carriers.items.map((carrier: TCarrierModel) => ({ + ...carrier, + id: getIdFromMongoObject(carrier), + })), + pagination: { + totalCount: carriers.totalCount, + limit: carriers.limit, + hasMore: carriers.hasMore, + nextCursor: carriers.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get active carriers: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateTMSCarrierStatusController = async (req: Request, res: Response) => { + const { worldId, carrierId } = req.params; + const { status } = req.body as { status: string }; + + if (!worldId || !carrierId) + return sendResponse({ + res, + status: 400, + error: "worldId and carrierId are required", + }); + + if (!status) + return sendResponse({ + res, + status: 400, + error: "Status is required", + }); + + try { + const repo = TmsCarrierRepository(worldId); + const carrier = await repo.updateCarrierStatus(carrierId, status); + if (!carrier) + return sendResponse({ + res, + status: 404, + error: `Carrier ${carrierId} not found`, + }); + + sendResponse({ + res, + data: { ...carrier, id: getIdFromMongoObject(carrier) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update carrier status: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateTMSCarrierPerformanceController = async (req: Request, res: Response) => { + const { worldId, carrierId } = req.params; + const performanceData = req.body as { + onTimeDeliveryRate?: number; + damageClaimRate?: number; + averageTransitTime?: number; + totalShipmentsCompleted?: number; + }; + + if (!worldId || !carrierId) + return sendResponse({ + res, + status: 400, + error: "worldId and carrierId are required", + }); + + if (!performanceData || Object.keys(performanceData).length === 0) + return sendResponse({ + res, + status: 400, + error: "Performance data is required", + }); + + try { + const repo = TmsCarrierRepository(worldId); + const carrier = await repo.updateCarrierPerformance(carrierId, performanceData); + if (!carrier) + return sendResponse({ + res, + status: 404, + error: `Carrier ${carrierId} not found`, + }); + + sendResponse({ + res, + data: { ...carrier, id: getIdFromMongoObject(carrier) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update carrier performance: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSCarrierMetricsController = async (req: Request, res: Response) => { + const { worldId, carrierId } = req.params; + + if (!worldId || !carrierId) + return sendResponse({ + res, + status: 400, + error: "worldId and carrierId are required", + }); + + try { + const repo = TmsCarrierRepository(worldId); + const metrics = await repo.getCarrierMetrics(carrierId); + sendResponse({ + res, + data: metrics, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get carrier metrics: ${getErrorMessage(error)}`, + }); + } +}; + +export const searchTMSCarriersController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const searchTerm = req.query.searchTerm as string; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!searchTerm) + return sendResponse({ + res, + status: 400, + error: "searchTerm query parameter is required", + }); + + const filters = { + carrierType: req.query.carrierType as string | undefined, + status: req.query.status as string | undefined, + serviceRegion: req.query.serviceRegion as string | undefined, + }; + + try { + const repo = TmsCarrierRepository(worldId); + const carriers = await repo.searchCarriers(searchTerm, filters); + sendResponse({ + res, + data: carriers.carriers.map((carrier: TCarrierModel) => ({ + ...carrier, + id: getIdFromMongoObject(carrier), + })), + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to search carriers: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSCarriersByPerformanceController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const criteria = { + minOnTimeRate: req.query.minOnTimeRate ? Number(req.query.minOnTimeRate) : undefined, + maxDamageRate: req.query.maxDamageRate ? Number(req.query.maxDamageRate) : undefined, + maxTransitTime: req.query.maxTransitTime ? Number(req.query.maxTransitTime) : undefined, + carrierType: req.query.carrierType as string | undefined, + }; + + try { + const repo = TmsCarrierRepository(worldId); + const carriers = await repo.getCarriersByPerformance(criteria); + sendResponse({ + res, + data: carriers.carriers.map((carrier: TCarrierModel) => ({ + ...carrier, + id: getIdFromMongoObject(carrier), + })), + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get carriers by performance: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/tms/inbound_trailer.controller.ts b/packages/controlmart/src/controller/tms/inbound_trailer.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..412842ca2154b905963c37ab647ef07cdda78815 --- /dev/null +++ b/packages/controlmart/src/controller/tms/inbound_trailer.controller.ts @@ -0,0 +1,504 @@ +import type { Request, Response } from "express"; + +import { TmsInboundTrailerRepository } from "../../repository"; +import type { TInboundTrailerModel } from "../../models/tms/inbound_trailer.tms.model"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createTMSInboundTrailerController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.createInboundTrailer(data); + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create inbound trailer: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSInboundTrailerByIdController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.getTrailerById(trailerId); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Inbound trailer ${trailerId} not found`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get inbound trailer: ${getErrorMessage(error)}`, + }); + } +}; + +export const scheduleTMSTrailerAppointmentController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + const appointmentData = req.body as { + appointmentId?: string; + scheduledArrival: Date; + scheduledDeparture?: Date; + dockDoor?: string; + dcId: string; + facilityName?: string; + }; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + if (!appointmentData || !appointmentData.scheduledArrival || !appointmentData.dcId) + return sendResponse({ + res, + status: 400, + error: "Appointment data with scheduledArrival and dcId are required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.scheduleTrailerAppointment(trailerId, appointmentData); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Trailer ${trailerId} not found or not in valid status for scheduling`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to schedule trailer appointment: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateTMSTrailerStatusController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + const updateData = req.body as { + status: string; + actualArrival?: Date; + actualDeparture?: Date; + estimatedArrival?: Date; + dockDoor?: string; + }; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + if (!updateData || !updateData.status) + return sendResponse({ + res, + status: 400, + error: "Status is required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.updateTrailerStatus(trailerId, updateData.status, { + actualArrival: updateData.actualArrival, + actualDeparture: updateData.actualDeparture, + estimatedArrival: updateData.estimatedArrival, + dockDoor: updateData.dockDoor, + }); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Trailer ${trailerId} not found`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update trailer status: ${getErrorMessage(error)}`, + }); + } +}; + +export const checkInTMSTrailerController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + const checkInData = req.body as { + actualArrival: Date; + driverName?: string; + driverPhone?: string; + sealNumber?: string; + dockDoor?: string; + }; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + if (!checkInData || !checkInData.actualArrival) + return sendResponse({ + res, + status: 400, + error: "Check-in data with actualArrival is required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.checkInTrailer(trailerId, checkInData); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Trailer ${trailerId} not found or not in valid status for check-in`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to check in trailer: ${getErrorMessage(error)}`, + }); + } +}; + +export const startTMSTrailerUnloadingController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.startUnloading(trailerId); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Trailer ${trailerId} not found or not in valid status for unloading`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to start trailer unloading: ${getErrorMessage(error)}`, + }); + } +}; + +export const completeTMSTrailerUnloadingController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + const completionData = req.body as { + completionTime: Date; + actualPallets?: number; + }; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + if (!completionData || !completionData.completionTime) + return sendResponse({ + res, + status: 400, + error: "Completion data with completionTime is required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.completeUnloading(trailerId, completionData); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Trailer ${trailerId} not found or not in valid status for completion`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to complete trailer unloading: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSInboundTrailersByStatusController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const statusParam = req.query.status as string; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!statusParam) + return sendResponse({ + res, + status: 400, + error: "status query parameter is required", + }); + + const statuses = statusParam.split(","); + const filters = { + carrierId: req.query.carrierId as string | undefined, + dcId: req.query.dcId as string | undefined, + dateStart: req.query.from ? new Date(req.query.from as string) : undefined, + dateEnd: req.query.to ? new Date(req.query.to as string) : undefined, + }; + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailers = await repo.getTrailersByStatus(statuses, filters); + sendResponse({ + res, + data: trailers.items.map((trailer: TInboundTrailerModel) => ({ + ...trailer, + id: getIdFromMongoObject(trailer), + })), + pagination: { + totalCount: trailers.totalCount, + limit: trailers.limit, + hasMore: trailers.hasMore, + nextCursor: trailers.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get inbound trailers: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSTrailersByAppointmentDateController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const dcId = req.query.dcId as string; + const appointmentDate = req.query.appointmentDate + ? new Date(req.query.appointmentDate as string) + : new Date(); + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!dcId) + return sendResponse({ + res, + status: 400, + error: "dcId query parameter is required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailers = await repo.getTrailersByAppointmentDate(dcId, appointmentDate); + sendResponse({ + res, + data: trailers.items.map((trailer: TInboundTrailerModel) => ({ + ...trailer, + id: getIdFromMongoObject(trailer), + })), + pagination: { + totalCount: trailers.totalCount, + limit: trailers.limit, + hasMore: trailers.hasMore, + nextCursor: trailers.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get trailers by appointment date: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSAvailableDockDoorsController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const dcId = req.query.dcId as string; + const startTime = req.query.startTime ? new Date(req.query.startTime as string) : undefined; + const endTime = req.query.endTime ? new Date(req.query.endTime as string) : undefined; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!dcId || !startTime || !endTime) + return sendResponse({ + res, + status: 400, + error: "dcId, startTime, and endTime query parameters are required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const dockDoors = await repo.getAvailableDockDoors(dcId, { + startTime, + endTime, + }); + sendResponse({ + res, + data: dockDoors, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get available dock doors: ${getErrorMessage(error)}`, + }); + } +}; + +export const addTMSTrailerDelayController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + const delayData = req.body as { + delayType: string; + reason: string; + reportedAt: Date; + estimatedDelay: number; + }; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + if (!delayData || !delayData.delayType || !delayData.reason || !delayData.reportedAt) + return sendResponse({ + res, + status: 400, + error: "Delay data with delayType, reason, and reportedAt are required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.addDelay(trailerId, delayData); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Trailer ${trailerId} not found`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to add trailer delay: ${getErrorMessage(error)}`, + }); + } +}; + +export const assignTMSTrailerToDockController = async (req: Request, res: Response) => { + const { worldId, trailerId } = req.params; + const { dockDoor } = req.body as { dockDoor: string }; + + if (!worldId || !trailerId) + return sendResponse({ + res, + status: 400, + error: "worldId and trailerId are required", + }); + + if (!dockDoor) + return sendResponse({ + res, + status: 400, + error: "dockDoor is required", + }); + + try { + const repo = TmsInboundTrailerRepository(worldId); + const trailer = await repo.assignToDock(trailerId, dockDoor); + if (!trailer) + return sendResponse({ + res, + status: 404, + error: `Trailer ${trailerId} not found`, + }); + + sendResponse({ + res, + data: { ...trailer, id: getIdFromMongoObject(trailer) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to assign trailer to dock: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/tms/index.ts b/packages/controlmart/src/controller/tms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..f3873bfefe3dbd53a6074fa759b8eab13ebded4d --- /dev/null +++ b/packages/controlmart/src/controller/tms/index.ts @@ -0,0 +1,3 @@ +export * from "./shipment.controller"; +export * from "./carrier.controller"; +export * from "./inbound_trailer.controller"; diff --git a/packages/controlmart/src/controller/tms/shipment.controller.ts b/packages/controlmart/src/controller/tms/shipment.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..65eb9d5db76fba9d49904027638f8a7e3317e6a8 --- /dev/null +++ b/packages/controlmart/src/controller/tms/shipment.controller.ts @@ -0,0 +1,519 @@ +import type { Request, Response } from "express"; + +import { TmsShipmentRepository } from "../../repository"; +import { sendResponse } from "../../utils/http.util"; +import { getIdFromMongoObject } from "../../utils/mongo.util"; +import { getErrorMessage } from "../../utils/error.util"; + +export const createTMSShipmentController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!data || Object.keys(data).length === 0) + return sendResponse({ + res, + status: 400, + error: "Request body is required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.createShipment(data); + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create shipment: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSShipmentByIdController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.getShipmentWithEvents(shipmentId); + if (!shipment) + return sendResponse({ + res, + status: 404, + error: `Shipment ${shipmentId} not found`, + }); + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get shipment: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSShipmentsByStatusController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const statusParam = req.query.status as string; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + if (!statusParam) + return sendResponse({ + res, + status: 400, + error: "status query parameter is required", + }); + + const statuses = statusParam.split(","); + const filters = { + carrierId: req.query.carrierId as string | undefined, + dateStart: req.query.from ? new Date(req.query.from as string) : undefined, + dateEnd: req.query.to ? new Date(req.query.to as string) : undefined, + shipmentType: req.query.shipmentType as string | undefined, + limit: req.query.limit ? Number(req.query.limit) : undefined, + cursor: req.query.cursor as string | undefined, + }; + + try { + const repo = TmsShipmentRepository(worldId); + const shipments = await repo.getShipmentsByStatus(statuses, filters); + sendResponse({ + res, + data: shipments.items.map((shipment) => ({ + ...shipment, + id: getIdFromMongoObject(shipment), + })), + pagination: { + totalCount: shipments.totalCount, + limit: shipments.limit, + hasMore: shipments.hasMore, + nextCursor: shipments.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get shipments: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSInTransitShipmentsController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const { cursor, limit } = req.query; + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipments = await repo.getInTransitShipments({ + cursor: cursor as string | undefined, + limit: limit ? Number(limit) : undefined, + }); + sendResponse({ + res, + data: shipments.items.map((shipment) => ({ + ...shipment, + id: getIdFromMongoObject(shipment), + })), + pagination: { + totalCount: shipments.totalCount, + limit: shipments.limit, + hasMore: shipments.hasMore, + nextCursor: shipments.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get in-transit shipments: ${getErrorMessage(error)}`, + }); + } +}; + +export const getTMSShipmentsByCarrierController = async (req: Request, res: Response) => { + const { worldId, carrierId } = req.params; + + if (!worldId || !carrierId) + return sendResponse({ + res, + status: 400, + error: "worldId and carrierId are required", + }); + + const dateStart = req.query.from ? new Date(req.query.from as string) : undefined; + const dateEnd = req.query.to ? new Date(req.query.to as string) : undefined; + const { cursor, limit } = req.query; + + try { + const repo = TmsShipmentRepository(worldId); + const shipments = await repo.getShipmentsByCarrier(carrierId, { + dateStart, + dateEnd, + cursor: cursor as string | undefined, + limit: limit ? Number(limit) : undefined, + }); + sendResponse({ + res, + data: shipments.items.map((shipment) => ({ + ...shipment, + id: getIdFromMongoObject(shipment), + })), + pagination: { + totalCount: shipments.totalCount, + limit: shipments.limit, + hasMore: shipments.hasMore, + nextCursor: shipments.nextCursor || null, + previousCursor: req.query.cursor as string | null, + }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to get shipments by carrier: ${getErrorMessage(error)}`, + }); + } +}; + +export const tenderTMSShipmentController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const carrierInfo = req.body as { + carrierId: string; + carrierName: string; + carrierCode: string; + scacCode: string; + }; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!carrierInfo || !carrierInfo.carrierId) + return sendResponse({ + res, + status: 400, + error: "Carrier information is required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.tenderShipment(shipmentId, carrierInfo); + if (!shipment) + return sendResponse({ + res, + status: 404, + error: `Shipment ${shipmentId} not found or not in PLANNED status`, + }); + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to tender shipment: ${getErrorMessage(error)}`, + }); + } +}; + +export const acceptTMSShipmentController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const acceptanceData = req.body as { + proNumber?: string; + trackingNumber?: string; + estimatedPickupDate?: Date; + }; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.acceptShipment(shipmentId, acceptanceData); + if (!shipment) + return sendResponse({ + res, + status: 404, + error: `Shipment ${shipmentId} not found or not in TENDERED status`, + }); + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to accept shipment: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateTMSShipmentLocationController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const locationData = req.body as { + latitude: number; + longitude: number; + city?: string; + state?: string; + timestamp: Date; + source?: "EDI" | "MANUAL" | "GPS" | "CARRIER_PORTAL"; + }; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!locationData || !locationData.latitude || !locationData.longitude) + return sendResponse({ + res, + status: 400, + error: "Latitude and longitude are required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.updateShipmentLocation(shipmentId, { + ...locationData, + timestamp: locationData.timestamp || new Date(), + }); + if (!shipment) + return sendResponse({ + res, + status: 404, + error: `Shipment ${shipmentId} not found`, + }); + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update shipment location: ${getErrorMessage(error)}`, + }); + } +}; + +export const updateTMSShipmentStatusController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const updateData = req.body as { + status: string; + timestamp?: Date; + location?: string; + note?: string; + source?: string; + }; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!updateData || !updateData.status) + return sendResponse({ + res, + status: 400, + error: "Status is required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.updateShipmentStatus(shipmentId, updateData.status, { + timestamp: updateData.timestamp, + location: updateData.location, + note: updateData.note, + source: updateData.source, + }); + if (!shipment) + return sendResponse({ + res, + status: 404, + error: `Shipment ${shipmentId} not found`, + }); + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to update shipment status: ${getErrorMessage(error)}`, + }); + } +}; + +export const processTMSEdi214UpdateController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const ediData = req.body as { + status: string; + locationCode?: string; + city?: string; + state?: string; + timestamp: Date; + equipmentId?: string; + estimatedDeliveryDate?: Date; + rawEdiData: any; + }; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!ediData || !ediData.status || !ediData.rawEdiData) + return sendResponse({ + res, + status: 400, + error: "EDI data with status and rawEdiData are required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.processEdi214Update(shipmentId, { + ...ediData, + timestamp: ediData.timestamp || new Date(), + }); + if (!shipment) + return sendResponse({ + res, + status: 404, + error: `Shipment ${shipmentId} not found`, + }); + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to process EDI 214 update: ${getErrorMessage(error)}`, + }); + } +}; + +export const addTMSShipmentDelayController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const delayData = req.body as { + delayType: string; + reason: string; + startTime: Date; + estimatedDelay: number; + endTime?: Date; + }; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!delayData || !delayData.delayType || !delayData.reason) + return sendResponse({ + res, + status: 400, + error: "Delay type and reason are required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const shipment = await repo.addDelay(shipmentId, delayData); + if (!shipment) + return sendResponse({ + res, + status: 404, + error: `Shipment ${shipmentId} not found`, + }); + + sendResponse({ + res, + data: { ...shipment, id: getIdFromMongoObject(shipment) }, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to add shipment delay: ${getErrorMessage(error)}`, + }); + } +}; + +export const createTMSShipmentStatusEventController = async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const eventData = req.body; + + if (!worldId || !shipmentId) + return sendResponse({ + res, + status: 400, + error: "worldId and shipmentId are required", + }); + + if (!eventData || Object.keys(eventData).length === 0) + return sendResponse({ + res, + status: 400, + error: "Event data is required", + }); + + try { + const repo = TmsShipmentRepository(worldId); + const event = await repo.createStatusEvent(shipmentId, eventData); + sendResponse({ + res, + data: { ...event, id: getIdFromMongoObject(event) }, + status: 201, + }); + } catch (error) { + sendResponse({ + res, + status: 500, + error: `Failed to create status event: ${getErrorMessage(error)}`, + }); + } +}; diff --git a/packages/controlmart/src/controller/verification.controller.ts b/packages/controlmart/src/controller/verification.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..b0f648e32ada4f59d73f618b50320179e5ce1857 --- /dev/null +++ b/packages/controlmart/src/controller/verification.controller.ts @@ -0,0 +1,119 @@ +import type { Request, Response } from "express"; +import { TicketRepository } from "../repository/tickets.repository"; +import { mapToVerificationTicket } from "../verification/utils/ticket-mapper.util"; +import { verifyTicket } from "../verification/engine"; +import { verifierRegistry } from "../verification/registry"; +import { createAppLogger } from "../utils/logger.util"; +import { z } from "zod"; +import { World } from "../models/world.model"; + +const logger = createAppLogger({ service: "verification-controller" }); + +const ParamsSchema = z.object({ + worldId: z.string(), + ticketId: z.string(), +}); + +const EntityParamsSchema = z.object({ + worldId: z.string(), +}); + +const EntityBodySchema = z.object({ + odId: z.string(), + entityId: z.string().optional(), + entityType: z.string().default("ORDER"), + metadata: z.record(z.string(), z.any()).optional(), +}); + +export const runVerification = async (req: Request, res: Response) => { + try { + const { worldId, ticketId } = ParamsSchema.parse(req.params); + + const world = await World.findById(worldId).lean(); + if (!world) { + return res.status(404).json({ error: "World not found" }); + } + const worldLayout = world.layout + + if (!worldLayout) { + return res.status(400).json({ error: "World layout not found" }); + } + + const ticketRepo = TicketRepository(worldId); + const dbTicket = await ticketRepo.getTicketById(ticketId); + + if (!dbTicket) { + return res.status(404).json({ error: "Ticket not found" }); + } + + const verificationTicket = mapToVerificationTicket(dbTicket); + logger.info({ ticketId, odId: verificationTicket.odId, layout: worldLayout }, "Starting verification for ticket"); + + const result = await verifyTicket(verificationTicket, worldLayout, verifierRegistry, logger); + + return res.status(200).json(result); + + } catch (error: any) { + logger.error({ error: String(error), stack: error.stack }, "Verification failed"); + return res.status(500).json({ + error: "Verification execution failed", + details: error.message + }); + } +}; + +export const runEntityVerification = async (req: Request, res: Response) => { + try { + const { worldId } = EntityParamsSchema.parse(req.params); + const { odId, entityId, entityType, metadata } = EntityBodySchema.parse(req.body); + + const world = await World.findById(worldId).lean(); + if (!world) { + return res.status(404).json({ error: "World not found" }); + } + const worldLayout = world.layout; + + if (!worldLayout) { + return res.status(400).json({ error: "World layout not found" }); + } + + const affectedEntities: any[] = []; + if (entityId) { + affectedEntities.push({ + type: entityType, + id: entityId, + metadata: { source: "manual-verification" } + }); + } + + const verificationTicket: any = { + id: `virtual-${Date.now()}`, + worldId: worldId, + odId: odId, + odRunId: "manual-verify", + failedStepId: "n/a", + failureType: "MANUAL_VERIFICATION", + status: "new", + createdAt: new Date().toISOString(), + affectedEntities, + metadata: { + isVirtual: true, + triggeredBy: "user-agent", + ...(metadata || {}) + } + }; + + logger.info({ odId, entityId, layout: worldLayout }, "Starting on-demand verification for entity"); + + const result = await verifyTicket(verificationTicket, worldLayout, verifierRegistry, logger); + + return res.status(200).json(result); + + } catch (error: any) { + logger.error({ error: String(error), stack: error.stack }, "Entity verification failed"); + return res.status(500).json({ + error: "Verification execution failed", + details: error.message + }); + } +}; diff --git a/packages/controlmart/src/controller/wms/bin.controller.ts b/packages/controlmart/src/controller/wms/bin.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..33d90f8d59dc3af67c3281592f97896ab2dfcd23 --- /dev/null +++ b/packages/controlmart/src/controller/wms/bin.controller.ts @@ -0,0 +1,143 @@ +import type { Request, Response } from "express"; +import { WMSBinRepository } from "../../repository/wms"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createBinController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) { + return sendResponse({ res, status: 400, error: "Request body is required" }); + } + + const repo = WMSBinRepository(worldId); + const bin = await repo.createBin(data); + sendResponse({ res, data: bin, status: 201 }); +}); + +export const getBinByCodeController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, binCode } = req.params; + const { warehouseId } = req.query; + + if (!worldId || !binCode) { + return sendResponse({ res, status: 400, error: "worldId and binCode are required" }); + } + + const repo = WMSBinRepository(worldId); + const bin = await repo.getBinByCode(binCode, warehouseId as string); + + if (!bin) { + return sendResponse({ res, status: 404, error: `Bin ${binCode} not found` }); + } + + sendResponse({ res, data: bin }); +}); + +export const getBinsByZoneController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, zoneId } = req.params; + const { status, binType, locationType, abcClassification } = req.query; + + if (!worldId || !zoneId) { + return sendResponse({ res, status: 400, error: "worldId and zoneId are required" }); + } + + const statusArray = typeof status === "string" ? [status] : (status as string[]); + const binTypeArray = typeof binType === "string" ? [binType] : (binType as string[]); + const locationTypeArray = + typeof locationType === "string" ? [locationType] : (locationType as string[]); + const abcClassArray = + typeof abcClassification === "string" ? [abcClassification] : (abcClassification as string[]); + + const repo = WMSBinRepository(worldId); + const bins = await repo.getBinsByZone(zoneId, { + status: statusArray, + binType: binTypeArray, + locationType: locationTypeArray, + abcClassification: abcClassArray, + }); + + sendResponse({ res, data: bins }); +}); + +export const updateBinStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, binId } = req.params; + const { status, reason } = req.body; + + if (!worldId || !binId) { + return sendResponse({ res, status: 400, error: "worldId and binId are required" }); + } + if (!status) { + return sendResponse({ res, status: 400, error: "status is required" }); + } + + const repo = WMSBinRepository(worldId); + const updated = await repo.updateBinStatus(binId, status, reason); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Bin ${binId} not found` }); + } + + sendResponse({ res, data: updated }); +}); + +export const updateBinCapacityController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, binId } = req.params; + const capacity = req.body; + + if (!worldId || !binId) { + return sendResponse({ res, status: 400, error: "worldId and binId are required" }); + } + if (!capacity) { + return sendResponse({ res, status: 400, error: "capacity data is required" }); + } + + const repo = WMSBinRepository(worldId); + const result = await repo.updateBinCapacity(binId, capacity); + + sendResponse({ res, data: result }); +}); + +export const getAvailableBinsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { zoneIds, binType, minWeight, minVolume, minPallets, warehouseId } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const zoneIdsArray = typeof zoneIds === "string" ? [zoneIds] : (zoneIds as string[]); + const binTypeArray = typeof binType === "string" ? [binType] : (binType as string[]); + + const filters: any = {}; + if (warehouseId) filters.warehouseId = warehouseId; + if (zoneIdsArray) filters.zoneIds = zoneIdsArray; + if (binTypeArray) filters.binType = binTypeArray; + + if (minWeight || minVolume || minPallets) { + filters.minCapacity = {}; + if (minWeight) filters.minCapacity.weight = Number(minWeight); + if (minVolume) filters.minCapacity.volume = Number(minVolume); + if (minPallets) filters.minCapacity.pallets = Number(minPallets); + } + + const repo = WMSBinRepository(worldId); + const availableBins = await repo.getAvailableBins(filters); + + sendResponse({ res, data: availableBins }); +}); + +export const getBinUtilizationController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { zoneIds, warehouseId, dateStart, dateEnd } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const zoneIdsArray = typeof zoneIds === "string" ? [zoneIds] : (zoneIds as string[]); + + const repo = WMSBinRepository(worldId); + const utilization = await repo.getBinUtilization({ + zoneIds: zoneIdsArray, + warehouseId: warehouseId as string, + }); + + sendResponse({ res, data: utilization }); +}); diff --git a/packages/controlmart/src/controller/wms/cycle_count.controller.ts b/packages/controlmart/src/controller/wms/cycle_count.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..7fd455fd75aef575da466d5e4033726d1c813eef --- /dev/null +++ b/packages/controlmart/src/controller/wms/cycle_count.controller.ts @@ -0,0 +1,226 @@ +import type { Request, Response } from "express"; +import { WMSCycleCountRepository } from "../../repository/wms/cycle_count.wms.repository"; +import type { TCycleCountInput } from "../../models/wms/cycle_count.wms.model"; +import { asyncHandler } from "../../utils/http.util"; + +// Create cycle count +export const createCycleCountController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const cycleCountData: TCycleCountInput = req.body; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const cycleCount = await cycleCountRepo.createCycleCount(cycleCountData); + + res.status(201).json({ + success: true, + message: "Cycle count created successfully", + data: cycleCount, + }); +}); + +// Get cycle counts by status +export const getCycleCountsByStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { status, warehouseId, countType, dateStart, dateEnd, cursor, limit } = req.query; + + const statusArray = Array.isArray(status) + ? (status as string[]) + : typeof status === "string" + ? [status] + : []; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(countType && { + countType: Array.isArray(countType) ? (countType as string[]) : [countType as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(cursor && { cursor: cursor as string }), + ...(limit && { limit: parseInt(limit as string) }), + }; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const result = await cycleCountRepo.getCycleCountsByStatus(statusArray, filters); + + res.status(200).json({ + success: true, + message: "Cycle counts by status retrieved successfully", + data: result, + }); + }, +); + +// Get cycle count by ID +export const getCycleCountByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, cycleCountId } = req.params; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const cycleCount = await cycleCountRepo.getCycleCountById(cycleCountId as string); + + res.status(200).json({ + success: true, + message: "Cycle count retrieved successfully", + data: cycleCount, + }); +}); + +// Update cycle count status +export const updateCycleCountStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, cycleCountId } = req.params; + const { status, completedBy } = req.body; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const result = await cycleCountRepo.updateCycleCountStatus( + cycleCountId as string, + status, + completedBy, + ); + + res.status(200).json({ + success: true, + message: "Cycle count status updated successfully", + data: result, + }); + }, +); + +// Assign user to cycle count +export const assignUserToCycleCountController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, cycleCountId } = req.params; + const { userId, userName, assignedBins } = req.body; + + const assignment = { + userId, + userName, + assignedBins, + }; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const result = await cycleCountRepo.assignUserToCycleCount(cycleCountId as string, assignment); + + res.status(200).json({ + success: true, + message: "User assigned to cycle count successfully", + data: result, + }); + }, +); + +// Add count to result +export const addCountToResultController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, cycleCountId } = req.params; + const { + binId, + productId, + sku, + expectedQuantity, + actualQuantity, + variance, + countedBy, + countedAt, + notes, + } = req.body; + + const countResult = { + binId, + productId, + sku, + expectedQuantity, + actualQuantity, + variance, + countedBy, + countedAt: new Date(countedAt), + ...(notes && { notes }), + }; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const result = await cycleCountRepo.addCountToResult(cycleCountId as string, countResult); + + res.status(200).json({ + success: true, + message: "Count result added successfully", + data: result, + }); +}); + +// Get cycle counts by warehouse +export const getCycleCountsByWarehouseController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { status, countType, dateStart, dateEnd } = req.query; + + const filters = { + ...(status && { + status: Array.isArray(status) ? (status as string[]) : [status as string], + }), + ...(countType && { + countType: Array.isArray(countType) ? (countType as string[]) : [countType as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + }; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const cycleCounts = await cycleCountRepo.getCycleCountsByWarehouse( + warehouseId as string, + filters, + ); + + res.status(200).json({ + success: true, + message: "Cycle counts by warehouse retrieved successfully", + data: cycleCounts, + }); + }, +); + +// Get cycle count variance report +export const getCycleCountVarianceReportController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dateStart, dateEnd, countType } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(countType && { + countType: Array.isArray(countType) ? (countType as string[]) : [countType as string], + }), + }; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const report = await cycleCountRepo.getCycleCountVarianceReport(filters); + + res.status(200).json({ + success: true, + message: "Cycle count variance report retrieved successfully", + data: report, + }); + }, +); + +// Get scheduled cycle counts +export const getScheduledCycleCountsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { from, to, warehouseId } = req.query; + + const cycleCountRepo = WMSCycleCountRepository(worldId as string); + const scheduledCounts = await cycleCountRepo.getScheduledCycleCounts( + new Date(from as string), + new Date(to as string), + warehouseId as string, + ); + + res.status(200).json({ + success: true, + message: "Scheduled cycle counts retrieved successfully", + data: scheduledCounts, + }); + }, +); diff --git a/packages/controlmart/src/controller/wms/daily_metrics.controller.ts b/packages/controlmart/src/controller/wms/daily_metrics.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..a4edffbd1e7eee5396c285807b1d025706a6c797 --- /dev/null +++ b/packages/controlmart/src/controller/wms/daily_metrics.controller.ts @@ -0,0 +1,138 @@ +import type { Request, Response } from "express"; +import { WMSDailyMetricsRepository } from "../../repository/wms/daily_metrics.wms.repository"; +import type { TDailyMetricsInput } from "../../models/wms/daily_metrics.wms.model"; +import { asyncHandler } from "../../utils/http.util"; + +// Create daily metrics +export const createDailyMetricsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const metricsData: TDailyMetricsInput = req.body; + + const metricsRepo = WMSDailyMetricsRepository(worldId as string); + const metrics = await metricsRepo.createDailyMetrics(metricsData); + + res.status(201).json({ + success: true, + message: "Daily metrics created successfully", + data: metrics, + }); +}); + +// Get daily metrics by date range +export const getDailyMetricsByDateRangeController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { dateStart, dateEnd, shift, zoneId } = req.query; + + const filters = { + ...(shift && { shift: shift as string }), + ...(zoneId && { zoneId: zoneId as string }), + }; + + const metricsRepo = WMSDailyMetricsRepository(worldId as string); + const metrics = await metricsRepo.getDailyMetricsByDateRange( + warehouseId as string, + new Date(dateStart as string), + new Date(dateEnd as string), + filters, + ); + + res.status(200).json({ + success: true, + message: "Daily metrics by date range retrieved successfully", + data: metrics, + }); + }, +); + +// Get daily metrics by ID +export const getDailyMetricsByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, metricId } = req.params; + + const metricsRepo = WMSDailyMetricsRepository(worldId as string); + const metrics = await metricsRepo.getDailyMetricsById(metricId as string); + + res.status(200).json({ + success: true, + message: "Daily metrics retrieved successfully", + data: metrics, + }); +}); + +// Update daily metrics +export const updateDailyMetricsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, metricId } = req.params; + const updateData: Partial = req.body; + + const metricsRepo = WMSDailyMetricsRepository(worldId as string); + const result = await metricsRepo.updateDailyMetrics(metricId as string, updateData); + + res.status(200).json({ + success: true, + message: "Daily metrics updated successfully", + data: result, + }); +}); + +// Get metrics summary +export const getMetricsSummaryController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dateStart, dateEnd, zoneId } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(zoneId && { zoneId: zoneId as string }), + }; + + const metricsRepo = WMSDailyMetricsRepository(worldId as string); + const summary = await metricsRepo.getMetricsSummary(filters); + + res.status(200).json({ + success: true, + message: "Metrics summary retrieved successfully", + data: summary, + }); +}); + +// Get performance trends +export const getPerformanceTrendsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { metricType, dateStart, dateEnd } = req.query; + + const metricsRepo = WMSDailyMetricsRepository(worldId as string); + const trends = await metricsRepo.getPerformanceTrends( + warehouseId as string, + metricType as "inbound" | "picking" | "packing" | "inventory", + new Date(dateStart as string), + new Date(dateEnd as string), + ); + + res.status(200).json({ + success: true, + message: "Performance trends retrieved successfully", + data: trends, + }); +}); + +// Get zone performance comparison +export const getZonePerformanceComparisonController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { dateStart, dateEnd } = req.query; + + const metricsRepo = WMSDailyMetricsRepository(worldId as string); + const comparison = await metricsRepo.getZonePerformanceComparison( + warehouseId as string, + new Date(dateStart as string), + new Date(dateEnd as string), + ); + + res.status(200).json({ + success: true, + message: "Zone performance comparison retrieved successfully", + data: comparison, + }); + }, +); diff --git a/packages/controlmart/src/controller/wms/dc.controller.ts b/packages/controlmart/src/controller/wms/dc.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..fd26201db2ccd9c59a41c01a8630ec328e1268fb --- /dev/null +++ b/packages/controlmart/src/controller/wms/dc.controller.ts @@ -0,0 +1,118 @@ +import type { Request, Response } from "express"; +import { WMSDistributionCenterRepository } from "../../repository/wms"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createDistributionCenterController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) { + return sendResponse({ res, status: 400, error: "Request body is required" }); + } + + const repo = WMSDistributionCenterRepository(worldId); + const distributionCenter = await repo.createDistributionCenter(data); + sendResponse({ res, data: distributionCenter, status: 201 }); + }, +); + +export const getDistributionCenterByIdController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, dcId } = req.params; + + if (!worldId || !dcId) { + return sendResponse({ res, status: 400, error: "worldId and dcId are required" }); + } + + const repo = WMSDistributionCenterRepository(worldId); + const distributionCenter = await repo.getDistributionCenterById(dcId); + + if (!distributionCenter) { + return sendResponse({ res, status: 404, error: `Distribution Center ${dcId} not found` }); + } + + sendResponse({ res, data: distributionCenter }); + }, +); + +export const getDistributionCentersByStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { status, warehouseId, dcType } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const statusArray = typeof status === "string" ? [status] : (status as string[]) || []; + const dcTypeArray = typeof dcType === "string" ? [dcType] : (dcType as string[]); + + const repo = WMSDistributionCenterRepository(worldId); + const distributionCenters = await repo.getDistributionCentersByStatus(statusArray, { + warehouseId: warehouseId as string, + dcType: dcTypeArray, + }); + + sendResponse({ res, data: distributionCenters }); + }, +); + +export const updateDistributionCenterStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, dcId } = req.params; + const { status, reason } = req.body; + + if (!worldId || !dcId) { + return sendResponse({ res, status: 400, error: "worldId and dcId are required" }); + } + if (!status) { + return sendResponse({ res, status: 400, error: "status is required" }); + } + + const repo = WMSDistributionCenterRepository(worldId); + const updated = await repo.updateOperationalStatus(dcId, status, reason); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Distribution Center ${dcId} not found` }); + } + + sendResponse({ res, data: updated }); + }, +); + +export const getDistributionCenterCapacityController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, dcId } = req.params; + + if (!worldId || !dcId) { + return sendResponse({ res, status: 400, error: "worldId and dcId are required" }); + } + + const repo = WMSDistributionCenterRepository(worldId); + const capacity = await repo.getDistributionCenterCapacity(dcId); + + sendResponse({ res, data: capacity }); + }, +); + +export const getAllDistributionCentersController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dcType, operationalStatus } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const dcTypeArray = typeof dcType === "string" ? [dcType] : (dcType as string[]); + const statusArray = + typeof operationalStatus === "string" ? [operationalStatus] : (operationalStatus as string[]); + + const repo = WMSDistributionCenterRepository(worldId); + const distributionCenters = await repo.getAllDistributionCenters({ + warehouseId: warehouseId as string, + dcType: dcTypeArray, + operationalStatus: statusArray, + }); + + sendResponse({ res, data: distributionCenters }); + }, +); diff --git a/packages/controlmart/src/controller/wms/dock_door.controller.ts b/packages/controlmart/src/controller/wms/dock_door.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..91fded85c140f2a57186a91c49d7a58d850601ec --- /dev/null +++ b/packages/controlmart/src/controller/wms/dock_door.controller.ts @@ -0,0 +1,193 @@ +import type { Request, Response } from "express"; +import { WMSDockDoorRepository } from "../../repository/wms"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createDockDoorController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) { + return sendResponse({ res, status: 400, error: "Request body is required" }); + } + + const repo = WMSDockDoorRepository(worldId); + const dockDoor = await repo.createDockDoor(data); + sendResponse({ res, data: dockDoor, status: 201 }); +}); + +export const getDockDoorByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, dockDoorId } = req.params; + + if (!worldId || !dockDoorId) { + return sendResponse({ res, status: 400, error: "worldId and dockDoorId are required" }); + } + + const repo = WMSDockDoorRepository(worldId); + const dockDoor = await repo.getDockDoorById(dockDoorId); + + if (!dockDoor) { + return sendResponse({ res, status: 404, error: `Dock Door ${dockDoorId} not found` }); + } + + sendResponse({ res, data: dockDoor }); +}); + +export const getDockDoorsByWarehouseController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { doorType, status, zoneId } = req.query; + + if (!worldId || !warehouseId) { + return sendResponse({ res, status: 400, error: "worldId and warehouseId are required" }); + } + + const doorTypeArray = typeof doorType === "string" ? [doorType] : (doorType as string[]); + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSDockDoorRepository(worldId); + const dockDoors = await repo.getDockDoorsByWarehouse(warehouseId, { + doorType: doorTypeArray, + status: statusArray, + zoneId: zoneId as string, + }); + + sendResponse({ res, data: dockDoors }); + }, +); + +export const updateDockDoorStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, dockDoorId } = req.params; + const { status, reason } = req.body; + + if (!worldId || !dockDoorId) { + return sendResponse({ res, status: 400, error: "worldId and dockDoorId are required" }); + } + if (!status) { + return sendResponse({ res, status: 400, error: "status is required" }); + } + + const repo = WMSDockDoorRepository(worldId); + const updated = await repo.updateDockDoorStatus(dockDoorId, status, reason); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Dock Door ${dockDoorId} not found` }); + } + + sendResponse({ res, data: updated }); +}); + +export const assignAppointmentToDoorController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, dockDoorId } = req.params; + const appointment = req.body; + + if (!worldId || !dockDoorId) { + return sendResponse({ res, status: 400, error: "worldId and dockDoorId are required" }); + } + if (!appointment || !appointment.appointmentId) { + return sendResponse({ + res, + status: 400, + error: "appointment data with appointmentId is required", + }); + } + + const repo = WMSDockDoorRepository(worldId); + const result = await repo.assignAppointmentToDoor(dockDoorId, appointment); + + sendResponse({ res, data: result }); + }, +); + +export const clearAppointmentFromDoorController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, dockDoorId } = req.params; + const { completionNotes } = req.body; + + if (!worldId || !dockDoorId) { + return sendResponse({ res, status: 400, error: "worldId and dockDoorId are required" }); + } + + const repo = WMSDockDoorRepository(worldId); + const result = await repo.clearAppointmentFromDoor(dockDoorId, completionNotes); + + sendResponse({ res, data: result }); + }, +); + +export const getAvailableDockDoorsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { doorType, zoneId, maxTrailerLength, levelingDock, restraintSystem, startTime, endTime } = + req.query; + + if (!worldId || !warehouseId || !doorType) { + return sendResponse({ + res, + status: 400, + error: "worldId, warehouseId, and doorType are required", + }); + } + + const filters: any = {}; + if (zoneId) filters.zoneId = zoneId; + if (maxTrailerLength || levelingDock || restraintSystem) { + filters.capabilities = {}; + if (maxTrailerLength) filters.capabilities.maxTrailerLength = Number(maxTrailerLength); + if (levelingDock) filters.capabilities.levelingDock = levelingDock === "true"; + if (restraintSystem) filters.capabilities.restraintSystem = restraintSystem === "true"; + } + if (startTime && endTime) { + filters.timeSlot = { + start: new Date(startTime as string), + end: new Date(endTime as string), + }; + } + + const repo = WMSDockDoorRepository(worldId); + const availableDoors = await repo.getAvailableDockDoors(warehouseId, doorType as string, filters); + + sendResponse({ res, data: availableDoors }); +}); + +export const getDockDoorUtilizationController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { doorType, dateStart, dateEnd } = req.query; + + if (!worldId || !warehouseId) { + return sendResponse({ res, status: 400, error: "worldId and warehouseId are required" }); + } + + const doorTypeArray = typeof doorType === "string" ? [doorType] : (doorType as string[]); + + const repo = WMSDockDoorRepository(worldId); + const utilization = await repo.getDockDoorUtilization(warehouseId, { + doorType: doorTypeArray, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + }); + + sendResponse({ res, data: utilization }); + }, +); + +export const getDockDoorScheduleController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, dockDoorId } = req.params; + const { from, to } = req.query; + + if (!worldId || !dockDoorId) { + return sendResponse({ res, status: 400, error: "worldId and dockDoorId are required" }); + } + if (!from || !to) { + return sendResponse({ res, status: 400, error: "from and to date parameters are required" }); + } + + const repo = WMSDockDoorRepository(worldId); + const schedule = await repo.getDockDoorSchedule(dockDoorId, { + from: new Date(from as string), + to: new Date(to as string), + }); + + sendResponse({ res, data: schedule }); +}); diff --git a/packages/controlmart/src/controller/wms/inbound_order.controller.ts b/packages/controlmart/src/controller/wms/inbound_order.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..7d5a0d8da65e3b8e09d66e3ff100438f9ebef728 --- /dev/null +++ b/packages/controlmart/src/controller/wms/inbound_order.controller.ts @@ -0,0 +1,219 @@ +import type { Request, Response } from "express"; +import { WMSInboundOrderRepository } from "../../repository/wms"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createInboundOrderController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) { + return sendResponse({ res, status: 400, error: "Request body is required" }); + } + + const repo = WMSInboundOrderRepository(worldId); + const order = await repo.createInboundOrder(data); + sendResponse({ res, data: order, status: 201 }); +}); + +export const getInboundOrdersByStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { status, warehouseId, vendorId, dateStart, dateEnd, priority } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const statusArray = typeof status === "string" ? [status] : (status as string[]) || []; + const priorityArray = typeof priority === "string" ? [priority] : (priority as string[]); + + const repo = WMSInboundOrderRepository(worldId); + const orders = await repo.getInboundOrdersByStatus(statusArray, { + warehouseId: warehouseId as string, + vendorId: vendorId as string, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + priority: priorityArray, + }); + + sendResponse({ res, data: orders }); + }, +); + +export const getInboundOrderByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, inboundOrderId } = req.params; + + if (!worldId || !inboundOrderId) { + return sendResponse({ res, status: 400, error: "worldId and inboundOrderId are required" }); + } + + const repo = WMSInboundOrderRepository(worldId); + const order = await repo.getInboundOrderById(inboundOrderId); + + if (!order) { + return sendResponse({ res, status: 404, error: `Inbound Order ${inboundOrderId} not found` }); + } + + sendResponse({ res, data: order }); +}); + +export const getInboundOrderByPoNumberController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, poNumber } = req.params; + + if (!worldId || !poNumber) { + return sendResponse({ res, status: 400, error: "worldId and poNumber are required" }); + } + + const repo = WMSInboundOrderRepository(worldId); + const order = await repo.getInboundOrderByPoNumber(poNumber); + + if (!order) { + return sendResponse({ + res, + status: 404, + error: `Inbound Order with PO ${poNumber} not found`, + }); + } + + sendResponse({ res, data: order }); + }, +); + +export const updateOrderStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, inboundOrderId } = req.params; + const { status, statusDate } = req.body; + + if (!worldId || !inboundOrderId) { + return sendResponse({ res, status: 400, error: "worldId and inboundOrderId are required" }); + } + if (!status) { + return sendResponse({ res, status: 400, error: "status is required" }); + } + + const repo = WMSInboundOrderRepository(worldId); + const updated = await repo.updateOrderStatus( + inboundOrderId, + status, + statusDate ? new Date(statusDate) : undefined, + ); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Inbound Order ${inboundOrderId} not found` }); + } + + sendResponse({ res, data: updated }); +}); + +export const updateReceivingProgressController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, inboundOrderId } = req.params; + const { lineNumber, receivedQuantity, lotNumber, expirationDate } = req.body; + + if (!worldId || !inboundOrderId) { + return sendResponse({ res, status: 400, error: "worldId and inboundOrderId are required" }); + } + if (lineNumber === undefined || receivedQuantity === undefined) { + return sendResponse({ + res, + status: 400, + error: "lineNumber and receivedQuantity are required", + }); + } + + const repo = WMSInboundOrderRepository(worldId); + const result = await repo.updateReceivingProgress( + inboundOrderId, + lineNumber, + receivedQuantity, + lotNumber, + expirationDate ? new Date(expirationDate) : undefined, + ); + + sendResponse({ res, data: result }); + }, +); + +export const getOrdersExpectedTodayController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { targetDate } = req.query; + + if (!worldId || !warehouseId) { + return sendResponse({ res, status: 400, error: "worldId and warehouseId are required" }); + } + + const repo = WMSInboundOrderRepository(worldId); + const orders = await repo.getOrdersExpectedToday( + warehouseId, + targetDate ? new Date(targetDate as string) : undefined, + ); + + sendResponse({ res, data: orders }); + }, +); + +export const getReceivingMetricsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dateStart, dateEnd, vendorId } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = WMSInboundOrderRepository(worldId); + const metrics = await repo.getReceivingMetrics({ + warehouseId: warehouseId as string, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + vendorId: vendorId as string, + }); + + sendResponse({ res, data: metrics }); +}); + +export const getOrdersByVendorController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, vendorId } = req.params; + const { warehouseId, status, dateStart, dateEnd } = req.query; + + if (!worldId || !vendorId) { + return sendResponse({ res, status: 400, error: "worldId and vendorId are required" }); + } + + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSInboundOrderRepository(worldId); + const orders = await repo.getOrdersByVendor(vendorId, { + warehouseId: warehouseId as string, + status: statusArray, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + }); + + sendResponse({ res, data: orders }); +}); + +/** + * Patch Inbound Order Controller + * + * Generic PATCH endpoint for inbound orders. + * Allows updating any allowed field in a single request. + */ +export const patchInboundOrderController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, inboundOrderId } = req.params; + const updates = req.body; + + if (!worldId || !inboundOrderId) { + return sendResponse({ res, status: 400, error: "worldId and inboundOrderId are required" }); + } + + if (!updates || Object.keys(updates).length === 0) { + return sendResponse({ res, status: 400, error: "No updates provided" }); + } + + const repo = WMSInboundOrderRepository(worldId); + const result = await repo.patch(inboundOrderId, updates); + + if (!result) { + return sendResponse({ res, status: 404, error: `Inbound Order ${inboundOrderId} not found` }); + } + + sendResponse({ res, data: result, message: "Inbound order updated successfully" }); +}); diff --git a/packages/controlmart/src/controller/wms/inbound_receiving_transaction.controller.ts b/packages/controlmart/src/controller/wms/inbound_receiving_transaction.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..762b0849f57550e8ef6540b14dedb09e64e192ae --- /dev/null +++ b/packages/controlmart/src/controller/wms/inbound_receiving_transaction.controller.ts @@ -0,0 +1,146 @@ +import type { Request, Response } from "express"; +import { WMSInboundReceivingTransactionRepository } from "../../repository/wms/inbound_receiving_transaction.wms.repository"; +import type { TReceivingTransactionInput } from "../../models/wms/inbound_receiving_transaction.wms.model"; +import { asyncHandler } from "../../utils/http.util"; + +// Create receiving transaction +export const createReceivingTransactionController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const transactionData: TReceivingTransactionInput = req.body; + + const receivingRepo = WMSInboundReceivingTransactionRepository(worldId as string); + const transaction = await receivingRepo.createReceivingTransaction(transactionData); + + res.status(201).json({ + success: true, + message: "Receiving transaction created successfully", + data: transaction, + }); + }, +); + +// Get transaction by ID +export const getTransactionByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + + const receivingRepo = WMSInboundReceivingTransactionRepository(worldId as string); + const transaction = await receivingRepo.getTransactionById(transactionId as string); + + res.status(200).json({ + success: true, + message: "Receiving transaction retrieved successfully", + data: transaction, + }); +}); + +// Get all transactions +export const getAllTransactionsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, inboundOrderId, status, userId, dateStart, dateEnd, cursor, limit } = + req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(inboundOrderId && { inboundOrderId: inboundOrderId as string }), + ...(status && { + status: Array.isArray(status) ? (status as string[]) : [status as string], + }), + ...(userId && { userId: userId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(cursor && { cursor: cursor as string }), + ...(limit && { limit: parseInt(limit as string) }), + }; + + const receivingRepo = WMSInboundReceivingTransactionRepository(worldId as string); + const result = await receivingRepo.getAllTransactions(filters); + + res.status(200).json({ + success: true, + message: "All receiving transactions retrieved successfully", + data: result, + }); +}); + +// Update transaction status +export const updateTransactionStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + const { status, notes } = req.body; + + const receivingRepo = WMSInboundReceivingTransactionRepository(worldId as string); + const result = await receivingRepo.updateTransactionStatus( + transactionId as string, + status, + notes, + ); + + res.status(200).json({ + success: true, + message: "Transaction status updated successfully", + data: result, + }); + }, +); + +// Add item to transaction +export const addItemToTransactionController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, transactionId } = req.params; + const { + sku, + productName, + expectedQuantity, + receivedQuantity, + unitOfMeasure, + lotNumber, + serialNumbers, + condition, + location, + } = req.body; + + const item = { + sku, + productName, + expectedQuantity, + receivedQuantity, + unitOfMeasure, + ...(lotNumber && { lotNumber }), + ...(serialNumbers && { serialNumbers }), + ...(condition && { condition }), + ...(location && { location }), + }; + + const receivingRepo = WMSInboundReceivingTransactionRepository(worldId as string); + const result = await receivingRepo.addItemToTransaction(transactionId as string, item); + + res.status(200).json({ + success: true, + message: "Item added to transaction successfully", + data: result, + }); +}); + +// Get receiving metrics +export const getInboundReceivingMetricsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, userId, dateStart, dateEnd } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(userId && { userId: userId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + }; + + const receivingRepo = WMSInboundReceivingTransactionRepository(worldId as string); + const metrics = await receivingRepo.getReceivingMetrics(filters); + + res.status(200).json({ + success: true, + message: "Receiving metrics retrieved successfully", + data: metrics, + }); + }, +); diff --git a/packages/controlmart/src/controller/wms/index.ts b/packages/controlmart/src/controller/wms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..f2dbabe1a7e96a686256afdea201121bfce446a2 --- /dev/null +++ b/packages/controlmart/src/controller/wms/index.ts @@ -0,0 +1,17 @@ +export * from "./bin.controller"; +export * from "./cycle_count.controller"; +export * from "./daily_metrics.controller"; +export * from "./dc.controller"; +export * from "./dock_door.controller"; +export * from "./inbound_order.controller"; +export * from "./inbound_receiving_transaction.controller"; +export * from "./inventory_transaction.controller"; +export * from "./outbound_order.controller"; +export * from "./outbound_shipment.controller"; +export * from "./replenishment.controller"; +export * from "./task.controller"; +export * from "./warehouse.controller"; +export * from "./zone.controller"; + +// Operations Dashboard (aggregated views for UI) +export * from "./operations_dashboard.controller"; diff --git a/packages/controlmart/src/controller/wms/inventory_transaction.controller.ts b/packages/controlmart/src/controller/wms/inventory_transaction.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..e61ae554627b2a3e907a61ae5b07ec330a6a5923 --- /dev/null +++ b/packages/controlmart/src/controller/wms/inventory_transaction.controller.ts @@ -0,0 +1,182 @@ +import type { Request, Response } from "express"; +import { WMSInventoryTransactionRepository } from "../../repository/wms/inventory_transaction.wms.repository"; +import type { TInventoryTransactionInput } from "../../models/wms/inventory_transaction.wms.model"; +import { asyncHandler } from "../../utils/http.util"; + +// Create inventory transaction +export const createInventoryTransactionController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const transactionData: TInventoryTransactionInput = req.body; + + const inventoryRepo = WMSInventoryTransactionRepository(worldId as string); + const transaction = await inventoryRepo.createInventoryTransaction(transactionData); + + res.status(201).json({ + success: true, + message: "Inventory transaction created successfully", + data: transaction, + }); + }, +); + +// Get transactions by product +export const getTransactionsByProductController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, productId } = req.params; + const { warehouseId, transactionType, dateStart, dateEnd, binId, cursor, limit } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(transactionType && { + transactionType: Array.isArray(transactionType) + ? (transactionType as string[]) + : [transactionType as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(binId && { binId: binId as string }), + ...(cursor && { cursor: cursor as string }), + ...(limit && { limit: parseInt(limit as string) }), + }; + + const inventoryRepo = WMSInventoryTransactionRepository(worldId as string); + const result = await inventoryRepo.getTransactionsByProduct(productId as string, filters); + + res.status(200).json({ + success: true, + message: "Transactions by product retrieved successfully", + data: result, + }); + }, +); + +// Get transactions by bin +export const getTransactionsByBinController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, binId } = req.params; + const { warehouseId, transactionType, dateStart, dateEnd, productId } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(transactionType && { + transactionType: Array.isArray(transactionType) + ? (transactionType as string[]) + : [transactionType as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(productId && { productId: productId as string }), + }; + + const inventoryRepo = WMSInventoryTransactionRepository(worldId as string); + const transactions = await inventoryRepo.getTransactionsByBin(binId as string, filters); + + res.status(200).json({ + success: true, + message: "Transactions by bin retrieved successfully", + data: transactions, + }); +}); + +// Get transactions by reference +export const getTransactionsByReferenceController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, referenceType, referenceId } = req.params; + + const inventoryRepo = WMSInventoryTransactionRepository(worldId as string); + const transactions = await inventoryRepo.getTransactionsByReference( + referenceType as string, + referenceId as string, + ); + + res.status(200).json({ + success: true, + message: "Transactions by reference retrieved successfully", + data: transactions, + }); + }, +); + +// Get inventory movement report +export const getInventoryMovementReportController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, productIds, transactionType, dateStart, dateEnd } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(productIds && { + productIds: Array.isArray(productIds) ? (productIds as string[]) : [productIds as string], + }), + ...(transactionType && { + transactionType: Array.isArray(transactionType) + ? (transactionType as string[]) + : [transactionType as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + }; + + const inventoryRepo = WMSInventoryTransactionRepository(worldId as string); + const report = await inventoryRepo.getInventoryMovementReport(filters); + + res.status(200).json({ + success: true, + message: "Inventory movement report retrieved successfully", + data: report, + }); + }, +); + +// Get transaction history +export const getTransactionHistoryController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, productId, binId, transactionType, dateStart, dateEnd, limit } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(productId && { productId: productId as string }), + ...(binId && { binId: binId as string }), + ...(transactionType && { + transactionType: Array.isArray(transactionType) + ? (transactionType as string[]) + : [transactionType as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(limit && { limit: parseInt(limit as string) }), + }; + + const inventoryRepo = WMSInventoryTransactionRepository(worldId as string); + const history = await inventoryRepo.getTransactionHistory(filters); + + res.status(200).json({ + success: true, + message: "Transaction history retrieved successfully", + data: history, + }); +}); + +// Get inventory adjustments +export const getInventoryAdjustmentsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dateStart, dateEnd, userId } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(userId && { userId: userId as string }), + }; + + const inventoryRepo = WMSInventoryTransactionRepository(worldId as string); + const adjustments = await inventoryRepo.getInventoryAdjustments(filters); + + res.status(200).json({ + success: true, + message: "Inventory adjustments retrieved successfully", + data: adjustments, + }); + }, +); diff --git a/packages/controlmart/src/controller/wms/operations_dashboard.controller.ts b/packages/controlmart/src/controller/wms/operations_dashboard.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..ca1f82d334c9a4d5b71f6ccbbdf915a9a499a5f8 --- /dev/null +++ b/packages/controlmart/src/controller/wms/operations_dashboard.controller.ts @@ -0,0 +1,881 @@ +import type { Request, Response } from "express"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; +import { Inventory, InboundOrder, OutboundOrder, Task } from "../../models/wms"; +import { buildPatchUpdate } from "../../utils/patch.util"; +import { PurchaseOrder } from "../../models/erp/orders.erp.model"; +import { EdiTransaction } from "../../models/edi/transactions.model"; +import { FinanceTransaction } from "../../models/finance/finance.model"; + +/** + * Operations Dashboard Controller + * + * Provides aggregated metrics for the WMS Command Center dashboard. + * This is a single endpoint that returns all KPIs needed for the UI. + */ + +export type TOperationsDashboardResponse = { + inventory: { + totalItems: number; + byStatus: Record; + lowStockAlerts: number; + expiringAlerts: number; + }; + receiving: { + total: number; + pending: number; + expected: number; + inTransit: number; + receiving: number; + received: number; + dueToday: number; + }; + fulfillment: { + total: number; + active: number; + created: number; + released: number; + allocated: number; + picking: number; + picked: number; + packing: number; + packed: number; + shipped: number; + rushOrders: number; + }; + tasks: { + total: number; + pending: number; + inProgress: number; + completedToday: number; + byType: Record; + }; +}; + +export const getOperationsDashboardController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + // Run all aggregations in parallel for performance + const [inventoryStats, receivingStats, fulfillmentStats, taskStats] = await Promise.all([ + // Inventory aggregation + aggregateInventoryStats(worldId), + // Receiving (Inbound Orders) aggregation + aggregateReceivingStats(worldId), + // Fulfillment (Outbound Orders) aggregation + aggregateFulfillmentStats(worldId), + // Tasks aggregation + aggregateTaskStats(worldId), + ]); + + const dashboard: TOperationsDashboardResponse = { + inventory: inventoryStats, + receiving: receivingStats, + fulfillment: fulfillmentStats, + tasks: taskStats, + }; + + sendResponse({ res, data: dashboard }); + }, +); + +async function aggregateInventoryStats(worldId: string) { + const sevenDaysFromNow = new Date(); + sevenDaysFromNow.setDate(sevenDaysFromNow.getDate() + 7); + + const result = await Inventory.aggregate([ + { $match: { "worldRef.worldId": worldId } }, + { + $group: { + _id: null, + totalItems: { $sum: "$quantityOnHand" }, + statusCounts: { $push: "$inventoryStatus" }, + // Count items expiring within 7 days + expiringItems: { + $sum: { + $cond: [ + { + $and: [ + { $ne: ["$expirationDate", null] }, + { $lte: ["$expirationDate", sevenDaysFromNow] }, + { $gt: ["$expirationDate", new Date()] }, + ], + }, + 1, + 0, + ], + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalItems: 0, + byStatus: {}, + lowStockAlerts: 0, + expiringAlerts: 0, + }; + } + + const data = result[0]; + + // Process status counts + const byStatus: Record = {}; + data.statusCounts.forEach((status: string) => { + byStatus[status] = (byStatus[status] || 0) + 1; + }); + + return { + totalItems: data.totalItems || 0, + byStatus, + lowStockAlerts: 0, // TODO: Implement when safety stock levels are defined + expiringAlerts: data.expiringItems || 0, + }; +} + +async function aggregateReceivingStats(worldId: string) { + const today = new Date(); + const startOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate()); + const endOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate() + 1); + + const result = await InboundOrder.aggregate([ + { $match: { "worldRef.worldId": worldId } }, + { + $group: { + _id: null, + total: { $sum: 1 }, + expected: { + $sum: { $cond: [{ $eq: ["$orderStatus", "EXPECTED"] }, 1, 0] }, + }, + inTransit: { + $sum: { $cond: [{ $eq: ["$orderStatus", "IN_TRANSIT"] }, 1, 0] }, + }, + receiving: { + $sum: { $cond: [{ $eq: ["$orderStatus", "RECEIVING"] }, 1, 0] }, + }, + received: { + $sum: { $cond: [{ $eq: ["$orderStatus", "RECEIVED"] }, 1, 0] }, + }, + // Count orders expected today + dueToday: { + $sum: { + $cond: [ + { + $and: [ + { $gte: ["$dates.expectedArrival", startOfDay] }, + { $lt: ["$dates.expectedArrival", endOfDay] }, + { $in: ["$orderStatus", ["EXPECTED", "IN_TRANSIT", "RECEIVING"]] }, + ], + }, + 1, + 0, + ], + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + pending: 0, + expected: 0, + inTransit: 0, + receiving: 0, + received: 0, + dueToday: 0, + }; + } + + const data = result[0]; + const expected = data.expected || 0; + const inTransit = data.inTransit || 0; + const receivingCount = data.receiving || 0; + + return { + total: data.total || 0, + pending: expected + inTransit + receivingCount, + expected, + inTransit, + receiving: receivingCount, + received: data.received || 0, + dueToday: data.dueToday || 0, + }; +} + +async function aggregateFulfillmentStats(worldId: string) { + const result = await OutboundOrder.aggregate([ + { $match: { "worldRef.worldId": worldId } }, + { + $group: { + _id: null, + total: { $sum: 1 }, + created: { + $sum: { $cond: [{ $eq: ["$orderStatus", "CREATED"] }, 1, 0] }, + }, + released: { + $sum: { $cond: [{ $eq: ["$orderStatus", "RELEASED"] }, 1, 0] }, + }, + allocated: { + $sum: { $cond: [{ $eq: ["$orderStatus", "ALLOCATED"] }, 1, 0] }, + }, + picking: { + $sum: { $cond: [{ $eq: ["$orderStatus", "PICKING"] }, 1, 0] }, + }, + picked: { + $sum: { $cond: [{ $eq: ["$orderStatus", "PICKED"] }, 1, 0] }, + }, + packing: { + $sum: { $cond: [{ $eq: ["$orderStatus", "PACKING"] }, 1, 0] }, + }, + packed: { + $sum: { $cond: [{ $eq: ["$orderStatus", "PACKED"] }, 1, 0] }, + }, + shipped: { + $sum: { $cond: [{ $eq: ["$orderStatus", "SHIPPED"] }, 1, 0] }, + }, + // Count RUSH priority orders that are not yet shipped + rushOrders: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$orderPriority", "RUSH"] }, + { $not: { $in: ["$orderStatus", ["SHIPPED", "COMPLETED", "CANCELLED"]] } }, + ], + }, + 1, + 0, + ], + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + active: 0, + created: 0, + released: 0, + allocated: 0, + picking: 0, + picked: 0, + packing: 0, + packed: 0, + shipped: 0, + rushOrders: 0, + }; + } + + const data = result[0]; + const released = data.released || 0; + const allocated = data.allocated || 0; + const picking = data.picking || 0; + const picked = data.picked || 0; + const packing = data.packing || 0; + const packed = data.packed || 0; + + return { + total: data.total || 0, + active: released + allocated + picking + picked + packing + packed, + created: data.created || 0, + released, + allocated, + picking, + picked, + packing, + packed, + shipped: data.shipped || 0, + rushOrders: data.rushOrders || 0, + }; +} + +async function aggregateTaskStats(worldId: string) { + const today = new Date(); + const startOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate()); + + const result = await Task.aggregate([ + { $match: { "worldRef.worldId": worldId } }, + { + $group: { + _id: null, + total: { $sum: 1 }, + // Pending = CREATED or RELEASED + pending: { + $sum: { + $cond: [{ $in: ["$taskStatus", ["CREATED", "RELEASED", "ASSIGNED"]] }, 1, 0], + }, + }, + inProgress: { + $sum: { $cond: [{ $eq: ["$taskStatus", "IN_PROGRESS"] }, 1, 0] }, + }, + // Completed today + completedToday: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$taskStatus", "COMPLETED"] }, + { $gte: ["$timing.completedAt", startOfDay] }, + ], + }, + 1, + 0, + ], + }, + }, + // Collect all task types for counting + taskTypes: { $push: "$taskType" }, + }, + }, + ]); + + if (result.length === 0) { + return { + total: 0, + pending: 0, + inProgress: 0, + completedToday: 0, + byType: {}, + }; + } + + const data = result[0]; + + // Process task type counts + const byType: Record = {}; + data.taskTypes.forEach((type: string) => { + if (type) { + byType[type] = (byType[type] || 0) + 1; + } + }); + + return { + total: data.total || 0, + pending: data.pending || 0, + inProgress: data.inProgress || 0, + completedToday: data.completedToday || 0, + byType, + }; +} + +/** + * Inventory List Controller + * + * Returns a paginated list of inventory items with filtering options. + * Used by the WMS Inventory List View (Level 3). + */ +export type TInventoryListItem = { + inventoryId: string; + sku: string; + productName: string; + warehouseId: string; + binId: string; + quantityOnHand: number; + quantityAllocated: number; + quantityAvailable: number; + inventoryStatus: string; + lotNumber?: string; + expirationDate?: Date; + lastMovementAt?: Date; +}; + +export type TInventoryListResponse = { + items: TInventoryListItem[]; + pagination: { + total: number; + limit: number; + offset: number; + hasMore: boolean; + }; +}; + +export const getInventoryListController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { status, warehouseId, expiringSoon, lowStock, search, limit = "50", offset = "0" } = req.query; + + if (!worldId) { + return sendResponse({ res, status: 400, error: "worldId is required" }); + } + + const limitNum = Math.min(parseInt(limit as string, 10) || 50, 100); + const offsetNum = parseInt(offset as string, 10) || 0; + + // Build query + const query: Record = { + "worldRef.worldId": worldId, + }; + + // Filter by status + if (status) { + const statusArray = typeof status === "string" ? [status] : (status as string[]); + query.inventoryStatus = { $in: statusArray }; + } + + // Filter by warehouse + if (warehouseId) { + query.warehouseId = warehouseId; + } + + // Filter by expiring soon (within 7 days) + if (expiringSoon === "true") { + const sevenDaysFromNow = new Date(); + sevenDaysFromNow.setDate(sevenDaysFromNow.getDate() + 7); + query.expirationDate = { + $ne: null, + $lte: sevenDaysFromNow, + $gt: new Date(), + }; + } + + // Search by SKU or product name + if (search) { + query.$or = [ + { sku: { $regex: search, $options: "i" } }, + { productName: { $regex: search, $options: "i" } }, + ]; + } + + // Get total count and items in parallel + const [totalCount, items] = await Promise.all([ + Inventory.countDocuments(query), + Inventory.find(query) + .sort({ lastMovementAt: -1, sku: 1 }) + .skip(offsetNum) + .limit(limitNum) + .lean(), + ]); + + const response: TInventoryListResponse = { + items: items.map((item: any) => ({ + inventoryId: item.inventoryId, + sku: item.sku, + productName: item.productName || "", + warehouseId: item.warehouseId, + binId: item.binId, + quantityOnHand: item.quantityOnHand || 0, + quantityAllocated: item.quantityAllocated || 0, + quantityAvailable: item.quantityAvailable ?? ((item.quantityOnHand - item.quantityAllocated) || 0), + inventoryStatus: item.inventoryStatus || "AVAILABLE", + lotNumber: item.lotNumber, + expirationDate: item.expirationDate, + lastMovementAt: item.lastMovementAt, + })), + pagination: { + total: totalCount, + limit: limitNum, + offset: offsetNum, + hasMore: offsetNum + items.length < totalCount, + }, + }; + + sendResponse({ res, data: response }); +}); + +/** + * Inbound Order Relations Controller + * + * Returns cross-service related data for an inbound order: + * - ERP Order (by poNumber) + * - EDI Documents (856 ASN, etc.) + * - Finance Transactions (payment_out) + */ +export type TInboundOrderRelationsResponse = { + erpOrder?: { + orderId: string; + status: string; + totalAmount?: number; + customerId?: string; + partnerId?: string; + poType?: string; + orderDate?: Date; + }; + ediDocuments: Array<{ + transactionId: string; + docType: string; + status: string; + direction: string; + timestamp?: Date; + businessDocumentNumber?: string; + }>; + financeTransaction?: { + transactionId: string; + type: string; + amount: number; + status?: string; + processedAt?: Date; + }; +}; + +export const getInboundOrderRelationsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + + if (!worldId || !orderId) { + return sendResponse({ + res, + status: 400, + error: "worldId and orderId are required", + }); + } + + // First get the inbound order to find the PO number and vendor + const inboundOrder = await InboundOrder.findOne({ + "worldRef.worldId": worldId, + inboundOrderId: orderId, + }).lean(); + + if (!inboundOrder) { + return sendResponse({ + res, + status: 404, + error: `Inbound order ${orderId} not found`, + }); + } + + const poNumber = (inboundOrder as any).poNumber; + const vendorId = (inboundOrder as any).vendor?.vendorId; + + // Fetch all related data in parallel + const [erpOrder, ediDocs, financeTransactions] = await Promise.all([ + // ERP Order lookup by orderId (which should match poNumber) + poNumber + ? PurchaseOrder.findOne({ + "worldRef.worldId": worldId, + orderId: poNumber, + }).lean() + : null, + + // EDI Documents - look for 856 (ASN) or documents with matching businessDocumentNumber + poNumber + ? EdiTransaction.find({ + "worldRef.worldId": worldId, + $or: [ + { businessDocumentNumber: poNumber }, + { "payload.orderId": poNumber }, + { partnerId: vendorId, docType: "856" }, + ], + }) + .sort({ timestamp: -1 }) + .limit(10) + .lean() + : [], + + // Finance transactions - look for payment_out with sourceId matching PO + poNumber + ? FinanceTransaction.find({ + "worldRef.worldId": worldId, + sourceId: poNumber, + type: "payment_out", + }) + .sort({ processedAt: -1 }) + .limit(5) + .lean() + : [], + ]); + + const response: TInboundOrderRelationsResponse = { + erpOrder: erpOrder + ? { + orderId: (erpOrder as any).orderId, + status: (erpOrder as any).status, + totalAmount: (erpOrder as any).totalAmount, + customerId: (erpOrder as any).customerId, + partnerId: (erpOrder as any).partnerId, + poType: (erpOrder as any).poType, + orderDate: (erpOrder as any).orderDate, + } + : undefined, + ediDocuments: (ediDocs as any[]).map((doc) => ({ + transactionId: doc.transactionId, + docType: doc.docType, + status: doc.status, + direction: doc.direction, + timestamp: doc.timestamp, + businessDocumentNumber: doc.businessDocumentNumber, + })), + financeTransaction: financeTransactions.length > 0 + ? { + transactionId: (financeTransactions[0] as any).transactionId, + type: (financeTransactions[0] as any).type, + amount: (financeTransactions[0] as any).amount, + status: (financeTransactions[0] as any).status, + processedAt: (financeTransactions[0] as any).processedAt, + } + : undefined, + }; + + sendResponse({ res, data: response }); + }, +); + +/** + * Outbound Order Relations Controller + * + * Returns cross-service related data for an outbound order: + * - ERP Order (sales order) + * - EDI Documents (850 PO, 810 Invoice, etc.) + * - Finance Transactions (payment_in) + */ +export type TOutboundOrderRelationsResponse = { + erpOrder?: { + orderId: string; + status: string; + totalAmount?: number; + customerId?: string; + partnerId?: string; + poType?: string; + orderDate?: Date; + }; + ediDocuments: Array<{ + transactionId: string; + docType: string; + status: string; + direction: string; + timestamp?: Date; + businessDocumentNumber?: string; + }>; + financeTransaction?: { + transactionId: string; + type: string; + amount: number; + status?: string; + processedAt?: Date; + }; +}; + +export const getOutboundOrderRelationsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + + if (!worldId || !orderId) { + return sendResponse({ + res, + status: 400, + error: "worldId and orderId are required", + }); + } + + // First get the outbound order to find the order number and customer + const outboundOrder = await OutboundOrder.findOne({ + "worldRef.worldId": worldId, + orderId: orderId, + }).lean(); + + if (!outboundOrder) { + return sendResponse({ + res, + status: 404, + error: `Outbound order ${orderId} not found`, + }); + } + + const orderNumber = (outboundOrder as any).orderNumber; + const customerId = (outboundOrder as any).customer?.customerId; + const erpOrderRef = (outboundOrder as any).sourceOrderId || orderNumber; + + // Fetch all related data in parallel + const [erpOrder, ediDocs, financeTransactions] = await Promise.all([ + // ERP Order lookup + erpOrderRef + ? PurchaseOrder.findOne({ + "worldRef.worldId": worldId, + orderId: erpOrderRef, + }).lean() + : null, + + // EDI Documents - look for 850 (PO) or 810 (Invoice) + orderNumber + ? EdiTransaction.find({ + "worldRef.worldId": worldId, + $or: [ + { businessDocumentNumber: orderNumber }, + { "payload.orderId": orderNumber }, + { customerId, docType: { $in: ["850", "810"] } }, + ], + }) + .sort({ timestamp: -1 }) + .limit(10) + .lean() + : [], + + // Finance transactions - look for payment_in with sourceId matching order + orderNumber + ? FinanceTransaction.find({ + "worldRef.worldId": worldId, + sourceId: { $in: [orderNumber, erpOrderRef] }, + type: "payment_in", + }) + .sort({ processedAt: -1 }) + .limit(5) + .lean() + : [], + ]); + + const response: TOutboundOrderRelationsResponse = { + erpOrder: erpOrder + ? { + orderId: (erpOrder as any).orderId, + status: (erpOrder as any).status, + totalAmount: (erpOrder as any).totalAmount, + customerId: (erpOrder as any).customerId, + partnerId: (erpOrder as any).partnerId, + poType: (erpOrder as any).poType, + orderDate: (erpOrder as any).orderDate, + } + : undefined, + ediDocuments: (ediDocs as any[]).map((doc) => ({ + transactionId: doc.transactionId, + docType: doc.docType, + status: doc.status, + direction: doc.direction, + timestamp: doc.timestamp, + businessDocumentNumber: doc.businessDocumentNumber, + })), + financeTransaction: financeTransactions.length > 0 + ? { + transactionId: (financeTransactions[0] as any).transactionId, + type: (financeTransactions[0] as any).type, + amount: (financeTransactions[0] as any).amount, + status: (financeTransactions[0] as any).status, + processedAt: (financeTransactions[0] as any).processedAt, + } + : undefined, + }; + + sendResponse({ res, data: response }); + }, +); + +/** + * Update Inventory Status Controller + * + * Updates the status of an inventory item (AVAILABLE, HOLD, QUARANTINE, etc.) + */ +export const updateInventoryStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, inventoryId } = req.params; + const { inventoryStatus } = req.body; + + if (!worldId || !inventoryId) { + return sendResponse({ + res, + status: 400, + error: "worldId and inventoryId are required", + }); + } + + if (!inventoryStatus) { + return sendResponse({ + res, + status: 400, + error: "inventoryStatus is required in request body", + }); + } + + const validStatuses = ["AVAILABLE", "HOLD", "QUARANTINE", "ALLOCATED", "EXPIRED"]; + if (!validStatuses.includes(inventoryStatus)) { + return sendResponse({ + res, + status: 400, + error: `Invalid status. Must be one of: ${validStatuses.join(", ")}`, + }); + } + + const updated = await Inventory.findOneAndUpdate( + { + "worldRef.worldId": worldId, + inventoryId: inventoryId, + }, + { + $set: { + inventoryStatus: inventoryStatus, + updatedAt: new Date(), + }, + }, + { new: true } + ).lean(); + + if (!updated) { + return sendResponse({ + res, + status: 404, + error: `Inventory item ${inventoryId} not found`, + }); + } + + sendResponse({ res, data: updated }); + }, +); + +/** + * Patch Inventory Controller + * + * Generic PATCH endpoint for inventory items. + * Allows updating any allowed field in a single request. + */ +export const patchInventoryController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, inventoryId } = req.params; + const updates = req.body; + + if (!worldId || !inventoryId) { + return sendResponse({ + res, + status: 400, + error: "worldId and inventoryId are required", + }); + } + + if (!updates || Object.keys(updates).length === 0) { + return sendResponse({ + res, + status: 400, + error: "No updates provided", + }); + } + + // Allowed fields for inventory patching + const allowedFields = ["inventoryStatus", "lotNumber", "expirationDate", "binId"]; + const { $set, fieldsUpdated } = buildPatchUpdate(updates, allowedFields); + + if (fieldsUpdated.length === 0) { + return sendResponse({ + res, + status: 400, + error: `No valid fields to update. Allowed fields: ${allowedFields.join(", ")}`, + }); + } + + const updated = await Inventory.findOneAndUpdate( + { + "worldRef.worldId": worldId, + inventoryId: inventoryId, + }, + { $set }, + { new: true } + ).lean(); + + if (!updated) { + return sendResponse({ + res, + status: 404, + error: `Inventory item ${inventoryId} not found`, + }); + } + + sendResponse({ + res, + data: updated, + message: `Inventory updated: ${fieldsUpdated.join(", ")}`, + }); + }, +); diff --git a/packages/controlmart/src/controller/wms/outbound_order.controller.ts b/packages/controlmart/src/controller/wms/outbound_order.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..71781303eceb37b3c863d07a4db35346c138ec14 --- /dev/null +++ b/packages/controlmart/src/controller/wms/outbound_order.controller.ts @@ -0,0 +1,281 @@ +import type { Request, Response } from "express"; +import { WMSOutboundOrderRepository } from "../../repository/wms/outbound_order.wms.repository"; +import type { TOutboundOrderInput } from "../../models/wms/outbound_order.wms.model"; +import { asyncHandler, sendResponse } from "../../utils/http.util"; + +// Create a new outbound order +export const createOutboundOrderController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const orderData: TOutboundOrderInput = req.body; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const order = await outboundOrderRepo.createOutboundOrder(orderData); + + res.status(201).json({ + success: true, + message: "Outbound order created successfully", + data: order, + }); +}); + +// Get outbound orders by status +export const getOutboundOrdersByStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { status, warehouseId, customerId, orderType, dateStart, dateEnd, priority } = req.query; + + const statusArray = Array.isArray(status) + ? (status as string[]) + : typeof status === "string" + ? [status] + : []; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(customerId && { customerId: customerId as string }), + ...(orderType && { + orderType: Array.isArray(orderType) ? (orderType as string[]) : [orderType as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(priority && { + priority: Array.isArray(priority) ? (priority as string[]) : [priority as string], + }), + }; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const orders = await outboundOrderRepo.getOutboundOrdersByStatus(statusArray, filters); + + res.status(200).json({ + success: true, + message: "Outbound orders retrieved successfully", + data: orders, + }); + }, +); + +// Get outbound order by ID +export const getOutboundOrderByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const order = await outboundOrderRepo.getOutboundOrderById(orderId as string); + + res.status(200).json({ + success: true, + message: "Outbound order retrieved successfully", + data: order, + }); +}); + +// Get outbound order by number +export const getOutboundOrderByNumberController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, orderNumber } = req.params; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const order = await outboundOrderRepo.getOutboundOrderByNumber(orderNumber as string); + + res.status(200).json({ + success: true, + message: "Outbound order retrieved successfully", + data: order, + }); + }, +); + +// Update order status +export const updateOutboundOrderStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + const { status, statusDate } = req.body; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const result = await outboundOrderRepo.updateOrderStatus( + orderId as string, + status, + statusDate ? new Date(statusDate) : undefined, + ); + + res.status(200).json({ + success: true, + message: "Order status updated successfully", + data: result, + }); + }, +); + +// Update order priority +export const updateOutboundOrderPriorityController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + const { priority } = req.body; + + if (!priority) { + return res.status(400).json({ + success: false, + error: "priority is required in request body", + }); + } + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const result = await outboundOrderRepo.updateOrderPriority(orderId as string, priority); + + if (!result) { + return res.status(404).json({ + success: false, + error: `Order ${orderId} not found`, + }); + } + + res.status(200).json({ + success: true, + message: "Order priority updated successfully", + data: result, + }); + }, +); + +// Allocate order line +export const allocateOrderLineController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + const { lineNumber, allocatedQuantity, allocationDetails } = req.body; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const result = await outboundOrderRepo.allocateOrderLine({ + orderId: orderId as string, + lineNumber, + allocatedQuantity, + allocationDetails, + }); + + res.status(200).json({ + success: true, + message: "Order line allocated successfully", + data: result, + }); +}); + +// Update picking progress +export const updatePickingProgressController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + const { lineNumber, pickedQuantity } = req.body; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const result = await outboundOrderRepo.updatePickingProgress({ + orderId: orderId as string, + lineNumber, + pickedQuantity, + }); + + res.status(200).json({ + success: true, + message: "Picking progress updated successfully", + data: result, + }); +}); + +// Get orders ready for picking +export const getOrdersReadyForPickingController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { priority, orderType, customerId } = req.query; + + const filters = { + ...(priority && { + priority: Array.isArray(priority) ? (priority as string[]) : [priority as string], + }), + ...(orderType && { + orderType: Array.isArray(orderType) ? (orderType as string[]) : [orderType as string], + }), + ...(customerId && { customerId: customerId as string }), + }; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const orders = await outboundOrderRepo.getOrdersReadyForPicking(warehouseId as string, filters); + + res.status(200).json({ + success: true, + message: "Orders ready for picking retrieved successfully", + data: orders, + }); + }, +); + +// Get order fulfillment metrics +export const getOrderFulfillmentMetricsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dateStart, dateEnd, orderType } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(orderType && { + orderType: Array.isArray(orderType) ? (orderType as string[]) : [orderType as string], + }), + }; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const metrics = await outboundOrderRepo.getOrderFulfillmentMetrics(filters); + + res.status(200).json({ + success: true, + message: "Order fulfillment metrics retrieved successfully", + data: metrics, + }); + }, +); + +// Get orders by customer +export const getOrdersByCustomerController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, customerId } = req.params; + const { warehouseId, status, dateStart, dateEnd } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(status && { + status: Array.isArray(status) ? (status as string[]) : [status as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + }; + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId as string); + const orders = await outboundOrderRepo.getOrdersByCustomer(customerId as string, filters); + + res.status(200).json({ + success: true, + message: "Orders by customer retrieved successfully", + data: orders, + }); +}); + +/** + * Patch Outbound Order Controller + * + * Generic PATCH endpoint for outbound orders. + * Allows updating any allowed field in a single request. + */ +export const patchOutboundOrderController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, orderId } = req.params; + const updates = req.body; + + if (!worldId || !orderId) { + return sendResponse({ res, status: 400, error: "worldId and orderId are required" }); + } + + if (!updates || Object.keys(updates).length === 0) { + return sendResponse({ res, status: 400, error: "No updates provided" }); + } + + const outboundOrderRepo = WMSOutboundOrderRepository(worldId); + const result = await outboundOrderRepo.patch(orderId, updates); + + if (!result) { + return sendResponse({ res, status: 404, error: `Outbound Order ${orderId} not found` }); + } + + sendResponse({ res, data: result, status: 200 }); +}); diff --git a/packages/controlmart/src/controller/wms/outbound_shipment.controller.ts b/packages/controlmart/src/controller/wms/outbound_shipment.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..749a964d1487408c56d90a76734fac9aec18c89b --- /dev/null +++ b/packages/controlmart/src/controller/wms/outbound_shipment.controller.ts @@ -0,0 +1,194 @@ +import type { Request, Response } from "express"; +import { WMSOutboundShipmentRepository } from "../../repository/wms/outbound_shipment.wms.repository"; +import type { TShipmentInput } from "../../models/wms/outbound_shipment.wms.model"; +import { asyncHandler } from "../../utils/http.util"; + +// Create a new shipment +export const createShipmentController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const shipmentData: TShipmentInput = req.body; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const shipment = await shipmentRepo.createShipment(shipmentData); + + res.status(201).json({ + success: true, + message: "Shipment created successfully", + data: shipment, + }); +}); + +// Get shipments by status +export const getShipmentsByStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { status, warehouseId, carrierId, serviceLevel, dateStart, dateEnd } = req.query; + + const statusArray = Array.isArray(status) + ? (status as string[]) + : typeof status === "string" + ? [status] + : []; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(carrierId && { carrierId: carrierId as string }), + ...(serviceLevel && { serviceLevel: serviceLevel as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + }; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const shipments = await shipmentRepo.getShipmentsByStatus(statusArray, filters); + + res.status(200).json({ + success: true, + message: "Shipments retrieved successfully", + data: shipments, + }); +}); + +// Get shipment by ID +export const getShipmentByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const shipment = await shipmentRepo.getShipmentById(shipmentId as string); + + res.status(200).json({ + success: true, + message: "Shipment retrieved successfully", + data: shipment, + }); +}); + +// Update shipment status +export const updateShipmentStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const { status, statusDate, trackingNumber } = req.body; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const result = await shipmentRepo.updateShipmentStatus( + shipmentId as string, + status, + statusDate ? new Date(statusDate) : undefined, + trackingNumber, + ); + + res.status(200).json({ + success: true, + message: "Shipment status updated successfully", + data: result, + }); +}); + +// Get shipments by warehouse +export const getShipmentsByWarehouseController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { status, dateStart, dateEnd, carrier } = req.query; + + const filters = { + ...(status && { + status: Array.isArray(status) ? (status as string[]) : [status as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(carrier && { carrier: carrier as string }), + }; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const shipments = await shipmentRepo.getShipmentsByWarehouse(warehouseId as string, filters); + + res.status(200).json({ + success: true, + message: "Shipments by warehouse retrieved successfully", + data: shipments, + }); + }, +); + +// Add tracking event +export const addTrackingEventController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, shipmentId } = req.params; + const { eventType, eventDate, location, description, carrierEventCode } = req.body; + + const event = { + eventType, + eventDate: new Date(eventDate), + location, + description, + ...(carrierEventCode && { carrierEventCode }), + }; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const result = await shipmentRepo.addTrackingEvent(shipmentId as string, event); + + res.status(200).json({ + success: true, + message: "Tracking event added successfully", + data: result, + }); +}); + +// Get shipment metrics +export const getShipmentMetricsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dateStart, dateEnd, carrier } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(carrier && { + carrier: Array.isArray(carrier) ? (carrier as string[]) : [carrier as string], + }), + }; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const metrics = await shipmentRepo.getShipmentMetrics(filters); + + res.status(200).json({ + success: true, + message: "Shipment metrics retrieved successfully", + data: metrics, + }); +}); + +// Get shipments ready to ship +export const getShipmentsReadyToShipController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { carrier, serviceLevel, priorityOrders } = req.query; + + const filters = { + ...(carrier && { carrier: carrier as string }), + ...(serviceLevel && { serviceLevel: serviceLevel as string }), + ...(priorityOrders && { priorityOrders: priorityOrders === "true" }), + }; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const shipments = await shipmentRepo.getShipmentsReadyToShip(warehouseId as string, filters); + + res.status(200).json({ + success: true, + message: "Shipments ready to ship retrieved successfully", + data: shipments, + }); + }, +); + +// Get shipments by tracking number +export const getShipmentsByTrackingNumberController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, trackingNumber } = req.params; + + const shipmentRepo = WMSOutboundShipmentRepository(worldId as string); + const shipments = await shipmentRepo.getShipmentsByTrackingNumber(trackingNumber as string); + + res.status(200).json({ + success: true, + message: "Shipments by tracking number retrieved successfully", + data: shipments, + }); + }, +); diff --git a/packages/controlmart/src/controller/wms/replenishment.controller.ts b/packages/controlmart/src/controller/wms/replenishment.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..f5f6a53f4549e29184144b9637e6b25509c7667f --- /dev/null +++ b/packages/controlmart/src/controller/wms/replenishment.controller.ts @@ -0,0 +1,211 @@ +import type { Request, Response } from "express"; +import { WMSReplenishmentRepository } from "../../repository/wms/replenishment.wms.repository"; +import type { TReplenishmentInput } from "../../models/wms/replenishment.wms.model"; +import { asyncHandler } from "../../utils/http.util"; + +// Create replenishment +export const createReplenishmentController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const replenishmentData: TReplenishmentInput = req.body; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const replenishment = await replenishmentRepo.createReplenishment(replenishmentData); + + res.status(201).json({ + success: true, + message: "Replenishment created successfully", + data: replenishment, + }); +}); + +// Get replenishments by status +export const getReplenishmentsByStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { status, warehouseId, productId, replenishmentType, priority } = req.query; + + const statusArray = Array.isArray(status) + ? (status as string[]) + : typeof status === "string" + ? [status] + : []; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(productId && { productId: productId as string }), + ...(replenishmentType && { + replenishmentType: Array.isArray(replenishmentType) + ? (replenishmentType as string[]) + : [replenishmentType as string], + }), + ...(priority && { priority: parseInt(priority as string) }), + }; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const replenishments = await replenishmentRepo.getReplenishmentsByStatus(statusArray, filters); + + res.status(200).json({ + success: true, + message: "Replenishments by status retrieved successfully", + data: replenishments, + }); + }, +); + +// Get replenishment by ID +export const getReplenishmentByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, replenishmentId } = req.params; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const replenishment = await replenishmentRepo.getReplenishmentById(replenishmentId as string); + + res.status(200).json({ + success: true, + message: "Replenishment retrieved successfully", + data: replenishment, + }); +}); + +// Update replenishment status +export const updateReplenishmentStatusController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, replenishmentId } = req.params; + const { status, taskId } = req.body; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const result = await replenishmentRepo.updateReplenishmentStatus( + replenishmentId as string, + status, + taskId, + ); + + res.status(200).json({ + success: true, + message: "Replenishment status updated successfully", + data: result, + }); + }, +); + +// Get replenishments by product +export const getReplenishmentsByProductController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, productId } = req.params; + const { warehouseId, status, dateStart, dateEnd } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(status && { + status: Array.isArray(status) ? (status as string[]) : [status as string], + }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + }; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const replenishments = await replenishmentRepo.getReplenishmentsByProduct( + productId as string, + filters, + ); + + res.status(200).json({ + success: true, + message: "Replenishments by product retrieved successfully", + data: replenishments, + }); + }, +); + +// Get replenishments by bin +export const getReplenishmentsByBinController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, binId } = req.params; + const { type, warehouseId, status } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(status && { + status: Array.isArray(status) ? (status as string[]) : [status as string], + }), + }; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const replenishments = await replenishmentRepo.getReplenishmentsByBin( + binId as string, + type as "source" | "destination", + filters, + ); + + res.status(200).json({ + success: true, + message: "Replenishments by bin retrieved successfully", + data: replenishments, + }); + }, +); + +// Approve replenishment +export const approveReplenishmentController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, replenishmentId } = req.params; + const { approvedBy, approvedQuantity } = req.body; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const result = await replenishmentRepo.approveReplenishment( + replenishmentId as string, + approvedBy, + approvedQuantity, + ); + + res.status(200).json({ + success: true, + message: "Replenishment approved successfully", + data: result, + }); +}); + +// Get replenishment metrics +export const getReplenishmentMetricsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseId, dateStart, dateEnd, replenishmentType } = req.query; + + const filters = { + ...(warehouseId && { warehouseId: warehouseId as string }), + ...(dateStart && { dateStart: new Date(dateStart as string) }), + ...(dateEnd && { dateEnd: new Date(dateEnd as string) }), + ...(replenishmentType && { + replenishmentType: Array.isArray(replenishmentType) + ? (replenishmentType as string[]) + : [replenishmentType as string], + }), + }; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const metrics = await replenishmentRepo.getReplenishmentMetrics(filters); + + res.status(200).json({ + success: true, + message: "Replenishment metrics retrieved successfully", + data: metrics, + }); + }, +); + +// Cancel replenishment +export const cancelReplenishmentController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, replenishmentId } = req.params; + const { reason, cancelledBy } = req.body; + + const replenishmentRepo = WMSReplenishmentRepository(worldId as string); + const result = await replenishmentRepo.cancelReplenishment( + replenishmentId as string, + reason, + cancelledBy, + ); + + res.status(200).json({ + success: true, + message: "Replenishment cancelled successfully", + data: result, + }); +}); diff --git a/packages/controlmart/src/controller/wms/task.controller.ts b/packages/controlmart/src/controller/wms/task.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..0f87ad9d0f849d0e711a2f4715cc03ba0f56634b --- /dev/null +++ b/packages/controlmart/src/controller/wms/task.controller.ts @@ -0,0 +1,227 @@ +import type { Request, Response } from "express"; +import { WMSTaskRepository } from "../../repository/wms"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createTaskController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) { + return sendResponse({ res, status: 400, error: "Request body is required" }); + } + + const repo = WMSTaskRepository(worldId); + const task = await repo.createTask(data); + sendResponse({ res, data: task, status: 201 }); +}); + +export const getTaskLogsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { taskIds, taskTypes, userIds, status, dateStart, dateEnd, zoneId } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const taskIdsArray = typeof taskIds === "string" ? [taskIds] : (taskIds as string[]); + const taskTypesArray = typeof taskTypes === "string" ? [taskTypes] : (taskTypes as string[]); + const userIdsArray = typeof userIds === "string" ? [userIds] : (userIds as string[]); + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSTaskRepository(worldId); + const logs = await repo.getTaskLogs({ + taskIds: taskIdsArray, + taskTypes: taskTypesArray, + userIds: userIdsArray, + status: statusArray, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + zoneId: zoneId as string, + }); + + sendResponse({ res, data: logs }); +}); + +export const getTasksByUserController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, userId } = req.params; + const { status } = req.query; + + if (!worldId || !userId) { + return sendResponse({ res, status: 400, error: "worldId and userId are required" }); + } + + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSTaskRepository(worldId); + const tasks = await repo.getTasksByUser(userId, statusArray); + + sendResponse({ res, data: tasks }); +}); + +export const getTaskTimestampsController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { taskId, taskTypes, userIds, dateStart, dateEnd, includeHistorical } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const taskTypesArray = typeof taskTypes === "string" ? [taskTypes] : (taskTypes as string[]); + const userIdsArray = typeof userIds === "string" ? [userIds] : (userIds as string[]); + + const repo = WMSTaskRepository(worldId); + const timestamps = await repo.getTaskTimestamps({ + taskId: taskId as string, + taskTypes: taskTypesArray, + userIds: userIdsArray, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + includeHistorical: includeHistorical === "true", + }); + + sendResponse({ res, data: timestamps }); +}); + +export const updateTaskStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, taskId } = req.params; + const { status, timestamp, userId } = req.body; + + if (!worldId || !taskId) { + return sendResponse({ res, status: 400, error: "worldId and taskId are required" }); + } + if (!status) { + return sendResponse({ res, status: 400, error: "status is required" }); + } + + const repo = WMSTaskRepository(worldId); + const updated = await repo.updateTaskStatus( + taskId, + status, + timestamp ? new Date(timestamp) : undefined, + userId, + ); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Task ${taskId} not found` }); + } + + sendResponse({ res, data: updated }); +}); + +export const getTaskPerformanceMetricsController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId } = req.params; + const { taskTypes, userIds, dateStart, dateEnd } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const taskTypesArray = typeof taskTypes === "string" ? [taskTypes] : (taskTypes as string[]); + const userIdsArray = typeof userIds === "string" ? [userIds] : (userIds as string[]); + + const repo = WMSTaskRepository(worldId); + const metrics = await repo.getTaskPerformanceMetrics({ + taskTypes: taskTypesArray, + userIds: userIdsArray, + dateStart: dateStart ? new Date(dateStart as string) : undefined, + dateEnd: dateEnd ? new Date(dateEnd as string) : undefined, + }); + + sendResponse({ res, data: metrics }); + }, +); + +export const getActiveTasksController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { zoneId } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const repo = WMSTaskRepository(worldId); + const tasks = await repo.getActiveTasks(zoneId as string); + + sendResponse({ res, data: tasks }); +}); + +export const addTaskScanController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, taskId } = req.params; + const scanData = req.body; + + if (!worldId || !taskId) { + return sendResponse({ res, status: 400, error: "worldId and taskId are required" }); + } + if (!scanData || !scanData.scanType) { + return sendResponse({ res, status: 400, error: "scan data with scanType is required" }); + } + + const repo = WMSTaskRepository(worldId); + const result = await repo.addTaskScan(taskId, scanData); + + sendResponse({ res, data: result }); +}); + +export const assignTaskController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, taskId } = req.params; + const { userId, userName } = req.body; + + if (!worldId || !taskId) { + return sendResponse({ res, status: 400, error: "worldId and taskId are required" }); + } + if (!userId || !userName) { + return sendResponse({ res, status: 400, error: "userId and userName are required" }); + } + + const repo = WMSTaskRepository(worldId); + const result = await repo.assignTask(taskId, { userId, userName }); + + if (!result) { + return sendResponse({ res, status: 404, error: `Task ${taskId} not found` }); + } + + sendResponse({ res, data: result }); +}); + +export const updateTaskPriorityController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, taskId } = req.params; + const { priority } = req.body; + + if (!worldId || !taskId) { + return sendResponse({ res, status: 400, error: "worldId and taskId are required" }); + } + if (typeof priority !== "number") { + return sendResponse({ res, status: 400, error: "priority must be a number" }); + } + + const repo = WMSTaskRepository(worldId); + const result = await repo.updateTaskPriority(taskId, priority); + + if (!result) { + return sendResponse({ res, status: 404, error: `Task ${taskId} not found` }); + } + + sendResponse({ res, data: result }); +}); + +/** + * Patch Task Controller + * + * Generic PATCH endpoint for tasks. + * Allows updating any allowed field in a single request. + */ +export const patchTaskController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, taskId } = req.params; + const updates = req.body; + + if (!worldId || !taskId) { + return sendResponse({ res, status: 400, error: "worldId and taskId are required" }); + } + + if (!updates || Object.keys(updates).length === 0) { + return sendResponse({ res, status: 400, error: "No updates provided" }); + } + + const repo = WMSTaskRepository(worldId); + const result = await repo.patch(taskId, updates); + + if (!result) { + return sendResponse({ res, status: 404, error: `Task ${taskId} not found` }); + } + + sendResponse({ res, data: result, message: "Task updated successfully" }); +}); diff --git a/packages/controlmart/src/controller/wms/warehouse.controller.ts b/packages/controlmart/src/controller/wms/warehouse.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..7cca4c239d4a47c34680765af7ce8f1e00cba7cc --- /dev/null +++ b/packages/controlmart/src/controller/wms/warehouse.controller.ts @@ -0,0 +1,208 @@ +import type { Request, Response } from "express"; +import { WMSWarehouseRepository } from "../../repository/wms"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createWarehouseController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) { + return sendResponse({ res, status: 400, error: "Request body is required" }); + } + + const repo = WMSWarehouseRepository(worldId); + const warehouse = await repo.createWarehouse(data); + sendResponse({ res, data: warehouse, status: 201 }); +}); + +export const getAllWarehousesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseType, status, cursor, limit } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const warehouseTypeArray = + typeof warehouseType === "string" ? [warehouseType] : (warehouseType as string[]); + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSWarehouseRepository(worldId); + const warehouses = await repo.getAllWarehouses({ + warehouseType: warehouseTypeArray?.length ? warehouseTypeArray : undefined, + status: statusArray?.length ? statusArray : undefined, + cursor: cursor as string, + limit: limit ? parseInt(limit as string) : undefined, + }); + + sendResponse({ res, data: warehouses }); +}); + +export const getWarehouseByCodeController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseCode } = req.params; + + if (!worldId || !warehouseCode) { + return sendResponse({ + res, + status: 400, + error: "worldId and warehouseCode are required", + }); + } + + const repo = WMSWarehouseRepository(worldId); + const warehouse = await repo.getWarehouseByCode(warehouseCode); + + if (!warehouse) { + return sendResponse({ res, status: 404, error: `Warehouse ${warehouseCode} not found` }); + } + + sendResponse({ res, data: warehouse }); +}); + +export const getWarehouseByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + + if (!worldId || !warehouseId) { + return sendResponse({ res, status: 400, error: "worldId and warehouseId are required" }); + } + + const repo = WMSWarehouseRepository(worldId); + const warehouse = await repo.getWarehouseById(warehouseId); + + if (!warehouse) { + return sendResponse({ res, status: 404, error: `Warehouse ${warehouseId} not found` }); + } + + sendResponse({ res, data: warehouse }); +}); + +export const updateWarehouseController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const updateData = req.body; + + if (!worldId || !warehouseId) { + return sendResponse({ res, status: 400, error: "worldId and warehouseId are required" }); + } + if (!updateData || Object.keys(updateData).length === 0) { + return sendResponse({ res, status: 400, error: "Update data is required" }); + } + + const repo = WMSWarehouseRepository(worldId); + const updated = await repo.updateWarehouse(warehouseId, updateData); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Warehouse ${warehouseId} not found` }); + } + + sendResponse({ res, data: updated }); +}); + +export const getWarehousesByTypeController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseType } = req.params; + const { status, cursor, limit } = req.query; + + if (!worldId || !warehouseType) { + return sendResponse({ res, status: 400, error: "worldId and warehouseType are required" }); + } + + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSWarehouseRepository(worldId); + const warehouses = await repo.getWarehousesByType(warehouseType, { + status: statusArray?.length ? statusArray : undefined, + cursor: cursor as string, + limit: limit ? parseInt(limit as string) : undefined, + }); + + sendResponse({ res, data: warehouses }); +}); + +export const getActiveWarehousesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { warehouseType, cursor, limit } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const warehouseTypeArray = + typeof warehouseType === "string" ? [warehouseType] : (warehouseType as string[]); + + const repo = WMSWarehouseRepository(worldId); + const warehouses = await repo.getActiveWarehouses({ + warehouseType: warehouseTypeArray?.length ? warehouseTypeArray : undefined, + cursor: cursor as string, + limit: limit ? parseInt(limit as string) : undefined, + }); + + sendResponse({ res, data: warehouses }); +}); + +export const updateWarehouseStatusController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { status } = req.body; + + if (!worldId || !warehouseId) { + return sendResponse({ res, status: 400, error: "worldId and warehouseId are required" }); + } + if (!status || !["ACTIVE", "DISABLED", "ARCHIVED"].includes(status)) { + return sendResponse({ + res, + status: 400, + error: "Valid status is required (ACTIVE, DISABLED, or ARCHIVED)", + }); + } + + const repo = WMSWarehouseRepository(worldId); + const updated = await repo.updateWarehouseStatus(warehouseId, status); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Warehouse ${warehouseId} not found` }); + } + + sendResponse({ res, data: updated }); +}); + +export const searchWarehousesController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const { q: searchTerm, warehouseType, status, limit } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!searchTerm) { + return sendResponse({ res, status: 400, error: "Search term (q) is required" }); + } + + const warehouseTypeArray = + typeof warehouseType === "string" ? [warehouseType] : (warehouseType as string[]); + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSWarehouseRepository(worldId); + const warehouses = await repo.searchWarehouses(searchTerm as string, { + warehouseType: warehouseTypeArray?.length ? warehouseTypeArray : undefined, + status: statusArray?.length ? statusArray : undefined, + limit: limit ? parseInt(limit as string) : undefined, + }); + + sendResponse({ res, data: warehouses }); +}); + +export const getWarehousesByTimezoneController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, timezone } = req.params; + const { warehouseType, status, limit } = req.query; + + if (!worldId || !timezone) { + return sendResponse({ res, status: 400, error: "worldId and timezone are required" }); + } + + const warehouseTypeArray = + typeof warehouseType === "string" ? [warehouseType] : (warehouseType as string[]); + const statusArray = typeof status === "string" ? [status] : (status as string[]); + + const repo = WMSWarehouseRepository(worldId); + const warehouses = await repo.getWarehousesByTimezone(timezone, { + warehouseType: warehouseTypeArray?.length ? warehouseTypeArray : undefined, + status: statusArray?.length ? statusArray : undefined, + limit: limit ? parseInt(limit as string) : undefined, + }); + + sendResponse({ res, data: warehouses }); + }, +); diff --git a/packages/controlmart/src/controller/wms/zone.controller.ts b/packages/controlmart/src/controller/wms/zone.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..ad0885be9927e65d3213409a9b2cd233f0cc9720 --- /dev/null +++ b/packages/controlmart/src/controller/wms/zone.controller.ts @@ -0,0 +1,163 @@ +import type { Request, Response } from "express"; +import { WMSZoneRepository } from "../../repository/wms"; +import { sendResponse, asyncHandler } from "../../utils/http.util"; + +export const createZoneController = asyncHandler(async (req: Request, res: Response) => { + const { worldId } = req.params; + const data = req.body; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + if (!data || Object.keys(data).length === 0) { + return sendResponse({ res, status: 400, error: "Request body is required" }); + } + + const repo = WMSZoneRepository(worldId); + const zone = await repo.createZone(data); + sendResponse({ res, data: zone, status: 201 }); +}); + +export const getZonesByWarehouseController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseId } = req.params; + const { zoneType, temperatureControlled } = req.query; + + if (!worldId || !warehouseId) { + return sendResponse({ res, status: 400, error: "worldId and warehouseId are required" }); + } + + const zoneTypeArray = typeof zoneType === "string" ? [zoneType] : (zoneType as string[]); + + const repo = WMSZoneRepository(worldId); + const zones = await repo.getZonesByWarehouse(warehouseId, { + zoneType: zoneTypeArray, + temperatureControlled: temperatureControlled === "true", + }); + + sendResponse({ res, data: zones }); +}); + +export const getZoneByCodeController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, warehouseId, zoneCode } = req.params; + + if (!worldId || !warehouseId || !zoneCode) { + return sendResponse({ + res, + status: 400, + error: "worldId, warehouseId, and zoneCode are required", + }); + } + + const repo = WMSZoneRepository(worldId); + const zone = await repo.getZoneByCode(warehouseId, zoneCode); + + if (!zone) { + return sendResponse({ res, status: 404, error: `Zone ${zoneCode} not found` }); + } + + sendResponse({ res, data: zone }); +}); + +export const getZoneByIdController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, zoneId } = req.params; + + if (!worldId || !zoneId) { + return sendResponse({ res, status: 400, error: "worldId and zoneId are required" }); + } + + const repo = WMSZoneRepository(worldId); + const zone = await repo.getZoneById(zoneId); + + if (!zone) { + return sendResponse({ res, status: 404, error: `Zone ${zoneId} not found` }); + } + + sendResponse({ res, data: zone }); +}); + +export const updateZoneController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, zoneId } = req.params; + const updateData = req.body; + + if (!worldId || !zoneId) { + return sendResponse({ res, status: 400, error: "worldId and zoneId are required" }); + } + if (!updateData || Object.keys(updateData).length === 0) { + return sendResponse({ res, status: 400, error: "Update data is required" }); + } + + const repo = WMSZoneRepository(worldId); + const updated = await repo.updateZone(zoneId, updateData); + + if (!updated) { + return sendResponse({ res, status: 404, error: `Zone ${zoneId} not found` }); + } + + sendResponse({ res, data: updated }); +}); + +export const getZonesByTypeController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, zoneType } = req.params; + const { warehouseId } = req.query; + + if (!worldId || !zoneType) { + return sendResponse({ res, status: 400, error: "worldId and zoneType are required" }); + } + + const repo = WMSZoneRepository(worldId); + const zones = await repo.getZonesByType(zoneType, warehouseId as string); + + sendResponse({ res, data: zones }); +}); + +export const addAisleToZoneController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, zoneId } = req.params; + const aisleData = req.body; + + if (!worldId || !zoneId) { + return sendResponse({ res, status: 400, error: "worldId and zoneId are required" }); + } + if (!aisleData || !aisleData.aisleId) { + return sendResponse({ res, status: 400, error: "aisle data with aisleId is required" }); + } + + const repo = WMSZoneRepository(worldId); + const result = await repo.addAisleToZone(zoneId, aisleData); + + sendResponse({ res, data: result }); +}); + +export const removeAisleFromZoneController = asyncHandler(async (req: Request, res: Response) => { + const { worldId, zoneId, aisleId } = req.params; + + if (!worldId || !zoneId || !aisleId) { + return sendResponse({ res, status: 400, error: "worldId, zoneId, and aisleId are required" }); + } + + const repo = WMSZoneRepository(worldId); + const result = await repo.removeAisleFromZone(zoneId, aisleId); + + sendResponse({ res, data: result }); +}); + +export const getZoneCapacityUtilizationController = asyncHandler( + async (req: Request, res: Response) => { + const { worldId, zoneId } = req.params; + const { warehouseId, zoneIds, zoneType } = req.query; + + if (!worldId) return sendResponse({ res, status: 400, error: "worldId is required" }); + + const zoneIdsArray = typeof zoneIds === "string" ? [zoneIds] : (zoneIds as string[]) || []; + if (zoneId) { + zoneIdsArray.push(zoneId); + } + const zoneTypeArray = typeof zoneType === "string" ? [zoneType] : (zoneType as string[]); + + const repo = WMSZoneRepository(worldId); + const utilization = await repo.getZoneCapacityUtilization({ + warehouseId: warehouseId as string, + zoneIds: zoneIdsArray, + zoneType: zoneTypeArray, + }); + + sendResponse({ res, data: utilization }); + }, +); diff --git a/packages/controlmart/src/controller/world.controller.ts b/packages/controlmart/src/controller/world.controller.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f5091416e2af7f043416b5df4ea33e48aabc2a7 --- /dev/null +++ b/packages/controlmart/src/controller/world.controller.ts @@ -0,0 +1,658 @@ +import type { Request, Response } from "express"; + +import { WorldRepository, CompanyRepository, ProductRepository } from "../repository"; +import type { TWorldInput } from "../models/world.model"; +import { getIdFromMongoObject } from "../utils/mongo.util"; +import { sendResponse } from "../utils/http.util"; +import { getErrorMessage } from "../utils/error.util"; +// Research branch imports +import { worlds } from "../worlds"; +import { clearWorldData } from "../services/mongo.service"; +// od-arch imports +import { capabilitySamplingService } from "../services/capability-sampling.service"; +import { capabilityCatalog } from "../services/capability-catalog.service"; +import { ChaosConfigRegistry } from "../services/chaos-config.registry"; +import { knowledgeGraph } from "../services/knowledge-graph.service"; +import type { ChaosPolicy } from "../types/od.type"; +import type { ValidationWarning } from "../types/knowledge-graph.type"; +import { parseOffsetParams } from "../utils/pagination.util"; + +const _seedWorld = async (worldId: string, layout: string | undefined, realHoursPerSimDay: number, onProgress?: (data: any) => void) => { + let seedResult = null; + if (layout && worlds[layout]) { + const worldConfig = worlds[layout]; + + if (worldConfig.dataSeeder) { + if (onProgress) { + onProgress({ step: "seeding_data", layout: worldConfig.layoutName }); + } + + // Execute the data seeder from the registry + seedResult = await worldConfig.dataSeeder(1_000_000, worldId); + } + + if (worldConfig.odSeeder) { + if (onProgress) { + onProgress({ step: "seeding_ods", layout: worldConfig.layoutName }); + } + // Execute the OD seeder from the registry + await worldConfig.odSeeder(worldId, realHoursPerSimDay); + } + } + return seedResult; +}; + +const createWorldController = async (req: Request, res: Response) => { + const stream = req.query.stream === "true"; + + if (stream) { + res.setHeader("Content-Type", "text/event-stream"); + res.setHeader("Cache-Control", "no-cache"); + res.setHeader("Connection", "keep-alive"); + res.setHeader("X-Stream-Mode", "true"); + res.setHeader("Access-Control-Allow-Origin", "*"); + res.setHeader("Access-Control-Allow-Headers", "Cache-Control"); + + res.flushHeaders(); + res.write("event: connected\n"); + res.write('data: {"message":"Stream connected"}\n\n'); + if (res.flush) { + res.flush(); + } + + req.on("close", () => { + console.log("Client disconnected from stream"); + }); + } + + try { + const worldData: TWorldInput = req.body; + const { layout } = worldData; + + if (!layout) { + if (stream) { + sendResponse({ res, stream, event: "error", error: "Missing required field: layout" }); + return; + } + return sendResponse({ res, status: 400, error: "Missing required field: layout" }); + } + + if (stream) { + console.log("Starting world creation with streaming..."); + sendResponse({ + res, + stream, + event: "progress", + data: { step: "creating_world" }, + }); + } + + // od-arch: Capability sampling support + let capabilityIds: string[] = []; + let samplingMetadata: { type: string; count: number } | null = null; + let validationWarnings: ValidationWarning[] = []; + + if (worldData.samplingStrategy) { + try { + if (stream) + sendResponse({ + res, + stream, + event: "progress", + data: { step: "sampling_capabilities" }, + }); + + // Apply sampling strategy with optional persona filtering + capabilityIds = capabilitySamplingService.applySamplingStrategy( + worldData.samplingStrategy, + worldData.personas + ); + + samplingMetadata = { + type: worldData.samplingStrategy.type, + count: capabilityIds.length, + }; + + worldData.capabilityIds = capabilityIds; + + if (stream) + sendResponse({ + res, + stream, + event: "progress", + data: { + step: "capabilities_sampled", + count: capabilityIds.length, + type: worldData.samplingStrategy.type, + }, + }); + + // Validate sampled capabilities + if (capabilityIds.length > 0) { + for (const capId of capabilityIds) { + const capability = capabilityCatalog.getById(capId); + if (capability) { + const validation = knowledgeGraph.validateOD(capability.odId); + if (!validation.isExecutable || validation.warnings.length > 0) { + validationWarnings.push(...validation.warnings); + } + } + } + } + } catch (samplingError) { + const samplingMsg = getErrorMessage(samplingError); + if (!res.headersSent) { + if (stream) { + sendResponse({ + res, + stream, + event: "error", + error: `Sampling failed: ${samplingMsg}`, + }); + res.end(); + } else { + res.status(400).json({ + success: false, + error: `Sampling failed: ${samplingMsg}`, + }); + } + } + return; + } + } + + // Assign all personas from capability definitions if not already specified + if (!worldData.personas?.allowedPersonas?.length) { + const allCapabilities = capabilityCatalog.getAll(); + const allPersonaIds = [...new Set( + allCapabilities.flatMap(cap => cap.tags?.personas || []) + )]; + worldData.personas = { + ...worldData.personas, + allowedPersonas: allPersonaIds, + }; + } + + const newWorld = await WorldRepository.createWorld(worldData); + const worldId = getIdFromMongoObject(newWorld); + + // Register chaos config in registry if present (MORPH-309 fix) + if (newWorld.chaos) { + ChaosConfigRegistry.setWorldChaosConfiguration(worldId, newWorld.chaos); + } + + if (stream) + sendResponse({ + res, + stream, + event: "progress", + data: { step: "world_created", worldId }, + }); + + const realHoursPerSimDay = req.body.realHoursPerSimDay ? Number(req.body.realHoursPerSimDay) : 2; + const seedResult = await _seedWorld(worldId, layout, realHoursPerSimDay, stream ? (data) => sendResponse({ res, stream, event: "progress", data }) : undefined); + + const companyResult = await CompanyRepository(worldId).getAllCompanies(); + const companies = companyResult.items; + const mainCompany = companies.find((c) => c.isMpcCompany); + const npcCompanies = companies.filter((c) => !c.isMpcCompany); + + const productResult = await ProductRepository(worldId).getAllProducts(); + const products = productResult.items; + const productsForMpc = products; + + const result = { + world: newWorld, + mainCompany, + npcCompanies, + productsForMpc, + seedResult, + ...(samplingMetadata && { + capabilities: { + samplingType: samplingMetadata.type, + count: samplingMetadata.count, + ids: capabilityIds, + validationWarnings, + }, + }), + }; + + if (stream) { + sendResponse({ res, stream, event: "complete", data: result }); + res.end(); + } else { + res.status(200).json(result); + } + } catch (err) { + const msg = getErrorMessage(err); + if (!res.headersSent) { + if (stream) { + sendResponse({ res, stream, event: "error", error: msg }); + res.end(); + } else { + res.status(500).json({ success: false, error: msg }); + } + } + } +}; + +const resetWorldController = async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + + await clearWorldData(worldId); + + const realHoursPerSimDay = world.realHoursPerSimDay || 2; + + const seedResult = await _seedWorld(worldId, world.layout || "perishable-food-manufacturer", realHoursPerSimDay); + + sendResponse({ res, status: 200, data: { success: true, message: "World reset and re-seeded", seedResult } }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +const getWorldController = async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + sendResponse({ res, status: 200, data: world }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +const listWorldsController = async (req: Request, res: Response) => { + try { + // Parse pagination and filter parameters (od-arch style) + const pagination = parseOffsetParams(req.query); + const { is_default, mpcCompany, search, cursor, limit } = req.query; + + // Build filters + const filters: any = {}; + if (typeof is_default === 'string') { + filters.is_default = is_default === 'true'; + } + if (typeof mpcCompany === 'string') { + filters.mpcCompany = mpcCompany; + } + if (typeof search === 'string') { + filters.search = search; + } + // Research branch cursor support + if (typeof cursor === 'string') { + filters.cursor = cursor; + } + if (typeof limit === 'string') { + filters.limit = parseInt(limit, 10); + } + + // Get worlds with pagination + const result = await WorldRepository.getAllWorlds( + Object.keys(filters).length > 0 ? filters : undefined, + pagination + ); + + // Handle both pagination styles in response + if ('items' in result) { + // Cursor-based pagination response (research branch) + sendResponse({ + res, + status: 200, + data: result.items, + pagination: { + nextCursor: result.nextCursor ?? null, + previousCursor: req.query.cursor as string | null, + totalCount: result.totalCount, + hasMore: result.hasMore, + limit: result.limit, + }, + }); + } else { + // Offset-based pagination response (od-arch) + sendResponse({ + res, + status: 200, + data: { + count: result.data.length, + total: result.pagination?.total, + worlds: result.data, + pagination: result.pagination, + }, + }); + } + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +const deleteWorldController = async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const deleted = await WorldRepository.deleteWorld(worldId); + if (!deleted) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + sendResponse({ res, status: 200, data: { success: true } }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +const updateWorldController = async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const updated = await WorldRepository.updateWorld(worldId, req.body); + if (!updated) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + sendResponse({ res, status: 200, data: updated }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +// Research branch: Layout controllers +const getWorldLayoutsController = async (req: Request, res: Response) => { + try { + const layouts = Object.values(worlds).map((w) => ({ + id: w.id, + name: w.layoutName, + description: w.description, + shortDescription: w.shortDescription, + })); + sendResponse({ res, status: 200, data: layouts }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +const getWorldLayoutByIdController = async (req: Request, res: Response) => { + const { layoutId } = req.params; + + if (!layoutId) { + sendResponse({ res, status: 400, error: "Layout ID is required" }); + return; + } + + try { + const layout = worlds[layoutId]; + if (!layout) { + sendResponse({ res, status: 404, error: "Layout not found" }); + return; + } + sendResponse({ + res, + status: 200, + data: { + id: layout.id, + name: layout.layoutName, + description: layout.description, + shortDescription: layout.shortDescription, + docs: layout.docs ? layout.docs() : null, + }, + }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +// Research branch: Act controller (proxy for internal API calls) +const actController = async (req: Request, res: Response) => { + try { + const { method = "GET", path, body, params, query } = req.body; + + if (!path) { + return sendResponse({ res, status: 400, error: "Path is required" }); + } + + const port = process.env.PORT || 3000; + const baseUrl = `http://localhost:${port}`; + + let finalPath = path; + if (params) { + Object.entries(params).forEach(([key, value]) => { + finalPath = finalPath.replace(`:${key}`, value); + }); + } + + const url = new URL(finalPath, baseUrl); + if (query) { + Object.entries(query).forEach(([key, value]) => { + url.searchParams.append(key, String(value)); + }); + } + + const options: RequestInit = { + method, + headers: { + "Content-Type": "application/json", + ...(req.headers.authorization ? { authorization: req.headers.authorization } : {}), + }, + }; + + if (body && ["POST", "PUT", "PATCH"].includes(method.toUpperCase())) { + options.body = JSON.stringify(body); + } + + const response = await fetch(url.toString(), options); + + let responseData; + const contentType = response.headers.get("content-type"); + if (contentType && contentType.includes("application/json")) { + responseData = await response.json(); + } else { + responseData = await response.text(); + } + + res.status(response.status).json(responseData); + + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +// od-arch: Capability controllers +const getWorldCapabilitiesController = async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + + const capabilityIds = world.capabilityIds || []; + + const capabilities = capabilityIds.length > 0 + ? capabilityIds.map(id => capabilityCatalog.getById(id)).filter(Boolean) + : []; + + sendResponse({ + res, + status: 200, + data: { + worldId: getIdFromMongoObject(world), + worldName: world.name, + samplingStrategy: world.samplingStrategy || { type: 'all' }, + count: capabilityIds.length, + capabilityIds, + capabilities, + }, + }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +// od-arch: Chaos controllers +const getWorldChaosController = async (req: Request, res: Response) => { + const { worldId } = req.params; + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + + const chaosConfig = world.chaos || { + processChaosEnabled: false, + infraChaosEnabled: false, + }; + + sendResponse({ + res, + status: 200, + data: { + worldId: getIdFromMongoObject(world), + worldName: world.name, + chaos: chaosConfig, + }, + }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +const updateWorldChaosController = async (req: Request, res: Response) => { + const { worldId } = req.params; + const chaosConfig = req.body; // Allow implicit typing or cast to TWorldChaosConfig if imported + + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + + const updated = await WorldRepository.updateWorld(worldId, { + chaos: chaosConfig, + }); + + if (!updated) { + sendResponse({ res, status: 500, error: "Failed to update world chaos" }); + return; + } + + // Use the new registry method + ChaosConfigRegistry.setWorldChaosConfiguration(worldId, chaosConfig); + + sendResponse({ + res, + status: 200, + data: { + worldId, + chaos: chaosConfig, + message: "World chaos configuration updated successfully", + }, + }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +const deleteWorldChaosController = async (req: Request, res: Response) => { + const { worldId } = req.params; + + if (!worldId) { + sendResponse({ res, status: 400, error: "worldId is required" }); + return; + } + + try { + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + sendResponse({ res, status: 404, error: "World not found" }); + return; + } + + const updated = await WorldRepository.updateWorld(worldId, { + chaos: undefined, + }); + + if (!updated) { + sendResponse({ res, status: 500, error: "Failed to delete world chaos" }); + return; + } + + ChaosConfigRegistry.setWorldChaosConfiguration(worldId, { + processChaosEnabled: false, + infraChaosEnabled: false, + }); + + sendResponse({ + res, + status: 200, + data: { + worldId, + message: "World chaos configuration deleted successfully", + }, + }); + } catch (err) { + sendResponse({ res, status: 500, error: getErrorMessage(err) }); + } +}; + +export const WorldController = { + createWorldController, + getWorldController, + listWorldsController, + updateWorldController, + deleteWorldController, + // Research branch controllers + resetWorldController, + getWorldLayoutsController, + getWorldLayoutByIdController, + actController, + // od-arch controllers + getWorldCapabilitiesController, + getWorldChaosController, + updateWorldChaosController, + deleteWorldChaosController, +}; diff --git a/packages/controlmart/src/docs/base.docs.ts b/packages/controlmart/src/docs/base.docs.ts new file mode 100644 index 0000000000000000000000000000000000000000..e03b465c95d47f329c89e4404dd53bbfa8502f21 --- /dev/null +++ b/packages/controlmart/src/docs/base.docs.ts @@ -0,0 +1,350 @@ +export const openApiBase = { + openapi: "3.1.0", + info: { + title: "Morpheus ControlMart API", + version: "1.0.0", + description: ` +# Morpheus ControlMart API + +A comprehensive enterprise-grade API for managing EDI transactions, ERP operations, warehouse management, transportation logistics, and operational monitoring in the Morpheus ecosystem. + +## Core Capabilities + +### **Business Operations** +- **World Management**: Multi-tenant isolated business environments with complete data segregation +- **EDI Transaction Processing**: Full Electronic Data Interchange lifecycle with validation, transformation, and routing +- **ERP Management**: Complete enterprise resource planning including products, orders, invoices, shipments, and payments +- **Warehouse Management (WMS)**: Comprehensive warehouse operations including inventory, tasks, equipment, labor management, and fulfillment +- **Transportation Management (TMS)**: End-to-end transportation logistics with carrier integration, route optimization, and shipment tracking + +### **System Operations** +- **Comprehensive Logging**: Dual-layer logging system with operational monitoring and audit trails for compliance +- **Real-time Streaming**: Server-sent events (SSE) for long-running operations and live updates +- **Advanced Filtering**: Sophisticated query capabilities with pagination, sorting, and search across all endpoints +- **Data Integration**: Seamless integration between EDI, ERP, WMS, and TMS systems with automated workflows + +## Response Structure + +All API endpoints follow a consistent response format designed for reliability and comprehensive error handling: + +### **Standard JSON Response** +\`\`\`json +{ + "success": boolean, + "status": number, + "data": any | null, + "meta": { + "event": "message", + "timestamp": "2024-01-15T08:00:00.000Z", + ...additionalMetadata + }, + "pagination": { // Only for paginated endpoints + "limit": number, + "previousCursor": string | null, + "nextCursor": string | null, + "totalCount": number, + "hasMore": boolean + } +} +\`\`\` + +### **Error Response** +\`\`\`json +{ + "success": false, + "status": number, + "error": "Error message description", + "meta": { + "event": "message", + "timestamp": "2024-01-15T08:00:00.000Z" + } +} +\`\`\` + +### **Server-Sent Events (SSE) Response** +For streaming endpoints, responses are delivered as SSE: +\`\`\` +event: progress +data: {"success": true, "status": 200, "data": {...}, "meta": {"event": "progress", "timestamp": "..."}} + +event: complete +data: {"success": true, "status": 200, "data": {...}, "meta": {"event": "complete", "timestamp": "..."}} +\`\`\` + +## Authentication + +This API currently operates without authentication for development purposes. Production deployments should implement proper authentication mechanisms. + +## Rate Limiting + +No rate limiting is currently enforced. Production deployments should implement appropriate rate limiting based on operational requirements. + +## Pagination + +Paginated endpoints use cursor-based pagination for optimal performance: +- **cursor**: Use the \`nextCursor\` from previous response for next page +- **limit**: Maximum results per page (default: 50, max: 100) +- **totalCount**: Total number of records available +- **hasMore**: Indicates if additional pages are available + `, + contact: { + name: "Skyfall Team", + email: "engineering@skyfall.ai", + }, + license: { + name: "(c) Skyfall AI", + url: "https://skyfall.ai", + }, + }, + servers: [ + { + url: "http://localhost:8282", + description: "Development server", + }, + { + url: "https://morpheus.corp.skyfall.ai/", + description: "Production server", + }, + ], + tags: [ + { + name: "Health", + description: "System health and status endpoints", + }, + { + name: "World", + description: "World management operations - create and manage isolated business environments", + }, + { + name: "EDI", + description: + "EDI transaction management - full lifecycle of Electronic Data Interchange operations", + }, + { + name: "Logs", + description: ` +**Comprehensive Logging and Audit System** + +The logging system provides two distinct but complementary types of logging within the Morpheus ecosystem: + +## 1. Operational Logs (World Logs) +**Endpoint**: \`/{worldId}/logs\` + +Real-time operational monitoring and system behavior tracking: +- **Multi-Service Coverage**: Tracks activities across EDI, ERP, AS2, translation, validation, gateway, and infrastructure services +- **Granular Filtering**: Filter by service type, log level, entities, transactions, and time ranges +- **Full-Text Search**: Search within log messages for specific content +- **Real-Time Monitoring**: Immediate capture and retrieval of system events + +**Service Types**: EDI, ERP, AS2, translator, validator, gateway, infra, other +**Log Levels**: trace, debug, info, warn, error, fatal + +## 2. Audit Logs (Data Change Audit) +**Endpoint**: \`/{worldId}/audit-logs\` + +Comprehensive data change tracking for compliance and security: +- **Complete Change History**: Track all create, update, and delete operations on database records +- **Before/After Snapshots**: See exactly what changed in each modification +- **Model-Specific Filtering**: Filter by specific data models (EdiTransaction, WMSTask, etc.) +- **Document Tracking**: Follow all changes to specific documents using their IDs +- **Compliance Ready**: Meets regulatory requirements for data change tracking + +**Common Use Cases:** +- **Operational**: Debugging, performance monitoring, error tracking, real-time system health +- **Audit**: Compliance reporting, data investigation, security monitoring, change impact analysis + `, + }, + { + name: "WMS", + description: + "Warehouse Management System - comprehensive warehouse operations including inventory, tasks, equipment, labor, and shipments", + }, + { + name: "TMS", + description: + "Transportation Management System - comprehensive transportation operations including shipments, carriers, and trailer management", + }, + { + name: "ERP", + description: + "Enterprise Resource Planning - comprehensive business operations including companies, products, orders, invoices, shipments, and payments", + }, + { + name: "Finance", + description: + "Finance transaction management - comprehensive financial tracking, accounting operations, and business intelligence for cash flow analysis", + }, + { + name: "Ledger", + description: ` +**Company Ledger Management System** + +Central financial position management with comprehensive balance tracking and analytics: + +**Core Features**: +- **Financial Position Tracking**: Complete cash, receivables, and payables management +- **Auto-Calculated Net Position**: Automatic calculation of net financial position (cash + receivables - payables) +- **World-Scoped Ledgers**: One ledger per world environment with unique constraint enforcement +- **Delta-Based Operations**: Precise incremental balance adjustments with atomic operations + +**Key Capabilities**: +- **Upsert Operations**: Intelligent create-or-update functionality for ledger management +- **Increment/Decrement**: Delta-based balance adjustments for transaction processing +- **Analytics Summary**: Comprehensive financial metrics and business intelligence +- **Integration Support**: Full API support for external accounting system integration + +**Use Cases**: +- **Financial Dashboards**: Real-time financial position for executive reporting +- **Cash Flow Management**: Track and manage company cash flow and liquidity +- **Accounts Integration**: Synchronize with AR/AP systems for complete financial picture +- **Business Intelligence**: Financial analytics and performance monitoring + `, + }, + { + name: "Business Rules", + description: + "Business rule management - create, configure, and manage business rules for data validation, transformation, and automation across domains", + }, + { + name: "OD", + description: ` +**Operational Descriptor (OD) System** + +The Operational Descriptor (OD) system is a powerful workflow orchestration engine designed to automate complex, multi-service business processes within the Morpheus ecosystem. It enables the definition, execution, scheduling, and monitoring of operational workflows using a declarative JSON-based schema. + +## Core Capabilities + +### **Workflow Orchestration** +- **Declarative Definitions**: Define complex workflows using JSON schemas +- **Multi-Step Execution**: Chain multiple steps including MCP tool calls, scripts, and mapping operations +- **Conditional Logic**: Implement branching logic and conditional execution paths +- **Error Handling**: Robust error handling with retry policies and compensation steps + +### **Scheduling & Automation** +- **Flexible Scheduling**: Schedule ODs to run once or on a recurring interval (CRON-like) +- **Background Jobs**: Execute long-running processes in the background +- **Bulk Operations**: Schedule multiple ODs in a single operation + +### **Integration & Extensibility** +- **MCP Integration**: Seamlessly integrate with Model Context Protocol (MCP) tools +- **Scripting Support**: Execute custom JavaScript/TypeScript logic within steps +- **Chaos Engineering**: Built-in chaos testing capabilities to simulate failures and ensure resilience + +## Key Components + +- **Operational Descriptor**: The blueprint defining the workflow, including steps, inputs, and policies. +- **Job**: An instance of a scheduled or executing OD. +- **Executor**: The engine responsible for running the OD steps and managing state. + +## Common Use Cases +- **EDI Processing**: Automate the validation, transformation, and routing of EDI documents. +- **Warehouse Operations**: Orchestrate complex WMS tasks like inventory replenishment and cycle counting. +- **Data Synchronization**: Keep data in sync across ERP, WMS, and TMS systems. +- **System Maintenance**: Schedule routine maintenance tasks and health checks. + `, + }, + { + name: "ITSM Tickets", + description: ` +**IT Service Management (ITSM) Ticketing System** + +Comprehensive ticketing system for managing incidents, service requests, problems, and change requests within a world environment. + +## Key Features + +- **Ticket Types**: Support for incidents, service requests, problems, and change requests +- **Priority & Impact Management**: Configurable priority, impact, and urgency levels +- **Status Workflow**: Complete lifecycle management from new to closed +- **Work Notes**: Internal and public work notes with timestamp tracking +- **Assignment Management**: Assign tickets to specific users or teams +- **Attachment Support**: File attachments for additional context +- **Audit Trail**: Complete history of all ticket changes and updates + +## Ticket Types +- **incident**: Unplanned interruption to service +- **service_request**: Request for something to be provided +- **problem**: Root cause of one or more incidents +- **change**: Addition, modification or removal of service + +## Status Values +- **new**: Newly created ticket +- **open**: Ticket has been opened and acknowledged +- **in_progress**: Work is actively being performed +- **on_hold**: Ticket is temporarily suspended +- **resolved**: Issue has been resolved but awaiting confirmation +- **closed**: Ticket is completed and closed + +## Priority Levels +- **low**: Can be addressed during normal business hours +- **medium**: Should be addressed promptly +- **high**: Requires urgent attention +- **critical**: Requires immediate attention + +This system is designed to be generated and managed by the world simulation system for realistic ITSM scenarios. +`, + }, + { + name: "Knowledge Graph", + description: ` +**Knowledge Graph System** + +The Knowledge Graph provides a comprehensive visualization of the relationships between personas, capabilities, operational descriptors (ODs), tools, services, and entities within a world environment. + +## Graph Structure + +The knowledge graph consists of **nodes** and **edges** that represent entities and their relationships: + +### Node Types +- **PERSONA**: Roles/actors (Store Manager, Warehouse Worker) +- **CAPABILITY**: Business functions (Order Fulfillment, Inventory Check) +- **OD**: Operational Descriptors (workflow definitions) +- **TOOL**: API operations (createOrder, allocateInventory) +- **SERVICE**: System boundaries (ERP, WMS, TMS, EDI) +- **ENTITY**: Data objects (Order, Product, Inventory) + +### Edge Types (Relationships) +- **can_perform**: Persona → Capability (which capabilities a persona can execute) +- **implemented_by**: Capability → OD (how a capability is implemented) +- **uses**: OD → Tool (which tools an OD invokes) +- **exposed_by**: Tool → Service (which service provides a tool) +- **produces**: Tool → Entity (entities created by a tool) +- **requires**: Tool → Entity (entities needed as input) +- **modifies**: Tool → Entity (entities mutated by a tool) + +## Filtering + +When a world has assigned capabilities, the graph is automatically filtered using bidirectional BFS to show only the relevant connected subgraph. This includes all upstream (personas, capabilities) and downstream (tools, services, entities) nodes connected to the world's ODs. + +## Use Cases +- **Capability Discovery**: Understand what capabilities are available and how they're connected +- **Impact Analysis**: See what entities are affected by running a specific OD +- **Dependency Mapping**: Identify tool and service dependencies +- **Lineage Tracking**: Trace entity creation and modification paths +`, + }, + { + name: "Chaos", + description: ` +**Chaos Engineering System** + +The Chaos Engineering system enables controlled fault injection for testing system resilience. It allows configuration of failure scenarios through presets and policies. + +## Key Features + +- **Presets**: Pre-configured chaos policies for common failure scenarios +- **World-Level Policies**: Apply chaos rules to specific world environments +- **Capability Overrides**: Fine-grained control over chaos behavior for specific capabilities +- **OD Overrides**: Target specific Operational Descriptors with custom failure modes + +## Use Cases +- **Resilience Testing**: Verify system behavior under failure conditions +- **Integration Testing**: Test error handling across service boundaries +- **Training**: Simulate production-like failures in safe environments +`, + }, + ], + security: [ + // No security currently implemented + // Future: { "ApiKeyAuth": [] } or { "BearerAuth": [] } + ], +}; diff --git a/packages/controlmart/src/docs/index.ts b/packages/controlmart/src/docs/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..447c244916ff82ad864d4047386d4d83440b0c94 --- /dev/null +++ b/packages/controlmart/src/docs/index.ts @@ -0,0 +1,30 @@ +/** + * Main OpenAPI specification assembly + * + * This module combines all the modular documentation components into + * the final OpenAPI specification used by Swagger and Scalar UI. + */ +import { openApiBase } from './base.docs'; +import { allSchemas } from './schemas'; +import { allPaths } from './paths'; + +/** + * The complete OpenAPI specification for the Morpheus ControlMart API. + * + * Structure: + * - Base info, servers, tags from base.docs.ts + * - Paths from paths/index.ts (including WMS submodules) + * - Schemas under components.schemas for proper OpenAPI 3.0 compliance + */ +export const openApiSpec = { + ...openApiBase, + paths: allPaths, + components: { + schemas: allSchemas, + }, +}; + +// Re-export for convenience +export { allSchemas } from './schemas'; +export { allPaths } from './paths'; +export { openApiBase } from './base.docs'; diff --git a/packages/controlmart/src/docs/paths/chaos.paths.ts b/packages/controlmart/src/docs/paths/chaos.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..2a41f8bc7b8f17c399066ff3277df90dea62da15 --- /dev/null +++ b/packages/controlmart/src/docs/paths/chaos.paths.ts @@ -0,0 +1,270 @@ +export const chaosPaths = { + "/chaos/presets": { + get: { + tags: [ + "Chaos" + ], + summary: "List all chaos presets", + description: "Retrieve a list of all available chaos presets with their metadata.", + operationId: "listChaosPresets", + responses: { + "200": { + description: "Successfully retrieved chaos presets", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + count: { + type: "integer", + description: "Number of presets returned" + }, + data: { + type: "array", + items: { + type: "object", + properties: { + id: { + type: "string", + description: "Unique preset identifier" + }, + name: { + type: "string", + description: "Human-readable preset name" + }, + description: { + type: "string", + description: "Description of what the preset does" + } + } + } + } + } + } + } + } + }, + "500": { + description: "Internal server error", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + error: { + type: "string" + } + } + } + } + } + } + } + } + }, + "/chaos/presets/{id}": { + get: { + tags: [ + "Chaos" + ], + summary: "Get a specific chaos preset", + description: "Retrieve detailed information about a specific chaos preset by its ID.", + operationId: "getChaosPreset", + parameters: [ + { + name: "id", + in: "path", + required: true, + description: "The unique identifier of the chaos preset", + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved chaos preset", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + description: "The complete chaos policy configuration" + } + } + } + } + } + }, + "400": { + description: "Bad request - Preset ID is required", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + error: { + type: "string", + example: "Preset ID is required" + } + } + } + } + } + }, + "404": { + description: "Not found - Chaos preset does not exist", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + error: { + type: "string", + example: "Chaos preset not found: preset-id" + } + } + } + } + } + }, + "500": { + description: "Internal server error", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + error: { + type: "string" + } + } + } + } + } + } + } + } + }, + "/chaos/status": { + get: { + tags: [ + "Chaos" + ], + summary: "Get chaos system status", + description: "Retrieve the current status of the chaos engineering system, including whether it is enabled and statistics about configured policies.", + operationId: "getChaosStatus", + responses: { + "200": { + description: "Successfully retrieved chaos status", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + enabled: { + type: "boolean", + description: "Whether chaos engineering is currently enabled" + }, + activePreset: { + type: "string", + nullable: true, + description: "The currently active chaos preset (from CHAOS_PRESET env var), or null if none" + }, + stats: { + type: "object", + description: "Statistics about chaos configuration", + properties: { + presetCount: { + type: "integer", + description: "Number of available presets" + }, + worldPolicyCount: { + type: "integer", + description: "Number of world-level policies configured" + }, + capabilityOverrideCount: { + type: "integer", + description: "Number of capability-level overrides configured" + }, + odOverrideCount: { + type: "integer", + description: "Number of OD-level overrides configured" + } + } + } + } + } + } + } + } + } + }, + "500": { + description: "Internal server error", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + error: { + type: "string" + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/docs.paths.ts b/packages/controlmart/src/docs/paths/docs.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..c13df28186332f8b5c47156983ebd937efe035eb --- /dev/null +++ b/packages/controlmart/src/docs/paths/docs.paths.ts @@ -0,0 +1,152 @@ +export const docsPaths = { + "/docs/mesh": { + get: { + tags: [ + "System" + ], + summary: "Detailed Service Mesh Documentation", + description: "\n### Retrieve Formatted Documentation\n\nProgrammatically access detailed, human-readable documentation for any service in the mesh. This is useful for building dynamic help systems or exploring the API capabilities programmatically.\n\n#### Parameters Guide\n\n- **service**: The top-level domain (e.g., `wms`, `tms`, `erp`, `audit`, `capability`).\n- **action** (Sub-Service): The specific functional area or resource within the service.\n - Example: In `wms`, actions include `inbound-order`, `inventory`, `cycle-count`.\n - Leave empty to see all actions for a service.\n- **method**: Filter by HTTP method to find specific operations (e.g., `post` for creation).\n\n#### Usage Examples\n\n**1. Get all documentation for WMS:**\n`GET /docs/mesh?service=wms`\n\n**2. Get documentation for WMS Inbound Orders (Action/Sub-Service):**\n`GET /docs/mesh?service=wms&action=inbound-order`\n\n**3. Get only the creation endpoint (POST) for Inbound Orders:**\n`GET /docs/mesh?service=wms&action=inbound-order&method=post`\n\n**4. Get clean docs without example payloads (for compact view):**\n`GET /docs/mesh?service=wms&action=inbound-order&includeExamples=false`\n ", + parameters: [ + { + name: "service", + in: "query", + required: true, + schema: { + type: "string", + example: "wms" + }, + description: "Service identifier. This is the top-level grouping (e.g. 'wms', 'erp', 'tms', 'audit', 'capability', 'knowledge-graph')." + }, + { + name: "action", + in: "query", + required: false, + schema: { + type: "string", + example: "inbound-order" + }, + description: "Sub-service or specific resource domain. E.g., for 'wms', valid actions include 'inbound-order', 'outbound-order', 'inventory', 'cycle-count'." + }, + { + name: "method", + in: "query", + required: false, + schema: { + type: "string", + enum: [ + "get", + "post", + "put", + "delete", + "patch" + ], + example: "post" + }, + description: "Filter by HTTP method to narrow down to specific operations." + }, + { + name: "includeExamples", + in: "query", + required: false, + schema: { + type: "boolean", + default: true + }, + description: "Include full JSON example payloads in the formatted output. Set to false for a more concise view." + } + ], + responses: { + "200": { + description: "Successful response containing formatted documentation strings.", + content: { + "application/json": { + schema: { + type: "object", + properties: { + service: { + type: "string", + example: "wms" + }, + filters: { + type: "object", + properties: { + action: { + type: "string" + }, + method: { + type: "string" + } + } + }, + count: { + type: "number", + example: 1 + }, + endpoints: { + type: "array", + items: { + type: "object", + properties: { + path: { + type: "string", + example: "/{worldId}/wms/inbound-orders" + }, + method: { + type: "string", + example: "post" + }, + summary: { + type: "string", + example: "Create new inbound order" + }, + formatted: { + type: "string", + description: "The full multi-line formatted documentation string." + } + } + } + } + } + } + } + } + } + } + } + }, + "/docs/swagger.json": { + get: { + tags: [ + "System" + ], + summary: "Get OpenAPI Specification", + description: "Retrieve the full OpenAPI (Swagger) specification for the API.", + responses: { + "200": { + description: "OpenAPI JSON Specification", + content: { + "application/json": { + schema: { + type: "object" + } + } + } + } + } + } + }, + "/docs": { + get: { + tags: [ + "System" + ], + summary: "API Reference UI", + description: "Interactive API documentation UI.", + responses: { + "200": { + description: "HTML content" + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/edi.paths.ts b/packages/controlmart/src/docs/paths/edi.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..a04c029c46c126c909c976979e8b532a3e6f7b56 --- /dev/null +++ b/packages/controlmart/src/docs/paths/edi.paths.ts @@ -0,0 +1,1559 @@ +export const ediPaths = { + "/{worldId}/edi": { + get: { + tags: [ + "EDI" + ], + summary: "List EDI transactions with advanced filtering", + description: "\n## List EDI Transactions\n\nRetrieve a paginated list of EDI transactions with comprehensive filtering capabilities.\n\n### Features\n- **Multi-dimensional Filtering**: Partner, customer, document type, direction, status\n- **Advanced Date Filtering**: Precise timestamp-based queries \n- **Cursor-based Pagination**: High-performance pagination for large datasets\n- **Real-time Status Tracking**: Monitor transaction processing states\n- **Business Document Correlation**: Track related transactions through flow IDs\n\n### EDI Document Types Supported\n- **850**: Purchase Orders\n- **855**: Purchase Order Acknowledgments\n- **856**: Advanced Ship Notices \n- **810**: Invoices\n- **820**: Payment Orders\n- **997**: Functional Acknowledgments\n- **999**: Implementation Acknowledgments\n\n### Transaction Processing States\n- **RECEIVED**: Successfully received and parsed\n- **QUEUED**: Validated and queued for processing \n- **PROCESSING**: Currently being processed\n- **DELIVERED**: Successfully processed and delivered\n- **ERRORED**: Processing failed\n- **ARCHIVED**: Completed transactions\n ", + operationId: "listEdiTransactions", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter by trading partner identifier", + schema: { + type: "string", + example: "PARTNER_WALMART_001" + } + }, + { + name: "customerId", + in: "query", + required: false, + description: "Filter by customer identifier", + schema: { + type: "string", + example: "CUSTOMER_AMAZON_123" + } + }, + { + name: "docType", + in: "query", + required: false, + description: "Filter by EDI document type", + schema: { + type: "string", + enum: [ + "850", + "855", + "856", + "810", + "820", + "997", + "999" + ], + example: "810" + } + }, + { + name: "direction", + in: "query", + required: false, + description: "Filter by transaction direction", + schema: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND" + ], + example: "INBOUND" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by processing status", + schema: { + type: "string", + enum: [ + "RECEIVED", + "QUEUED", + "PROCESSING", + "DELIVERED", + "ERRORED", + "ARCHIVED" + ], + example: "ERRORED" + } + }, + { + name: "flowId", + in: "query", + required: false, + description: "Filter by business flow identifier", + schema: { + type: "string", + example: "FLOW_PO_2024_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter from this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-15T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter until this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-15T23:59:59.999Z" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor from previous page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439015" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of transactions to return (max: 20)", + schema: { + type: "integer", + minimum: 1, + maximum: 20, + default: 20, + example: 10 + } + } + ], + responses: { + "200": { + description: "Successfully retrieved EDI transactions", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/EdiTransaction" + }, + description: "Array of EDI transaction objects" + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 156 + }, + limit: { + type: "integer", + example: 10 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439020" + }, + previousCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439015" + } + } + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + } + } + } + }, + "400": { + description: "Bad Request - Invalid parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + }, + post: { + tags: [ + "EDI" + ], + summary: "Create a new EDI transaction", + description: "\n## Create EDI Transaction\n\nSubmit a new EDI transaction for processing with automatic parsing and validation.\n\n### Features\n- **Automatic Parsing**: Extract business metadata from raw EDI\n- **Idempotency Protection**: Duplicate detection and prevention\n- **Business Rule Validation**: EDI standards and business requirements\n- **Dollar Value Extraction**: Automatic monetary value calculation\n- **Control Number Tracking**: Full X12 control number hierarchy\n ", + operationId: "createEdiTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + partnerId: { + type: "string", + description: "Trading partner identifier", + example: "PARTNER_WALMART_001" + }, + docType: { + type: "string", + enum: [ + "850", + "855", + "856", + "810", + "820", + "997", + "999" + ], + example: "810" + }, + direction: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND" + ], + example: "OUTBOUND" + }, + rawEdi: { + type: "string", + description: "Raw EDI document text", + example: "ISA*00* *00* *ZZ*SENDER..." + } + }, + required: [ + "partnerId", + "docType", + "direction" + ] + } + } + } + }, + responses: { + "201": { + description: "EDI transaction created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + $ref: "#/components/schemas/EdiTransaction" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + }, + "/{worldId}/edi/deprecated": { + get: { + tags: [ + "EDI" + ], + summary: "[DEPRECATED] List EDI transactions with page-based pagination", + deprecated: true, + description: "\n## ⚠️ DEPRECATED - List EDI Transactions (Page-Based Pagination)\n\n> **WARNING**: This endpoint is deprecated and will be removed in a future version. \n> Please migrate to `GET /{worldId}/edi` which uses cursor-based pagination for better performance and scalability.\n\n### Deprecation Notice\n\nThis endpoint uses **page-based pagination** (`page` and `pageSize` parameters) which has significant performance limitations:\n\n**Why This Endpoint Is Deprecated:**\n- ❌ **Poor Performance**: Page-based pagination requires database to skip records, causing slow queries on large datasets\n- ❌ **Inconsistent Results**: Data changes between page requests can cause missing or duplicate records\n- ❌ **Memory Intensive**: Higher memory consumption for offset-based queries\n- ❌ **Scalability Issues**: Performance degrades linearly with page number\n\n**Migration Path:**\nUse `GET /{worldId}/edi` instead, which provides:\n- ✅ **Cursor-Based Pagination**: Uses `cursor` parameter for efficient, consistent pagination\n- ✅ **Better Performance**: Constant-time pagination regardless of dataset size\n- ✅ **Consistent Results**: Stable pagination even with concurrent data changes\n- ✅ **Lower Memory Usage**: Optimized database queries\n\n### Migration Example\n\n**Old (Deprecated):**\n```\nGET /{worldId}/edi/deprecated?page=2&pageSize=10\n```\n\n**New (Recommended):**\n```\nGET /{worldId}/edi?limit=10&cursor={nextCursor_from_previous_response}\n```\n\n### Features (Same as Main Endpoint)\n\n- **Multi-dimensional Filtering**: Partner, customer, document type, direction, status\n- **Advanced Date Filtering**: Precise timestamp-based queries \n- **Real-time Status Tracking**: Monitor transaction processing states\n- **Business Document Correlation**: Track related transactions through flow IDs\n\n### EDI Document Types Supported\n- **850**: Purchase Orders\n- **855**: Purchase Order Acknowledgments\n- **856**: Advanced Ship Notices \n- **810**: Invoices\n- **820**: Payment Orders\n- **997**: Functional Acknowledgments\n- **999**: Implementation Acknowledgments\n\n### Transaction Processing States\n- **RECEIVED**: Successfully received and parsed\n- **QUEUED**: Validated and queued for processing \n- **PROCESSING**: Currently being processed\n- **DELIVERED**: Successfully processed and delivered\n- **ERRORED**: Processing failed\n- **ARCHIVED**: Completed transactions\n\n### Implementation Details\n\nThis endpoint uses the `getEdiTransactionsByPageNumber` repository method which:\n- Uses MongoDB `.skip()` and `.limit()` for pagination\n- Calculates skip offset as: `(page - 1) * pageSize`\n- Sorts by `createdAt` in descending order (newest first)\n- Returns total count via separate `countDocuments()` query\n- Does NOT support cursor-based navigation\n ", + operationId: "listEdiTransactionsDeprecated", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter by trading partner identifier", + schema: { + type: "string", + example: "PARTNER_WALMART_001" + } + }, + { + name: "customerId", + in: "query", + required: false, + description: "Filter by customer identifier", + schema: { + type: "string", + example: "CUSTOMER_AMAZON_123" + } + }, + { + name: "docType", + in: "query", + required: false, + description: "Filter by EDI document type", + schema: { + type: "string", + enum: [ + "850", + "855", + "856", + "810", + "820", + "997", + "999" + ], + example: "810" + } + }, + { + name: "direction", + in: "query", + required: false, + description: "Filter by transaction direction", + schema: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND" + ], + example: "INBOUND" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by processing status", + schema: { + type: "string", + enum: [ + "RECEIVED", + "QUEUED", + "PROCESSING", + "DELIVERED", + "ERRORED", + "ARCHIVED" + ], + example: "ERRORED" + } + }, + { + name: "flowId", + in: "query", + required: false, + description: "Filter by business flow identifier", + schema: { + type: "string", + example: "FLOW_PO_2024_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter from this date (ISO 8601). Note: Uses createdAt field, not timestamp.", + schema: { + type: "string", + format: "date-time", + example: "2024-01-15T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter until this date (ISO 8601). Note: Uses createdAt field, not timestamp.", + schema: { + type: "string", + format: "date-time", + example: "2024-01-15T23:59:59.999Z" + } + }, + { + name: "page", + in: "query", + required: false, + deprecated: true, + description: "[DEPRECATED] Page number for pagination (1-indexed). Use cursor-based pagination in the main endpoint instead.", + schema: { + type: "integer", + minimum: 1, + default: 1, + example: 2 + } + }, + { + name: "pageSize", + in: "query", + required: false, + deprecated: true, + description: "[DEPRECATED] Number of records per page. Use 'limit' parameter in the main endpoint instead.", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 10, + example: 10 + } + } + ], + responses: { + "200": { + description: "Successfully retrieved EDI transactions (using deprecated page-based pagination)", + headers: { + Deprecation: { + description: "Indicates this endpoint is deprecated", + schema: { + type: "string", + example: "true" + } + }, + Sunset: { + description: "Expected sunset date for this endpoint", + schema: { + type: "string", + format: "date", + example: "2025-12-31" + } + } + }, + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/EdiTransaction" + }, + description: "Array of EDI transaction objects for the requested page" + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 156, + description: "Total number of records matching the filter criteria" + }, + limit: { + type: "integer", + example: 10, + description: "Number of records per page (same as pageSize parameter)" + }, + hasMore: { + type: "boolean", + example: true, + description: "Indicates if there are more records available (always true if items returned)" + }, + nextCursor: { + type: "null", + example: null, + description: "Always null - cursor pagination not supported in this deprecated endpoint" + }, + previousCursor: { + type: "null", + example: null, + description: "Always null - cursor pagination not supported in this deprecated endpoint" + } + }, + description: "Pagination metadata. Note: nextCursor and previousCursor are always null in this deprecated endpoint." + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + }, + deprecationWarning: { + type: "string", + example: "This endpoint is deprecated. Please migrate to GET /{worldId}/edi with cursor-based pagination." + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + examples: { + deprecatedPaginationExample: { + summary: "Page-based pagination response (deprecated)", + value: { + success: true, + status: 200, + data: [ + { + _id: "507f1f77bcf86cd799439011", + worldRef: { + worldId: "550e8400-e29b-41d4-a716-446655440000" + }, + transactionId: "edi_txn_001", + partnerId: "PARTNER_WALMART_001", + customerId: "CUSTOMER_AMAZON_123", + docType: "810", + direction: "OUTBOUND", + status: "DELIVERED", + timestamp: "2024-01-15T10:25:30.123Z", + createdAt: "2024-01-15T10:25:30.123Z", + updatedAt: "2024-01-15T10:26:45.789Z" + } + ], + pagination: { + totalCount: 156, + limit: 10, + hasMore: true, + nextCursor: null, + previousCursor: null + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z", + deprecationWarning: "This endpoint is deprecated. Please migrate to GET /{worldId}/edi with cursor-based pagination." + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + }, + "/{worldId}/edi/{transactionId}": { + get: { + tags: [ + "EDI" + ], + summary: "Get specific EDI transaction", + operationId: "getEdiTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "transactionId", + in: "path", + required: true, + schema: { + type: "string", + example: "edi_edi_2024_001" + } + } + ], + responses: { + "200": { + description: "Transaction retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/EdiTransaction" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time" + } + } + } + } + } + } + } + }, + "404": { + description: "Transaction not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + }, + patch: { + tags: [ + "EDI" + ], + summary: "Update EDI transaction", + operationId: "updateEdiTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "transactionId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + requestBody: { + content: { + "application/json": { + schema: { + type: "object", + properties: { + businessDocumentNumber: { + type: "string" + }, + flowId: { + type: "string" + }, + payload: { + type: "object" + } + } + } + } + } + }, + responses: { + "200": { + description: "Transaction updated", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean" + }, + status: { + type: "integer" + }, + data: { + $ref: "#/components/schemas/EdiTransaction" + } + } + } + } + } + } + } + }, + delete: { + tags: [ + "EDI" + ], + summary: "Delete EDI transaction", + operationId: "deleteEdiTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "transactionId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Transaction deleted", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + deleted: { + type: "boolean", + example: true + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/edi/{transactionId}/status": { + patch: { + tags: [ + "EDI" + ], + summary: "Update transaction status", + operationId: "updateEdiTransactionStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "transactionId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "RECEIVED", + "QUEUED", + "PROCESSING", + "DELIVERED", + "ERRORED", + "ARCHIVED" + ], + example: "ERRORED" + }, + errorReason: { + type: "string" + }, + errorDetails: { + type: "object" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Status updated", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean" + }, + status: { + type: "integer" + }, + data: { + $ref: "#/components/schemas/EdiTransaction" + } + } + } + } + } + } + } + } + }, + "/{worldId}/edi/{transactionId}/requeue": { + post: { + tags: [ + "EDI" + ], + summary: "Requeue failed transaction", + operationId: "requeueEdiTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "transactionId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Transaction requeued", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean" + }, + status: { + type: "integer" + }, + data: { + $ref: "#/components/schemas/EdiTransaction" + } + } + } + } + } + } + } + } + }, + "/{worldId}/edi/statistics/invoice": { + get: { + tags: [ + "EDI" + ], + summary: "Get invoice processing statistics", + operationId: "getInvoiceStatistics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "dateStart", + in: "query", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + schema: { + type: "string", + format: "date-time" + } + } + ], + responses: { + "200": { + description: "Statistics retrieved", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean" + }, + status: { + type: "integer" + }, + data: { + type: "object", + properties: { + invoiceStatistics: { + type: "object", + properties: { + total: { + type: "integer" + }, + errored: { + type: "integer" + }, + rejectionRate: { + type: "number" + }, + totalExposureDollar: { + type: "number" + }, + firstPassAcceptanceRate: { + type: "number" + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/edi/statistics/errors": { + get: { + tags: [ + "EDI" + ], + summary: "Get error statistics by type or partner", + operationId: "getEdiErrorStatistics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "aggregationType", + in: "query", + required: true, + schema: { + type: "string", + enum: [ + "by-doctype", + "by-partners" + ], + example: "by-doctype" + } + }, + { + name: "dateStart", + in: "query", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + schema: { + type: "string", + format: "date-time" + } + } + ], + responses: { + "200": { + description: "Error statistics retrieved", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean" + }, + status: { + type: "integer" + }, + data: { + type: "object", + properties: { + ediErrorStats: { + type: "array", + items: { + type: "object", + properties: { + docType: { + type: "string" + }, + partnerId: { + type: "string" + }, + count: { + type: "integer" + } + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/edi/statistics/amount": { + get: { + tags: [ + "EDI" + ], + summary: "Get EDI dollar amount exposure statistics", + description: ` +## Get EDI Dollar Amount Exposure Statistics + +Retrieve aggregated dollar value statistics for EDI transactions, grouped by trading partner or document type. + +### Features +- **Partner Analysis**: View dollar exposure by trading partner +- **Document Type Analysis**: View dollar exposure by EDI document type +- **Date Filtering**: Filter statistics within a date range +- **Top Results**: Returns up to 100 aggregated results + +### Use Cases +- Financial exposure monitoring per partner +- Revenue analysis by document type +- Risk assessment for trading relationships +- Budget planning and forecasting + `, + operationId: "getEdiDollarAmountStatistics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "aggregationType", + in: "query", + required: true, + description: "How to aggregate the dollar amount statistics", + schema: { + type: "string", + enum: [ + "by-partners", + "by-document-type" + ], + example: "by-partners" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter from this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-01T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter until this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-12-31T23:59:59.999Z" + } + } + ], + responses: { + "200": { + description: "Dollar amount statistics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + description: "Response varies based on aggregationType parameter", + oneOf: [ + { + type: "object", + properties: { + ediDollarAmountExposureByPartners: { + type: "array", + description: "Dollar exposure aggregated by trading partner (when aggregationType=by-partners)", + items: { + type: "object", + properties: { + partnerId: { + type: "string", + description: "Trading partner identifier", + example: "PARTNER_WALMART_001" + }, + totalDollarAmount: { + type: "number", + description: "Total dollar value of EDI transactions for this partner", + example: 125000.50 + }, + transactionCount: { + type: "integer", + description: "Number of transactions for this partner", + example: 45 + } + } + } + } + } + }, + { + type: "object", + properties: { + ediDollarAmountExposureByDocumentType: { + type: "array", + description: "Dollar exposure aggregated by document type (when aggregationType=by-document-type)", + items: { + type: "object", + properties: { + docType: { + type: "string", + description: "EDI document type", + example: "810" + }, + totalDollarAmount: { + type: "number", + description: "Total dollar value of EDI transactions for this document type", + example: 250000.75 + }, + transactionCount: { + type: "integer", + description: "Number of transactions for this document type", + example: 120 + } + } + } + } + } + } + ] + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + examples: { + byPartners: { + summary: "Dollar exposure by partners", + value: { + success: true, + status: 200, + data: { + ediDollarAmountExposureByPartners: [ + { + partnerId: "PARTNER_WALMART_001", + totalDollarAmount: 125000.50, + transactionCount: 45 + }, + { + partnerId: "PARTNER_TARGET_002", + totalDollarAmount: 87500.25, + transactionCount: 32 + } + ] + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + byDocumentType: { + summary: "Dollar exposure by document type", + value: { + success: true, + status: 200, + data: { + ediDollarAmountExposureByDocumentType: [ + { + docType: "810", + totalDollarAmount: 250000.75, + transactionCount: 120 + }, + { + docType: "850", + totalDollarAmount: 180000.00, + transactionCount: 85 + } + ] + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid or missing aggregationType", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}/edi214": { + post: { + tags: [ + "TMS" + ], + summary: "Process EDI 214 status update", + description: "\n## Process EDI 214 Transportation Status Update\n\nProcess an incoming EDI 214 Transportation Status message to update shipment status and location.\n\n### Features\n- **EDI 214 Processing**: Parse and process standard EDI 214 messages\n- **Status Mapping**: Map EDI status codes to internal shipment statuses\n- **Location Updates**: Extract and update location information from EDI\n- **ETA Updates**: Update estimated delivery dates based on EDI data\n- **Raw Data Preservation**: Store complete raw EDI message for audit\n\n### EDI 214 Message Types\n- **Departure**: Shipment departed from location\n- **Arrival**: Shipment arrived at location \n- **In-Transit**: Shipment status update during transit\n- **Delivery**: Final delivery confirmation\n- **Exception**: Delays or problems during transit\n\n### Data Processing\n- Validates EDI message structure\n- Maps EDI status codes to shipment statuses\n- Updates location and ETA information\n- Creates corresponding status events\n- Preserves raw EDI for compliance\n ", + operationId: "processTMSEdi214Update", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + description: "Mapped shipment status from EDI", + example: "IN_TRANSIT" + }, + locationCode: { + type: "string", + description: "EDI location code", + example: "MEM" + }, + city: { + type: "string", + description: "Location city from EDI", + example: "Memphis" + }, + state: { + type: "string", + description: "Location state from EDI", + example: "TN" + }, + timestamp: { + type: "string", + format: "date-time", + description: "EDI message timestamp", + example: "2024-11-26T14:30:00.000Z" + }, + equipmentId: { + type: "string", + description: "Truck or container identifier", + example: "TRK12345" + }, + estimatedDeliveryDate: { + type: "string", + format: "date-time", + description: "Updated estimated delivery date", + example: "2024-11-29T17:00:00.000Z" + }, + rawEdiData: { + type: "object", + description: "Complete raw EDI 214 message data", + example: { + ISA: "ISA*00* *00* *ZZ*CARRIER *ZZ*SHIPPER *241126*1430*U*00401*000000001*0*T*>", + GS: "GS*QM*CARRIER*SHIPPER*20241126*1430*1*X*004010", + ST: "ST*214*0001", + B10: "B10*SHIP-2024-001234*PRO123456789", + segments: "..." + } + } + }, + required: [ + "status", + "rawEdiData" + ] + } + } + } + }, + responses: { + "200": { + description: "EDI 214 processed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSShipment" + } + } + } + } + } + }, + "400": { + description: "Invalid EDI 214 data", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + status: { + type: "integer", + example: 400 + }, + error: { + type: "string", + example: "EDI data with status and rawEdiData are required" + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/erp.paths.ts b/packages/controlmart/src/docs/paths/erp.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..413386fea2539e41da029741c68b75d2d230207a --- /dev/null +++ b/packages/controlmart/src/docs/paths/erp.paths.ts @@ -0,0 +1,6323 @@ +export const erpPaths = { + "/{worldId}/erp/companies": { + post: { + tags: [ + "ERP" + ], + summary: "Create new ERP company", + description: "\nCreate a new ERP company record with comprehensive business information and operational configuration.\n\n**Core Features**:\n- **Company Registration**: Complete business entity setup with legal and operational details\n- **Auto-Generated IDs**: Automatic companyId generation via generateIdByService\n- **MPC Management**: Support for Main Player Company (MPC) designation with automatic exclusivity\n- **Multi-Address Support**: Billing, shipping, and remit-to address configuration\n- **Financial Configuration**: Credit limits, payment terms, and currency management\n\n**Use Cases**:\n- **Partner Onboarding**: Register new customers, suppliers, and business partners\n- **ERP Integration**: Create company records for ERP system synchronization\n- **B2B Network Setup**: Establish business relationships with comprehensive company data\n- **Financial Management**: Configure credit terms and payment relationships\n ", + operationId: "createERPCompany", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "name" + ], + properties: { + isMpcCompany: { + type: "boolean", + description: "Main Player Company designation (exclusive within world)", + default: false, + example: false + }, + companyId: { + type: "string", + description: "Optional custom company identifier (auto-generated if not provided)", + example: "COMP_ACME001" + }, + externalReference: { + type: "string", + description: "External system reference identifier", + example: "EXT_REF_12345" + }, + name: { + type: "string", + description: "Company name (required)", + example: "Acme Corporation" + }, + legalName: { + type: "string", + description: "Legal business name", + example: "Acme Corporation LLC" + }, + duns: { + type: "string", + description: "DUNS (Data Universal Numbering System) number", + example: "123456789" + }, + taxId: { + type: "string", + description: "Tax identification number", + example: "TAX123456789" + }, + taxRegistrationNumbers: { + type: "array", + description: "Country-specific tax registration numbers", + items: { + type: "object", + properties: { + country: { + type: "string", + example: "USA" + }, + number: { + type: "string", + example: "REG123456789" + } + } + } + }, + currency: { + type: "string", + description: "Primary operating currency", + default: "USD", + example: "USD" + }, + paymentTerms: { + type: "string", + description: "Payment terms and conditions", + example: "NET30" + }, + creditLimit: { + type: "number", + description: "Credit limit amount", + example: 100000 + }, + creditHold: { + type: "boolean", + description: "Credit hold status", + default: false, + example: false + }, + billingAddress: { + $ref: "#/components/schemas/Address" + }, + shippingAddress: { + $ref: "#/components/schemas/Address" + }, + remitTo: { + $ref: "#/components/schemas/Address" + }, + primaryContact: { + type: "object", + description: "Primary contact information", + properties: { + name: { + type: "string", + example: "John Smith" + }, + email: { + type: "string", + example: "john.smith@acme.com" + }, + phone: { + type: "string", + example: "+1-555-123-4567" + } + } + }, + salesOrg: { + type: "string", + description: "Sales organization code", + example: "US_EAST" + }, + priceList: { + type: "string", + description: "Price list identifier", + example: "STANDARD_RETAIL" + }, + glAccount: { + type: "string", + description: "General ledger account", + example: "1200-AR-TRADE" + }, + customerClass: { + type: "string", + description: "Customer classification", + example: "PREMIUM" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "PROSPECT", + "BLOCKED" + ], + description: "Company operational status", + default: "ACTIVE", + example: "ACTIVE" + }, + companyType: { + type: "string", + enum: [ + "CUSTOMER", + "SUPPLIER", + "PARTNER", + "INTERNAL" + ], + description: "Company relationship type", + default: "CUSTOMER", + example: "CUSTOMER" + }, + customFields: { + type: "object", + description: "Additional custom fields", + additionalProperties: true, + example: { + erpSource: "SAP", + regionCode: "US" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Company created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Company created successfully" + }, + data: { + $ref: "#/components/schemas/ERPCompany" + } + } + } + } + } + }, + "400": { + description: "Bad Request - validation error or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Company ID and name are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "ERP" + ], + summary: "Get all ERP companies", + description: "\nRetrieve all ERP companies with comprehensive filtering and pagination capabilities.\n\n**Core Features**:\n- **Advanced Filtering**: Filter by status, type, currency, and MPC designation\n- **Text Search**: Search by company name or DUNS number\n- **Paginated Results**: Cursor-based pagination for optimal performance\n- **Complete Business Data**: Returns full company profiles with addresses and contacts\n\n**Use Cases**:\n- **Partner Management**: View complete business partner network\n- **ERP Synchronization**: Bulk operations and system integration\n- **Financial Analysis**: Filter companies by currency and credit status\n- **Business Intelligence**: Comprehensive company data for reporting\n ", + operationId: "getAllERPCompanies", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by company status", + schema: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "PROSPECT", + "BLOCKED" + ], + example: "ACTIVE" + } + }, + { + name: "companyType", + in: "query", + required: false, + description: "Filter by company relationship type", + schema: { + type: "string", + enum: [ + "CUSTOMER", + "SUPPLIER", + "PARTNER", + "INTERNAL" + ], + example: "CUSTOMER" + } + }, + { + name: "currency", + in: "query", + required: false, + description: "Filter by operating currency", + schema: { + type: "string", + example: "USD" + } + }, + { + name: "isMpcCompany", + in: "query", + required: false, + description: "Filter by MPC (Main Player Company) status", + schema: { + type: "boolean", + example: false + } + }, + { + name: "search", + in: "query", + required: false, + description: "Search by company name or DUNS number", + schema: { + type: "string", + example: "Acme" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Companies retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Companies retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/ERPCompany" + } + }, + pagination: { + type: "object", + properties: { + limit: { + type: "integer", + example: 50 + }, + previousCursor: { + type: "string", + example: null + }, + totalCount: { + type: "integer", + example: 25 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + example: null + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/erp/companies/mpc": { + get: { + tags: [ + "ERP" + ], + summary: "Get Main Player Company (MPC)", + description: "\nRetrieve the designated Main Player Company (MPC) for the world environment.\n\n**Core Features**:\n- **MPC Identification**: Get the single MPC designated for the world\n- **Exclusive Access**: Returns the company marked with isMpcCompany=true\n- **World Scoping**: Ensures world-specific MPC isolation\n- **Complete Profile**: Returns full company data including addresses and contacts\n\n**Use Cases**:\n- **System Configuration**: Identify the primary company for world operations\n- **Business Rules**: Access MPC data for business logic and workflows\n- **Financial Operations**: Use MPC information for internal financial processes\n- **Integration Points**: Primary company reference for external system integration\n\n**Important**: Only one MPC can exist per world environment.\n ", + operationId: "getMpcERPCompany", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "MPC company retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "MPC company retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPCompany" + } + } + } + } + } + }, + "404": { + description: "Not Found - MPC company does not exist in this world", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "MPC company not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/companies/random": { + get: { + tags: [ + "ERP" + ], + summary: "Get random ERP company", + description: "\nRetrieve a random company from the world environment, with optional type filtering.\n\n**Core Features**:\n- **Random Selection**: Algorithmically random company selection\n- **Type Filtering**: Choose between MPC and NPC (Non-Player Company) types\n- **Test Data Support**: Ideal for testing and demonstration purposes\n- **World Scoping**: Random selection within specific world environment\n\n**Use Cases**:\n- **Testing & Development**: Generate random test data for development\n- **Demo Scenarios**: Create realistic demo scenarios with random companies\n- **Load Testing**: Use random company data for performance testing\n- **Data Sampling**: Statistical sampling of company data for analysis\n ", + operationId: "getRandomERPCompany", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "type", + in: "query", + required: false, + description: "Company type filter for random selection", + schema: { + type: "string", + enum: [ + "npc", + "mpc" + ], + default: "npc", + example: "npc" + } + } + ], + responses: { + "200": { + description: "Random company retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Random company retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPCompany" + } + } + } + } + } + }, + "404": { + description: "Not Found - no companies available for random selection", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "No npc companies found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/companies/bulk": { + post: { + tags: [ + "ERP" + ], + summary: "Bulk upsert ERP companies", + description: "\nBulk create or update multiple ERP companies in a single operation for efficient data management.\n\n**Core Features**:\n- **Bulk Operations**: Process multiple companies simultaneously\n- **Upsert Logic**: Create new or update existing companies based on companyId\n- **Performance Optimized**: Efficient bulk database operations\n- **Atomic Processing**: All operations succeed or fail together\n\n**Use Cases**:\n- **Data Migration**: Migrate company data from external systems\n- **ERP Synchronization**: Sync companies from ERP systems in bulk\n- **Initial Setup**: Bootstrap company data for new world environments\n- **Batch Updates**: Update multiple company records simultaneously\n ", + operationId: "bulkUpsertERPCompanies", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "companies" + ], + properties: { + companies: { + type: "array", + description: "Array of company objects to upsert", + items: { + type: "object", + description: "Company data (same as create company schema)", + properties: { + companyId: { + type: "string", + description: "Company identifier for upsert matching", + example: "COMP_ACME001" + }, + name: { + type: "string", + description: "Company name", + example: "Acme Corporation" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "PROSPECT", + "BLOCKED" + ], + example: "ACTIVE" + } + } + }, + minItems: 1, + example: [ + { + companyId: "COMP_ACME001", + name: "Acme Corporation", + status: "ACTIVE", + companyType: "CUSTOMER" + }, + { + companyId: "COMP_BETA002", + name: "Beta Industries", + status: "ACTIVE", + companyType: "SUPPLIER" + } + ] + } + } + } + } + } + }, + responses: { + "200": { + description: "Bulk upsert completed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Bulk upsert completed successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "2 companies processed successfully" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - invalid or missing companies data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Companies array is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/companies/{companyId}": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP company by ID", + description: "\nRetrieve specific ERP company by unique company identifier for detailed business information access.\n\n**Core Features**:\n- **Direct Access**: Get company by unique companyId\n- **Complete Profile**: Returns full company data including addresses and financial terms\n- **Fast Lookup**: Optimized query using indexed companyId field\n- **Business Intelligence**: Access comprehensive company business data\n\n**Use Cases**:\n- **Company Details**: Get complete company information for business operations\n- **Reference Resolution**: Resolve company references from orders and transactions\n- **Partner Management**: Access detailed partner information for relationship management\n- **Integration Support**: Direct API access for external system integration\n\n**⚠️ Minor Field Naming Note**: Model uses 'duns' field but route parameter is 'dunsNumber' for clarity.\n ", + operationId: "getERPCompanyById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "companyId", + in: "path", + required: true, + description: "Company unique identifier", + schema: { + type: "string", + example: "COMP_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Company retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Company retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPCompany" + } + } + } + } + } + }, + "404": { + description: "Not Found - company does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Company not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "ERP" + ], + summary: "Update ERP company", + description: "\nUpdate ERP company information with partial data for business relationship maintenance.\n\n**Core Features**:\n- **Partial Updates**: Update specific company fields without replacing entire record\n- **MPC Management**: Handle Main Player Company designation with automatic exclusivity\n- **Business Configuration**: Modify financial terms, addresses, and operational settings\n- **Validation**: Ensures data consistency and business rule compliance\n\n**Use Cases**:\n- **Profile Updates**: Update company contact information and addresses\n- **Financial Changes**: Modify credit limits, payment terms, and currency settings\n- **Status Management**: Change company status for operational control\n- **Relationship Updates**: Update company type and business relationship classification\n ", + operationId: "updateERPCompany", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "companyId", + in: "path", + required: true, + description: "Company unique identifier", + schema: { + type: "string", + example: "COMP_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + name: { + type: "string", + description: "Updated company name", + example: "Acme Corporation Enhanced" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "PROSPECT", + "BLOCKED" + ], + description: "Updated company status", + example: "INACTIVE" + }, + creditLimit: { + type: "number", + description: "Updated credit limit", + example: 150000 + }, + creditHold: { + type: "boolean", + description: "Updated credit hold status", + example: true + }, + primaryContact: { + type: "object", + description: "Updated primary contact information", + properties: { + name: { + type: "string", + example: "Jane Smith" + }, + email: { + type: "string", + example: "jane.smith@acme.com" + }, + phone: { + type: "string", + example: "+1-555-987-6543" + } + } + }, + billingAddress: { + $ref: "#/components/schemas/Address" + }, + customFields: { + type: "object", + description: "Updated custom fields", + additionalProperties: true + } + } + } + } + } + }, + responses: { + "200": { + description: "Company updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Company updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPCompany" + } + } + } + } + } + }, + "404": { + description: "Not Found - company does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Company not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + delete: { + tags: [ + "ERP" + ], + summary: "Delete ERP company", + description: "\nDelete ERP company record from the system for data cleanup and relationship management.\n\n**Core Features**:\n- **Complete Removal**: Permanently delete company record from database\n- **World Scoping**: Ensures deletion only within specified world environment\n- **Business Safety**: Validate deletion constraints before removal\n- **Audit Trail**: Deletion tracked through audit plugin\n\n**Use Cases**:\n- **Data Cleanup**: Remove obsolete or duplicate company records\n- **Relationship Termination**: Delete companies when business relationships end\n- **System Maintenance**: Clean up test or invalid company data\n- **Compliance**: Remove company data per data retention policies\n\n**Important**: Ensure no active business transactions reference this company before deletion.\n ", + operationId: "deleteERPCompany", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "companyId", + in: "path", + required: true, + description: "Company unique identifier", + schema: { + type: "string", + example: "COMP_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Company deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Company deleted successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "Company deleted successfully" + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - company does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Company not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/companies/duns/{dunsNumber}": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP company by DUNS number", + description: "\nRetrieve ERP company by DUNS (Data Universal Numbering System) number for business verification and integration.\n\n**Core Features**:\n- **DUNS Lookup**: Direct company access via standardized DUNS identifier\n- **Business Verification**: Validate company identity using DUNS number\n- **Integration Support**: External system integration using DUNS as key\n- **Credit Bureau Integration**: Support for credit bureau and financial service lookups\n\n**Use Cases**:\n- **Business Verification**: Verify company legitimacy using DUNS number\n- **Credit Checks**: Integrate with credit bureaus using DUNS identifier\n- **ERP Integration**: Lookup companies during ERP system data exchange\n- **Partner Validation**: Validate business partners using standard DUNS identifier\n\n**⚠️ Field Naming Note**: Model stores as 'duns' field, route parameter uses 'dunsNumber' for clarity.\n ", + operationId: "getERPCompanyByDunsNumber", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "dunsNumber", + in: "path", + required: true, + description: "DUNS (Data Universal Numbering System) number", + schema: { + type: "string", + example: "123456789" + } + } + ], + responses: { + "200": { + description: "Company retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Company retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPCompany" + } + } + } + } + } + }, + "404": { + description: "Not Found - company does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Company not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/products": { + post: { + tags: [ + "ERP" + ], + summary: "Create new ERP product", + description: "\nCreate a new ERP product record with comprehensive product information and operational configuration.\n\n**Core Features**:\n- **Product Registration**: Complete product setup with identification, pricing, and inventory details\n- **Auto-Generated IDs**: Automatic productId generation via generateIdByService\n- **Multi-Standard Support**: UPC and EAN barcode support for retail integration\n- **Inventory Management**: Configurable inventory tracking with dimensions and weight\n- **Pricing Configuration**: Currency-based pricing with cost tracking\n\n**Use Cases**:\n- **Product Catalog Setup**: Create comprehensive product catalogs for ERP integration\n- **Inventory Management**: Register products for warehouse and inventory tracking\n- **Pricing Management**: Set up product pricing for sales and financial operations\n- **Retail Integration**: Support retail operations with barcode and SKU management\n ", + operationId: "createERPProduct", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "name" + ], + properties: { + productId: { + type: "string", + description: "Optional custom product identifier (auto-generated if not provided)", + example: "PROD_WIDGET_001" + }, + upc: { + type: "string", + description: "Universal Product Code for retail identification", + example: "123456789012" + }, + ean: { + type: "string", + description: "European Article Number for international identification", + example: "1234567890123" + }, + name: { + type: "string", + description: "Product name (required)", + example: "Premium Widget" + }, + description: { + type: "string", + description: "Detailed product description", + example: "High-quality premium widget with enhanced features" + }, + commodityCode: { + type: "string", + description: "Commodity classification code for trade and customs", + example: "8421.21.0000" + }, + taxClassification: { + type: "string", + description: "Tax classification for accounting and compliance", + example: "TAXABLE_GOODS" + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure for product quantification", + default: "EA", + example: "EA" + }, + weight: { + type: "object", + description: "Product weight specification", + properties: { + value: { + type: "number", + example: 2.5 + }, + unit: { + type: "string", + example: "LB" + } + } + }, + dimensions: { + type: "object", + description: "Product dimensions for shipping and storage", + properties: { + length: { + type: "number", + example: 12.5 + }, + width: { + type: "number", + example: 8 + }, + height: { + type: "number", + example: 3.5 + }, + unit: { + type: "string", + example: "IN" + } + } + }, + inventoryTracking: { + type: "boolean", + description: "Enable inventory tracking for this product", + default: true, + example: true + }, + price: { + type: "object", + description: "Product selling price", + properties: { + currency: { + type: "string", + example: "USD" + }, + amount: { + type: "number", + example: 99.99 + } + } + }, + cost: { + type: "object", + description: "Product cost basis for margin calculations", + properties: { + currency: { + type: "string", + example: "USD" + }, + amount: { + type: "number", + example: 45.5 + } + } + }, + leadTimeDays: { + type: "number", + description: "Lead time in days for procurement or manufacturing", + example: 14 + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "DISCONTINUED" + ], + description: "Product lifecycle status", + default: "ACTIVE", + example: "ACTIVE" + }, + customFields: { + type: "object", + description: "Additional custom fields for product-specific data", + additionalProperties: true, + example: { + brand: "Premium Brand", + category: "Electronics" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Product created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Product created successfully" + }, + data: { + $ref: "#/components/schemas/ERPProduct" + } + } + } + } + } + }, + "400": { + description: "Bad Request - invalid product data or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "ERP" + ], + summary: "Get all ERP products", + description: "\nRetrieve all ERP products with comprehensive filtering and pagination capabilities.\n\n**Core Features**:\n- **Advanced Filtering**: Filter by status, inventory tracking, price range, and search text\n- **Text Search**: Search by product name and description\n- **Price Range Filtering**: Filter products within specific price ranges\n- **Paginated Results**: Cursor-based pagination for optimal performance\n- **Complete Product Data**: Returns full product profiles with pricing and specifications\n\n**Use Cases**:\n- **Product Catalog Management**: Browse complete product catalogs\n- **Inventory Planning**: Filter products by inventory tracking settings\n- **Pricing Analysis**: Filter products by price ranges for pricing strategies\n- **Search Operations**: Find products by name or description for quick lookup\n ", + operationId: "getAllERPProducts", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by product status", + schema: { + type: "string", + enum: [ + "ACTIVE", + "DISCONTINUED" + ], + example: "ACTIVE" + } + }, + { + name: "inventoryTracking", + in: "query", + required: false, + description: "Filter by inventory tracking enabled status", + schema: { + type: "boolean", + example: true + } + }, + { + name: "minPrice", + in: "query", + required: false, + description: "Minimum price filter", + schema: { + type: "number", + example: 10 + } + }, + { + name: "maxPrice", + in: "query", + required: false, + description: "Maximum price filter", + schema: { + type: "number", + example: 500 + } + }, + { + name: "searchText", + in: "query", + required: false, + description: "Search by product name or description", + schema: { + type: "string", + example: "widget" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Products retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Products retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/ERPProduct" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/erp/products/random": { + get: { + tags: [ + "ERP" + ], + summary: "Get random ERP product", + description: "\nRetrieve a random product from the world environment for testing and demonstration purposes.\n\n**Core Features**:\n- **Random Selection**: Algorithmically random product selection\n- **World Scoping**: Random selection within specific world environment\n- **Test Data Support**: Ideal for testing and demonstration scenarios\n- **Complete Product Data**: Returns full product profile with all specifications\n\n**Use Cases**:\n- **Testing & Development**: Generate random test data for development\n- **Demo Scenarios**: Create realistic demo scenarios with random products\n- **Load Testing**: Use random product data for performance testing\n- **Data Sampling**: Statistical sampling of product data for analysis\n ", + operationId: "getRandomERPProduct", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Random product retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Random product retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPProduct" + } + } + } + } + } + }, + "404": { + description: "Not Found - no products available for random selection", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "No products found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/products/bulk": { + post: { + tags: [ + "ERP" + ], + summary: "Bulk upsert ERP products", + description: "\nBulk create or update multiple ERP products in a single operation for efficient product catalog management.\n\n**Core Features**:\n- **Bulk Operations**: Process multiple products simultaneously\n- **Upsert Logic**: Create new or update existing products based on productId\n- **Performance Optimized**: Efficient bulk database operations\n- **Atomic Processing**: All operations succeed or fail together\n\n**Use Cases**:\n- **Data Migration**: Migrate product catalogs from external systems\n- **ERP Synchronization**: Sync product data from ERP systems in bulk\n- **Initial Setup**: Bootstrap product catalogs for new world environments\n- **Batch Updates**: Update multiple product records simultaneously\n ", + operationId: "bulkUpsertERPProducts", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "products" + ], + properties: { + products: { + type: "array", + description: "Array of product objects to upsert", + items: { + type: "object", + description: "Product data (same as create product schema)", + properties: { + productId: { + type: "string", + description: "Product identifier for upsert matching", + example: "PROD_WIDGET_001" + }, + name: { + type: "string", + description: "Product name", + example: "Premium Widget" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "DISCONTINUED" + ], + example: "ACTIVE" + } + } + }, + minItems: 1, + example: [ + { + productId: "PROD_WIDGET_001", + name: "Premium Widget", + status: "ACTIVE", + price: { + currency: "USD", + amount: 99.99 + } + }, + { + productId: "PROD_GADGET_002", + name: "Smart Gadget", + status: "ACTIVE", + price: { + currency: "USD", + amount: 149.99 + } + } + ] + } + } + } + } + } + }, + responses: { + "200": { + description: "Bulk upsert completed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Bulk upsert completed successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "2 products processed successfully" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - invalid or missing products data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Products array is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/products/{productId}": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP product by ID", + description: "\nRetrieve specific ERP product by product identifier for detailed product information access.\n\n**Core Features**:\n- **Direct Access**: Get product by unique productId identifier \n- **Complete Profile**: Returns full product data including pricing and specifications\n- **Fast Lookup**: Optimized query using indexed productId field\n- **Product Intelligence**: Access comprehensive product data for business operations\n\n**Use Cases**:\n- **Product Details**: Get complete product information for catalog operations\n- **Inventory Reference**: Resolve product references from inventory and orders\n- **Pricing Lookup**: Access current product pricing for sales operations\n- **Integration Support**: Direct API access for external system integration\n ", + operationId: "getERPProductById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "productId", + in: "path", + required: true, + description: "Unique product identifier", + schema: { + type: "string", + example: "PROD_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Product retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Product retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPProduct" + } + } + } + } + } + }, + "404": { + description: "Not Found - product does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Product not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "ERP" + ], + summary: "Update ERP product", + description: "\nUpdate ERP product information with partial data for product catalog maintenance.\n\n**Core Features**:\n- **Partial Updates**: Update specific product fields without replacing entire record\n- **Pricing Management**: Modify pricing and cost information\n- **Inventory Configuration**: Update inventory tracking and specifications\n- **Validation**: Ensures data consistency and business rule compliance\n\n**Use Cases**:\n- **Price Updates**: Modify product pricing and cost information\n- **Specification Changes**: Update product dimensions, weight, and descriptions\n- **Status Management**: Change product status for lifecycle management\n- **Catalog Maintenance**: Update product information for catalog management\n ", + operationId: "updateERPProduct", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "productId", + in: "path", + required: true, + description: "Unique product identifier", + schema: { + type: "string", + example: "PROD_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + name: { + type: "string", + description: "Updated product name", + example: "Premium Widget Enhanced" + }, + description: { + type: "string", + description: "Updated product description", + example: "Enhanced premium widget with new features" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "DISCONTINUED" + ], + description: "Updated product status", + example: "DISCONTINUED" + }, + price: { + type: "object", + description: "Updated product selling price", + properties: { + currency: { + type: "string", + example: "USD" + }, + amount: { + type: "number", + example: 119.99 + } + } + }, + cost: { + type: "object", + description: "Updated product cost", + properties: { + currency: { + type: "string", + example: "USD" + }, + amount: { + type: "number", + example: 52.5 + } + } + }, + inventoryTracking: { + type: "boolean", + description: "Updated inventory tracking setting", + example: false + }, + leadTimeDays: { + type: "number", + description: "Updated lead time", + example: 21 + }, + customFields: { + type: "object", + description: "Updated custom fields", + additionalProperties: true + } + } + } + } + } + }, + responses: { + "200": { + description: "Product updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Product updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPProduct" + } + } + } + } + } + }, + "404": { + description: "Not Found - product does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Product not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + delete: { + tags: [ + "ERP" + ], + summary: "Delete ERP product", + description: "\nDelete ERP product record from the system for catalog cleanup and product lifecycle management.\n\n**Core Features**:\n- **Complete Removal**: Permanently delete product record from database\n- **World Scoping**: Ensures deletion only within specified world environment\n- **Business Safety**: Validate deletion constraints before removal\n- **Audit Trail**: Deletion tracked through audit plugin\n\n**Use Cases**:\n- **Catalog Cleanup**: Remove obsolete or duplicate product records\n- **Product Lifecycle**: Delete products that are no longer offered\n- **System Maintenance**: Clean up test or invalid product data\n- **Compliance**: Remove product data per data retention policies\n\n**Important**: Ensure no active transactions reference this product before deletion.\n ", + operationId: "deleteERPProduct", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "productId", + in: "path", + required: true, + description: "Unique product identifier", + schema: { + type: "string", + example: "PROD_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Product deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Product deleted successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "Product deleted successfully" + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - product does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Product not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/orders": { + post: { + tags: [ + "ERP" + ], + summary: "Create new ERP purchase order", + description: "\nCreate a new ERP purchase order with comprehensive order information and line item details.\n\n**Core Features**:\n- **Purchase Order Creation**: Complete B2B order setup with customer/partner relationships\n- **Auto-Generated IDs**: Automatic orderId generation via generateIdByService\n- **Multi-Line Support**: Support for multiple line items with detailed pricing and scheduling\n- **Financial Calculations**: Automatic subtotal, tax, and total calculations\n- **Status Workflow**: Complete order lifecycle management from received to completed\n\n**Use Cases**:\n- **B2B Commerce**: Create purchase orders for customer and partner transactions\n- **Supply Chain Management**: Order products and materials from suppliers\n- **Financial Management**: Track order values and payment obligations\n- **Inventory Planning**: Generate purchase orders for inventory replenishment\n ", + operationId: "createERPOrder", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "customerId", + "orderDate", + "lines" + ], + properties: { + orderId: { + type: "string", + description: "Optional custom order identifier (auto-generated if not provided)", + example: "ORDER_507f1f77bcf86cd799439012" + }, + poType: { + type: "string", + enum: [ + "STANDARD", + "BLANKET", + "CONTRACT", + "DROP_SHIP" + ], + description: "Purchase order type for business logic", + default: "STANDARD", + example: "STANDARD" + }, + customerId: { + type: "string", + description: "Customer identifier (required)", + example: "CUST_507f1f77bcf86cd799439013" + }, + partnerId: { + type: "string", + description: "Partner identifier for B2B relationships", + example: "PARTNER_507f1f77bcf86cd799439014" + }, + orderDate: { + type: "string", + format: "date", + description: "Order placement date (required)", + example: "2024-01-15" + }, + requestedDate: { + type: "string", + format: "date", + description: "Requested delivery date", + example: "2024-01-25" + }, + dueDate: { + type: "string", + format: "date", + description: "Due date for order completion", + example: "2024-01-30" + }, + buyer: { + type: "object", + description: "Buyer information", + properties: { + id: { + type: "string", + example: "BUYER001" + }, + name: { + type: "string", + example: "John Smith" + } + } + }, + currency: { + type: "string", + description: "Order currency", + default: "USD", + example: "USD" + }, + subtotal: { + type: "number", + description: "Order subtotal before taxes and fees", + example: 1250 + }, + discounts: { + type: "array", + description: "Order-level discounts", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Volume discount" + }, + amount: { + type: "number", + example: 50 + } + } + } + }, + totalAmount: { + type: "number", + description: "Total order amount including taxes and fees", + example: 1335 + }, + status: { + type: "string", + enum: [ + "RECEIVED", + "ACKED", + "IN_PROGRESS", + "PARTIALLY_SHIPPED", + "COMPLETED", + "CANCELLED" + ], + description: "Order processing status", + default: "RECEIVED", + example: "RECEIVED" + }, + lines: { + type: "array", + description: "Order line items (required)", + minItems: 1, + items: { + type: "object", + required: [ + "lineNumber", + "sku", + "quantityOrdered" + ], + properties: { + lineNumber: { + type: "number", + description: "Line item sequence number", + example: 1 + }, + sku: { + type: "string", + description: "Product SKU identifier", + example: "PROD_WIDGET_001" + }, + description: { + type: "string", + description: "Line item description", + example: "Premium Widget - Blue" + }, + quantityOrdered: { + type: "number", + description: "Quantity ordered", + example: 10 + }, + unitPrice: { + type: "number", + description: "Unit price per item", + example: 99.99 + }, + lineTotal: { + type: "number", + description: "Total line amount", + example: 999.9 + }, + promisedDate: { + type: "string", + format: "date", + description: "Promised delivery date for this line", + example: "2024-01-20" + } + } + } + }, + notes: { + type: "string", + description: "Order notes and special instructions", + example: "Please deliver to dock 3" + }, + customFields: { + type: "object", + description: "Additional order-specific fields", + additionalProperties: true, + example: { + salesRep: "JOHN_DOE", + priority: "HIGH" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Order created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Order created successfully" + }, + data: { + $ref: "#/components/schemas/ERPOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - invalid order data or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "ERP" + ], + summary: "Get all ERP orders", + description: "\nRetrieve all ERP orders with comprehensive filtering and pagination capabilities.\n\n**Core Features**:\n- **Advanced Filtering**: Filter by status, customer, partner, order date ranges\n- **Date Range Support**: Filter orders by order date and requested date ranges\n- **Customer/Partner Filtering**: Search orders by business relationships\n- **Paginated Results**: Cursor-based pagination for optimal performance\n- **Complete Order Data**: Returns full order profiles with line items\n\n**Use Cases**:\n- **Order Management**: Browse and manage customer orders\n- **Financial Reporting**: Filter orders by date ranges for financial analysis\n- **Customer Service**: Search orders by customer for support inquiries\n- **Business Intelligence**: Analyze order patterns and trends\n ", + operationId: "getAllERPOrders", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by order status", + schema: { + type: "string", + enum: [ + "RECEIVED", + "ACKED", + "IN_PROGRESS", + "PARTIALLY_SHIPPED", + "COMPLETED", + "CANCELLED" + ], + example: "IN_PROGRESS" + } + }, + { + name: "customerId", + in: "query", + required: false, + description: "Filter by customer identifier", + schema: { + type: "string", + example: "CUST_507f1f77bcf86cd799439013" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter by partner identifier", + schema: { + type: "string", + example: "PARTNER_507f1f77bcf86cd799439014" + } + }, + { + name: "orderId", + in: "query", + required: false, + description: "Filter by specific order identifier", + schema: { + type: "string", + example: "ORDER_507f1f77bcf86cd799439012" + } + }, + { + name: "orderdateStart", + in: "query", + required: false, + description: "Filter orders from this order date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "orderdateEnd", + in: "query", + required: false, + description: "Filter orders to this order date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + }, + { + name: "requesteddateStart", + in: "query", + required: false, + description: "Filter orders from this requested date", + schema: { + type: "string", + format: "date", + example: "2024-01-15" + } + }, + { + name: "requesteddateEnd", + in: "query", + required: false, + description: "Filter orders to this requested date", + schema: { + type: "string", + format: "date", + example: "2024-02-15" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Orders retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Orders retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/ERPOrder" + } + }, + pagination: { + type: "object", + properties: { + limit: { + type: "integer", + example: 50 + }, + previousCursor: { + type: "string", + example: null + }, + totalCount: { + type: "integer", + example: 75 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + example: "507f1f77bcf86cd799439020" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/erp/orders/{orderId}": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP order by ID", + description: "\nRetrieve specific ERP order by order identifier for detailed order information access.\n\n**Core Features**:\n- **Direct Access**: Get order by unique orderId identifier\n- **Complete Profile**: Returns full order data including line items and financial details\n- **Fast Lookup**: Optimized query using indexed orderId field\n- **Business Intelligence**: Access comprehensive order data for operations\n\n**Use Cases**:\n- **Order Details**: Get complete order information for business operations\n- **Customer Service**: Resolve order inquiries using order ID references\n- **Financial Operations**: Access order details for financial processing\n- **Integration Support**: Direct API access for external system integration\n ", + operationId: "getERPOrderById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Unique order identifier", + schema: { + type: "string", + example: "ORDER_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Order retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Order retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPOrder" + } + } + } + } + } + }, + "404": { + description: "Not Found - order does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Order not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "ERP" + ], + summary: "Update ERP order", + description: "\nUpdate ERP order information with partial data for order management and lifecycle processing.\n\n**Core Features**:\n- **Partial Updates**: Update specific order fields without replacing entire record\n- **Line Item Management**: Modify order line items and quantities\n- **Financial Updates**: Update pricing, discounts, and total calculations\n- **Status Management**: Control order workflow and processing state\n\n**Use Cases**:\n- **Order Changes**: Modify order details per customer requests\n- **Quantity Adjustments**: Update line item quantities and pricing\n- **Status Updates**: Manage order processing workflow\n- **Financial Corrections**: Adjust pricing and discount information\n ", + operationId: "updateERPOrder", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Unique order identifier", + schema: { + type: "string", + example: "ORDER_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + requestedDate: { + type: "string", + format: "date", + description: "Updated requested delivery date", + example: "2024-02-01" + }, + dueDate: { + type: "string", + format: "date", + description: "Updated due date", + example: "2024-02-05" + }, + subtotal: { + type: "number", + description: "Updated subtotal amount", + example: 1350 + }, + totalAmount: { + type: "number", + description: "Updated total amount", + example: 1450 + }, + status: { + type: "string", + enum: [ + "RECEIVED", + "ACKED", + "IN_PROGRESS", + "PARTIALLY_SHIPPED", + "COMPLETED", + "CANCELLED" + ], + description: "Updated order status", + example: "IN_PROGRESS" + }, + lines: { + type: "array", + description: "Updated order line items", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + example: 1 + }, + quantityOrdered: { + type: "number", + example: 15 + }, + unitPrice: { + type: "number", + example: 89.99 + }, + lineTotal: { + type: "number", + example: 1349.85 + } + } + } + }, + notes: { + type: "string", + description: "Updated order notes", + example: "Delivery instructions updated" + }, + customFields: { + type: "object", + description: "Updated custom fields", + additionalProperties: true + } + } + } + } + } + }, + responses: { + "200": { + description: "Order updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Order updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPOrder" + } + } + } + } + } + }, + "404": { + description: "Not Found - order does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Order not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + delete: { + tags: [ + "ERP" + ], + summary: "Delete ERP order", + description: "\nDelete ERP order record from the system for order cleanup and lifecycle management.\n\n**Core Features**:\n- **Complete Removal**: Permanently delete order record from database\n- **World Scoping**: Ensures deletion only within specified world environment\n- **Business Safety**: Validate deletion constraints before removal\n- **Audit Trail**: Deletion tracked through audit plugin\n\n**Use Cases**:\n- **Order Cleanup**: Remove cancelled or obsolete orders\n- **Data Management**: Clean up test or duplicate order data\n- **System Maintenance**: Remove invalid order records\n- **Compliance**: Order deletion per data retention policies\n\n**Important**: Ensure order is in appropriate status for deletion before removing.\n ", + operationId: "deleteERPOrder", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Unique order identifier", + schema: { + type: "string", + example: "ORDER_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Order deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Order deleted successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "Order deleted successfully" + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - order does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Order not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/orders/{orderId}/status": { + put: { + tags: [ + "ERP" + ], + summary: "Update ERP order status", + description: "\nUpdate ERP order status for order lifecycle and workflow management.\n\n**Core Features**:\n- **Status Workflow**: Manage order progression through business states\n- **Dedicated Endpoint**: Specialized endpoint for status-only updates \n- **Business Logic**: Enforce business rules for status transitions\n- **Audit Tracking**: Complete audit trail of status changes\n\n**Use Cases**:\n- **Order Processing**: Move orders through fulfillment workflow\n- **Status Updates**: Update order status from external systems\n- **Workflow Management**: Control order processing state\n- **Integration Support**: Status updates from ERP and warehouse systems\n\n**Status Values**:\n- **RECEIVED**: Initial order received\n- **ACKED**: Order acknowledged \n- **IN_PROGRESS**: Order being processed\n- **PARTIALLY_SHIPPED**: Partial fulfillment\n- **COMPLETED**: Order fully completed\n- **CANCELLED**: Order cancelled\n ", + operationId: "updateERPOrderStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Unique order identifier", + schema: { + type: "string", + example: "ORDER_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "RECEIVED", + "ACKED", + "IN_PROGRESS", + "PARTIALLY_SHIPPED", + "COMPLETED", + "CANCELLED" + ], + description: "New order status", + example: "IN_PROGRESS" + } + } + } + } + } + }, + responses: { + "200": { + description: "Order status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Order status updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Status is required", + meta: { event: "order_status_update_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Order not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Order not found", + meta: { event: "order_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/invoices": { + post: { + tags: [ + "ERP" + ], + summary: "Create new ERP invoice", + description: "\nCreate a new ERP invoice with comprehensive billing information and line item details.\n\n**Core Features**:\n- **Invoice Generation**: Complete invoice setup with customer/partner relationships\n- **Auto-Generated IDs**: Automatic invoiceId generation via generateIdByService\n- **Multi-Line Support**: Support for multiple line items with detailed pricing and taxes\n- **Financial Calculations**: Automatic subtotal, tax, allowance, and charge calculations\n- **Multiple Types**: Support for standard, credit, debit, and correction invoices\n\n**Use Cases**:\n- **Billing Operations**: Generate invoices for customer transactions\n- **Financial Management**: Track invoice values and payment obligations\n- **Tax Compliance**: Generate invoices with proper tax calculations\n- **Credit Management**: Issue credit and debit memos for adjustments\n ", + operationId: "createERPInvoice", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "customerId", + "issueDate", + "totalAmount", + "lines" + ], + properties: { + invoiceId: { + type: "string", + description: "Optional custom invoice identifier (auto-generated if not provided)", + example: "INV_507f1f77bcf86cd799439012" + }, + invoiceType: { + type: "string", + enum: [ + "STANDARD", + "CREDIT", + "DEBIT", + "CORRECTION" + ], + description: "Invoice type for business processing", + default: "STANDARD", + example: "STANDARD" + }, + poNumber: { + type: "string", + description: "Related purchase order number", + example: "ORDER_507f1f77bcf86cd799439013" + }, + customerId: { + type: "string", + description: "Customer identifier (required)", + example: "CUST_507f1f77bcf86cd799439014" + }, + partnerId: { + type: "string", + description: "Partner identifier for B2B relationships", + example: "PARTNER_507f1f77bcf86cd799439015" + }, + billTo: { + $ref: "#/components/schemas/Address" + }, + remitTo: { + $ref: "#/components/schemas/Address" + }, + issueDate: { + type: "string", + format: "date", + description: "Invoice issue date (required)", + example: "2024-01-15" + }, + dueDate: { + type: "string", + format: "date", + description: "Payment due date", + example: "2024-02-15" + }, + currency: { + type: "string", + description: "Invoice currency", + default: "USD", + example: "USD" + }, + subtotal: { + type: "number", + description: "Invoice subtotal before adjustments", + example: 1000 + }, + allowances: { + type: "array", + description: "Invoice allowances and discounts", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Early payment discount" + }, + amount: { + type: "number", + example: 25 + } + } + } + }, + charges: { + type: "array", + description: "Additional charges and fees", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Shipping charge" + }, + amount: { + type: "number", + example: 15 + } + } + } + }, + totalAmount: { + type: "number", + description: "Total invoice amount including taxes and adjustments (required)", + example: 1080 + }, + balanceDue: { + type: "number", + description: "Outstanding balance due", + example: 1080 + }, + status: { + type: "string", + enum: [ + "DRAFT", + "SENT", + "VALIDATED", + "REJECTED", + "PAID", + "PARTIALLY_PAID" + ], + description: "Invoice processing status", + default: "DRAFT", + example: "DRAFT" + }, + lines: { + type: "array", + description: "Invoice line items (required)", + minItems: 1, + items: { + type: "object", + required: [ + "lineNumber", + "sku", + "quantity", + "unitPrice" + ], + properties: { + lineNumber: { + type: "number", + description: "Line item sequence number", + example: 1 + }, + sku: { + type: "string", + description: "Product SKU identifier", + example: "PROD_WIDGET_001" + }, + description: { + type: "string", + description: "Line item description", + example: "Premium Widget - Blue" + }, + quantity: { + type: "number", + description: "Invoiced quantity", + example: 10 + }, + unitPrice: { + type: "number", + description: "Unit price per item", + example: 99.99 + }, + lineAmount: { + type: "number", + description: "Total line amount", + example: 999.9 + } + } + } + }, + paymentTerms: { + type: "string", + description: "Payment terms and conditions", + example: "NET30" + }, + customFields: { + type: "object", + description: "Additional invoice-specific fields", + additionalProperties: true, + example: { + salesRep: "JANE_DOE", + region: "NORTHEAST" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Invoice created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Invoice created successfully" + }, + data: { + $ref: "#/components/schemas/ERPInvoice" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid invoice data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "invoice_creation_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "ERP" + ], + summary: "Get all ERP invoices", + description: "\nRetrieve all ERP invoices with comprehensive filtering and pagination capabilities.\n\n**Core Features**:\n- **Advanced Filtering**: Filter by status, customer, partner, and date ranges\n- **Date Range Support**: Filter invoices by issue date for financial reporting\n- **Customer/Partner Filtering**: Search invoices by business relationships\n- **Paginated Results**: Cursor-based pagination for optimal performance\n- **Financial Data**: Returns complete invoice data with line items and totals\n\n**Use Cases**:\n- **Accounts Receivable**: Manage outstanding invoices and payments\n- **Financial Reporting**: Filter invoices by date ranges for financial analysis\n- **Customer Service**: Search invoices by customer for support inquiries\n- **Business Intelligence**: Analyze invoice patterns and revenue trends\n ", + operationId: "getAllERPInvoices", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by invoice status", + schema: { + type: "string", + enum: [ + "DRAFT", + "SENT", + "VALIDATED", + "REJECTED", + "PAID", + "PARTIALLY_PAID" + ], + example: "SENT" + } + }, + { + name: "customerId", + in: "query", + required: false, + description: "Filter by customer identifier", + schema: { + type: "string", + example: "CUST_507f1f77bcf86cd799439014" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter by partner identifier", + schema: { + type: "string", + example: "PARTNER_507f1f77bcf86cd799439015" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter invoices from this issue date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter invoices to this issue date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Invoices retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Invoices retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/ERPInvoice" + } + }, + pagination: { + type: "object", + properties: { + limit: { + type: "integer", + example: 50 + }, + previousCursor: { + type: "string", + example: null + }, + totalCount: { + type: "integer", + example: 89 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + example: "507f1f77bcf86cd799439025" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/erp/invoices/{invoiceId}": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP invoice by ID", + description: "\nRetrieve specific ERP invoice by invoice identifier for detailed billing information access.\n\n**Core Features**:\n- **Direct Access**: Get invoice by unique invoiceId identifier\n- **Complete Profile**: Returns full invoice data including line items and financial details\n- **Fast Lookup**: Optimized query using indexed invoiceId field\n- **Financial Intelligence**: Access comprehensive invoice data for accounting operations\n\n**Use Cases**:\n- **Invoice Details**: Get complete invoice information for billing operations\n- **Customer Service**: Resolve billing inquiries using invoice ID references\n- **Accounting Operations**: Access invoice details for financial processing\n- **Integration Support**: Direct API access for external accounting system integration\n ", + operationId: "getERPInvoiceById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "invoiceId", + in: "path", + required: true, + description: "Unique invoice identifier", + schema: { + type: "string", + example: "INV_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Invoice retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Invoice retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPInvoice" + } + } + } + } + } + }, + "404": { + description: "Not Found - invoice does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Invoice not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "ERP" + ], + summary: "Update ERP invoice", + description: "\nUpdate ERP invoice information with partial data for billing and financial management.\n\n**Core Features**:\n- **Partial Updates**: Update specific invoice fields without replacing entire record\n- **Line Item Management**: Modify invoice line items and pricing\n- **Financial Updates**: Update amounts, allowances, and charges\n- **Status Management**: Control invoice workflow and processing state\n\n**Use Cases**:\n- **Invoice Corrections**: Modify invoice details per customer or business requirements\n- **Amount Adjustments**: Update line item quantities, pricing, and totals\n- **Status Updates**: Manage invoice processing workflow\n- **Financial Corrections**: Adjust allowances, charges, and tax information\n ", + operationId: "updateERPInvoice", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "invoiceId", + in: "path", + required: true, + description: "Unique invoice identifier", + schema: { + type: "string", + example: "INV_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + dueDate: { + type: "string", + format: "date", + description: "Updated payment due date", + example: "2024-03-01" + }, + subtotal: { + type: "number", + description: "Updated subtotal amount", + example: 1100 + }, + totalAmount: { + type: "number", + description: "Updated total amount", + example: 1188 + }, + balanceDue: { + type: "number", + description: "Updated balance due", + example: 1188 + }, + status: { + type: "string", + enum: [ + "DRAFT", + "SENT", + "VALIDATED", + "REJECTED", + "PAID", + "PARTIALLY_PAID" + ], + description: "Updated invoice status", + example: "SENT" + }, + allowances: { + type: "array", + description: "Updated allowances and discounts", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Volume discount" + }, + amount: { + type: "number", + example: 50 + } + } + } + }, + charges: { + type: "array", + description: "Updated additional charges", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Express shipping" + }, + amount: { + type: "number", + example: 25 + } + } + } + }, + lines: { + type: "array", + description: "Updated invoice line items", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + example: 1 + }, + quantity: { + type: "number", + example: 12 + }, + unitPrice: { + type: "number", + example: 89.99 + }, + lineAmount: { + type: "number", + example: 1079.88 + } + } + } + }, + customFields: { + type: "object", + description: "Updated custom fields", + additionalProperties: true + } + } + } + } + } + }, + responses: { + "200": { + description: "Invoice updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Invoice updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPInvoice" + } + } + } + } + } + }, + "404": { + description: "Not Found - invoice does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Invoice not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + delete: { + tags: [ + "ERP" + ], + summary: "Delete ERP invoice", + description: "\nDelete ERP invoice record from the system for billing cleanup and financial management.\n\n**Core Features**:\n- **Complete Removal**: Permanently delete invoice record from database\n- **World Scoping**: Ensures deletion only within specified world environment\n- **Business Safety**: Validate deletion constraints before removal\n- **Audit Trail**: Deletion tracked through audit plugin\n\n**Use Cases**:\n- **Invoice Cleanup**: Remove cancelled or duplicate invoices\n- **Data Management**: Clean up test or erroneous invoice data\n- **System Maintenance**: Remove invalid invoice records\n- **Compliance**: Invoice deletion per data retention policies\n\n**Important**: Ensure invoice is in appropriate status for deletion and no payments are applied.\n ", + operationId: "deleteERPInvoice", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "invoiceId", + in: "path", + required: true, + description: "Unique invoice identifier", + schema: { + type: "string", + example: "INV_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Invoice deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Invoice deleted successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "Invoice deleted successfully" + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - Invoice not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Invoice not found", + meta: { event: "invoice_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/invoices/{invoiceId}/status": { + put: { + tags: [ + "ERP" + ], + summary: "Update ERP invoice status", + description: "\nUpdate ERP invoice status for billing lifecycle and workflow management.\n\n**Core Features**:\n- **Status Workflow**: Manage invoice progression through billing states\n- **Dedicated Endpoint**: Specialized endpoint for status-only updates\n- **Business Logic**: Enforce business rules for status transitions\n- **Audit Tracking**: Complete audit trail of status changes\n\n**Use Cases**:\n- **Billing Processing**: Move invoices through billing workflow\n- **Payment Processing**: Update status when payments are received\n- **Workflow Management**: Control invoice processing state\n- **Integration Support**: Status updates from payment and accounting systems\n\n**Status Values**:\n- **DRAFT**: Invoice in draft state\n- **SENT**: Invoice sent to customer\n- **VALIDATED**: Invoice validated and approved\n- **REJECTED**: Invoice rejected or disputed\n- **PAID**: Invoice fully paid\n- **PARTIALLY_PAID**: Invoice partially paid\n ", + operationId: "updateERPInvoiceStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "invoiceId", + in: "path", + required: true, + description: "Unique invoice identifier", + schema: { + type: "string", + example: "INV_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "DRAFT", + "SENT", + "VALIDATED", + "REJECTED", + "PAID", + "PARTIALLY_PAID" + ], + description: "New invoice status", + example: "SENT" + } + } + } + } + } + }, + responses: { + "200": { + description: "Invoice status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Invoice status updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPInvoice" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Status is required", + meta: { event: "invoice_status_update_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Invoice not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Invoice not found", + meta: { event: "invoice_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments": { + post: { + tags: [ + "ERP" + ], + summary: "Create new ERP shipment", + description: "\nCreate a new ERP shipment with comprehensive logistics and tracking information.\n\n**Core Features**:\n- **Shipment Creation**: Complete shipment setup with carrier and destination details\n- **Auto-Generated IDs**: Automatic shipmentId generation via generateIdByService\n- **Multi-Line Support**: Support for multiple shipment line items with detailed product information\n- **Tracking Integration**: Built-in carrier tracking and status management\n- **Event Logging**: Comprehensive event and document management\n\n**Use Cases**:\n- **Logistics Management**: Create shipments for order fulfillment and distribution\n- **Carrier Integration**: Setup shipments with carrier information and tracking\n- **Supply Chain**: Track product movements between locations\n- **Customer Service**: Provide shipment visibility and status updates\n ", + operationId: "createERPShipment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "toAddress", + "lines" + ], + properties: { + shipmentId: { + type: "string", + description: "Optional custom shipment identifier (auto-generated if not provided)", + example: "SHIP_507f1f77bcf86cd799439012" + }, + poNumber: { + type: "string", + description: "Related purchase order number", + example: "ORDER_507f1f77bcf86cd799439013" + }, + carrier: { + type: "object", + description: "Carrier information", + properties: { + name: { + type: "string", + example: "FedEx" + }, + scac: { + type: "string", + example: "FDXE" + }, + mode: { + type: "string", + example: "Ground" + } + } + }, + trackingNumber: { + type: "string", + description: "Carrier tracking number", + example: "1Z999AA1234567890" + }, + shipDate: { + type: "string", + format: "date", + description: "Shipment date", + example: "2024-01-15" + }, + estimatedArrival: { + type: "string", + format: "date", + description: "Estimated arrival date", + example: "2024-01-17" + }, + status: { + type: "string", + enum: [ + "CREATED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ], + description: "Shipment status", + default: "CREATED", + example: "CREATED" + }, + fromAddress: { + $ref: "#/components/schemas/Address" + }, + toAddress: { + $ref: "#/components/schemas/Address" + }, + packaging: { + type: "object", + description: "Packaging information", + properties: { + palletCount: { + type: "number", + example: 2 + }, + totalPackages: { + type: "number", + example: 10 + }, + packagingType: { + type: "string", + example: "BOX" + } + } + }, + lines: { + type: "array", + description: "Shipment line items (required)", + minItems: 1, + items: { + type: "object", + required: [ + "lineNumber", + "sku", + "quantityShipped" + ], + properties: { + lineNumber: { + type: "number", + description: "Line item sequence number", + example: 1 + }, + sku: { + type: "string", + description: "Product SKU identifier", + example: "PROD_WIDGET_001" + }, + quantityShipped: { + type: "number", + description: "Quantity being shipped", + example: 10 + }, + quantityOrdered: { + type: "number", + description: "Original quantity ordered", + example: 15 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure", + example: "EA" + }, + lotNumber: { + type: "string", + description: "Lot or batch number", + example: "LOT_2024_001" + }, + serialNumbers: { + type: "array", + description: "Serial numbers for tracked items", + items: { + type: "string" + }, + example: [ + "SN001", + "SN002" + ] + }, + palletId: { + type: "string", + description: "Pallet identifier", + example: "PALLET_001" + }, + packageCount: { + type: "number", + description: "Number of packages", + example: 2 + }, + weight: { + type: "number", + description: "Total weight for line", + example: 25.5 + } + } + } + }, + flowId: { + type: "string", + description: "Business flow identifier", + example: "FLOW_OUTBOUND_001" + }, + customFields: { + type: "object", + description: "Additional shipment-specific fields", + additionalProperties: true, + example: { + expedited: true, + specialHandling: "FRAGILE" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Shipment created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Shipment created successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid shipment data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "shipment_creation_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "ERP" + ], + summary: "Get all ERP shipments", + description: "\nRetrieve all ERP shipments with comprehensive filtering and pagination capabilities.\n\n**Core Features**:\n- **Advanced Filtering**: Filter by status, carrier, shipment date ranges\n- **Product Filtering**: Search shipments by SKU within line items\n- **Date Range Support**: Filter shipments by ship date for logistics analysis\n- **Paginated Results**: Cursor-based pagination for optimal performance\n- **Complete Shipment Data**: Returns full shipment profiles with line items and tracking\n\n**Use Cases**:\n- **Logistics Management**: Browse and manage shipment operations\n- **Carrier Performance**: Filter shipments by carrier for performance analysis\n- **Customer Service**: Search shipments for customer inquiries\n- **Business Intelligence**: Analyze shipment patterns and logistics trends\n\n**🔴 Critical Filter Bug**: Repository filters by 'productId' but shipment model has 'sku' fields in line items!\n ", + operationId: "getAllERPShipments", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by shipment status", + schema: { + type: "string", + enum: [ + "CREATED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ], + example: "IN_TRANSIT" + } + }, + { + name: "productId", + in: "query", + required: false, + description: "⚠️ BROKEN FILTER: Repository attempts to filter by 'productId' but shipment model only has 'sku' fields in line items", + schema: { + type: "string", + example: "PROD_WIDGET_001" + } + }, + { + name: "carrierName", + in: "query", + required: false, + description: "Filter by carrier name (partial match)", + schema: { + type: "string", + example: "FedEx" + } + }, + { + name: "shipmentId", + in: "query", + required: false, + description: "Filter by specific shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter shipments from this ship date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter shipments to this ship date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Shipments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipments retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/ERPShipment" + } + }, + pagination: { + type: "object", + properties: { + limit: { + type: "integer", + example: 50 + }, + previousCursor: { + type: "string", + example: null + }, + totalCount: { + type: "integer", + example: 125 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + example: "507f1f77bcf86cd799439030" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments/bulk": { + post: { + tags: [ + "ERP" + ], + summary: "Bulk upsert ERP shipments", + description: "\nPerform bulk upsert operations on multiple ERP shipments for efficient data management.\n\n**Core Features**:\n- **Bulk Operations**: Process multiple shipments in a single transaction\n- **Upsert Logic**: Insert new shipments or update existing ones\n- **Performance Optimized**: Efficient bulk processing for large datasets\n- **Atomic Operations**: Ensures data consistency across bulk operations\n\n**Use Cases**:\n- **Data Migration**: Import large shipment datasets from external systems\n- **EDI Processing**: Bulk processing of EDI 856 shipment notices\n- **System Integration**: Synchronize shipments from multiple sources\n- **Performance**: High-volume shipment processing operations\n ", + operationId: "bulkUpsertERPShipments", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "shipments" + ], + properties: { + shipments: { + type: "array", + description: "Array of shipments to process", + minItems: 1, + maxItems: 1000, + items: { + type: "object", + properties: { + shipmentId: { + type: "string", + example: "SHIP_001" + }, + poNumber: { + type: "string", + example: "PO_001" + }, + carrier: { + type: "object", + properties: { + name: { + type: "string", + example: "UPS" + }, + scac: { + type: "string", + example: "UPGF" + } + } + }, + trackingNumber: { + type: "string", + example: "1Z999AA123456" + }, + status: { + type: "string", + enum: [ + "CREATED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ] + }, + toAddress: { + $ref: "#/components/schemas/Address" + }, + lines: { + type: "array", + items: { + type: "object", + properties: { + sku: { + type: "string" + }, + quantityShipped: { + type: "number" + } + } + } + } + } + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Bulk operation completed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipments processed successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "150 shipments processed successfully" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid bulk data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Shipments array is required", + meta: { event: "bulk_shipment_creation_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments/{shipmentId}": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP shipment by ID", + description: "\nRetrieve specific ERP shipment by shipment ID for detailed logistics information access.\n\n**Core Features**:\n- **Direct Access**: Get shipment by unique shipment identifier\n- **Complete Profile**: Returns full shipment data including line items, tracking, and events\n- **Fast Lookup**: Optimized query using indexed shipmentId field\n- **Logistics Intelligence**: Access comprehensive shipment data for operations\n\n**Use Cases**:\n- **Shipment Details**: Get complete shipment information for logistics operations\n- **Customer Service**: Resolve shipping inquiries using shipment ID references\n- **Tracking Operations**: Access shipment details for tracking and updates\n- **Integration Support**: Direct API access for external logistics system integration\n ", + operationId: "getERPShipmentById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Shipment retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "404": { + description: "Not Found - shipment does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "ERP" + ], + summary: "Update ERP shipment", + description: "\nUpdate ERP shipment information with partial data for logistics and tracking management.\n\n**Core Features**:\n- **Partial Updates**: Update specific shipment fields without replacing entire record\n- **Logistics Management**: Modify carrier information, dates, and routing details\n- **Status Updates**: Update shipment status and tracking information\n- **Line Item Updates**: Modify shipment line items and quantities\n\n**Use Cases**:\n- **Shipment Changes**: Modify shipment details per logistics requirements\n- **Carrier Updates**: Update carrier information and tracking numbers\n- **Route Changes**: Modify addresses and delivery information\n- **Status Management**: Update shipment processing state\n\n**Important**: For status-only updates, use the dedicated status endpoint for better performance.\n ", + operationId: "updateERPShipment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + trackingNumber: { + type: "string", + description: "Updated tracking number", + example: "1Z999AA9876543210" + }, + estimatedArrival: { + type: "string", + format: "date", + description: "Updated estimated arrival date", + example: "2024-01-20" + }, + actualArrival: { + type: "string", + format: "date", + description: "Actual arrival date", + example: "2024-01-19" + }, + status: { + type: "string", + enum: [ + "CREATED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ], + description: "Updated shipment status", + example: "IN_TRANSIT" + }, + carrier: { + type: "object", + description: "Updated carrier information", + properties: { + name: { + type: "string", + example: "UPS" + }, + scac: { + type: "string", + example: "UPGF" + }, + mode: { + type: "string", + example: "Ground" + } + } + }, + packaging: { + type: "object", + description: "Updated packaging information", + properties: { + palletCount: { + type: "number", + example: 3 + }, + totalPackages: { + type: "number", + example: 12 + }, + packagingType: { + type: "string", + example: "PALLET" + } + } + }, + customFields: { + type: "object", + description: "Updated custom fields", + additionalProperties: true + } + } + } + } + } + }, + responses: { + "200": { + description: "Shipment updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "404": { + description: "Not Found - shipment does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + delete: { + tags: [ + "ERP" + ], + summary: "Delete ERP shipment", + description: "\nDelete ERP shipment record from the system for logistics cleanup and lifecycle management.\n\n**Core Features**:\n- **Complete Removal**: Permanently delete shipment record from database\n- **World Scoping**: Ensures deletion only within specified world environment\n- **Business Safety**: Validate deletion constraints before removal\n- **Audit Trail**: Deletion tracked through audit plugin\n\n**Use Cases**:\n- **Shipment Cleanup**: Remove cancelled or obsolete shipments\n- **Data Management**: Clean up test or duplicate shipment data\n- **System Maintenance**: Remove invalid shipment records\n- **Compliance**: Shipment deletion per data retention policies\n\n**Important**: Ensure shipment is in appropriate status for deletion before removing.\n ", + operationId: "deleteERPShipment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Shipment deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment deleted successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "Shipment deleted successfully" + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "shipment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments/{shipmentId}/status": { + put: { + tags: [ + "ERP" + ], + summary: "Update ERP shipment status", + description: "\nUpdate ERP shipment status for logistics lifecycle and workflow management.\n\n**Core Features**:\n- **Status Workflow**: Manage shipment progression through logistics states\n- **Dedicated Endpoint**: Specialized endpoint for status-only updates\n- **Business Logic**: Enforce business rules for status transitions\n- **Audit Tracking**: Complete audit trail of status changes\n\n**Use Cases**:\n- **Logistics Processing**: Move shipments through fulfillment workflow\n- **Carrier Updates**: Update status when carriers provide tracking updates\n- **Workflow Management**: Control shipment processing state\n- **Integration Support**: Status updates from carrier and logistics systems\n\n**Status Values**:\n- **CREATED**: Initial shipment created\n- **IN_TRANSIT**: Shipment in transit to destination\n- **DELIVERED**: Shipment successfully delivered\n- **EXCEPTION**: Shipment has delivery exception\n ", + operationId: "updateERPShipmentStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "CREATED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ], + description: "New shipment status", + example: "IN_TRANSIT" + } + } + } + } + } + }, + responses: { + "200": { + description: "Shipment status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment status updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Status is required", + meta: { event: "shipment_status_update_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "shipment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments/{shipmentId}/tracking": { + put: { + tags: [ + "ERP" + ], + summary: "Update ERP shipment tracking details", + description: "\nUpdate ERP shipment carrier and tracking information for enhanced logistics visibility.\n\n**Core Features**:\n- **Tracking Management**: Update carrier and tracking number information\n- **Carrier Integration**: Support for multiple carrier integrations\n- **Real-time Updates**: Update tracking details as information becomes available\n- **Visibility Enhancement**: Improve shipment tracking for all stakeholders\n\n**Use Cases**:\n- **Carrier Integration**: Update tracking when shipments are picked up by carriers\n- **Label Generation**: Add tracking numbers when shipping labels are generated\n- **Customer Service**: Provide tracking updates for customer inquiries\n- **Logistics Operations**: Maintain accurate tracking information for operations\n ", + operationId: "updateERPShipmentTracking", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "carrier" + ], + properties: { + carrier: { + type: "object", + description: "Carrier information (required)", + required: [ + "name" + ], + properties: { + name: { + type: "string", + description: "Carrier name", + example: "FedEx" + }, + scac: { + type: "string", + description: "Standard Carrier Alpha Code", + example: "FDXE" + }, + mode: { + type: "string", + description: "Transportation mode", + example: "Ground" + } + } + }, + trackingNumber: { + type: "string", + description: "Carrier tracking number", + example: "1Z999AA1234567890" + } + } + } + } + } + }, + responses: { + "200": { + description: "Tracking details updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Tracking details updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid tracking data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Carrier information is required", + meta: { event: "shipment_tracking_update_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "shipment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments/{shipmentId}/events": { + post: { + tags: [ + "ERP" + ], + summary: "Add ERP shipment event", + description: "\nAdd tracking event to ERP shipment for comprehensive logistics monitoring and audit trail.\n\n**Core Features**:\n- **Event Tracking**: Add timestamped events for shipment monitoring\n- **Location Updates**: Track shipment movement through various locations\n- **Status Events**: Record status changes with detailed information\n- **Audit Trail**: Maintain complete event history for shipments\n\n**Use Cases**:\n- **Carrier Integration**: Add events from carrier tracking systems\n- **Logistics Monitoring**: Record shipment progress through distribution network\n- **Exception Management**: Log delivery exceptions and resolution actions\n- **Customer Service**: Provide detailed shipment tracking information\n ", + operationId: "addERPShipmentEvent", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "ts", + "location", + "status" + ], + properties: { + ts: { + type: "string", + format: "date-time", + description: "Event timestamp (required)", + example: "2024-01-15T14:30:00Z" + }, + location: { + type: "string", + description: "Event location (required)", + example: "Memphis, TN Hub" + }, + status: { + type: "string", + description: "Event status description (required)", + example: "Package scanned at facility" + }, + note: { + type: "string", + description: "Optional event note", + example: "Package processed through automated sorting facility" + } + } + } + } + } + }, + responses: { + "200": { + description: "Event added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Event added successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid event data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Event with ts, location, and status are required", + meta: { event: "shipment_event_add_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "shipment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments/{shipmentId}/documents": { + post: { + tags: [ + "ERP" + ], + summary: "Add ERP shipment document", + description: "\nAdd document URL to ERP shipment for comprehensive documentation and compliance tracking.\n\n**Core Features**:\n- **Document Management**: Attach documents to shipments for reference\n- **URL Storage**: Store document URLs for bills of lading, labels, and certificates\n- **Compliance Support**: Maintain required shipping documentation\n- **Integration Ready**: Support for document management system integration\n\n**Use Cases**:\n- **Shipping Labels**: Attach shipping label URLs for carrier pickup\n- **Bills of Lading**: Store BOL documents for freight shipments\n- **Customs Documentation**: Attach customs forms for international shipments\n- **Compliance**: Maintain required shipping and regulatory documentation\n ", + operationId: "addERPShipmentDocument", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "documentUrl" + ], + properties: { + documentUrl: { + type: "string", + format: "url", + description: "URL to the document (required)", + example: "https://storage.example.com/documents/bill-of-lading-12345.pdf" + } + } + } + } + } + }, + responses: { + "200": { + description: "Document added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Document added successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid document data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "documentUrl is required", + meta: { event: "shipment_document_add_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "shipment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/shipments/{shipmentId}/lines": { + put: { + tags: [ + "ERP" + ], + summary: "Update ERP shipment lines", + description: "\nUpdate ERP shipment line items for comprehensive shipment content management.\n\n**Core Features**:\n- **Line Item Management**: Complete replacement of shipment line items\n- **Product Tracking**: Update SKU, quantities, and product information\n- **Serialization Support**: Manage serial numbers and lot tracking\n- **Packaging Details**: Update packaging and weight information\n\n**Use Cases**:\n- **Shipment Adjustments**: Modify quantities due to allocation changes\n- **Product Updates**: Update SKU information or product details\n- **Serialization**: Add or update serial numbers for tracked items\n- **Packaging Changes**: Modify packaging information for shipping requirements\n ", + operationId: "updateERPShipmentLines", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "lines" + ], + properties: { + lines: { + type: "array", + description: "Complete array of shipment line items (replaces existing)", + minItems: 1, + items: { + type: "object", + required: [ + "lineNumber", + "sku", + "quantityShipped" + ], + properties: { + lineNumber: { + type: "number", + description: "Line item sequence number", + example: 1 + }, + sku: { + type: "string", + description: "Product SKU identifier", + example: "PROD_WIDGET_001" + }, + quantityShipped: { + type: "number", + description: "Quantity being shipped", + example: 8 + }, + quantityOrdered: { + type: "number", + description: "Original quantity ordered", + example: 10 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure", + example: "EA" + }, + lotNumber: { + type: "string", + description: "Lot or batch number", + example: "LOT_2024_002" + }, + serialNumbers: { + type: "array", + description: "Serial numbers for tracked items", + items: { + type: "string" + }, + example: [ + "SN003", + "SN004", + "SN005" + ] + }, + palletId: { + type: "string", + description: "Pallet identifier", + example: "PALLET_002" + }, + packageCount: { + type: "number", + description: "Number of packages", + example: 1 + }, + weight: { + type: "number", + description: "Total weight for line", + example: 20.4 + }, + customFields: { + type: "object", + description: "Line-specific custom fields", + additionalProperties: true + } + } + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Shipment lines updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment lines updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid lines data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Lines array is required", + meta: { event: "shipment_lines_update_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "shipment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/payments": { + post: { + tags: [ + "ERP" + ], + summary: "Create new ERP payment", + description: "\nCreate a new ERP payment with comprehensive financial and allocation information.\n\n**Core Features**:\n- **Payment Processing**: Complete payment setup with customer/partner relationships\n- **Auto-Generated IDs**: Automatic paymentId generation via generateIdByService\n- **Multi-Method Support**: Support for ACH, Wire, Check, Credit Card, and other payment methods\n- **Allocation Management**: Advanced payment allocation to invoices with discount tracking\n- **Bank Integration**: Comprehensive bank details and remittance information\n\n**Use Cases**:\n- **Accounts Receivable**: Process customer payments for outstanding invoices\n- **Financial Management**: Track payment values and allocation details\n- **Cash Application**: Apply payments to specific invoices with discount tracking\n- **Bank Reconciliation**: Manage bank details and payment method information\n ", + operationId: "createERPPayment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "customerId", + "paymentDate", + "totalAmount" + ], + properties: { + paymentId: { + type: "string", + description: "Optional custom payment identifier (auto-generated if not provided)", + example: "PAY_507f1f77bcf86cd799439012" + }, + remittanceId: { + type: "string", + description: "Remittance advice identifier", + example: "REM_507f1f77bcf86cd799439013" + }, + customerId: { + type: "string", + description: "Customer identifier (required)", + example: "CUST_507f1f77bcf86cd799439014" + }, + partnerId: { + type: "string", + description: "Partner identifier for B2B relationships", + example: "PARTNER_507f1f77bcf86cd799439015" + }, + paymentDate: { + type: "string", + format: "date", + description: "Payment date (required)", + example: "2024-01-15" + }, + currency: { + type: "string", + description: "Payment currency", + default: "USD", + example: "USD" + }, + totalAmount: { + type: "number", + description: "Total payment amount (required)", + example: 1500 + }, + method: { + type: "string", + enum: [ + "ACH", + "WIRE", + "CHECK", + "CREDIT_CARD", + "OTHER" + ], + description: "Payment method", + default: "ACH", + example: "ACH" + }, + bankDetails: { + type: "object", + description: "Banking information for payment processing", + properties: { + bankName: { + type: "string", + example: "Wells Fargo" + }, + accountNumber: { + type: "string", + example: "****1234" + }, + routingNumber: { + type: "string", + example: "121000248" + }, + swift: { + type: "string", + example: "WFBIUS6S" + } + } + }, + allocations: { + type: "array", + description: "Payment allocations to invoices", + items: { + type: "object", + required: [ + "invoiceNumber", + "appliedAmount" + ], + properties: { + invoiceNumber: { + type: "string", + description: "Invoice number for allocation", + example: "INV_507f1f77bcf86cd799439016" + }, + appliedAmount: { + type: "number", + description: "Amount applied to invoice", + example: 750 + }, + discountTaken: { + type: "number", + description: "Early payment discount taken", + example: 15 + }, + unappliedAmount: { + type: "number", + description: "Unapplied amount remaining", + example: 0 + }, + allocationMethod: { + type: "string", + description: "Method used for allocation", + example: "FIFO" + } + } + } + }, + status: { + type: "string", + enum: [ + "RECEIVED", + "APPLIED", + "UNMATCHED", + "REVERSAL" + ], + description: "Payment processing status", + default: "RECEIVED", + example: "RECEIVED" + }, + referenceNumbers: { + type: "array", + description: "Payment reference numbers", + items: { + type: "string" + }, + example: [ + "REF001", + "CHECK12345" + ] + }, + notes: { + type: "string", + description: "Payment notes and additional information", + example: "Customer payment for invoices INV_001 and INV_002" + }, + flowId: { + type: "string", + description: "Business flow identifier", + example: "FLOW_AR_PROCESSING" + }, + customFields: { + type: "object", + description: "Additional payment-specific fields", + additionalProperties: true, + example: { + processingFee: 5, + customerReference: "CUST_PAY_001" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Payment created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Payment created successfully" + }, + data: { + $ref: "#/components/schemas/ERPPayment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid payment data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "payment_creation_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "ERP" + ], + summary: "Get all ERP payments", + description: "\nRetrieve all ERP payments with comprehensive filtering and pagination capabilities.\n\n**Core Features**:\n- **Advanced Filtering**: Filter by status, customer, partner, and payment date ranges\n- **Date Range Support**: Filter payments by payment date for financial reporting\n- **Customer/Partner Filtering**: Search payments by business relationships\n- **Paginated Results**: Cursor-based pagination for optimal performance\n- **Complete Financial Data**: Returns full payment profiles with allocation details\n\n**Use Cases**:\n- **Accounts Receivable**: Browse and manage customer payments\n- **Financial Reporting**: Filter payments by date ranges for cash flow analysis\n- **Customer Service**: Search payments by customer for account inquiries\n- **Business Intelligence**: Analyze payment patterns and cash collection trends\n\n**🔴 Critical Filter Bug**: Repository filters by 'productId' using non-existent 'appliedProducts.productId' field!\n ", + operationId: "getAllERPPayments", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by payment status", + schema: { + type: "string", + enum: [ + "RECEIVED", + "APPLIED", + "UNMATCHED", + "REVERSAL" + ], + example: "APPLIED" + } + }, + { + name: "customerId", + in: "query", + required: false, + description: "Filter by customer identifier", + schema: { + type: "string", + example: "CUST_507f1f77bcf86cd799439014" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter by partner identifier", + schema: { + type: "string", + example: "PARTNER_507f1f77bcf86cd799439015" + } + }, + { + name: "productId", + in: "query", + required: false, + description: "⚠️ BROKEN FILTER: Repository attempts to query 'appliedProducts.productId' but payment model has no appliedProducts field", + schema: { + type: "string", + example: "PROD_WIDGET_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter payments from this payment date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter payments to this payment date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Payments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Payments retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/ERPPayment" + } + }, + pagination: { + type: "object", + properties: { + limit: { + type: "integer", + example: 50 + }, + previousCursor: { + type: "string", + example: null + }, + totalCount: { + type: "integer", + example: 89 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + example: "507f1f77bcf86cd799439025" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/erp/payments/{paymentId}": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP payment by ID", + description: "\nRetrieve specific ERP payment by payment ID for detailed financial information access.\n\n**Core Features**:\n- **Direct Access**: Get payment by unique payment identifier\n- **Complete Profile**: Returns full payment data including allocation details and bank information\n- **Fast Lookup**: Optimized query using indexed paymentId field\n- **Financial Intelligence**: Access comprehensive payment data for accounting operations\n\n**Use Cases**:\n- **Payment Details**: Get complete payment information for financial operations\n- **Customer Service**: Resolve payment inquiries using payment ID references\n- **Accounting Operations**: Access payment details for ledger and reconciliation\n- **Integration Support**: Direct API access for external accounting system integration\n ", + operationId: "getERPPaymentById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "paymentId", + in: "path", + required: true, + description: "Unique payment identifier", + schema: { + type: "string", + example: "PAY_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Payment retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Payment retrieved successfully" + }, + data: { + $ref: "#/components/schemas/ERPPayment" + } + } + } + } + } + }, + "404": { + description: "Not Found - payment does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Payment not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "ERP" + ], + summary: "Update ERP payment", + description: "\nUpdate ERP payment information with partial data for financial and allocation management.\n\n**Core Features**:\n- **Partial Updates**: Update specific payment fields without replacing entire record\n- **Financial Management**: Modify payment amounts, methods, and bank details\n- **Allocation Updates**: Update payment allocations and invoice applications\n- **Status Management**: Control payment processing workflow\n\n**Use Cases**:\n- **Payment Corrections**: Modify payment details per accounting requirements\n- **Allocation Adjustments**: Update invoice allocations and discount information\n- **Bank Updates**: Modify bank details and payment method information\n- **Status Management**: Update payment processing state\n\n**Important**: For status-only updates, use the dedicated status endpoint for better performance.\n ", + operationId: "updateERPPayment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "paymentId", + in: "path", + required: true, + description: "Unique payment identifier", + schema: { + type: "string", + example: "PAY_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + totalAmount: { + type: "number", + description: "Updated payment amount", + example: 1650 + }, + method: { + type: "string", + enum: [ + "ACH", + "WIRE", + "CHECK", + "CREDIT_CARD", + "OTHER" + ], + description: "Updated payment method", + example: "WIRE" + }, + bankDetails: { + type: "object", + description: "Updated banking information", + properties: { + bankName: { + type: "string", + example: "Chase Bank" + }, + accountNumber: { + type: "string", + example: "****5678" + }, + routingNumber: { + type: "string", + example: "021000021" + }, + swift: { + type: "string", + example: "CHASUS33" + } + } + }, + status: { + type: "string", + enum: [ + "RECEIVED", + "APPLIED", + "UNMATCHED", + "REVERSAL" + ], + description: "Updated payment status", + example: "APPLIED" + }, + referenceNumbers: { + type: "array", + description: "Updated reference numbers", + items: { + type: "string" + }, + example: [ + "REF002", + "WIRE98765" + ] + }, + notes: { + type: "string", + description: "Updated payment notes", + example: "Payment updated per customer request" + }, + customFields: { + type: "object", + description: "Updated custom fields", + additionalProperties: true + } + } + } + } + } + }, + responses: { + "200": { + description: "Payment updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Payment updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPPayment" + } + } + } + } + } + }, + "404": { + description: "Not Found - payment does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Payment not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + delete: { + tags: [ + "ERP" + ], + summary: "Delete ERP payment", + description: "\nDelete ERP payment record from the system for financial cleanup and accounting management.\n\n**Core Features**:\n- **Complete Removal**: Permanently delete payment record from database\n- **World Scoping**: Ensures deletion only within specified world environment\n- **Business Safety**: Validate deletion constraints before removal\n- **Audit Trail**: Deletion tracked through audit plugin\n\n**Use Cases**:\n- **Payment Cleanup**: Remove erroneous or duplicate payments\n- **Data Management**: Clean up test or invalid payment data\n- **System Maintenance**: Remove obsolete payment records\n- **Compliance**: Payment deletion per data retention policies\n\n**Important**: Ensure payment is not allocated to invoices before deletion.\n ", + operationId: "deleteERPPayment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "paymentId", + in: "path", + required: true, + description: "Unique payment identifier", + schema: { + type: "string", + example: "PAY_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Payment deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Payment deleted successfully" + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "Payment deleted successfully" + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - Payment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Payment not found", + meta: { event: "payment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/payments/{paymentId}/status": { + put: { + tags: [ + "ERP" + ], + summary: "Update ERP payment status", + description: "\nUpdate ERP payment status for financial lifecycle and workflow management.\n\n**Core Features**:\n- **Status Workflow**: Manage payment progression through financial processing states\n- **Dedicated Endpoint**: Specialized endpoint for status-only updates\n- **Business Logic**: Enforce business rules for status transitions\n- **Audit Tracking**: Complete audit trail of status changes\n\n**Use Cases**:\n- **Payment Processing**: Move payments through accounting workflow\n- **Cash Application**: Update status when payments are applied to invoices\n- **Workflow Management**: Control payment processing state\n- **Integration Support**: Status updates from banking and accounting systems\n\n**Status Values**:\n- **RECEIVED**: Payment initially received\n- **APPLIED**: Payment applied to invoices\n- **UNMATCHED**: Payment without matching invoices\n- **REVERSAL**: Payment reversed or cancelled\n ", + operationId: "updateERPPaymentStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "paymentId", + in: "path", + required: true, + description: "Unique payment identifier", + schema: { + type: "string", + example: "PAY_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "RECEIVED", + "APPLIED", + "UNMATCHED", + "REVERSAL" + ], + description: "New payment status", + example: "APPLIED" + } + } + } + } + } + }, + responses: { + "200": { + description: "Payment status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Payment status updated successfully" + }, + data: { + $ref: "#/components/schemas/ERPPayment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Status is required", + meta: { event: "payment_status_update_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Payment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Payment not found", + meta: { event: "payment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/payments/{paymentId}/allocations": { + put: { + tags: [ + "ERP" + ], + summary: "Apply ERP payment allocations", + description: "\nApply payment allocations to invoices for comprehensive cash application and accounts receivable management.\n\n**Core Features**:\n- **Cash Application**: Apply payments to specific invoices with precise allocation tracking\n- **Discount Management**: Track early payment discounts and adjustments\n- **Multiple Allocations**: Support for splitting payments across multiple invoices\n- **Business Validation**: Ensure allocation amounts do not exceed payment totals\n- **Automatic Status Updates**: Updates payment status to APPLIED when allocations are applied\n\n**Use Cases**:\n- **Accounts Receivable**: Apply customer payments to outstanding invoices\n- **Cash Management**: Allocate payments across multiple invoices for customers\n- **Discount Processing**: Apply early payment discounts and calculate net amounts\n- **Financial Reconciliation**: Match payments with invoices for accurate accounting\n\n**Important**: Total allocation amounts cannot exceed the payment total amount.\n ", + operationId: "applyERPPaymentAllocations", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "paymentId", + in: "path", + required: true, + description: "Unique payment identifier", + schema: { + type: "string", + example: "PAY_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "allocations" + ], + properties: { + allocations: { + type: "array", + description: "Array of payment allocations to apply", + minItems: 1, + items: { + type: "object", + required: [ + "invoiceNumber", + "appliedAmount" + ], + properties: { + invoiceNumber: { + type: "string", + description: "Invoice number for allocation (required)", + example: "INV_507f1f77bcf86cd799439016" + }, + appliedAmount: { + type: "number", + description: "Amount to apply to invoice (required)", + minimum: 0.01, + example: 750 + }, + discountTaken: { + type: "number", + description: "Early payment discount amount", + minimum: 0, + example: 15 + }, + unappliedAmount: { + type: "number", + description: "Amount remaining unapplied", + minimum: 0, + example: 0 + }, + allocationMethod: { + type: "string", + description: "Method used for allocation", + enum: [ + "FIFO", + "LIFO", + "MANUAL", + "AUTOMATIC" + ], + example: "FIFO" + } + } + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Payment allocations applied successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Payment allocations applied successfully" + }, + data: { + $ref: "#/components/schemas/ERPPayment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid allocation data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Applied amount exceeds payment total", + meta: { event: "payment_allocation_failed", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Payment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Payment not found", + meta: { event: "payment_not_found", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/erp/operations-dashboard": { + get: { + tags: [ + "ERP" + ], + summary: "Get ERP operations dashboard metrics", + description: ` +Retrieve aggregated metrics for the ERP Command Center dashboard. + +**Core Features**: +- **Order Metrics**: Purchase orders and sales orders statistics by status, value, and recent activity +- **Invoice Metrics**: Total invoices, outstanding balance, overdue counts, and monthly paid +- **Company Metrics**: Customer and supplier counts by type and status +- **Product Metrics**: Active and discontinued product counts + +**Use Cases**: +- **Executive Dashboard**: High-level KPIs for operations monitoring +- **Performance Tracking**: Real-time visibility into order and invoice processing +- **Business Intelligence**: Aggregated data for reporting and analytics + `, + operationId: "getERPOperationsDashboard", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Dashboard metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/ERPOperationsDashboard" + } + } + } + } + } + }, + "400": { + description: "Bad Request - worldId is required", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "validation_error", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/finance.paths.ts b/packages/controlmart/src/docs/paths/finance.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..84dd1fcef67e1eb6af2a1c744a5481cf68e9d0a5 --- /dev/null +++ b/packages/controlmart/src/docs/paths/finance.paths.ts @@ -0,0 +1,1414 @@ +export const financePaths = { + "/{worldId}/finance/transactions": { + post: { + tags: [ + "Finance" + ], + summary: "Create new finance transaction", + description: "\nCreate a new finance transaction for comprehensive financial tracking and accounting operations.\n\n**Core Features**:\n- **Dual Direction**: Support for both incoming (payment_in) and outgoing (payment_out) transactions\n- **Source Tracking**: Link transactions to invoices, bills, manual entries, interest, or payments\n- **Partner Integration**: Associate transactions with business partners for relationship tracking\n- **Auto-Generated IDs**: Automatic transactionId generation via generateIdByService\n- **Flexible Metadata**: Store additional transaction-specific data in metadata field\n\n**Use Cases**:\n- **Accounts Receivable**: Record customer payments and receipts\n- **Accounts Payable**: Track vendor payments and disbursements\n- **Cash Management**: Monitor cash flow and transaction volumes\n- **Financial Integration**: Link with ERP invoices and payment systems\n ", + operationId: "createFinanceTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "type", + "amount", + "sourceType", + "sourceId" + ], + properties: { + transactionId: { + type: "string", + description: "Optional custom transaction identifier (auto-generated if not provided)", + example: "TRANS_507f1f77bcf86cd799439012" + }, + partnerId: { + type: "string", + description: "Partner identifier for transaction association", + example: "PARTNER_507f1f77bcf86cd799439013" + }, + type: { + type: "string", + enum: [ + "payment_in", + "payment_out" + ], + description: "Transaction direction (required)", + example: "payment_in" + }, + amount: { + type: "number", + description: "Transaction amount - must be greater than 0 (required)", + minimum: 0.01, + example: 1500 + }, + sourceType: { + type: "string", + enum: [ + "invoice", + "bill", + "manual", + "interest", + "payment" + ], + description: "Source document type (required)", + example: "invoice" + }, + sourceId: { + type: "string", + description: "Source document identifier (required)", + example: "INV_507f1f77bcf86cd799439014" + }, + metadata: { + type: "object", + description: "Additional transaction-specific data", + additionalProperties: true, + example: { + paymentMethod: "ACH", + bankReference: "REF123456", + customerReference: "CUST_PAY_001" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Finance transaction created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Transaction created successfully" + }, + data: { + $ref: "#/components/schemas/FinanceTransaction" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid transaction data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Missing required fields: type, amount, sourceType, and sourceId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "Finance" + ], + summary: "Get all finance transactions", + description: "\nRetrieve all finance transactions with comprehensive filtering capabilities for financial analysis and reporting.\n\n**Core Features**:\n- **Advanced Filtering**: Filter by partner, transaction type, source type, amount ranges, and date ranges\n- **Full-Text Search**: Search within transaction metadata for flexible querying\n- **Paginated Results**: Cursor-based pagination for efficient data retrieval\n- **Financial Analytics**: Support for business intelligence and reporting needs\n\n**Use Cases**:\n- **Financial Reporting**: Generate comprehensive transaction reports with filtering\n- **Cash Flow Analysis**: Track payment patterns and transaction volumes\n- **Partner Analytics**: Analyze transaction history by business partner\n- **Audit Trails**: Review transaction history for compliance and auditing\n ", + operationId: "getAllFinanceTransactions", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter by partner identifier", + schema: { + type: "string", + example: "PARTNER_507f1f77bcf86cd799439013" + } + }, + { + name: "type", + in: "query", + required: false, + description: "Filter by transaction direction", + schema: { + type: "string", + enum: [ + "payment_in", + "payment_out" + ], + example: "payment_in" + } + }, + { + name: "sourceType", + in: "query", + required: false, + description: "Filter by source document type", + schema: { + type: "string", + enum: [ + "invoice", + "bill", + "manual", + "interest", + "payment" + ], + example: "invoice" + } + }, + { + name: "sourceId", + in: "query", + required: false, + description: "Filter by source document identifier", + schema: { + type: "string", + example: "INV_507f1f77bcf86cd799439014" + } + }, + { + name: "amountMin", + in: "query", + required: false, + description: "Filter transactions with amount greater than or equal to this value", + schema: { + type: "number", + example: 100 + } + }, + { + name: "amountMax", + in: "query", + required: false, + description: "Filter transactions with amount less than or equal to this value", + schema: { + type: "number", + example: 5000 + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter transactions from this date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter transactions to this date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + }, + { + name: "search", + in: "query", + required: false, + description: "Full-text search within transaction data", + schema: { + type: "string", + example: "ACH payment" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439015" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 1000, + default: 20, + example: 50 + } + } + ], + responses: { + "200": { + description: "Finance transactions retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Transactions retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/FinanceTransaction" + } + }, + pagination: { + type: "object", + properties: { + limit: { + type: "integer", + example: 50 + }, + previousCursor: { + type: "string", + example: null + }, + totalCount: { + type: "integer", + example: 125 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + example: "507f1f77bcf86cd799439025" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/finance/transactions/bulk": { + post: { + tags: [ + "Finance" + ], + summary: "Bulk insert finance transactions", + description: "\nBulk insert multiple finance transactions for efficient batch processing and data migration.\n\n**Core Features**:\n- **Batch Processing**: Insert multiple transactions in a single operation for performance\n- **Validation**: Each transaction validated according to business rules\n- **Auto-Generated IDs**: Automatic transactionId generation for transactions without IDs\n- **Atomic Operation**: Entire batch processed as single database operation\n- **Detailed Results**: Returns count of successfully inserted transactions\n\n**Use Cases**:\n- **Data Migration**: Import financial data from external systems\n- **Batch Processing**: Process large volumes of transactions efficiently\n- **Integration**: Synchronize data from accounting systems\n- **Historical Import**: Load transaction history for new implementations\n ", + operationId: "bulkInsertFinanceTransactions", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "transactions" + ], + properties: { + transactions: { + type: "array", + description: "Array of finance transactions to insert", + minItems: 1, + items: { + type: "object", + required: [ + "type", + "amount", + "sourceType", + "sourceId" + ], + properties: { + transactionId: { + type: "string", + description: "Optional transaction identifier", + example: "TRANS_507f1f77bcf86cd799439012" + }, + partnerId: { + type: "string", + description: "Partner identifier", + example: "PARTNER_507f1f77bcf86cd799439013" + }, + type: { + type: "string", + enum: [ + "payment_in", + "payment_out" + ], + description: "Transaction direction", + example: "payment_in" + }, + amount: { + type: "number", + description: "Transaction amount", + minimum: 0.01, + example: 1500 + }, + sourceType: { + type: "string", + enum: [ + "invoice", + "bill", + "manual", + "interest", + "payment" + ], + description: "Source document type", + example: "invoice" + }, + sourceId: { + type: "string", + description: "Source document identifier", + example: "INV_507f1f77bcf86cd799439014" + }, + metadata: { + type: "object", + description: "Additional transaction data", + additionalProperties: true + } + } + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Transactions bulk inserted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Bulk insert completed successfully" + }, + data: { + type: "object", + properties: { + insertedCount: { + type: "integer", + example: 25 + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid transactions array provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "transactions array is required and cannot be empty", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/finance/transactions/{transactionId}": { + get: { + tags: [ + "Finance" + ], + summary: "Get finance transaction by ID", + description: "\nRetrieve specific finance transaction by transaction ID for detailed financial information access.\n\n**Core Features**:\n- **Direct Access**: Get transaction by unique transaction identifier\n- **Complete Profile**: Returns full transaction data including metadata and timestamps\n- **Fast Lookup**: Optimized query using indexed transactionId field\n- **Financial Intelligence**: Access comprehensive transaction data for accounting operations\n\n**Use Cases**:\n- **Transaction Details**: Get complete transaction information for financial operations\n- **Customer Service**: Resolve payment inquiries using transaction ID references\n- **Accounting Operations**: Access transaction details for ledger and reconciliation\n- **Integration Support**: Direct API access for external financial system integration\n ", + operationId: "getFinanceTransactionById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "transactionId", + in: "path", + required: true, + description: "Unique transaction identifier", + schema: { + type: "string", + example: "TRANS_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Finance transaction retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Transaction retrieved successfully" + }, + data: { + $ref: "#/components/schemas/FinanceTransaction" + } + } + } + } + } + }, + "404": { + description: "Not Found - Finance transaction not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Finance Transaction TRANS_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + patch: { + tags: [ + "Finance" + ], + summary: "Update finance transaction", + description: "\nUpdate finance transaction information with partial data for financial corrections and adjustments.\n\n**Core Features**:\n- **Partial Updates**: Update specific transaction fields without replacing entire record\n- **Financial Management**: Modify transaction amounts, types, and source associations\n- **Metadata Updates**: Update transaction-specific metadata for enhanced tracking\n- **Validation**: Ensure updated data meets business rules and constraints\n\n**Use Cases**:\n- **Transaction Corrections**: Modify transaction details per accounting requirements\n- **Amount Adjustments**: Update transaction amounts for corrections or adjustments\n- **Source Updates**: Change source document associations for proper linking\n- **Metadata Enhancement**: Add or update transaction metadata for improved tracking\n\n**Important**: Core transaction type and source validation rules apply to updates.\n ", + operationId: "updateFinanceTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "transactionId", + in: "path", + required: true, + description: "Unique transaction identifier", + schema: { + type: "string", + example: "TRANS_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + partnerId: { + type: "string", + description: "Updated partner identifier", + example: "PARTNER_507f1f77bcf86cd799439015" + }, + type: { + type: "string", + enum: [ + "payment_in", + "payment_out" + ], + description: "Updated transaction direction", + example: "payment_out" + }, + amount: { + type: "number", + description: "Updated transaction amount", + minimum: 0.01, + example: 1650 + }, + sourceType: { + type: "string", + enum: [ + "invoice", + "bill", + "manual", + "interest", + "payment" + ], + description: "Updated source document type", + example: "bill" + }, + sourceId: { + type: "string", + description: "Updated source document identifier", + example: "BILL_507f1f77bcf86cd799439016" + }, + metadata: { + type: "object", + description: "Updated transaction metadata", + additionalProperties: true, + example: { + paymentMethod: "WIRE", + correctionReason: "Amount adjustment per accounting review" + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Finance transaction updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Transaction updated successfully" + }, + data: { + $ref: "#/components/schemas/FinanceTransaction" + } + } + } + } + } + }, + "404": { + description: "Not Found - Finance transaction not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Finance Transaction TRANS_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + delete: { + tags: [ + "Finance" + ], + summary: "Delete finance transaction", + description: "\nDelete finance transaction record from the system for financial cleanup and data management.\n\n**Core Features**:\n- **Complete Removal**: Permanently delete transaction record from database\n- **World Scoping**: Ensures deletion only within specified world environment\n- **Business Safety**: Validate deletion constraints before removal\n- **Confirmation Response**: Clear confirmation of deletion success or failure\n\n**Use Cases**:\n- **Transaction Cleanup**: Remove erroneous or duplicate transactions\n- **Data Management**: Clean up test or invalid transaction data\n- **System Maintenance**: Remove obsolete transaction records\n- **Compliance**: Transaction deletion per data retention policies\n\n**Important**: Ensure transaction is not referenced by other financial records before deletion.\n ", + operationId: "deleteFinanceTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "transactionId", + in: "path", + required: true, + description: "Unique transaction identifier", + schema: { + type: "string", + example: "TRANS_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Finance transaction deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Transaction deleted successfully" + }, + data: { + type: "object", + properties: { + deleted: { + type: "boolean", + example: true + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - Finance transaction not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Finance Transaction TRANS_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/finance/transactions/source/{sourceType}/{sourceId}": { + get: { + tags: [ + "Finance" + ], + summary: "Get transactions by source document", + description: "\nRetrieve all finance transactions associated with a specific source document for comprehensive financial tracking.\n\n**Core Features**:\n- **Source Linking**: Get all transactions linked to specific source documents\n- **Multi-Source Support**: Support for invoices, bills, manual entries, interest, and payments\n- **Transaction History**: Complete financial history for source documents\n- **Paginated Results**: Efficient pagination for large transaction volumes\n\n**Use Cases**:\n- **Document Analysis**: See all financial transactions related to specific documents\n- **Payment Tracking**: Track all payments against invoices or bills\n- **Financial Reconciliation**: Match transactions with source documents\n- **Audit Trails**: Review complete financial activity for specific sources\n\n**Route Validation**: Both sourceType and sourceId are required path parameters.\n ", + operationId: "getTransactionsBySource", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "sourceType", + in: "path", + required: true, + description: "Source document type", + schema: { + type: "string", + enum: [ + "invoice", + "bill", + "manual", + "interest", + "payment" + ], + example: "invoice" + } + }, + { + name: "sourceId", + in: "path", + required: true, + description: "Source document identifier", + schema: { + type: "string", + example: "INV_507f1f77bcf86cd799439014" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results", + schema: { + type: "integer", + minimum: 1, + maximum: 1000, + default: 20, + example: 50 + } + } + ], + responses: { + "200": { + description: "Transactions retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Transactions retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/FinanceTransaction" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid source parameters provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId, sourceType, and sourceId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/finance/stats": { + get: { + tags: [ + "Finance" + ], + summary: "Get comprehensive finance statistics", + description: "\nRetrieve comprehensive financial statistics combining transaction analysis by type, partner, and overall summary.\n\n**Core Features**:\n- **Multi-Dimensional Analytics**: Combined statistics by type, partner, and overall summary\n- **Date Range Filtering**: Filter statistics by date ranges for period analysis\n- **Partner Filtering**: Focus statistics on specific business partners\n- **Complete Overview**: Single endpoint for comprehensive financial dashboard data\n\n**Use Cases**:\n- **Financial Dashboard**: Power comprehensive financial dashboards with single API call\n- **Executive Reporting**: Generate high-level financial summaries for management\n- **Business Intelligence**: Analyze financial patterns across multiple dimensions\n- **Performance Monitoring**: Track financial metrics and key performance indicators\n\n**Important**: This endpoint combines data from multiple aggregation functions for complete financial overview.\n ", + operationId: "getFinanceStats", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter statistics by partner identifier", + schema: { + type: "string", + example: "PARTNER_507f1f77bcf86cd799439013" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter statistics from this date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter statistics to this date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + } + ], + responses: { + "200": { + description: "Finance statistics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Statistics retrieved successfully" + }, + data: { + type: "object", + properties: { + byType: { + type: "object", + description: "Transaction statistics grouped by type", + additionalProperties: { + type: "object", + properties: { + count: { + type: "number", + example: 45 + }, + totalAmount: { + type: "number", + example: 75000 + }, + avgAmount: { + type: "number", + description: "Average transaction amount for this type", + example: 1666.67 + } + } + }, + example: { + payment_in: { + count: 45, + totalAmount: 75000, + avgAmount: 1666.67 + }, + payment_out: { + count: 23, + totalAmount: 32000, + avgAmount: 1391.30 + } + } + }, + byPartner: { + type: "array", + description: "Transaction statistics grouped by partner", + items: { + type: "object", + properties: { + partnerId: { + type: "string", + example: "PARTNER_001" + }, + count: { + type: "number", + example: 12 + }, + totalAmount: { + type: "number", + example: 18500 + }, + avgAmount: { + type: "number", + example: 1541.67 + } + } + } + }, + summary: { + type: "object", + description: "Overall financial summary", + properties: { + totalIncoming: { + type: "number", + example: 75000 + }, + totalOutgoing: { + type: "number", + example: 32000 + }, + netBalance: { + type: "number", + example: 43000 + }, + transactionCount: { + type: "number", + example: 68 + }, + avgTransactionAmount: { + type: "number", + example: 1573.53 + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/finance/summary": { + get: { + tags: [ + "Finance" + ], + summary: "Get financial summary", + description: "\nRetrieve comprehensive financial summary with incoming, outgoing, and net balance calculations.\n\n**Core Features**:\n- **Financial Overview**: Complete financial position with incoming, outgoing, and net balance\n- **Transaction Metrics**: Transaction count and average transaction amounts\n- **Partner Filtering**: Focus summary on specific business partners\n- **Date Range Analysis**: Analyze financial position for specific time periods\n\n**Use Cases**:\n- **Financial Position**: Get current financial standing and cash flow position\n- **Management Reporting**: Generate executive summaries for financial review\n- **Cash Flow Analysis**: Understand cash inflows, outflows, and net position\n- **Performance Metrics**: Track average transaction values and volumes\n\n**Summary Calculations**:\n- **totalIncoming**: Sum of all payment_in transactions\n- **totalOutgoing**: Sum of all payment_out transactions\n- **netBalance**: Difference between incoming and outgoing (incoming - outgoing)\n- **avgTransactionAmount**: Average amount across all transactions\n ", + operationId: "getFinancialSummary", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter summary by partner identifier", + schema: { + type: "string", + example: "PARTNER_507f1f77bcf86cd799439013" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter summary from this date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter summary to this date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + } + ], + responses: { + "200": { + description: "Financial summary retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Financial summary retrieved successfully" + }, + data: { + type: "object", + properties: { + totalIncoming: { + type: "number", + description: "Total amount of incoming payments", + example: 75000 + }, + totalOutgoing: { + type: "number", + description: "Total amount of outgoing payments", + example: 32000 + }, + netBalance: { + type: "number", + description: "Net balance (incoming - outgoing)", + example: 43000 + }, + transactionCount: { + type: "number", + description: "Total number of transactions", + example: 68 + }, + avgTransactionAmount: { + type: "number", + description: "Average transaction amount", + example: 1573.53 + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/finance/stats/by-type": { + get: { + tags: [ + "Finance" + ], + summary: "Get transaction statistics by type", + description: "\nRetrieve transaction statistics aggregated by transaction type for financial analysis and reporting.\n\n**Core Features**:\n- **Type-Based Analytics**: Statistics grouped by payment_in and payment_out transaction types\n- **Volume and Value Metrics**: Count and total amount for each transaction type\n- **Date Range Filtering**: Analyze type-based patterns over specific time periods\n- **Partner Filtering**: Focus type analysis on specific business partners\n\n**Use Cases**:\n- **Cash Flow Analysis**: Understand incoming vs outgoing payment patterns\n- **Financial Planning**: Analyze transaction type distributions for budgeting\n- **Performance Metrics**: Track payment processing volumes by type\n- **Trend Analysis**: Identify patterns in payment types over time\n\n**Statistics Include**:\n- **count**: Number of transactions for each type\n- **totalAmount**: Total monetary value for each type\n ", + operationId: "getTransactionsByType", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "partnerId", + in: "query", + required: false, + description: "Filter statistics by partner identifier", + schema: { + type: "string", + example: "PARTNER_507f1f77bcf86cd799439013" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter statistics from this date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter statistics to this date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + } + ], + responses: { + "200": { + description: "Transaction type statistics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Transaction type statistics retrieved successfully" + }, + data: { + type: "object", + description: "Transaction statistics by type", + additionalProperties: { + type: "object", + properties: { + count: { + type: "number", + example: 45 + }, + totalAmount: { + type: "number", + example: 75000 + }, + avgAmount: { + type: "number", + description: "Average transaction amount for this type", + example: 1666.67 + } + } + }, + example: { + payment_in: { + count: 45, + totalAmount: 75000, + avgAmount: 1666.67 + }, + payment_out: { + count: 23, + totalAmount: 32000, + avgAmount: 1391.30 + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/finance/stats/by-partner": { + get: { + tags: [ + "Finance" + ], + summary: "Get transaction statistics by partner", + description: "\nRetrieve transaction statistics aggregated by business partner for relationship analysis and reporting.\n\n**Core Features**:\n- **Partner-Based Analytics**: Statistics grouped by business partner for relationship insights\n- **Comprehensive Metrics**: Count, total amount, and average amount per partner\n- **Transaction Type Filtering**: Focus analysis on specific transaction types (payment_in/payment_out)\n- **Date Range Analysis**: Analyze partner relationships over specific time periods\n- **Result Limiting**: Configurable result limits with safety caps to prevent abuse\n\n**Use Cases**:\n- **Partner Performance**: Analyze transaction volumes and values by business partner\n- **Relationship Management**: Understand financial relationships with key partners\n- **Credit Analysis**: Evaluate partner payment patterns and transaction history\n- **Business Intelligence**: Identify top partners by transaction volume and value\n\n**Statistics Include**:\n- **partnerId**: Business partner identifier\n- **count**: Number of transactions with the partner\n- **totalAmount**: Total transaction value with the partner\n- **avgAmount**: Average transaction amount with the partner\n ", + operationId: "getTransactionsByPartner", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "type", + in: "query", + required: false, + description: "Filter by transaction type", + schema: { + type: "string", + enum: [ + "payment_in", + "payment_out" + ], + example: "payment_in" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter statistics from this date", + schema: { + type: "string", + format: "date", + example: "2024-01-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter statistics to this date", + schema: { + type: "string", + format: "date", + example: "2024-01-31" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of partner results (capped at 500)", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + default: 100, + example: 50 + } + } + ], + responses: { + "200": { + description: "Partner transaction statistics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Partner transaction statistics retrieved successfully" + }, + data: { + type: "array", + description: "Transaction statistics by partner, sorted by total amount descending", + items: { + type: "object", + properties: { + partnerId: { + type: "string", + description: "Business partner identifier", + example: "PARTNER_001" + }, + count: { + type: "number", + description: "Number of transactions with this partner", + example: 12 + }, + totalAmount: { + type: "number", + description: "Total transaction amount with this partner", + example: 18500 + }, + avgAmount: { + type: "number", + description: "Average transaction amount with this partner", + example: 1541.67 + } + } + } + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/index.ts b/packages/controlmart/src/docs/paths/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..ebda255af93aad23c3a94e2bb02e0af266202ae1 --- /dev/null +++ b/packages/controlmart/src/docs/paths/index.ts @@ -0,0 +1,33 @@ + +import { docsPaths } from './docs.paths'; +import { logsPaths } from './logs.paths'; +import { ticketsPaths } from './tickets.paths'; +import { odPaths } from './od.paths'; +import { worldPaths } from './world.paths'; +import { ediPaths } from './edi.paths'; +import { tmsPaths } from './tms.paths'; +import { erpPaths } from './erp.paths'; +import { financePaths } from './finance.paths'; +import { ledgerPaths } from './ledger.paths'; +import { knowledgeGraphPaths } from './knowledgeGraph.paths'; +import { wmsPaths } from './wms'; +import { chaosPaths } from './chaos.paths'; + +import { verificationPaths } from './verification.paths'; + +export const allPaths = { + ...docsPaths, + ...logsPaths, + ...ticketsPaths, + ...odPaths, + ...worldPaths, + ...ediPaths, + ...tmsPaths, + ...erpPaths, + ...financePaths, + ...ledgerPaths, + ...knowledgeGraphPaths, + ...wmsPaths, + ...chaosPaths, + ...verificationPaths, +}; diff --git a/packages/controlmart/src/docs/paths/knowledgeGraph.paths.ts b/packages/controlmart/src/docs/paths/knowledgeGraph.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..57c636de6daf858be383f85c55036861b3eff52e --- /dev/null +++ b/packages/controlmart/src/docs/paths/knowledgeGraph.paths.ts @@ -0,0 +1,367 @@ +export const knowledgeGraphPaths = { + "/{worldId}/knowledge-graph": { + get: { + tags: [ + "Knowledge Graph" + ], + summary: "Get knowledge graph for world", + description: "\nRetrieve the knowledge graph filtered by the world's assigned capabilities. Uses bidirectional BFS to capture the complete connected subgraph.\n\n**Graph Structure**:\nThe knowledge graph represents relationships between different entity types in the system:\n\n- **PERSONA → CAPABILITY**: via `can_perform` edge\n- **CAPABILITY → OD**: via `implemented_by` edge\n- **OD → TOOL**: via `uses` edge\n- **TOOL → SERVICE**: via `exposed_by` edge\n- **TOOL → ENTITY**: via `produces`, `requires`, or `modifies` edges\n\n**Filtering Behavior**:\n- When the world has assigned capabilities, the graph is automatically filtered to show only relevant nodes\n- Uses bidirectional BFS starting from the world's ODs to find all connected nodes\n- Returns `filtered: true` and `seedODs` array when filtering is applied\n- Returns full graph with `filtered: false` and info message when no capabilities are assigned\n\n**Use Cases**:\n- **Visualization**: Render interactive 2D/3D knowledge graph views\n- **Capability Discovery**: Understand available capabilities and their connections\n- **Impact Analysis**: Identify what entities are affected by running an OD\n- **Dependency Mapping**: Find tool and service dependencies\n- **Lineage Tracking**: Trace entity creation and modification paths\n ", + operationId: "getKnowledgeGraph", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Knowledge graph retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + nodes: { + type: "array", + description: "Array of graph nodes", + items: { + type: "object", + properties: { + id: { + type: "string", + description: "Unique node identifier", + example: "od:shipment-tracking" + }, + type: { + type: "string", + enum: [ + "PERSONA", + "CAPABILITY", + "OD", + "TOOL", + "SERVICE", + "ENTITY" + ], + description: "Node type", + example: "OD" + }, + label: { + type: "string", + description: "Human-readable label", + example: "Shipment Tracking" + }, + metadata: { + type: "object", + description: "Optional metadata", + additionalProperties: true + } + }, + required: [ + "id", + "type", + "label" + ] + } + }, + edges: { + type: "array", + description: "Array of graph edges", + items: { + type: "object", + properties: { + from: { + type: "string", + description: "Source node ID", + example: "od:shipment-tracking" + }, + to: { + type: "string", + description: "Target node ID", + example: "tool:getShipmentStatus" + }, + type: { + type: "string", + enum: [ + "can_perform", + "implemented_by", + "uses", + "exposed_by", + "produces", + "requires", + "modifies" + ], + description: "Edge relationship type", + example: "uses" + }, + metadata: { + type: "object", + description: "Optional metadata", + additionalProperties: true + } + }, + required: [ + "from", + "to", + "type" + ] + } + }, + stats: { + type: "object", + description: "Graph statistics", + properties: { + totalNodes: { + type: "integer", + description: "Total number of nodes", + example: 42 + }, + totalEdges: { + type: "integer", + description: "Total number of edges", + example: 68 + }, + nodesByType: { + type: "object", + description: "Count of nodes by type", + properties: { + PERSONA: { + type: "integer", + example: 3 + }, + CAPABILITY: { + type: "integer", + example: 5 + }, + OD: { + type: "integer", + example: 8 + }, + TOOL: { + type: "integer", + example: 15 + }, + SERVICE: { + type: "integer", + example: 4 + }, + ENTITY: { + type: "integer", + example: 7 + } + } + } + } + }, + filtered: { + type: "boolean", + description: "Whether the graph was filtered by capabilities", + example: true + }, + seedODs: { + type: "array", + description: "OD IDs used as seeds for filtering (only present when filtered)", + items: { + type: "string" + }, + example: [ + "shipment-tracking", + "inventory-check" + ] + }, + message: { + type: "string", + description: "Info message (only present when not filtered)", + example: "No capabilities assigned to this world. Showing full graph." + } + }, + required: [ + "nodes", + "edges", + "stats", + "filtered" + ] + } + } + }, + examples: { + filtered: { + summary: "Filtered graph (world has capabilities)", + value: { + success: true, + data: { + nodes: [ + { + id: "persona:warehouse-worker", + type: "PERSONA", + label: "Warehouse Worker" + }, + { + id: "cap:inventory-management", + type: "CAPABILITY", + label: "Inventory Management" + }, + { + id: "od:inventory-check", + type: "OD", + label: "Inventory Check" + }, + { + id: "tool:getInventoryLevels", + type: "TOOL", + label: "Get Inventory Levels" + }, + { + id: "service:wms", + type: "SERVICE", + label: "WMS" + }, + { + id: "entity:inventory", + type: "ENTITY", + label: "Inventory" + } + ], + edges: [ + { + from: "persona:warehouse-worker", + to: "cap:inventory-management", + type: "can_perform" + }, + { + from: "cap:inventory-management", + to: "od:inventory-check", + type: "implemented_by" + }, + { + from: "od:inventory-check", + to: "tool:getInventoryLevels", + type: "uses" + }, + { + from: "tool:getInventoryLevels", + to: "service:wms", + type: "exposed_by" + }, + { + from: "tool:getInventoryLevels", + to: "entity:inventory", + type: "requires" + } + ], + stats: { + totalNodes: 6, + totalEdges: 5, + nodesByType: { + PERSONA: 1, + CAPABILITY: 1, + OD: 1, + TOOL: 1, + SERVICE: 1, + ENTITY: 1 + } + }, + filtered: true, + seedODs: [ + "inventory-check" + ] + } + } + }, + unfiltered: { + summary: "Unfiltered graph (world has no capabilities)", + value: { + success: true, + data: { + nodes: [ + { + id: "persona:store-manager", + type: "PERSONA", + label: "Store Manager" + }, + { + id: "cap:order-fulfillment", + type: "CAPABILITY", + label: "Order Fulfillment" + } + ], + edges: [ + { + from: "persona:store-manager", + to: "cap:order-fulfillment", + type: "can_perform" + } + ], + stats: { + totalNodes: 2, + totalEdges: 1, + nodesByType: { + PERSONA: 1, + CAPABILITY: 1 + } + }, + filtered: false, + message: "No capabilities assigned to this world. Showing full graph." + } + } + } + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + error: { + type: "string", + example: "World not found" + } + } + } + } + } + }, + "500": { + description: "Internal server error", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + error: { + type: "string", + example: "Failed to generate knowledge graph" + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/ledger.paths.ts b/packages/controlmart/src/docs/paths/ledger.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..c7499ea876f7445ac0124819b19952fa227b1556 --- /dev/null +++ b/packages/controlmart/src/docs/paths/ledger.paths.ts @@ -0,0 +1,494 @@ +export const ledgerPaths = { + "/{worldId}/ledger": { + post: { + tags: [ + "Ledger" + ], + summary: "Upsert company ledger", + description: "\nCreate or update the company ledger for comprehensive financial position management.\n\n**Core Features**:\n- **Upsert Functionality**: Creates new ledger if not exists, updates if exists\n- **Auto-Calculated Net Position**: Automatically calculates netPosition = cash + totalReceivables - totalPayables\n- **World-Scoped**: One ledger per world environment with unique constraint\n- **Financial Integration**: Central hub for all financial position tracking\n\n**Use Cases**:\n- **Initial Setup**: Create company ledger for new financial tracking\n- **Financial Updates**: Update cash, receivables, and payables positions\n- **Balance Reconciliation**: Maintain accurate financial position records\n- **Integration Support**: Synchronize with external accounting systems\n\n**Important**: Net position is automatically calculated and cannot be manually set.\n ", + operationId: "upsertCompanyLedger", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + cash: { + type: "number", + description: "Company cash position", + default: 0, + example: 25000 + }, + totalReceivables: { + type: "number", + description: "Total accounts receivable", + default: 0, + example: 45000 + }, + totalPayables: { + type: "number", + description: "Total accounts payable", + default: 0, + example: 18000 + } + } + } + } + } + }, + responses: { + "201": { + description: "Company ledger upserted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Ledger upserted successfully" + }, + data: { + $ref: "#/components/schemas/CompanyLedger" + } + } + } + } + } + }, + "400": { + description: "Invalid ledger data provided", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + status: { + type: "integer", + example: 400 + }, + error: { + type: "string", + example: "worldId is required" + } + } + } + } + } + } + } + }, + get: { + tags: [ + "Ledger" + ], + summary: "Get company ledger by world ID", + description: "\nRetrieve the company ledger for the specified world environment.\n\n**Core Features**:\n- **World-Specific**: Get ledger for specific world environment\n- **Complete Position**: Returns cash, receivables, payables, and calculated net position\n- **Fast Lookup**: Optimized query using unique world ID index\n- **Financial Intelligence**: Access complete financial position for analysis\n\n**Use Cases**:\n- **Financial Dashboard**: Get current financial position for dashboards\n- **Balance Inquiry**: Check company financial position and cash flow\n- **Reporting**: Access ledger data for financial reporting and analysis\n- **Integration Support**: Provide financial position data to external systems\n\n**Response**: Returns null if no ledger exists for the world.\n ", + operationId: "getCompanyLedgerByWorldId", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Company ledger retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Ledger retrieved successfully" + }, + data: { + $ref: "#/components/schemas/CompanyLedger" + } + } + } + } + } + }, + "404": { + description: "Ledger not found for world", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + status: { + type: "integer", + example: 404 + }, + error: { + type: "string", + example: "Ledger for world 507f1f77bcf86cd799439011 not found" + } + } + } + } + } + } + } + }, + patch: { + tags: [ + "Ledger" + ], + summary: "Update company ledger", + description: "\nUpdate company ledger with partial data for financial position management.\n\n**Core Features**:\n- **Partial Updates**: Update specific ledger fields without replacing entire record\n- **Auto-Calculated Net Position**: Net position automatically recalculated after updates\n- **Financial Management**: Modify cash, receivables, and payables positions\n- **Validation**: Ensures updated data meets business rules and constraints\n\n**Use Cases**:\n- **Balance Adjustments**: Update financial positions per accounting operations\n- **Position Corrections**: Modify ledger balances for corrections or reconciliations\n- **Integration Updates**: Synchronize positions from external accounting systems\n- **Period-End Adjustments**: Update balances for month-end or year-end processes\n\n**Important**: Net position is recalculated automatically and cannot be manually updated.\n ", + operationId: "updateCompanyLedger", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + cash: { + type: "number", + description: "Updated cash position", + example: 28000 + }, + totalReceivables: { + type: "number", + description: "Updated accounts receivable total", + example: 42000 + }, + totalPayables: { + type: "number", + description: "Updated accounts payable total", + example: 20000 + } + } + } + } + } + }, + responses: { + "200": { + description: "Company ledger updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Ledger updated successfully" + }, + data: { + $ref: "#/components/schemas/CompanyLedger" + } + } + } + } + } + }, + "404": { + description: "Ledger not found for world", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + status: { + type: "integer", + example: 404 + }, + error: { + type: "string", + example: "Ledger for world 507f1f77bcf86cd799439011 not found" + } + } + } + } + } + } + } + } + }, + "/{worldId}/ledger/increment": { + patch: { + tags: [ + "Ledger" + ], + summary: "Increment company ledger balances", + description: "\nIncrement or decrement company ledger balances using delta values for precise financial adjustments.\n\n**Core Features**:\n- **Delta-Based Updates**: Use delta values for precise incremental changes\n- **Multi-Balance Support**: Update cash, receivables, and payables in single operation\n- **Auto-Calculated Net Position**: Net position recalculated automatically after increments\n- **Atomic Operations**: All balance changes applied atomically for consistency\n\n**Use Cases**:\n- **Transaction Processing**: Apply financial transaction impacts to ledger\n- **Payment Processing**: Increment/decrement balances based on payment activity\n- **Adjustment Entries**: Apply accounting adjustments and corrections\n- **Integration Updates**: Process incremental changes from external systems\n\n**Delta Logic**:\n- **Positive Values**: Increase balances\n- **Negative Values**: Decrease balances\n- **At Least One Required**: Must provide at least one delta value\n\n**Important**: Net position automatically recalculated as: cash + totalReceivables - totalPayables\n ", + operationId: "incrementCompanyLedgerBalances", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + cashDelta: { + type: "number", + description: "Amount to add/subtract from cash (positive = increase, negative = decrease)", + example: 5000 + }, + receivablesDelta: { + type: "number", + description: "Amount to add/subtract from receivables (positive = increase, negative = decrease)", + example: -2000 + }, + payablesDelta: { + type: "number", + description: "Amount to add/subtract from payables (positive = increase, negative = decrease)", + example: 1500 + } + } + } + } + } + }, + responses: { + "200": { + description: "Ledger balances incremented successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Ledger balances incremented successfully" + }, + data: { + $ref: "#/components/schemas/CompanyLedger" + } + } + } + } + } + }, + "400": { + description: "Invalid delta values provided", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + status: { + type: "integer", + example: 400 + }, + error: { + type: "string", + example: "At least one delta (cashDelta, receivablesDelta, payablesDelta) is required" + } + } + } + } + } + }, + "404": { + description: "Ledger not found for world", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: false + }, + status: { + type: "integer", + example: 404 + }, + error: { + type: "string", + example: "Ledger for world 507f1f77bcf86cd799439011 not found" + } + } + } + } + } + } + } + } + }, + "/{worldId}/ledger/analytics/summary": { + get: { + tags: [ + "Ledger" + ], + summary: "Get company ledger analytics summary", + description: "\nRetrieve comprehensive analytics summary for company ledger with aggregated financial metrics.\n\n**Core Features**:\n- **Aggregated Metrics**: Total cash, receivables, payables, and net position across all ledgers\n- **Distribution Analytics**: Count of positive vs negative net position ledgers\n- **Financial Intelligence**: Key performance indicators for financial analysis\n- **Business Intelligence**: High-level financial overview for executive dashboards\n\n**Use Cases**:\n- **Executive Dashboards**: Provide high-level financial metrics for leadership\n- **Financial Reporting**: Generate summary reports for financial analysis\n- **Performance Monitoring**: Track overall financial health and position\n- **Business Intelligence**: Support data-driven financial decision making\n\n**Analytics Include**:\n- **totalCash**: Aggregate cash position\n- **totalReceivables**: Total outstanding receivables\n- **totalPayables**: Total outstanding payables\n- **totalNetPosition**: Overall net financial position\n- **ledgerCount**: Total number of ledgers\n- **positiveLedgers**: Count of ledgers with positive net position\n- **negativeLedgers**: Count of ledgers with negative net position\n ", + operationId: "getCompanyLedgersSummary", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Ledger analytics summary retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Ledger summary retrieved successfully" + }, + data: { + type: "object", + properties: { + totalCash: { + type: "number", + description: "Total cash across all ledgers", + example: 125000 + }, + totalReceivables: { + type: "number", + description: "Total receivables across all ledgers", + example: 285000 + }, + totalPayables: { + type: "number", + description: "Total payables across all ledgers", + example: 165000 + }, + totalNetPosition: { + type: "number", + description: "Total net position across all ledgers", + example: 245000 + }, + ledgerCount: { + type: "number", + description: "Total number of ledgers", + example: 1 + }, + positiveLedgers: { + type: "number", + description: "Number of ledgers with positive net position", + example: 1 + }, + negativeLedgers: { + type: "number", + description: "Number of ledgers with negative net position", + example: 0 + } + } + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/logs.paths.ts b/packages/controlmart/src/docs/paths/logs.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..113b8557917646170e78c6d9a6f94e78d0462cb7 --- /dev/null +++ b/packages/controlmart/src/docs/paths/logs.paths.ts @@ -0,0 +1,771 @@ +export const logsPaths = { + "/{worldId}/logs": { + get: { + tags: [ + "Logs" + ], + summary: "Get operational logs for a world", + description: "\n## Retrieve Operational Logs\n\nGet paginated operational logs for monitoring and debugging system activities within a specific world.\n\n### Features\n- **Multi-Service Coverage**: Tracks activities across EDI, ERP, AS2, translation, validation, gateway, and infrastructure services\n- **Advanced Filtering**: Filter by service type, log level, entities, transactions, and time ranges \n- **Full-Text Search**: Search within log messages for specific content\n- **Cursor-Based Pagination**: Efficient pagination for large log volumes\n- **Real-Time Monitoring**: Access to the most recent system events\n\n### Common Use Cases\n- Debugging integration issues between services\n- Monitoring system performance and health\n- Tracking transaction flows across services\n- Error analysis and troubleshooting\n- Real-time operational monitoring\n ", + operationId: "getLogs", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the business world/environment", + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "serviceType", + in: "query", + required: false, + description: "Filter logs by specific service type", + schema: { + type: "string", + enum: [ + "edi", + "erp", + "finance", + "manufacturing", + "od", + "knowledge-graph", + "persona", + "tickets", + "tms", + "wms", + "world", + "capability", + ], + example: "edi" + } + }, + { + name: "level", + in: "query", + required: false, + description: "Filter logs by severity level", + schema: { + type: "string", + enum: [ + "trace", + "debug", + "info", + "warn", + "error", + "fatal" + ], + example: "error" + } + }, + { + name: "searchText", + in: "query", + required: false, + description: "Full-text search within log messages (case-insensitive)", + schema: { + type: "string", + example: "purchase order validation" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter logs from this date/time (ISO 8601 format)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-15T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter logs until this date/time (ISO 8601 format)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-16T23:59:59.999Z" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of logs to return (max: 100)", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 100, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for retrieving next page of results", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved logs", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true, + description: "Indicates if the request was successful" + }, + status: { + type: "integer", + example: 200, + description: "HTTP status code" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WorldLog" + }, + description: "Array of log entries matching the filter criteria" + }, + totalCount: { + type: "integer", + example: 2847, + description: "Total number of logs matching the filter (not limited by pagination)" + }, + limit: { + type: "integer", + example: 50, + description: "Number of logs returned in this response" + }, + hasMore: { + type: "boolean", + example: true, + description: "Whether more logs are available beyond this page" + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439012", + description: "Cursor for retrieving the next page of results" + } + }, + required: [ + "items", + "totalCount", + "limit", + "hasMore" + ] + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 2847, + description: "Total number of logs matching the filter" + }, + limit: { + type: "integer", + example: 50, + description: "Maximum number of logs per page" + }, + hasMore: { + type: "boolean", + example: true, + description: "Whether additional pages are available" + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439012", + description: "Cursor for the next page" + }, + previousCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439010", + description: "Cursor for the previous page" + } + }, + required: [ + "totalCount", + "limit", + "hasMore", + "nextCursor", + "previousCursor" + ] + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message", + description: "Event type for streaming responses" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z", + description: "Response generation timestamp" + } + }, + required: [ + "event", + "timestamp" + ] + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + examples: { + successfulResponse: { + summary: "Successful logs retrieval", + value: { + success: true, + status: 200, + data: { + items: [ + { + _id: "507f1f77bcf86cd799439011", + worldRef: { + worldId: "550e8400-e29b-41d4-a716-446655440000" + }, + logId: "log_123456789", + timestamp: "2024-01-15T10:25:30.123Z", + serviceType: "edi", + level: "info", + msg: "EDI 850 Purchase Order processed successfully", + metadata: { + transactionId: "txn_789abc", + partnerId: "partner_456", + docType: "850", + direction: "IN", + processingTime: 1250 + } + }, + { + _id: "507f1f77bcf86cd799439012", + worldRef: { + worldId: "550e8400-e29b-41d4-a716-446655440000" + }, + logId: "log_123456790", + timestamp: "2024-01-15T10:24:15.456Z", + serviceType: "erp", + level: "warn", + msg: "Purchase order validation warning: quantity exceeds standard limits", + metadata: { + orderId: "PO-2024-001", + customerId: "customer_789", + warningType: "QUANTITY_LIMIT", + expectedMax: 1000, + actualQuantity: 1500 + } + } + ], + totalCount: 2847, + limit: 50, + hasMore: true, + nextCursor: "507f1f77bcf86cd799439013" + }, + pagination: { + totalCount: 2847, + limit: 50, + hasMore: true, + nextCursor: "507f1f77bcf86cd799439013", + previousCursor: null + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + filteredByError: { + summary: "Error-level logs only", + value: { + success: true, + status: 200, + data: { + items: [ + { + _id: "507f1f77bcf86cd799439020", + worldRef: { + worldId: "550e8400-e29b-41d4-a716-446655440000" + }, + logId: "log_err001", + timestamp: "2024-01-15T09:15:22.789Z", + serviceType: "edi", + level: "error", + msg: "EDI validation failed: missing required segment BEG", + metadata: { + transactionId: "txn_error123", + partnerId: "partner_999", + docType: "850", + errorCode: "MISSING_SEGMENT", + validationErrors: [ + "Missing BEG segment", + "Invalid date format in DTM segment" + ] + } + } + ], + totalCount: 23, + limit: 50, + hasMore: false, + nextCursor: null + }, + pagination: { + totalCount: 23, + limit: 50, + hasMore: false, + nextCursor: null, + previousCursor: null + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + examples: { + missingWorldId: { + summary: "Missing required worldId parameter", + value: { + success: false, + status: 400, + error: "worldId is required", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + invalidDateFormat: { + summary: "Invalid date format", + value: { + success: false, + status: 400, + error: "Invalid date format. Use ISO 8601 format (YYYY-MM-DDTHH:mm:ss.sssZ)", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "World not found with ID: 550e8400-e29b-41d4-a716-446655440000", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Database connection failed", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + } + }, + "/{worldId}/audit-logs": { + get: { + tags: [ + "Logs" + ], + summary: "Get audit logs for data changes in a world", + description: "\n## Retrieve Data Audit Logs\n\nGet comprehensive audit logs that track all data changes within a specific world environment.\n\n### Features\n- **Complete Change History**: Track all create, update, and delete operations on database records\n- **Before/After Snapshots**: See exactly what changed in each modification\n- **Model-Specific Filtering**: Filter by specific data models (EdiTransaction, WMSTask, etc.)\n- **Document Tracking**: Follow all changes to specific documents using their IDs\n- **Compliance Ready**: Meets regulatory requirements for data change tracking\n\n### Common Use Cases\n- Compliance reporting and regulatory audits\n- Data investigation and forensic analysis\n- Security monitoring for unauthorized changes\n- Change impact analysis and rollback planning\n- Debugging data inconsistencies\n- Tracking user actions and system modifications\n\n### Audit Log Structure\nEach audit log entry contains:\n- **model**: The type of data model that was changed\n- **documentId**: The unique identifier of the changed document\n- **changedBy**: Who made the change (user ID or system identifier)\n- **before**: Complete state of the document before the change\n- **after**: Complete state of the document after the change\n- **reason**: Optional reason or context for the change\n- **timestamps**: When the audit log entry was created\n ", + operationId: "getAuditLogs", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the business world/environment", + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "model", + in: "query", + required: false, + description: "Filter audit logs by specific data model type", + schema: { + type: "string", + enum: [ + "EdiTransaction", + "Company", + "FinanceTransaction", + "ProductionRun", + "OperationalDescriptor", + "Persona", + "WorldItsmTicket", + "Carrier", + "TMSShipment", + "ERPShipment", + "InboundOrder", + "Task", + "World", + "Capability" + ], + example: "EdiTransaction" + } + }, + { + name: "documentId", + in: "query", + required: false, + description: "Filter audit logs by specific document identifier", + schema: { + type: "string", + example: "edi_txn_123456789" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter audit logs from this date/time (ISO 8601 format)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-15T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter audit logs until this date/time (ISO 8601 format)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-16T23:59:59.999Z" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved audit logs", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true, + description: "Indicates if the request was successful" + }, + status: { + type: "integer", + example: 200, + description: "HTTP status code" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/AuditLog" + }, + description: "Array of audit log entries matching the filter criteria" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message", + description: "Event type for streaming responses" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z", + description: "Response generation timestamp" + } + }, + required: [ + "event", + "timestamp" + ] + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + examples: { + successfulResponse: { + summary: "Successful audit logs retrieval", + value: { + success: true, + status: 200, + data: [ + { + _id: "507f1f77bcf86cd799439011", + model: "EdiTransaction", + documentId: "edi_txn_123456789", + changedBy: "user_john_doe", + before: { + status: "PENDING", + lastUpdated: "2024-01-15T09:00:00.000Z" + }, + after: { + status: "PROCESSED", + lastUpdated: "2024-01-15T10:25:30.123Z", + processedBy: "edi-processor-v2" + }, + reason: "Automated EDI processing completed", + createdAt: "2024-01-15T10:25:30.123Z", + updatedAt: "2024-01-15T10:25:30.123Z" + }, + { + _id: "507f1f77bcf86cd799439012", + model: "WMSTask", + documentId: "wms_task_987654321", + changedBy: "system_automation", + before: null, + after: { + taskId: "wms_task_987654321", + type: "PICK", + status: "ASSIGNED", + assignedTo: "worker_001", + priority: "HIGH", + worldRef: { + worldId: "550e8400-e29b-41d4-a716-446655440000" + } + }, + reason: "Task automatically created from inbound order", + createdAt: "2024-01-15T10:20:15.456Z", + updatedAt: "2024-01-15T10:20:15.456Z" + } + ], + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + filteredByModel: { + summary: "Audit logs filtered by model type", + value: { + success: true, + status: 200, + data: [ + { + _id: "507f1f77bcf86cd799439020", + model: "EdiTransaction", + documentId: "edi_txn_999888777", + changedBy: "user_jane_smith", + before: { + status: "FAILED", + errorMessage: "Invalid partner ID", + retryCount: 2 + }, + after: { + status: "PENDING", + errorMessage: null, + retryCount: 3, + partnerId: "CORRECTED_PARTNER_ID" + }, + reason: "Manual correction of partner ID after validation failure", + createdAt: "2024-01-15T09:45:22.789Z", + updatedAt: "2024-01-15T09:45:22.789Z" + } + ], + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + documentHistory: { + summary: "Complete history for a specific document", + value: { + success: true, + status: 200, + data: [ + { + _id: "507f1f77bcf86cd799439030", + model: "EdiTransaction", + documentId: "edi_txn_555444333", + changedBy: "edi-processor-v2", + before: { + status: "IN_PROGRESS", + stage: "VALIDATION" + }, + after: { + status: "PROCESSED", + stage: "COMPLETED", + validationResults: { + passed: true, + warnings: [] + } + }, + reason: "EDI validation and processing completed successfully", + createdAt: "2024-01-15T10:30:00.000Z", + updatedAt: "2024-01-15T10:30:00.000Z" + }, + { + _id: "507f1f77bcf86cd799439031", + model: "EdiTransaction", + documentId: "edi_txn_555444333", + changedBy: "edi-processor-v2", + before: { + status: "PENDING", + stage: "RECEIVED" + }, + after: { + status: "IN_PROGRESS", + stage: "VALIDATION", + processor: "edi-processor-v2" + }, + reason: "EDI processing started", + createdAt: "2024-01-15T10:25:00.000Z", + updatedAt: "2024-01-15T10:25:00.000Z" + } + ], + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + examples: { + missingWorldId: { + summary: "Missing required worldId parameter", + value: { + success: false, + status: 400, + error: "worldId is required", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "World not found with ID: 550e8400-e29b-41d4-a716-446655440000", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Database connection failed", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/od.paths.ts b/packages/controlmart/src/docs/paths/od.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..a2c09876304506865c858cd0f432aa3e3376662a --- /dev/null +++ b/packages/controlmart/src/docs/paths/od.paths.ts @@ -0,0 +1,1592 @@ +export const odPaths = { + "/{worldId}/od": { + post: { + tags: [ + "OD" + ], + summary: "Run a simple OD workflow (Demo)", + description: "Executes a predefined demo workflow (simple-edi or simple-wms). This is a legacy endpoint for demo purposes.", + operationId: "runOd", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "odName", + in: "query", + required: true, + schema: { + type: "string", + enum: [ + "simple-edi", + "simple-wms" + ] + } + } + ], + responses: { + "200": { + description: "Workflow executed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "null" + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors": { + post: { + tags: [ + "OD" + ], + summary: "Create a new Operational Descriptor", + description: "\n## Create Operational Descriptor\n\nCreate a new Operational Descriptor (OD) definition in the specified world. The OD defines a workflow with steps, inputs, and execution policies.\n\n### Features\n- **Schema Validation**: Validates the OD structure against the strict OD schema\n- **Immediate Scheduling**: Optionally schedule the OD for execution upon creation\n- **World Isolation**: ODs are scoped to a specific world environment\n\n### OD Structure\n- **id**: Unique identifier for the OD\n- **name**: Human-readable name\n- **type**: Type of OD (standard, background_job, workflow)\n- **steps**: Array of execution steps (MCP tools, scripts, etc.)\n- **runPolicy**: Configuration for execution behavior (idempotency, retries)\n ", + operationId: "createOD", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the business world/environment", + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + data: { + type: "object", + description: "The Operational Descriptor definition", + example: { + id: "edi-process-850", + name: "EDI 850 Processing", + version: "1.0.0", + type: "workflow", + steps: [ + { + id: "validate", + name: "Validate EDI", + type: "mcp", + service: "edi-validator", + tool: "validate_document", + input: { + type: "literal", + value: { + docType: "850" + } + } + } + ] + } + }, + schedule: { + type: "object", + description: "Optional scheduling configuration", + properties: { + type: { + type: "string", + enum: [ + "once", + "recurring" + ] + }, + time: { + type: "string", + format: "date-time", + description: "For 'once' type: Execution time" + }, + interval: { + type: "string", + description: "For 'recurring' type: Interval string (e.g., '1 day', '30 minutes')" + } + } + } + }, + required: [ + "data" + ] + } + } + } + }, + responses: { + "201": { + description: "OD created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + type: "object", + description: "The created OD" + } + } + } + } + } + }, + "207": { + description: "OD created but scheduling failed", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 207 + }, + data: { + type: "object", + properties: { + od: { + type: "object" + }, + scheduleError: { + type: "string" + }, + message: { + type: "string" + } + } + } + } + } + } + } + } + } + }, + get: { + tags: [ + "OD" + ], + summary: "List Operational Descriptors", + description: "\n## List Operational Descriptors\n\nRetrieve a paginated list of Operational Descriptors in the specified world.\n\n### Features\n- **Filtering**: Filter by OD type and name\n- **Pagination**: Cursor-based pagination for efficient retrieval\n ", + operationId: "getODs", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odType", + in: "query", + required: false, + schema: { + type: "string", + enum: [ + "standard", + "background_job", + "workflow" + ] + } + }, + { + name: "name", + in: "query", + required: false, + description: "Filter by OD name (regex search)", + schema: { + type: "string" + } + }, + { + name: "cursor", + in: "query", + required: false, + schema: { + type: "string" + } + }, + { + name: "limit", + in: "query", + required: false, + schema: { + type: "integer", + default: 50 + } + } + ], + responses: { + "200": { + description: "Successfully retrieved ODs", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + type: "object" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer" + }, + limit: { + type: "integer" + }, + hasMore: { + type: "boolean" + }, + nextCursor: { + type: "string", + nullable: true + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}": { + get: { + tags: [ + "OD" + ], + summary: "Get an Operational Descriptor by ID", + operationId: "getODById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved OD", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object" + } + } + } + } + } + }, + "404": { + description: "Not Found - Operational Descriptor does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "OD not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "OD" + ], + summary: "Update an Operational Descriptor", + operationId: "updateOD", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + data: { + type: "object", + description: "Updated OD definition" + } + } + } + } + } + }, + responses: { + "200": { + description: "OD updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object" + } + } + } + } + } + } + } + }, + delete: { + tags: [ + "OD" + ], + summary: "Delete an Operational Descriptor", + description: "Safely deletes an OD and cancels all associated scheduled jobs.", + operationId: "deleteOD", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "OD deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + message: { + type: "string" + }, + cancelledSchedules: { + type: "integer" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}/execute": { + post: { + tags: [ + "OD" + ], + summary: "Execute an Operational Descriptor", + description: "Triggers the immediate execution of an OD workflow.", + operationId: "executeOD", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + requestBody: { + content: { + "application/json": { + schema: { + type: "object", + properties: { + context: { + type: "object", + description: "Initial context/input for the execution", + example: { + trigger: "manual", + userId: "user_123" + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Execution completed", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + description: "Execution result including status and step details" + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/validate": { + post: { + tags: [ + "OD" + ], + summary: "Validate an OD schema", + description: "Validates an Operational Descriptor definition against the schema without saving it.", + operationId: "validateOD", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + description: "The OD definition to validate" + } + } + } + }, + responses: { + "200": { + description: "Validation successful", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + isValid: { + type: "boolean" + }, + errors: { + type: "array", + items: { + type: "string" + } + }, + warnings: { + type: "array", + items: { + type: "string" + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - OD schema validation failed", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Validation failed: OD schema is invalid", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}/schedule": { + post: { + tags: [ + "OD" + ], + summary: "Schedule an Operational Descriptor", + description: "Schedule an OD for future execution, either once or recurring.", + operationId: "scheduleOD", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + type: { + type: "string", + enum: [ + "once", + "recurring" + ], + description: "Type of schedule" + }, + time: { + type: "string", + format: "date-time", + description: "Required for 'once' type. ISO 8601 date string." + }, + interval: { + type: "string", + description: "Required for 'recurring' type. Human-readable interval (e.g., '1 day')." + }, + metadata: { + type: "object", + description: "Additional metadata to attach to the job" + } + }, + required: [ + "type" + ] + } + } + } + }, + responses: { + "201": { + description: "Schedule created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + type: "object", + properties: { + jobId: { + type: "string" + }, + nextRunAt: { + type: "string", + format: "date-time" + }, + type: { + type: "string" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}/schedules": { + get: { + tags: [ + "OD" + ], + summary: "Get schedules for an OD", + description: "Retrieve all active schedules for a specific Operational Descriptor.", + operationId: "getODSchedules", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Schedules retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + type: "object", + properties: { + jobId: { + type: "string" + }, + nextRunAt: { + type: "string", + format: "date-time" + }, + lastRunAt: { + type: "string", + format: "date-time" + }, + interval: { + type: "string" + }, + isRecurring: { + type: "boolean" + }, + lastRunResult: { + type: "object" + }, + data: { + type: "object" + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}/with-schedules": { + get: { + tags: [ + "OD" + ], + summary: "Get OD details with schedules", + description: "Retrieves the OD definition along with all its active schedules.", + operationId: "getODWithSchedules", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved OD details", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + description: "OD object with an additional 'schedules' array" + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}/schedules/{jobId}": { + put: { + tags: [ + "OD" + ], + summary: "Reschedule an OD job", + operationId: "rescheduleOD", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "jobId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + newTime: { + type: "string", + format: "date-time", + description: "New execution time" + } + }, + required: [ + "newTime" + ] + } + } + } + }, + responses: { + "200": { + description: "Rescheduled successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + jobId: { + type: "string" + }, + newScheduledTime: { + type: "string", + format: "date-time" + }, + previousTime: { + type: "string", + format: "date-time" + } + } + } + } + } + } + } + } + } + }, + delete: { + tags: [ + "OD" + ], + summary: "Cancel a scheduled OD job", + operationId: "cancelODSchedule", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "jobId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Schedule cancelled successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + message: { + type: "string" + }, + cancelled: { + type: "boolean" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}/schedules/{jobId}/pause": { + put: { + tags: [ + "OD" + ], + summary: "Pause a specific OD schedule", + description: "Pauses a specific scheduled execution of an OD. It will not run until resumed.", + operationId: "pauseODSchedule", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "jobId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Schedule paused successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + description: "Updated job details", + properties: { + message: { + type: "string" + }, + jobId: { + type: "string" + }, + nextRunAt: { + type: "string", + format: "date-time" + }, + disabled: { + type: "boolean", + example: true + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/descriptors/{odId}/schedules/{jobId}/resume": { + put: { + tags: [ + "OD" + ], + summary: "Resume a specific OD schedule", + description: "Resumes a paused scheduled execution of an OD.", + operationId: "resumeODSchedule", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + }, + { + name: "odId", + in: "path", + required: true, + schema: { + type: "string" + } + }, + { + name: "jobId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Schedule resumed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + description: "Updated job details", + properties: { + message: { + type: "string" + }, + jobId: { + type: "string" + }, + nextRunAt: { + type: "string", + format: "date-time" + }, + disabled: { + type: "boolean", + example: false + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/bulk-schedule": { + post: { + tags: [ + "OD" + ], + summary: "Bulk schedule multiple ODs", + operationId: "bulkScheduleODs", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + schedules: { + type: "array", + items: { + type: "object", + properties: { + odId: { + type: "string" + }, + type: { + type: "string", + enum: [ + "once", + "recurring" + ] + }, + time: { + type: "string", + format: "date-time" + }, + interval: { + type: "string" + }, + metadata: { + type: "object" + } + }, + required: [ + "odId", + "type" + ] + } + } + }, + required: [ + "schedules" + ] + } + } + } + }, + responses: { + "200": { + description: "Bulk schedule completed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + total: { + type: "integer" + }, + successful: { + type: "integer" + }, + failed: { + type: "array" + }, + results: { + type: "array" + } + } + } + } + } + } + } + }, + "207": { + description: "Partial success - some schedules failed", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 207 + }, + data: { + type: "object", + properties: { + total: { + type: "integer" + }, + successful: { + type: "integer" + }, + failed: { + type: "array", + items: { + type: "object", + properties: { + odId: { + type: "string" + }, + error: { + type: "string" + } + } + } + }, + results: { + type: "array" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/schedules/pause": { + put: { + tags: [ + "OD" + ], + summary: "Pause all OD schedules in world", + description: "Pauses ALL scheduled OD executions within the specified world.", + operationId: "pauseWorldSchedules", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + + } + } + ], + responses: { + "200": { + description: "All schedules paused successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + description: "Result of bulk pause operation", + properties: { + message: { + type: "string" + }, + count: { + type: "integer", + example: 5 + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/schedules/resume": { + put: { + tags: [ + "OD" + ], + summary: "Resume all OD schedules for a world", + description: "Resumes all OD schedules associated with the specified world.", + operationId: "resumeWorldSchedules", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "All world schedules resumed", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + message: { + type: "string" + }, + count: { + type: "integer" + } + } + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time" + } + } + } + }, + example: { + success: true, + status: 200, + data: { + message: "Resumed 5 schedules for world", + count: 5 + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + } + } + }, + "/{worldId}/od/schedules/status": { + get: { + tags: [ + "OD" + ], + summary: "Get world schedule status", + description: "Returns the collective status of all OD schedules for a world (operational, paused, or partial).", + operationId: "getWorldScheduleStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "World schedule status retrieved", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "operational", + "paused", + "partial" + ] + } + } + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time" + } + } + } + }, + example: { + success: true, + status: 200, + data: { + status: "operational" + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/tickets.paths.ts b/packages/controlmart/src/docs/paths/tickets.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..50a67a18de2c1a29f567ead08efd35609ef447db --- /dev/null +++ b/packages/controlmart/src/docs/paths/tickets.paths.ts @@ -0,0 +1,777 @@ +export const ticketsPaths = { + "/{worldId}/tickets": { + get: { + tags: [ + "ITSM Tickets" + ], + summary: "Get tickets for a world with filtering and pagination", + description: "\n## Retrieve ITSM Tickets\n\nGet paginated ITSM tickets for managing incidents, service requests, problems, and change requests within a specific world environment.\n\n### Features\n- **Multiple Ticket Types**: Support for incidents, service requests, problems, and change requests\n- **Advanced Filtering**: Filter by status, priority, impact, urgency, department, assignee, and date ranges\n- **Cursor-Based Pagination**: Efficient pagination for large ticket volumes\n\n### Ticket Types\n- **incident**: Unplanned interruption to service\n- **service_request**: Request for something to be provided \n- **problem**: Root cause of one or more incidents\n- **change**: Addition, modification or removal of service\n\n### Status Workflow\n- **new** → **open** → **in_progress** → **resolved** → **closed**\n- **on_hold** can be set from any active status\n ", + operationId: "getTickets", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the business world/environment", + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter tickets by status", + schema: { + type: "string", + enum: [ + "new", + "open", + "in_progress", + "on_hold", + "resolved", + "closed" + ], + example: "open" + } + }, + { + name: "priority", + in: "query", + required: false, + description: "Filter tickets by priority level", + schema: { + type: "string", + enum: [ + "low", + "medium", + "high", + "critical" + ], + example: "high" + } + }, + { + name: "impact", + in: "query", + required: false, + description: "Filter tickets by business impact level", + schema: { + type: "string", + enum: [ + "low", + "medium", + "high" + ], + example: "medium" + } + }, + { + name: "urgency", + in: "query", + required: false, + description: "Filter tickets by urgency level", + schema: { + type: "string", + enum: [ + "low", + "medium", + "high" + ], + example: "high" + } + }, + { + name: "department", + in: "query", + required: false, + description: "Filter tickets by department or category", + schema: { + type: "string", + example: "IT Support" + } + }, + { + name: "assignedTo", + in: "query", + required: false, + description: "Filter tickets by assigned user ID", + schema: { + type: "string", + example: "user_john_doe" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Filter tickets created from this date/time (ISO 8601 format)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-15T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Filter tickets created until this date/time (ISO 8601 format)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-16T23:59:59.999Z" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of tickets to return (max: 100)", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 100, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for retrieving next page of results", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved tickets", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/ItsmTicket" + } + }, + totalCount: { + type: "integer", + example: 156 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439012" + } + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 156 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439012" + }, + previousCursor: { + type: "string", + nullable: true, + example: null + } + } + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + } + } + } + }, + post: { + tags: [ + "ITSM Tickets" + ], + summary: "Create a new ITSM ticket", + operationId: "createTicket", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the business world/environment", + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + title: { + type: "string", + description: "Brief title describing the issue or request", + example: "Database connection timeout" + }, + description: { + type: "string", + description: "Detailed description of the issue", + example: "Users experiencing timeouts when accessing the customer portal" + }, + requester: { + type: "string", + description: "User ID of the person creating the ticket", + example: "507f1f77bcf86cd799439020" + }, + type: { + type: "string", + enum: [ + "incident", + "service_request", + "problem", + "change" + ], + default: "incident", + example: "incident" + }, + priority: { + type: "string", + enum: [ + "low", + "medium", + "high", + "critical" + ], + default: "medium", + example: "high" + }, + category: { + type: "string", + example: "Database" + }, + metadata: { + type: "object", + description: "Additional context and system metadata", + properties: { + odId: { type: "string" }, + odName: { type: "string" }, + runId: { type: "string" }, + failedStepId: { type: "string" }, + failureType: { type: "string" }, + contextSnapshots: { type: "array", items: { type: "object" } } + } + } + }, + required: [ + "title", + "description", + "requester" + ] + }, + examples: { + incident: { + summary: "Create an incident ticket", + value: { + title: "Database connection timeout", + description: "Users experiencing timeouts when accessing the customer portal. Database appears to be responding slowly.", + requester: "507f1f77bcf86cd799439020", + type: "incident", + priority: "high", + category: "Database" + } + } + } + } + } + }, + responses: { + "201": { + description: "Ticket created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + $ref: "#/components/schemas/ItsmTicket" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T11:30:00.123Z" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/tickets/{ticketId}": { + get: { + tags: [ + "ITSM Tickets" + ], + summary: "Get a specific ticket by ID", + operationId: "getTicket", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "ticketId", + in: "path", + required: true, + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Ticket retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/ItsmTicket" + } + } + } + } + } + } + } + }, + put: { + tags: [ + "ITSM Tickets" + ], + summary: "Update a ticket", + operationId: "updateTicket", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "ticketId", + in: "path", + required: true, + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + title: { + type: "string" + }, + description: { + type: "string" + }, + status: { + type: "string", + enum: [ + "new", + "open", + "in_progress", + "on_hold", + "resolved", + "closed" + ] + }, + priority: { + type: "string", + enum: [ + "low", + "medium", + "high", + "critical" + ] + }, + assignedTo: { + type: "string" + }, + resolutionNotes: { + type: "string" + } + } + } + } + } + }, + responses: { + "200": { + description: "Ticket updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/ItsmTicket" + } + } + } + } + } + } + } + }, + delete: { + tags: [ + "ITSM Tickets" + ], + summary: "Delete a ticket", + operationId: "deleteTicket", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "ticketId", + in: "path", + required: true, + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Ticket deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + message: { + type: "string", + example: "Ticket deleted successfully" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/tickets/{ticketId}/status": { + patch: { + tags: [ + "ITSM Tickets" + ], + summary: "Update ticket status only", + operationId: "updateTicketStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "ticketId", + in: "path", + required: true, + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "new", + "open", + "in_progress", + "on_hold", + "resolved", + "closed" + ] + } + }, + required: [ + "status" + ] + }, + examples: { + markInProgress: { + summary: "Mark ticket as in progress", + value: { + status: "in_progress" + } + }, + resolveTicket: { + summary: "Mark ticket as resolved", + value: { + status: "resolved" + } + } + } + } + } + }, + responses: { + "200": { + description: "Status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/ItsmTicket" + } + } + } + } + } + } + } + } + }, + "/{worldId}/tickets/{ticketId}/work-notes": { + patch: { + tags: [ + "ITSM Tickets" + ], + summary: "Update ticket work notes", + operationId: "updateTicketWorkNotes", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + { + name: "ticketId", + in: "path", + required: true, + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + workNotes: { + type: "array", + items: { + $ref: "#/components/schemas/WorkNote" + } + } + }, + required: [ + "workNotes" + ] + }, + examples: { + addProgressNote: { + summary: "Add progress update", + value: { + workNotes: [ + { + author: "tech_support_1", + note: "Issue identified. Working on resolution.", + isPublic: true + } + ] + } + } + } + } + } + }, + responses: { + "200": { + description: "Work notes updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/ItsmTicket" + } + } + } + } + } + } + } + } + }, + +}; diff --git a/packages/controlmart/src/docs/paths/tms.paths.ts b/packages/controlmart/src/docs/paths/tms.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..7a9d5ac7deb7585fb1602ea9bbcd4222f9ee12ca --- /dev/null +++ b/packages/controlmart/src/docs/paths/tms.paths.ts @@ -0,0 +1,4639 @@ +export const tmsPaths = { + "/{worldId}/tms/shipments": { + post: { + tags: [ + "TMS" + ], + summary: "Create a new TMS shipment", + description: "\n## Create TMS Shipment\n\nCreate a new shipment in the Transportation Management System with comprehensive tracking and logistics information.\n\n### Features\n- **Complete Shipment Definition**: Origin, destination, carrier, cargo, and routing information\n- **Status Management**: Automatic status initialization to PLANNED\n- **Auto ID Generation**: Automatic shipmentId generation if not provided\n- **Audit Trail**: Full audit logging of shipment creation\n\n### Shipment Status Flow\nPLANNED → TENDERED → ACCEPTED → PICKED_UP → IN_TRANSIT → DELIVERED\n\n### Required Fields\n- shipmentNumber: Unique business identifier\n- origin: Complete origin location information\n- destination: Complete destination location information\n ", + operationId: "createTMSShipment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/TMSShipmentInput" + }, + examples: { + standardShipment: { + summary: "Standard outbound shipment", + value: { + shipmentNumber: "SHIP-2024-001234", + shipmentType: "OUTBOUND", + serviceLevel: "STANDARD", + origin: { + locationId: "DC_001", + locationName: "Main Distribution Center", + address: { + street1: "1000 Industrial Blvd", + city: "Atlanta", + state: "GA", + postalCode: "30309", + country: "US" + }, + coordinates: { + latitude: 33.749, + longitude: -84.388 + }, + contactName: "Shipping Manager", + contactPhone: "555-0123" + }, + destination: { + locationId: "CUST_001", + locationName: "Customer Warehouse", + address: { + street1: "500 Commerce St", + city: "Memphis", + state: "TN", + postalCode: "38103", + country: "US" + }, + coordinates: { + latitude: 35.1495, + longitude: -90.049 + }, + contactName: "Receiving Clerk", + contactPhone: "555-0456" + }, + dates: { + plannedPickupDate: "2024-11-27T08:00:00.000Z", + plannedDeliveryDate: "2024-11-29T17:00:00.000Z" + }, + cargo: { + totalWeight: 15000, + totalWeightUOM: "LBS", + totalVolume: 800, + totalVolumeUOM: "CUFT", + palletCount: 20, + packageCount: 100, + commodityType: "General Merchandise", + hazmat: false, + temperatureControlled: false + }, + references: { + orderId: "ORD-2024-5678", + purchaseOrderNumber: "PO-2024-9012", + customerReference: "CUST-REF-789" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Shipment created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + $ref: "#/components/schemas/TMSShipment" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid shipment data provided in the request body", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "TMS" + ], + summary: "Get shipments by status with filtering", + description: "\n## Get TMS Shipments by Status\n\nRetrieve shipments filtered by status with advanced filtering and pagination capabilities.\n\n### Features\n- **Multi-Status Filtering**: Filter by multiple statuses using comma-separated values\n- **Advanced Filters**: Filter by carrier, date range, shipment type\n- **Cursor Pagination**: Efficient pagination for large result sets\n- **Performance Optimized**: Indexed queries for fast response times\n\n### Status Values\n- PLANNED: Shipment created but not yet tendered\n- TENDERED: Shipment tendered to carrier\n- ACCEPTED: Carrier accepted the shipment\n- PICKED_UP: Cargo picked up from origin\n- IN_TRANSIT: Shipment in transit\n- OUT_FOR_DELIVERY: Out for final delivery\n- DELIVERED: Successfully delivered\n- CANCELLED: Shipment cancelled\n- DELAYED: Shipment experiencing delays\n- EXCEPTION: Exception occurred during transit\n ", + operationId: "getTMSShipmentsByStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Filter by shipment status (comma-separated for multiple)", + schema: { + type: "string", + example: "IN_TRANSIT,OUT_FOR_DELIVERY" + } + }, + { + name: "carrierId", + in: "query", + required: false, + description: "Filter by carrier ID", + schema: { + type: "string", + example: "CARRIER_FEDEX_001" + } + }, + { + name: "from", + in: "query", + required: false, + description: "Filter shipments from this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00.000Z" + } + }, + { + name: "to", + in: "query", + required: false, + description: "Filter shipments until this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59.999Z" + } + }, + { + name: "shipmentType", + in: "query", + required: false, + description: "Filter by shipment type", + schema: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "TRANSFER", + "RETURN" + ], + example: "OUTBOUND" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Shipments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSShipment" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 156 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439012" + }, + previousCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439010" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required status parameter for shipment filtering", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status query parameter is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/in-transit": { + get: { + tags: [ + "TMS" + ], + summary: "Get all in-transit shipments", + description: "\n## Get In-Transit TMS Shipments\n\nRetrieve all shipments currently in transit with real-time location and ETA information.\n\n### Features\n- **Real-Time Tracking**: Current location and movement data\n- **ETA Calculations**: Estimated delivery times based on current position\n- **Exception Monitoring**: Identify shipments with delays or exceptions\n- **Performance Metrics**: Transit time and on-time delivery tracking\n\n### In-Transit Status Definition\nShipments with status IN_TRANSIT, including those with recent location updates and active tracking.\n ", + operationId: "getTMSInTransitShipments", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "In-transit shipments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSShipment" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 42 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + nullable: true, + example: null + }, + previousCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439010" + } + } + } + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}": { + get: { + tags: [ + "TMS" + ], + summary: "Get shipment by ID with events", + description: "\n## Get TMS Shipment Details\n\nRetrieve comprehensive shipment information including all related status events and tracking history.\n\n### Features\n- **Complete Shipment Data**: All shipment fields including cargo, routing, and costs\n- **Event History**: Full chronological history of status changes and updates\n- **Location Tracking**: Current and historical location data\n- **Delay Information**: Any delays and their impact on delivery\n\n### Response Includes\n- Shipment details with current status\n- Related status events ordered by timestamp\n- Current location and tracking information\n- Carrier and routing details\n ", + operationId: "getTMSShipmentById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + responses: { + "200": { + description: "Shipment retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + shipment: { + $ref: "#/components/schemas/TMSShipment" + }, + events: { + type: "array", + items: { + $ref: "#/components/schemas/TMSShipmentStatusEvent" + }, + description: "Status events ordered by timestamp (most recent first)" + } + } + } + } + } + } + } + }, + "404": { + description: "Not Found - Shipment with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment SHIP-2024-001234 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/carrier/{carrierId}": { + get: { + tags: [ + "TMS" + ], + summary: "Get shipments by carrier", + description: "\n## Get Shipments by Carrier\n\nRetrieve all shipments assigned to a specific carrier with optional status filtering.\n\n### Features\n- **Carrier-Specific View**: All shipments for a particular carrier\n- **Performance Analytics**: Carrier performance metrics and KPIs\n- **Status Breakdown**: Distribution of shipments by status\n- **Historical Data**: Past shipment performance for the carrier\n\n### Use Cases\n- Carrier performance monitoring\n- Capacity planning and allocation\n- Service level agreement (SLA) tracking\n- Carrier scorecard generation\n ", + operationId: "getTMSShipmentsByCarrier", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "carrierId", + in: "path", + required: true, + description: "Unique carrier identifier", + schema: { + type: "string", + example: "CARRIER_FEDEX_001" + } + }, + { + name: "from", + in: "query", + required: false, + description: "Filter shipments from this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00.000Z" + } + }, + { + name: "to", + in: "query", + required: false, + description: "Filter shipments until this date (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59.999Z" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Carrier shipments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSShipment" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 87 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439025" + }, + previousCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439015" + } + } + } + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}/tender": { + put: { + tags: [ + "TMS" + ], + summary: "Tender shipment to carrier", + description: "\n## Tender Shipment to Carrier\n\nTender a shipment to a specific carrier, transitioning from PLANNED to TENDERED status.\n\n### Features\n- **Carrier Assignment**: Assign specific carrier with complete information\n- **Status Transition**: PLANNED → TENDERED with audit trail\n- **Automatic Events**: Creates status change event automatically\n- **Validation**: Ensures shipment is in PLANNED status before tendering\n\n### Business Process\n1. Shipment must be in PLANNED status\n2. Carrier information is validated and assigned\n3. Status changes to TENDERED\n4. Status change event is automatically created\n5. Carrier can now accept or decline the shipment\n\n### Required Carrier Information\n- carrierId: Unique carrier identifier\n- carrierName: Display name of the carrier\n- carrierCode: Standard carrier code\n- scacCode: Standard Carrier Alpha Code\n ", + operationId: "tenderTMSShipment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + carrierId: { + type: "string", + description: "Unique carrier identifier", + example: "CARRIER_FEDEX_001" + }, + carrierName: { + type: "string", + description: "Carrier display name", + example: "FedEx Freight" + }, + carrierCode: { + type: "string", + description: "Standard carrier code", + example: "FDXF" + }, + scacCode: { + type: "string", + description: "Standard Carrier Alpha Code", + example: "FXFE" + } + }, + required: [ + "carrierId", + "carrierName", + "carrierCode", + "scacCode" + ] + } + } + } + }, + responses: { + "200": { + description: "Shipment tendered successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid tender request missing carrier information", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Carrier information is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found or not in PLANNED status for tendering", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment SHIP-2024-001234 not found or not in PLANNED status", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}/accept": { + put: { + tags: [ + "TMS" + ], + summary: "Accept shipment by carrier", + description: "\n## Accept Shipment by Carrier\n\nCarrier accepts a tendered shipment, transitioning from TENDERED to ACCEPTED status.\n\n### Features\n- **Carrier Acceptance**: Confirms carrier commitment to transport\n- **Tracking Information**: Optional PRO number and tracking details\n- **Pickup Scheduling**: Optional estimated pickup date\n- **Status Transition**: TENDERED → ACCEPTED with audit trail\n\n### Business Process\n1. Shipment must be in TENDERED status\n2. Carrier provides acceptance with optional details\n3. Status changes to ACCEPTED\n4. Pickup can now be scheduled\n5. Status change event is automatically created\n\n### Optional Carrier Data\n- proNumber: Carrier's Progressive Number\n- trackingNumber: Carrier's tracking reference\n- estimatedPickupDate: When carrier plans to pickup\n ", + operationId: "acceptTMSShipment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + requestBody: { + required: false, + content: { + "application/json": { + schema: { + type: "object", + properties: { + proNumber: { + type: "string", + description: "Carrier's Progressive Number", + example: "PRO123456789" + }, + trackingNumber: { + type: "string", + description: "Carrier's tracking reference number", + example: "TRK987654321" + }, + estimatedPickupDate: { + type: "string", + format: "date-time", + description: "Estimated pickup date and time", + example: "2024-11-27T08:00:00.000Z" + } + } + } + } + } + }, + responses: { + "200": { + description: "Shipment accepted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSShipment" + } + } + } + } + } + }, + "404": { + description: "Not Found - Shipment not found or not in TENDERED status for acceptance", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment SHIP-2024-001234 not found or not in TENDERED status", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}/location": { + put: { + tags: [ + "TMS" + ], + summary: "Update shipment location", + description: "\n## Update Shipment Location\n\nUpdate the current location of a shipment with GPS coordinates and optional location details.\n\n### Features\n- **Real-Time Tracking**: GPS coordinate updates for live tracking\n- **Multiple Sources**: Support for EDI, GPS, Manual, and Carrier Portal updates\n- **Location History**: Maintains complete location tracking history\n- **Automatic Events**: Creates location update events automatically\n\n### Data Sources\n- **GPS**: Automatic GPS device updates\n- **EDI**: EDI 214 location status updates\n- **MANUAL**: Manual location entry by operators\n- **CARRIER_PORTAL**: Updates from carrier web portals\n\n### Location Accuracy\n- Latitude/longitude coordinates are required\n- Optional city/state for human-readable location\n- Timestamp defaults to current time if not provided\n ", + operationId: "updateTMSShipmentLocation", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + latitude: { + type: "number", + description: "GPS latitude coordinate", + example: 35.1495 + }, + longitude: { + type: "number", + description: "GPS longitude coordinate", + example: -90.049 + }, + city: { + type: "string", + description: "Current city location", + example: "Memphis" + }, + state: { + type: "string", + description: "Current state/province", + example: "TN" + }, + timestamp: { + type: "string", + format: "date-time", + description: "Timestamp of location update", + example: "2024-11-26T14:30:00.000Z" + }, + source: { + type: "string", + enum: [ + "EDI", + "MANUAL", + "GPS", + "CARRIER_PORTAL" + ], + description: "Source of the location update", + example: "GPS" + } + }, + required: [ + "latitude", + "longitude" + ] + } + } + } + }, + responses: { + "200": { + description: "Location updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid location data missing required coordinates", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Latitude and longitude are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}/status": { + put: { + tags: [ + "TMS" + ], + summary: "Update shipment status", + description: "\n## Update Shipment Status\n\nUpdate the current status of a shipment with optional contextual information.\n\n### Features\n- **Status Management**: Update to any valid shipment status\n- **Context Information**: Optional location, notes, and source tracking\n- **Audit Trail**: Complete audit trail of all status changes\n- **Automatic Events**: Creates status change events automatically\n\n### Valid Status Values\n- **PLANNED**: Initial shipment planning\n- **TENDERED**: Shipment offered to carrier\n- **ACCEPTED**: Carrier accepted shipment\n- **PICKED_UP**: Cargo collected from origin\n- **IN_TRANSIT**: Shipment in transit\n- **OUT_FOR_DELIVERY**: Final delivery stage\n- **DELIVERED**: Successfully delivered\n- **CANCELLED**: Shipment cancelled\n- **DELAYED**: Experiencing delays\n- **EXCEPTION**: Exception occurred\n\n### Status Transitions\nWhile any status can be set, typical flow:\nPLANNED → TENDERED → ACCEPTED → PICKED_UP → IN_TRANSIT → OUT_FOR_DELIVERY → DELIVERED\n ", + operationId: "updateTMSShipmentStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "PLANNED", + "TENDERED", + "ACCEPTED", + "PICKED_UP", + "IN_TRANSIT", + "OUT_FOR_DELIVERY", + "DELIVERED", + "CANCELLED", + "DELAYED", + "EXCEPTION" + ], + description: "New shipment status", + example: "IN_TRANSIT" + }, + timestamp: { + type: "string", + format: "date-time", + description: "Timestamp of status change", + example: "2024-11-26T14:30:00.000Z" + }, + location: { + type: "string", + description: "Location where status change occurred", + example: "Memphis, TN" + }, + note: { + type: "string", + description: "Additional notes about status change", + example: "Shipment departed Memphis hub" + }, + source: { + type: "string", + description: "Source of the status update", + example: "EDI_214" + } + }, + required: [ + "status" + ] + }, + examples: { + inTransit: { + summary: "Mark shipment as in transit", + value: { + status: "IN_TRANSIT", + timestamp: "2024-11-26T14:30:00.000Z", + location: "Memphis, TN", + note: "Shipment departed Memphis hub", + source: "EDI_214" + } + }, + delivered: { + summary: "Mark shipment as delivered", + value: { + status: "DELIVERED", + timestamp: "2024-11-29T16:45:00.000Z", + location: "Customer Dock Door 3", + note: "Delivered successfully, signed by J.Smith", + source: "CARRIER_PORTAL" + } + } + } + } + } + }, + responses: { + "200": { + description: "Status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status data missing required status field", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Status is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}/events": { + post: { + tags: [ + "TMS" + ], + summary: "Create shipment status event", + description: "\n## Create Shipment Status Event\n\nCreate a new status event for shipment tracking and audit trail purposes.\n\n### Features\n- **Event Types**: Support for various event types including status changes, location updates, ETAs, delays, exceptions, and milestones\n- **Flexible Data**: Contextual information based on event type\n- **Audit Trail**: Complete event history for compliance and tracking\n- **Event Sources**: Track source of events (API, EDI, Manual, GPS, Carrier Portal)\n\n### Event Types\n- **STATUS_CHANGE**: Shipment status transitions\n- **LOCATION_UPDATE**: GPS or location changes\n- **ETA_UPDATE**: Estimated time of arrival updates\n- **DELAY**: Delay notifications and impacts\n- **EXCEPTION**: Exception handling and resolution\n- **MILESTONE**: Important shipment milestones\n\n### Event Context\nDifferent event types support different contextual data:\n- Status changes: previous/new status\n- Location updates: coordinates and location details\n- ETA updates: previous/new ETA and delay calculations\n- Exceptions: severity, description, and resolution\n ", + operationId: "createTMSShipmentStatusEvent", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/TMSShipmentStatusEventInput" + }, + examples: { + statusChange: { + summary: "Status change event", + value: { + eventType: "STATUS_CHANGE", + eventTime: "2024-11-26T14:30:00.000Z", + statusInfo: { + previousStatus: "ACCEPTED", + newStatus: "PICKED_UP" + }, + source: "CARRIER_PORTAL" + } + }, + locationUpdate: { + summary: "Location update event", + value: { + eventType: "LOCATION_UPDATE", + eventTime: "2024-11-26T15:45:00.000Z", + locationInfo: { + latitude: 35.1495, + longitude: -90.049, + city: "Memphis", + state: "TN" + }, + source: "GPS" + } + }, + exception: { + summary: "Exception event", + value: { + eventType: "EXCEPTION", + eventTime: "2024-11-26T16:00:00.000Z", + exceptionInfo: { + exceptionType: "WEATHER_DELAY", + severity: "MEDIUM", + description: "Delayed due to severe weather conditions in route", + resolution: "Monitoring weather conditions for safe transit" + }, + source: "MANUAL" + } + } + } + } + } + }, + responses: { + "201": { + description: "Event created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + $ref: "#/components/schemas/TMSShipmentStatusEvent" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid event data missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Event data is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/shipments/{shipmentId}/delays": { + post: { + tags: [ + "TMS" + ], + summary: "Add shipment delay", + description: "\n## Add Shipment Delay\n\nRecord a delay for a shipment with detailed information about the cause and impact.\n\n### Features\n- **Delay Tracking**: Comprehensive delay recording and impact analysis\n- **Delay Types**: Categorized delay types for better analytics\n- **Time Tracking**: Start time, end time, and estimated delay duration\n- **Impact Analysis**: Understand delay impact on delivery schedules\n- **Customer Communication**: Data for proactive customer notifications\n\n### Delay Types\n- **WEATHER**: Weather-related delays\n- **TRAFFIC**: Traffic congestion or road closures\n- **MECHANICAL**: Vehicle or equipment issues\n- **CARRIER**: Carrier operational issues\n- **CUSTOMS**: Customs or border delays (international)\n- **OTHER**: Other unforeseen circumstances\n\n### Delay Management\n- Delays can be ongoing (no end time) or completed\n- Estimated delay helps with ETA recalculation\n- Detailed reasons support customer communication\n- Delay patterns help improve route planning\n ", + operationId: "addTMSShipmentDelay", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Unique shipment identifier", + schema: { + type: "string", + example: "SHIP-2024-001234" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + delayType: { + type: "string", + enum: [ + "WEATHER", + "TRAFFIC", + "MECHANICAL", + "CARRIER", + "CUSTOMS", + "OTHER" + ], + description: "Category of delay", + example: "WEATHER" + }, + reason: { + type: "string", + description: "Detailed reason for the delay", + example: "Severe thunderstorms in Memphis area causing safety delays" + }, + startTime: { + type: "string", + format: "date-time", + description: "When the delay started", + example: "2024-11-26T16:00:00.000Z" + }, + estimatedDelay: { + type: "number", + description: "Estimated delay in minutes", + example: 120 + }, + endTime: { + type: "string", + format: "date-time", + description: "When the delay ended (optional, for ongoing delays)", + example: "2024-11-26T18:00:00.000Z" + } + }, + required: [ + "delayType", + "reason" + ] + }, + examples: { + weatherDelay: { + summary: "Weather-related delay", + value: { + delayType: "WEATHER", + reason: "Severe thunderstorms in Memphis area causing safety delays", + startTime: "2024-11-26T16:00:00.000Z", + estimatedDelay: 120 + } + }, + mechanicalIssue: { + summary: "Mechanical delay with resolution", + value: { + delayType: "MECHANICAL", + reason: "Truck tire blowout, replacement required", + startTime: "2024-11-26T14:30:00.000Z", + endTime: "2024-11-26T16:45:00.000Z", + estimatedDelay: 135 + } + } + } + } + } + }, + responses: { + "200": { + description: "Delay added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid delay data missing type or reason", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Delay type and reason are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers": { + post: { + tags: [ + "TMS" + ], + summary: "Create a new TMS carrier", + description: "\n## Create TMS Carrier\n\nRegister a new carrier in the Transportation Management System with complete profile and compliance information.\n\n### Features\n- **Complete Carrier Profile**: Business details, contact info, and operational capabilities\n- **Compliance Tracking**: DOT numbers, insurance, safety ratings, and certifications\n- **Service Region Management**: Define geographical service areas\n- **Performance Initialization**: Set up performance tracking metrics\n- **Automatic Validation**: Ensure unique carrier IDs and codes\n\n### Required Fields\n- carrierCode: Unique business identifier (SCAC or internal code)\n- carrierName: Official business name\n- carrierType: Transportation mode and service type\n\n### Carrier Types\n- **LTL**: Less Than Truckload\n- **FTL**: Full Truckload \n- **PARCEL**: Small package delivery\n- **INTERMODAL**: Rail/truck combination\n- **COURIER**: Express/overnight\n- **RAIL**: Rail transportation\n- **AIR**: Air freight\n- **OCEAN**: Ocean/maritime\n ", + operationId: "createTMSCarrier", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/TMSCarrierInput" + }, + examples: { + truckingCarrier: { + summary: "Standard FTL trucking carrier", + value: { + carrierCode: "ACME", + carrierName: "ACME Transportation", + carrierType: "FTL", + contact: { + primaryContactName: "John Smith", + email: "dispatch@acmetransport.com", + phone: "555-0199", + address: { + street: "100 Logistics Blvd", + city: "Atlanta", + state: "GA", + zipCode: "30309", + country: "US" + } + }, + compliance: { + dotNumber: "12345678", + mcNumber: "MC-987654", + scacCode: "ACME", + smartWayCertified: true, + insuranceExpiry: "2025-12-31T23:59:59.999Z", + safetyRating: "SATISFACTORY" + }, + serviceRegions: [ + "GA", + "FL", + "SC", + "NC", + "TN" + ], + performance: { + onTimeDeliveryRate: 0.95, + damageClaimRate: 0.002, + totalShipmentsCompleted: 0 + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Carrier created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + $ref: "#/components/schemas/TMSCarrier" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid carrier data or duplicate carrier code", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Carrier ID and code are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "TMS" + ], + summary: "Get active carriers with filtering", + description: "\n## Get Active TMS Carriers\n\nRetrieve active carriers with advanced filtering and pagination capabilities.\n\n### Features\n- **Active Status Filter**: Only returns carriers with ACTIVE status\n- **Multi-Criteria Filtering**: Filter by type, region, and certifications\n- **SmartWay Certification**: Filter by EPA SmartWay certified carriers\n- **Service Region Matching**: Find carriers serving specific regions\n- **Cursor Pagination**: Efficient pagination for large carrier lists\n\n### Carrier Status Values\n- **ACTIVE**: Available for shipment assignments\n- **INACTIVE**: Not accepting new assignments\n- **SUSPENDED**: Temporarily suspended\n- **PENDING_APPROVAL**: Awaiting approval process\n\n### Use Cases\n- Carrier selection for shipment tendering\n- Capacity planning and sourcing\n- Compliance and certification verification\n- Regional coverage analysis\n ", + operationId: "getTMSActiveCarriers", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "carrierType", + in: "query", + required: false, + description: "Filter by carrier transportation type", + schema: { + type: "string", + enum: [ + "LTL", + "FTL", + "PARCEL", + "INTERMODAL", + "COURIER", + "RAIL", + "AIR", + "OCEAN" + ], + example: "FTL" + } + }, + { + name: "serviceRegion", + in: "query", + required: false, + description: "Filter carriers serving specific region/state", + schema: { + type: "string", + example: "GA" + } + }, + { + name: "smartWayCertified", + in: "query", + required: false, + description: "Filter by EPA SmartWay certification status", + schema: { + type: "boolean", + example: true + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Active carriers retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSCarrier" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 247 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439025" + }, + previousCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439015" + } + } + } + } + }, + example: { + success: true, + status: 200, + data: [ + { + id: "674c5b8a5e8f0123456789ab", + carrierId: "CARRIER_FEDEX_001", + carrierCode: "FEDX_001", + companyName: "FedEx Corporation", + status: "ACTIVE", + carrierType: "FTL", + serviceRegions: [ + "US_NATIONWIDE" + ], + compliance: { + smartWayCertified: true + } + } + ], + pagination: { + totalCount: 247, + limit: 50, + hasMore: true, + nextCursor: "507f1f77bcf86cd799439025", + previousCursor: null + } + } + } + } + }, + "400": { + description: "Bad Request - Missing worldId path parameter", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers/search": { + get: { + tags: [ + "TMS" + ], + summary: "Search carriers by name or code", + description: "\n## Search TMS Carriers\n\nFull-text search across carrier names, codes, and SCAC codes with optional filtering.\n\n### Features\n- **Multi-Field Search**: Search across carrier name, code, and SCAC\n- **Case-Insensitive**: Flexible matching regardless of case\n- **Advanced Filtering**: Combine search with type, status, and region filters\n- **Performance Optimized**: Indexed search for fast response times\n- **Limited Results**: Capped at 50 results for performance\n\n### Search Fields\n- **carrierName**: Company name (e.g., \"FedEx\", \"UPS\")\n- **carrierCode**: Internal carrier code\n- **scacCode**: Standard Carrier Alpha Code\n\n### Use Cases\n- Carrier lookup during shipment planning\n- Vendor management and selection\n- Compliance verification\n- Directory browsing\n ", + operationId: "searchTMSCarriers", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "searchTerm", + in: "query", + required: true, + description: "Search term for carrier name, code, or SCAC", + schema: { + type: "string", + example: "FedEx" + } + }, + { + name: "carrierType", + in: "query", + required: false, + description: "Filter by carrier transportation type", + schema: { + type: "string", + enum: [ + "LTL", + "FTL", + "PARCEL", + "INTERMODAL", + "COURIER", + "RAIL", + "AIR", + "OCEAN" + ], + example: "PARCEL" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by carrier status", + schema: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "SUSPENDED", + "PENDING_APPROVAL" + ], + example: "ACTIVE" + } + }, + { + name: "serviceRegion", + in: "query", + required: false, + description: "Filter carriers serving specific region", + schema: { + type: "string", + example: "CA" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return (capped at 50)", + schema: { + type: "integer", + minimum: 1, + maximum: 50, + example: 20 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Carriers found successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSCarrier" + }, + maxItems: 50 + } + } + }, + example: { + success: true, + status: 200, + data: [ + { + id: "674c5b8a5e8f0123456789ab", + carrierId: "CARRIER_FEDEX_001", + carrierCode: "FEDX_001", + companyName: "FedEx Corporation", + status: "ACTIVE", + carrierType: "PARCEL", + compliance: { + scacCode: "FEDX" + } + }, + { + id: "674c5b8a5e8f0123456789ac", + carrierId: "CARRIER_FEDEX_002", + carrierCode: "FEDX_002", + companyName: "FedEx Express", + status: "ACTIVE", + carrierType: "PARCEL", + compliance: { + scacCode: "FDXE" + } + } + ] + } + } + } + }, + "400": { + description: "Bad Request - Missing searchTerm query parameter", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "searchTerm query parameter is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers/performance": { + get: { + tags: [ + "TMS" + ], + summary: "Get carriers by performance criteria", + description: "\n## Get Carriers by Performance\n\nFind carriers that meet specific performance criteria and service level requirements.\n\n### Features\n- **Performance Filtering**: Filter by on-time rate, damage rate, and transit time\n- **Quality Assurance**: Find top-performing carriers for critical shipments\n- **Benchmarking**: Compare carrier performance against standards\n- **Service Level Optimization**: Match carriers to shipment requirements\n- **Performance Sorting**: Results sorted by performance metrics\n\n### Performance Metrics\n- **On-Time Delivery Rate**: Percentage of shipments delivered on time\n- **Damage Claim Rate**: Percentage of shipments with damage claims\n- **Average Transit Time**: Average hours in transit\n- **Total Shipments**: Historical volume completed\n\n### Use Cases\n- Premium service lane setup\n- Carrier performance benchmarking\n- Service level agreement compliance\n- Quality-focused carrier selection\n ", + operationId: "getTMSCarriersByPerformance", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "minOnTimeRate", + in: "query", + required: false, + description: "Minimum on-time delivery rate (0.0-1.0)", + schema: { + type: "number", + minimum: 0, + maximum: 1, + example: 0.95 + } + }, + { + name: "maxDamageRate", + in: "query", + required: false, + description: "Maximum damage claim rate (0.0-1.0)", + schema: { + type: "number", + minimum: 0, + maximum: 1, + example: 0.01 + } + }, + { + name: "maxTransitTime", + in: "query", + required: false, + description: "Maximum average transit time in hours", + schema: { + type: "number", + minimum: 1, + example: 72 + } + }, + { + name: "carrierType", + in: "query", + required: false, + description: "Filter by carrier transportation type", + schema: { + type: "string", + enum: [ + "LTL", + "FTL", + "PARCEL", + "INTERMODAL", + "COURIER", + "RAIL", + "AIR", + "OCEAN" + ], + example: "FTL" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Performance-filtered carriers retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSCarrier" + } + } + } + }, + example: { + success: true, + status: 200, + data: [ + { + id: "674c5b8a5e8f0123456789ab", + carrierId: "CARRIER_FEDEX_001", + carrierCode: "FEDX_001", + companyName: "FedEx Corporation", + status: "ACTIVE", + carrierType: "FTL", + performance: { + onTimeDeliveryRate: 0.98, + damageClaimRate: 0.001, + averageTransitTime: 45.2 + } + }, + { + id: "674c5b8a5e8f0123456789ac", + carrierId: "CARRIER_UPS_001", + carrierCode: "UPS_001", + companyName: "United Parcel Service", + status: "ACTIVE", + carrierType: "FTL", + performance: { + onTimeDeliveryRate: 0.97, + damageClaimRate: 0.0005, + averageTransitTime: 48.1 + } + } + ] + } + } + } + }, + "400": { + description: "Bad Request - Missing worldId path parameter", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers/id/{carrierId}": { + get: { + tags: [ + "TMS" + ], + summary: "Get carrier by ID", + description: "\n## Get TMS Carrier by ID\n\nRetrieve complete carrier information using the unique carrier identifier.\n\n### Features\n- **Complete Carrier Profile**: All carrier details including performance metrics\n- **Compliance Information**: DOT numbers, certifications, and safety ratings\n- **Service Capabilities**: Transportation types and regional coverage\n- **Performance History**: Current metrics and trends\n\n### Response Includes\n- Basic carrier information and contact details\n- Compliance and certification status\n- Performance metrics and history\n- Service regions and capabilities\n ", + operationId: "getTMSCarrierById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "carrierId", + in: "path", + required: true, + description: "Unique carrier identifier", + schema: { + type: "string", + example: "CARRIER_FEDEX_001" + } + } + ], + responses: { + "200": { + description: "Carrier retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSCarrier" + } + } + } + } + } + }, + "404": { + description: "Not Found - Carrier with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Carrier CARRIER_FEDEX_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers/code/{carrierCode}": { + get: { + tags: [ + "TMS" + ], + summary: "Get carrier by code", + description: "\n## Get TMS Carrier by Code\n\nRetrieve carrier information using the carrier's business code (SCAC or internal code).\n\n### Features\n- **Code-Based Lookup**: Find carriers using business-friendly codes\n- **SCAC Integration**: Support for Standard Carrier Alpha Codes\n- **EDI Compatibility**: Perfect for EDI transaction processing\n- **Complete Profile**: Full carrier information and capabilities\n\n### Use Cases\n- EDI transaction processing and mapping\n- Business partner integration\n- Legacy system integration\n- Quick carrier lookup during operations\n ", + operationId: "getTMSCarrierByCode", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "carrierCode", + in: "path", + required: true, + description: "Carrier business code or SCAC", + schema: { + type: "string", + example: "FDXF" + } + } + ], + responses: { + "200": { + description: "Carrier retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSCarrier" + } + } + } + } + } + }, + "404": { + description: "Not Found - Carrier with specified code does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Carrier with code FDXF not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers/{carrierId}/status": { + put: { + tags: [ + "TMS" + ], + summary: "Update carrier status", + description: "\n## Update TMS Carrier Status\n\nChange the operational status of a carrier to control availability for shipment assignments.\n\n### Features\n- **Status Management**: Control carrier availability and operations\n- **Workflow Integration**: Support operational workflows and approvals\n- **Audit Trail**: Track status changes for compliance\n- **Real-Time Impact**: Immediately affects carrier eligibility\n\n### Status Values and Meanings\n- **ACTIVE**: Available for new shipment assignments\n- **INACTIVE**: Not accepting new assignments (maintenance, etc.)\n- **SUSPENDED**: Temporarily suspended due to performance/compliance issues\n- **PENDING_APPROVAL**: New carrier awaiting approval process\n\n### Business Rules\n- Only ACTIVE carriers appear in shipment tendering\n- Status changes are immediately effective\n- Historical performance data is preserved regardless of status\n ", + operationId: "updateTMSCarrierStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "carrierId", + in: "path", + required: true, + description: "Unique carrier identifier", + schema: { + type: "string", + example: "CARRIER_FEDEX_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "SUSPENDED", + "PENDING_APPROVAL" + ], + description: "New carrier status", + example: "SUSPENDED" + } + }, + required: [ + "status" + ] + }, + examples: { + suspend: { + summary: "Suspend carrier due to performance issues", + value: { + status: "SUSPENDED" + } + }, + reactivate: { + summary: "Reactivate previously suspended carrier", + value: { + status: "ACTIVE" + } + } + } + } + } + }, + responses: { + "200": { + description: "Carrier status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSCarrier" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid or missing status value", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Status is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Carrier with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Carrier CARRIER_FEDEX_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers/{carrierId}/performance": { + put: { + tags: [ + "TMS" + ], + summary: "Update carrier performance metrics", + description: "\n## Update TMS Carrier Performance\n\nUpdate carrier performance metrics based on completed shipments and operational data.\n\n### Features\n- **Performance Tracking**: Update key performance indicators\n- **Historical Preservation**: Maintains performance history and trends\n- **Automatic Timestamping**: Records when metrics were last updated\n- **Selective Updates**: Update individual metrics without affecting others\n- **Impact Analysis**: Performance changes affect carrier scoring and selection\n\n### Performance Metrics\n- **On-Time Delivery Rate**: Percentage (0.0-1.0) of shipments delivered on time\n- **Damage Claim Rate**: Percentage (0.0-1.0) of shipments with damage claims \n- **Average Transit Time**: Average hours from pickup to delivery\n- **Total Shipments Completed**: Cumulative shipment count for volume tracking\n\n### Update Strategy\n- Metrics can be updated individually or in combination\n- Last update timestamp is automatically recorded\n- Historical trends are preserved for analysis\n- Performance changes immediately affect carrier rankings\n ", + operationId: "updateTMSCarrierPerformance", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "carrierId", + in: "path", + required: true, + description: "Unique carrier identifier", + schema: { + type: "string", + example: "CARRIER_FEDEX_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + onTimeDeliveryRate: { + type: "number", + minimum: 0, + maximum: 1, + description: "On-time delivery rate (0.0-1.0)", + example: 0.96 + }, + damageClaimRate: { + type: "number", + minimum: 0, + maximum: 1, + description: "Damage claim rate (0.0-1.0)", + example: 0.001 + }, + averageTransitTime: { + type: "number", + minimum: 0, + description: "Average transit time in hours", + example: 48.5 + }, + totalShipmentsCompleted: { + type: "integer", + minimum: 0, + description: "Total number of completed shipments", + example: 1250 + } + }, + minProperties: 1 + }, + examples: { + monthlyUpdate: { + summary: "Monthly performance metrics update", + value: { + onTimeDeliveryRate: 0.96, + damageClaimRate: 0.001, + averageTransitTime: 48.5, + totalShipmentsCompleted: 1250 + } + }, + onTimeOnly: { + summary: "Update only on-time delivery rate", + value: { + onTimeDeliveryRate: 0.98 + } + } + } + } + } + }, + responses: { + "200": { + description: "Carrier performance updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSCarrier" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid or missing performance data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Performance data is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Carrier with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Carrier CARRIER_FEDEX_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/carriers/{carrierId}/metrics": { + get: { + tags: [ + "TMS" + ], + summary: "Get carrier performance and compliance metrics", + description: "\n## Get TMS Carrier Metrics\n\nRetrieve comprehensive performance and compliance metrics for a specific carrier.\n\n### Features\n- **Current Performance**: Real-time performance indicators\n- **Recent Trends**: Historical performance trend data \n- **Compliance Status**: DOT, MC, insurance, and safety information\n- **Operational Intelligence**: Data for carrier selection and evaluation\n\n### Performance Metrics Included\n- **On-Time Delivery Rate**: Current percentage (0.0-1.0)\n- **Damage Claim Rate**: Current damage rate (0.0-1.0)\n- **Average Transit Time**: Current average in hours\n- **Total Shipments**: Lifetime completed shipment count\n- **Last Update**: When metrics were last refreshed\n\n### Compliance Metrics Included\n- **DOT Number**: Department of Transportation registration\n- **MC Number**: Motor Carrier authority number\n- **SCAC Code**: Standard Carrier Alpha Code\n- **SmartWay Certification**: Environmental compliance status\n- **Insurance Expiry**: Current insurance coverage expiration\n- **Safety Rating**: Current DOT safety assessment\n\n### Business Use Cases\n- Carrier selection and ranking algorithms\n- Performance benchmarking and analysis\n- Compliance monitoring and alerts\n- Contract negotiation data\n- Risk assessment and mitigation\n ", + operationId: "getTMSCarrierMetrics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "carrierId", + in: "path", + required: true, + description: "Unique carrier identifier", + schema: { + type: "string", + example: "CARRIER_FEDEX_001" + } + } + ], + responses: { + "200": { + description: "Carrier metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + carrier: { + $ref: "#/components/schemas/TMSCarrier" + }, + metrics: { + type: "object", + properties: { + currentPerformance: { + type: "object", + properties: { + onTimeDeliveryRate: { + type: "number", + description: "Current on-time delivery rate (0.0-1.0)", + example: 0.96 + }, + damageClaimRate: { + type: "number", + description: "Current damage claim rate (0.0-1.0)", + example: 0.001 + }, + averageTransitTime: { + type: "number", + description: "Current average transit time in hours", + example: 48.5 + }, + totalShipmentsCompleted: { + type: "integer", + description: "Total completed shipments", + example: 1250 + } + } + }, + recentTrends: { + type: "object", + properties: { + lastPerformanceUpdate: { + type: "string", + format: "date-time", + description: "When performance metrics were last updated", + example: "2024-01-15T10:30:00.000Z" + } + } + }, + complianceStatus: { + type: "object", + properties: { + dotNumber: { + type: "string", + description: "DOT registration number", + example: "123456" + }, + mcNumber: { + type: "string", + description: "Motor Carrier authority number", + example: "MC-123456" + }, + scacCode: { + type: "string", + description: "Standard Carrier Alpha Code", + example: "FEDX" + }, + smartWayCertified: { + type: "boolean", + description: "SmartWay environmental certification status", + example: true + }, + insuranceExpiry: { + type: "string", + format: "date-time", + description: "Insurance coverage expiration date", + example: "2024-12-31T23:59:59.000Z" + }, + safetyRating: { + type: "string", + description: "DOT safety rating", + example: "SATISFACTORY" + } + } + } + } + } + } + } + } + }, + example: { + success: true, + status: 200, + data: { + carrier: { + id: "674c5b8a5e8f0123456789ab", + carrierId: "CARRIER_FEDEX_001", + carrierCode: "FEDX_001", + companyName: "FedEx Corporation", + status: "ACTIVE", + contactInfo: { + primaryContact: { + name: "John Smith", + title: "Account Manager", + phone: "1-800-463-3339", + email: "john.smith@fedex.com" + }, + dispatchContact: { + name: "Sarah Johnson", + title: "Dispatch Coordinator", + phone: "1-800-463-3340", + email: "dispatch@fedex.com" + }, + billingContact: { + name: "Mike Davis", + title: "Billing Manager", + phone: "1-800-463-3341", + email: "billing@fedex.com" + } + }, + services: [ + "LTL", + "FTL", + "EXPRESS" + ], + equipmentTypes: [ + "DRY_VAN", + "REFRIGERATED", + "FLATBED" + ], + serviceAreas: [ + "US_NATIONWIDE", + "CANADA", + "MEXICO" + ], + performance: { + onTimeDeliveryRate: 0.96, + damageClaimRate: 0.001, + averageTransitTime: 48.5, + totalShipmentsCompleted: 1250, + lastPerformanceUpdate: "2024-01-15T10:30:00.000Z" + }, + compliance: { + dotNumber: "123456", + mcNumber: "MC-123456", + scacCode: "FEDX", + smartWayCertified: true, + insuranceExpiry: "2024-12-31T23:59:59.000Z", + safetyRating: "SATISFACTORY" + }, + createdAt: "2024-01-01T00:00:00.000Z", + updatedAt: "2024-01-15T10:30:00.000Z" + }, + metrics: { + currentPerformance: { + onTimeDeliveryRate: 0.96, + damageClaimRate: 0.001, + averageTransitTime: 48.5, + totalShipmentsCompleted: 1250 + }, + recentTrends: { + lastPerformanceUpdate: "2024-01-15T10:30:00.000Z" + }, + complianceStatus: { + dotNumber: "123456", + mcNumber: "MC-123456", + scacCode: "FEDX", + smartWayCertified: true, + insuranceExpiry: "2024-12-31T23:59:59.000Z", + safetyRating: "SATISFACTORY" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required worldId and carrierId parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and carrierId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Carrier with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Carrier CARRIER_FEDEX_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error - Failed to retrieve carrier metrics", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Failed to get carrier metrics: Database connection failed", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers": { + post: { + tags: [ + "TMS" + ], + summary: "Create new inbound trailer", + description: "\n## Create TMS Inbound Trailer\n\nCreate a new inbound trailer record for dock appointment scheduling and warehouse operations management.\n\n### Features\n- **Complete Trailer Profile**: Comprehensive carrier, cargo, and facility information\n- **Appointment Scheduling**: Initial scheduling data with flexible timing options\n- **Status Management**: Automatic status tracking throughout the trailer lifecycle\n- **Cargo Documentation**: Purchase orders, expected freight, and compliance details\n- **Driver Information**: Contact details for communication and coordination\n\n### Trailer Lifecycle Status Flow\n1. **SCHEDULED**: Initial creation with appointment details\n2. **EN_ROUTE**: Carrier confirms trailer is in transit to facility\n3. **CHECKED_IN**: Trailer arrives and checks in at facility gate\n4. **AT_DOCK**: Assigned to specific dock door for operations\n5. **UNLOADING**: Active unloading process in progress\n6. **UNLOADED**: Unloading completed, ready for departure\n7. **DEPARTED**: Trailer has left the facility\n8. **CANCELLED**: Appointment cancelled\n9. **DELAYED**: Delayed arrival reported\n\n### Required Information\n- **trailerNumber**: Physical trailer identification (license plate/number)\n- **appointmentInfo.scheduledArrival**: Required arrival date and time\n- **facilityInfo.dcId**: Target distribution center identifier\n\n### Business Rules\n- trailerId is auto-generated with unique identifier if not provided\n- scheduledArrival is mandatory for all trailer appointments\n- trailerNumber should be unique within the carrier's fleet\n- Status automatically defaults to SCHEDULED upon creation\n- appointmentInfo, facilityInfo, and trailerNumber are required fields\n- carrierInfo and cargo details are optional but recommended for operations\n ", + operationId: "createTMSInboundTrailer", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + trailerId: { + type: "string", + description: "Unique trailer identifier (auto-generated if not provided)", + example: "TRAILER_001" + }, + trailerNumber: { + type: "string", + description: "Physical trailer number/license plate", + example: "TR-12345" + }, + carrierInfo: { + type: "object", + properties: { + carrierId: { + type: "string", + description: "Associated carrier identifier", + example: "CARRIER_FEDEX_001" + }, + carrierName: { + type: "string", + description: "Carrier company name", + example: "FedEx Corporation" + }, + driverName: { + type: "string", + description: "Driver full name", + example: "John Smith" + }, + driverPhone: { + type: "string", + description: "Driver contact phone", + example: "+1-555-123-4567" + } + } + }, + appointmentInfo: { + type: "object", + properties: { + scheduledArrival: { + type: "string", + format: "date-time", + description: "Scheduled arrival date and time", + example: "2024-01-20T08:00:00.000Z" + }, + scheduledDeparture: { + type: "string", + format: "date-time", + description: "Scheduled departure time", + example: "2024-01-20T16:00:00.000Z" + }, + dockDoor: { + type: "string", + description: "Assigned dock door", + example: "DOCK-A-001" + } + }, + required: [ + "scheduledArrival" + ] + }, + facilityInfo: { + type: "object", + properties: { + dcId: { + type: "string", + description: "Distribution center identifier", + example: "DC_ATL_001" + }, + facilityName: { + type: "string", + description: "Facility name", + example: "Atlanta Distribution Center" + } + }, + required: [ + "dcId" + ] + }, + cargo: { + type: "object", + properties: { + purchaseOrders: { + type: "array", + items: { + type: "string" + }, + description: "Associated purchase order numbers", + example: [ + "PO-2024-001", + "PO-2024-002" + ] + }, + expectedPallets: { + type: "integer", + description: "Expected number of pallets", + example: 20 + }, + trailerType: { + type: "string", + enum: [ + "DRY_VAN", + "REEFER", + "FLATBED", + "TANKER", + "INTERMODAL" + ], + description: "Type of trailer equipment", + example: "DRY_VAN" + }, + sealNumber: { + type: "string", + description: "Trailer seal number for security", + example: "SEAL-789456" + } + } + } + }, + required: [ + "trailerNumber", + "appointmentInfo", + "facilityInfo" + ] + } + } + } + }, + responses: { + "201": { + description: "Inbound trailer created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid data or missing required fields in trailer creation", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "TMS" + ], + summary: "Get trailers by status with filtering", + description: "\n## Get TMS Trailers by Status\n\nRetrieve inbound trailers based on their operational status with comprehensive filtering and pagination capabilities.\n\n### Features\n- **Multi-Status Filtering**: Filter by one or multiple status values using comma-separated list\n- **Date Range Filtering**: Filter by scheduled arrival date ranges for time-based queries\n- **Facility Filtering**: Filter by specific distribution center for location-based operations\n- **Carrier Filtering**: Filter by carrier ID for carrier-specific views\n- **Cursor Pagination**: Efficient pagination handling for large datasets\n- **Sorted Results**: Results automatically ordered by scheduled arrival time\n- **Flexible Queries**: Combine multiple filter parameters for precise results\n\n### Available Status Values\n- **SCHEDULED**: Trailer appointment scheduled and confirmed\n- **EN_ROUTE**: In transit to facility (carrier reported)\n- **CHECKED_IN**: Arrived at facility and checked in at gate\n- **AT_DOCK**: Assigned to dock door and positioned for unloading\n- **UNLOADING**: Active unloading process in progress\n- **UNLOADED**: Unloading completed, ready for departure\n- **DEPARTED**: Trailer has left the facility\n- **CANCELLED**: Appointment cancelled by carrier or facility\n- **DELAYED**: Delayed arrival reported with updated ETA\n\n### Query Parameters\n- **status**: Required - Single status or comma-separated list of statuses\n- **dcId**: Optional - Distribution center identifier for facility-specific filtering\n- **carrierId**: Optional - Carrier identifier for carrier-specific filtering\n- **startDate**: Optional - Start date for scheduled arrival filtering\n- **endDate**: Optional - End date for scheduled arrival filtering\n- **limit**: Optional - Maximum results per page (default: 50, max: 500)\n- **cursor**: Optional - Pagination cursor for efficient large dataset handling\n\n### Use Cases\n- **Dock Scheduling**: View trailers by status for dock assignment planning\n- **Driver Communication**: Find trailers needing driver notifications\n- **Operational Monitoring**: Real-time status tracking and performance monitoring\n- **Analytics & Reporting**: Generate status-based reports and KPI dashboards\n- **Facility Management**: Track trailer flow through facility operations\n ", + operationId: "getTMSInboundTrailersByStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Comma-separated list of status values to filter by", + schema: { + type: "string", + example: "SCHEDULED,EN_ROUTE,CHECKED_IN" + } + }, + { + name: "carrierId", + in: "query", + required: false, + description: "Filter by specific carrier", + schema: { + type: "string", + example: "CARRIER_FEDEX_001" + } + }, + { + name: "dcId", + in: "query", + required: false, + description: "Filter by distribution center", + schema: { + type: "string", + example: "DC_ATL_001" + } + }, + { + name: "from", + in: "query", + required: false, + description: "Start date for scheduled arrival filtering", + schema: { + type: "string", + format: "date-time", + example: "2024-01-20T00:00:00.000Z" + } + }, + { + name: "to", + in: "query", + required: false, + description: "End date for scheduled arrival filtering", + schema: { + type: "string", + format: "date-time", + example: "2024-01-20T23:59:59.000Z" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Trailers retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 127 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: true + }, + nextCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439025" + }, + previousCursor: { + type: "string", + nullable: true, + example: "507f1f77bcf86cd799439015" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required status query parameter", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status query parameter is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/appointments": { + get: { + tags: [ + "TMS" + ], + summary: "Get trailers by appointment date", + description: "\n## Get Trailers by Appointment Date\n\nRetrieve all trailers scheduled for appointments on a specific date at a distribution center, organized by appointment time for daily operations planning.\n\n### Features\n- **Daily Scheduling View**: Complete view of all trailers for specific appointment dates\n- **Facility-Specific Filtering**: Results filtered by distribution center identifier\n- **Chronological Organization**: Results automatically ordered by scheduled arrival time\n- **Date Flexibility**: Defaults to current date if appointmentDate not specified\n- **Operational Planning**: Support daily dock scheduling and resource allocation\n- **Efficient Pagination**: Handle large appointment volumes with cursor-based pagination\n\n### Query Parameters\n- **dcId**: Required - Distribution center identifier for facility-specific results\n- **appointmentDate**: Optional - Target appointment date (defaults to current date)\n- **limit**: Optional - Maximum results per page (default: 50, max: 500)\n- **cursor**: Optional - Pagination cursor for efficient data retrieval\n\n### Business Logic\n- Filters trailers by scheduledArrival date matching the requested appointment date\n- Results include all trailer statuses for comprehensive daily view\n- Appointments are sorted chronologically by scheduled arrival time\n- Supports same-day, future date, and historical date queries\n- Returns complete trailer details including status, carrier, and cargo information\n\n### Use Cases\n- **Daily Operations**: Comprehensive daily dock scheduling and coordination\n- **Driver Coordination**: Communicate with drivers for scheduled appointments\n- **Capacity Planning**: Analyze and plan daily dock resource allocation\n- **Real-time Monitoring**: Track appointment progress throughout the day\n- **Operations Dashboard**: Display daily appointment schedules and status\n- **Historical Analysis**: Review past appointment performance and patterns\n ", + operationId: "getTMSTrailersByAppointmentDate", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "dcId", + in: "query", + required: true, + description: "Distribution center identifier", + schema: { + type: "string", + example: "DC_ATL_001" + } + }, + { + name: "appointmentDate", + in: "query", + required: false, + description: "Appointment date (defaults to current date)", + schema: { + type: "string", + format: "date-time", + example: "2024-01-20T00:00:00.000Z" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results to return", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + example: 50 + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Trailers by appointment date retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 15 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + nullable: true, + example: null + }, + previousCursor: { + type: "string", + nullable: true, + example: null + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required dcId query parameter", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "dcId query parameter is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}": { + get: { + tags: [ + "TMS" + ], + summary: "Get trailer by ID", + description: "\n## Get TMS Trailer by ID\n\nRetrieve complete trailer information using the unique trailer identifier for detailed operations management.\n\n### Features\n- **Complete Trailer Profile**: Full trailer details including identification, status, and operational data\n- **Carrier Information**: Comprehensive carrier and driver contact details for coordination\n- **Cargo Manifest**: Purchase orders, expected deliveries, and freight specifications\n- **Appointment Details**: Scheduling information, dock assignments, and timing data\n- **Status Tracking**: Current operational status and historical progression\n- **Facility Assignment**: Distribution center and dock door information\n\n### Response Data Includes\n- **Identification**: Trailer ID, trailer number, and system identifiers\n- **Status**: Current operational status and last update timestamp\n- **Carrier Details**: Carrier ID, company name, driver name and contact\n- **Appointment Info**: Scheduled/actual arrival/departure, dock assignments\n- **Facility Info**: Distribution center details and address information\n- **Cargo**: Purchase orders, pallet counts, trailer type, seal numbers\n- **Operational Timestamps**: Created, updated, and status change times\n\n### Use Cases\n- **Status Verification**: Check current trailer status and location\n- **Driver Coordination**: Access driver contact information for communication\n- **Dock Planning**: Review appointment details for dock scheduling\n- **Cargo Tracking**: Verify expected deliveries and purchase orders\n- **Operational Monitoring**: Track trailer progress through facility operations\n ", + operationId: "getTMSInboundTrailerById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + responses: { + "200": { + description: "Trailer retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "404": { + description: "Not Found - Trailer with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inbound trailer TRAILER_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Internal server error", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}/schedule": { + post: { + tags: [ + "TMS" + ], + summary: "Schedule trailer appointment", + description: "\n## Schedule TMS Trailer Appointment\n\nSchedule or reschedule a trailer appointment with dock assignment and timing details.\n\n### Features\n- **Appointment Management**: Schedule arrival and departure times\n- **Dock Assignment**: Assign specific dock doors for operations\n- **Facility Coordination**: Link with distribution center operations\n- **Status Update**: Automatically updates trailer status to SCHEDULED\n- **Validation**: Ensures trailer is in valid status for scheduling\n\n### Business Rules\n- Trailer must be in SCHEDULED or EN_ROUTE status\n- Scheduled arrival time is required\n- Distribution center ID must be provided\n- Appointment ID is auto-generated if not provided\n- Dock door assignment is optional during scheduling\n\n### Workflow Integration\n- Updates trailer status to SCHEDULED\n- Triggers facility notification systems\n- Updates capacity planning calculations\n- Enables driver communication workflows\n ", + operationId: "scheduleTMSTrailerAppointment", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + appointmentId: { + type: "string", + description: "Appointment identifier (auto-generated if not provided)", + example: "APPT-ATL-001" + }, + scheduledArrival: { + type: "string", + format: "date-time", + description: "Scheduled arrival date and time", + example: "2024-01-20T08:00:00.000Z" + }, + scheduledDeparture: { + type: "string", + format: "date-time", + description: "Scheduled departure time", + example: "2024-01-20T16:00:00.000Z" + }, + dockDoor: { + type: "string", + description: "Assigned dock door", + example: "DOCK-A-001" + }, + dcId: { + type: "string", + description: "Distribution center identifier", + example: "DC_ATL_001" + }, + facilityName: { + type: "string", + description: "Facility name for driver reference", + example: "Atlanta Distribution Center" + } + }, + required: [ + "scheduledArrival", + "dcId" + ] + } + } + } + }, + responses: { + "200": { + description: "Trailer appointment scheduled successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "404": { + description: "Not Found - Trailer not found or not in valid status for scheduling", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Trailer TRAILER_001 not found or not in valid status for scheduling", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}/status": { + put: { + tags: [ + "TMS" + ], + summary: "Update trailer status", + description: "\n## Update TMS Trailer Status\n\nUpdate the operational status of a trailer with comprehensive timing and location data for complete lifecycle tracking.\n\n### Features\n- **Status Management**: Control trailer operational state throughout the facility workflow\n- **Timing Updates**: Record actual arrival/departure times with precision\n- **Location Tracking**: Update and manage dock door assignments\n- **Workflow Integration**: Automatically trigger downstream facility processes\n- **Audit Trail**: Maintain complete history of all status changes with timestamps\n- **Data Validation**: Ensure status transitions follow business rules\n\n### Valid Status Transitions\n- **SCHEDULED** → **EN_ROUTE**: Carrier confirms trailer has departed for facility\n- **EN_ROUTE** → **CHECKED_IN**: Trailer arrives and checks in at facility gate\n- **CHECKED_IN** → **AT_DOCK**: Trailer assigned and positioned at dock door\n- **AT_DOCK** → **UNLOADING**: Begin active unloading operations\n- **UNLOADING** → **UNLOADED**: Complete unloading process\n- **UNLOADED** → **DEPARTED**: Trailer exits facility\n- **Any Status** → **CANCELLED**: Appointment cancelled\n- **Any Status** → **DELAYED**: Delayed arrival reported\n\n### Optional Timing Updates\n- **actualArrival**: Record precise arrival time for performance tracking\n- **actualDeparture**: Record departure time for turnaround analysis\n- **estimatedArrival**: Update arrival estimate for planning adjustments\n- **dockDoor**: Assign or reassign dock door for operational flexibility\n\n### Business Rules\n- Status field is required for all updates\n- Timing fields must be valid ISO 8601 date-time strings\n- Dock door assignments must reference valid dock identifiers\n- Status transitions are logged for audit compliance\n ", + operationId: "updateTMSTrailerStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "SCHEDULED", + "EN_ROUTE", + "CHECKED_IN", + "AT_DOCK", + "UNLOADING", + "UNLOADED", + "DEPARTED", + "CANCELLED", + "DELAYED" + ], + description: "New trailer status", + example: "CHECKED_IN" + }, + actualArrival: { + type: "string", + format: "date-time", + description: "Actual arrival timestamp", + example: "2024-01-20T08:15:00.000Z" + }, + actualDeparture: { + type: "string", + format: "date-time", + description: "Actual departure timestamp", + example: "2024-01-20T17:30:00.000Z" + }, + estimatedArrival: { + type: "string", + format: "date-time", + description: "Updated estimated arrival", + example: "2024-01-20T08:30:00.000Z" + }, + dockDoor: { + type: "string", + description: "Assigned dock door", + example: "DOCK-A-001" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Trailer status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status value or missing required data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Status is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Trailer with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Trailer TRAILER_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}/checkin": { + post: { + tags: [ + "TMS" + ], + summary: "Check-in trailer at facility", + description: "\n## Check-in TMS Trailer\n\nRegister trailer arrival at the facility and complete the check-in process with comprehensive verification and documentation.\n\n### Features\n- **Arrival Registration**: Record precise actual arrival time for performance tracking\n- **Driver Verification**: Capture and validate driver information and contact details\n- **Security Compliance**: Record and verify seal numbers and trailer condition\n- **Optional Dock Assignment**: Assign dock door during check-in if available\n- **Automatic Status Update**: Updates trailer status to CHECKED_IN\n- **Notification Integration**: Trigger alerts to operations team and stakeholders\n\n### Check-in Process Workflow\n1. **Identity Verification**: Verify trailer number against scheduled appointment\n2. **Arrival Documentation**: Record actual arrival time with timestamp precision\n3. **Driver Information**: Capture driver name and contact information for coordination\n4. **Security Check**: Verify seal integrity and record seal number for compliance\n5. **Dock Assignment**: Optionally assign dock door if available and ready\n6. **Status Update**: Update trailer status and notify relevant teams\n7. **Workflow Trigger**: Enable next phase of facility operations\n\n### Required Information\n- **actualArrival**: Precise timestamp when trailer arrived at facility (required)\n\n### Optional Information\n- **driverName**: Full name of the driver for contact and coordination\n- **driverPhone**: Driver contact number for communication\n- **sealNumber**: Trailer seal number for security verification and compliance\n- **dockDoor**: Dock door assignment if available during check-in\n\n### Business Rules\n- actualArrival timestamp is mandatory for all check-ins\n- Driver information updates carrier records for communication\n- Seal verification supports security and compliance requirements\n- Dock assignment during check-in is optional but recommended for efficiency\n ", + operationId: "checkInTMSTrailer", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + actualArrival: { + type: "string", + format: "date-time", + description: "Actual arrival timestamp", + example: "2024-01-20T08:15:00.000Z" + }, + driverName: { + type: "string", + description: "Driver full name", + example: "John Smith" + }, + driverPhone: { + type: "string", + description: "Driver contact phone number", + example: "+1-555-123-4567" + }, + sealNumber: { + type: "string", + description: "Trailer seal number", + example: "SEAL-789456" + }, + dockDoor: { + type: "string", + description: "Assigned dock door", + example: "DOCK-A-001" + } + }, + required: [ + "actualArrival" + ] + } + } + } + }, + responses: { + "200": { + description: "Trailer checked in successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required check-in data including actualArrival", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Check-in data with actualArrival is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Trailer not found or not in valid status for check-in", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Trailer TRAILER_001 not found or not in valid status for check-in", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}/assign-dock": { + post: { + tags: [ + "TMS" + ], + summary: "Assign trailer to dock door", + description: "\n## Assign TMS Trailer to Dock Door\n\nAssign a checked-in trailer to a specific dock door for unloading operations with comprehensive validation and workflow integration.\n\n### Features\n- **Dock Door Assignment**: Assign specific dock door based on availability and operational needs\n- **Automatic Status Update**: Updates trailer status from CHECKED_IN to AT_DOCK\n- **Resource Management**: Coordinate dock door availability and prevent conflicts\n- **Operational Flow Integration**: Enable seamless transition to unloading workflow\n- **Real-time Location Tracking**: Update trailer location for operational visibility\n- **Notification System**: Alert operations team and driver of dock assignment\n\n### Assignment Process Workflow\n1. **Status Verification**: Confirm trailer is in CHECKED_IN status\n2. **Dock Availability**: Validate requested dock door is available for assignment\n3. **Conflict Prevention**: Ensure no existing trailers assigned to same dock\n4. **Assignment Execution**: Assign trailer to specified dock door\n5. **Status Update**: Update trailer status to AT_DOCK automatically\n6. **Team Notification**: Notify operations team and driver of assignment\n7. **Workflow Enablement**: Enable unloading process workflow and resource preparation\n\n### Business Rules\n- **Status Requirement**: Trailer must be in CHECKED_IN status for dock assignment\n- **Dock Availability**: Specified dock door must be available and not occupied\n- **Single Assignment**: Each trailer can only be assigned to one dock at a time\n- **Status Progression**: Assignment automatically advances trailer to AT_DOCK status\n- **Resource Coordination**: Assignment updates dock utilization and capacity planning\n\n### Required Information\n- **dockDoor**: Valid dock door identifier (must reference existing dock)\n\n### Use Cases\n- **Operations Planning**: Assign trailers to optimize dock utilization\n- **Workflow Management**: Progress trailers through facility operations\n- **Resource Coordination**: Manage dock door assignments and availability\n- **Performance Tracking**: Monitor trailer flow and dock efficiency\n ", + operationId: "assignTMSTrailerToDock", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + dockDoor: { + type: "string", + description: "Dock door identifier for assignment", + example: "DOCK-A-001" + } + }, + required: [ + "dockDoor" + ] + } + } + } + }, + responses: { + "200": { + description: "Trailer assigned to dock successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required dock door assignment", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "dockDoor is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Trailer with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Trailer TRAILER_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}/start-unloading": { + post: { + tags: [ + "TMS" + ], + summary: "Start trailer unloading process", + description: "\n## Start TMS Trailer Unloading\n\nInitiate the formal unloading process for a trailer assigned to a dock door with comprehensive workflow integration and resource coordination.\n\n### Features\n- **Process Initiation**: Begin formal unloading operations with proper documentation\n- **Automatic Status Update**: Updates trailer status from AT_DOCK to UNLOADING\n- **Time Tracking**: Automatically record unloading start time for performance metrics\n- **Resource Coordination**: Signal dock crew assignment and equipment allocation\n- **Workflow Integration**: Enable real-time progress tracking and completion monitoring\n- **Safety Protocols**: Ensure proper safety procedures are followed before starting\n\n### Unloading Process Workflow\n1. **Pre-Start Verification**: Confirm trailer is properly positioned at dock (AT_DOCK status)\n2. **Safety Check**: Verify dock crew safety protocols and equipment readiness\n3. **Resource Assignment**: Confirm dock crew availability and equipment allocation\n4. **Process Initiation**: Start formal unloading operations\n5. **Status Update**: Update trailer status to UNLOADING automatically\n6. **Time Logging**: Record start time for performance tracking and analytics\n7. **Progress Enablement**: Enable real-time progress monitoring and completion workflow\n\n### Business Rules\n- **Status Requirement**: Trailer must be in AT_DOCK status to start unloading\n- **Dock Assignment**: Valid dock door assignment is required\n- **Automatic Timestamping**: System automatically records start time\n- **Single Operation**: Only one trailer can be unloading per dock at a time\n- **Workflow Progression**: Starting unloading enables completion tracking workflow\n- **Safety Compliance**: Must meet safety requirements before operations begin\n\n### No Request Body Required\n- System automatically captures start time and updates status\n- All required information derived from current trailer state\n\n### Use Cases\n- **Operations Management**: Track unloading operations across facility\n- **Performance Analytics**: Measure unloading start times and efficiency\n- **Resource Planning**: Coordinate dock crew and equipment assignments\n- **Safety Compliance**: Ensure proper operational procedures are followed\n ", + operationId: "startTMSTrailerUnloading", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + responses: { + "200": { + description: "Trailer unloading started successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "404": { + description: "Not Found - Trailer not found or not in valid status for unloading", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Trailer TRAILER_001 not found or not in valid status for unloading", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}/complete-unloading": { + post: { + tags: [ + "TMS" + ], + summary: "Complete trailer unloading process", + description: "\n## Complete TMS Trailer Unloading\n\nFinalize unloading operations with comprehensive verification, count reconciliation, and timing documentation to enable departure workflow.\n\n### Features\n- **Process Finalization**: Complete and close unloading operations with proper documentation\n- **Automatic Status Update**: Updates trailer status from UNLOADING to UNLOADED\n- **Count Reconciliation**: Record and verify actual pallet counts against expected quantities\n- **Precision Time Recording**: Capture exact completion timestamp for performance analytics\n- **Quality Verification**: Ensure all cargo has been properly unloaded and verified\n- **Departure Enablement**: Automatically enable trailer departure workflow\n\n### Completion Process Workflow\n1. **Final Verification**: Confirm all cargo has been unloaded from trailer\n2. **Count Reconciliation**: Count actual pallets received and verify against manifest\n3. **Quality Check**: Inspect cargo condition and document any discrepancies\n4. **Documentation**: Complete unloading documentation and paperwork\n5. **Time Recording**: Capture precise completion timestamp\n6. **Status Update**: Update trailer status to UNLOADED automatically\n7. **Departure Preparation**: Enable departure workflow and dock release\n\n### Required Information\n- **completionTime**: Precise timestamp when unloading operations were completed (required)\n\n### Optional Information\n- **actualPallets**: Actual number of pallets received for reconciliation (recommended)\n\n### Business Rules\n- **Status Requirement**: Trailer must be in UNLOADING status to complete\n- **Completion Time**: Precise timestamp is mandatory for performance tracking\n- **Count Reconciliation**: Actual pallet count helps with inventory accuracy\n- **Workflow Progression**: Completion automatically enables departure workflow\n- **Documentation**: Completion triggers generation of unloading documentation\n- **Performance Tracking**: Completion data feeds into operational analytics\n\n### Use Cases\n- **Inventory Management**: Reconcile received quantities with expected deliveries\n- **Performance Analytics**: Track unloading completion times and efficiency\n- **Quality Control**: Document cargo condition and any issues\n- **Departure Coordination**: Enable timely trailer departure and dock availability\n ", + operationId: "completeTMSTrailerUnloading", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + completionTime: { + type: "string", + format: "date-time", + description: "Timestamp when unloading was completed", + example: "2024-01-20T15:30:00.000Z" + }, + actualPallets: { + type: "integer", + description: "Actual number of pallets received", + example: 18 + } + }, + required: [ + "completionTime" + ] + } + } + } + }, + responses: { + "200": { + description: "Trailer unloading completed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required completion data including completionTime", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Completion data with completionTime is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Trailer not found or not in valid status for completion", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Trailer TRAILER_001 not found or not in valid status for completion", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/trailers/{trailerId}/delays": { + post: { + tags: [ + "TMS" + ], + summary: "Add delay information to trailer", + description: "\n## Add TMS Trailer Delay\n\nRecord and track delay information for trailers to monitor operational disruptions, support proactive communication, and enable data-driven delay prevention strategies.\n\n### Features\n- **Comprehensive Delay Tracking**: Record various types of operational delays with detailed categorization\n- **Impact Assessment**: Capture estimated delay duration for planning adjustments\n- **Root Cause Analysis**: Categorize delay reasons for trend analysis and prevention\n- **Proactive Communication**: Support customer and stakeholder notification workflows\n- **Historical Analytics**: Enable delay pattern analysis and operational improvements\n- **Multiple Delay Support**: Track multiple delays for single trailer if needed\n\n### Delay Type Categories\n- **TRAFFIC**: Traffic congestion, road construction, or transportation infrastructure issues\n- **WEATHER**: Weather-related delays including storms, snow, ice, or extreme conditions\n- **CARRIER**: Carrier operational issues such as equipment failure or driver availability\n- **DOCK_AVAILABILITY**: Facility dock scheduling conflicts or capacity constraints\n- **MECHANICAL**: Vehicle or equipment mechanical issues requiring repair\n- **REGULATORY**: Inspection delays, permit issues, or regulatory compliance holds\n- **OTHER**: Other operational disruptions not covered by standard categories\n\n### Delay Information Requirements\n- **delayType**: Required - Standardized categorization for analytics and reporting\n- **reason**: Required - Detailed explanation for communication and analysis\n- **reportedAt**: Required - Timestamp when delay was first identified and reported\n- **estimatedDelay**: Optional - Expected delay duration in minutes for planning\n\n### Business Rules\n- **Multiple Delays**: Multiple delay records can be added to same trailer for complex situations\n- **Independent Tracking**: Each delay is timestamped and tracked independently\n- **Cumulative Impact**: Multiple delays contribute to overall arrival time adjustments\n- **Status Integration**: Delays may trigger automatic status updates (DELAYED)\n- **Historical Preservation**: All delay records maintained for analytics and audit\n- **Communication Triggers**: Delay addition can trigger automated notification workflows\n\n### Use Cases\n- **Operational Visibility**: Real-time tracking of trailer delays and impacts\n- **Customer Communication**: Proactive notification of delivery delays\n- **Performance Analytics**: Analyze delay patterns and identify improvement opportunities\n- **Resource Planning**: Adjust dock schedules and resource allocation based on delays\n- **Carrier Performance**: Track carrier reliability and delay frequency\n ", + operationId: "addTMSTrailerDelay", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "trailerId", + in: "path", + required: true, + description: "Unique trailer identifier", + schema: { + type: "string", + example: "TRAILER_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + delayType: { + type: "string", + enum: [ + "TRAFFIC", + "WEATHER", + "CARRIER", + "DOCK_AVAILABILITY", + "MECHANICAL", + "REGULATORY", + "OTHER" + ], + description: "Type of delay encountered", + example: "TRAFFIC" + }, + reason: { + type: "string", + description: "Detailed reason for the delay", + example: "Heavy traffic on I-75 due to construction" + }, + reportedAt: { + type: "string", + format: "date-time", + description: "When the delay was first reported", + example: "2024-01-20T07:15:00.000Z" + }, + estimatedDelay: { + type: "integer", + description: "Estimated delay duration in minutes", + example: 45 + } + }, + required: [ + "delayType", + "reason", + "reportedAt" + ] + } + } + } + }, + responses: { + "200": { + description: "Delay information added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/TMSInboundTrailer" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required delay data fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Delay data with delayType, reason, and reportedAt are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Trailer with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Trailer TRAILER_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/tms/dock-doors/available": { + get: { + tags: [ + "TMS" + ], + summary: "Get available dock doors for time period", + description: "\n## Get Available TMS Dock Doors\n\nRetrieve available dock doors for a specific distribution center and time period to support appointment scheduling and dock capacity management.\n\n### Features\n- **Real-Time Availability**: Find open dock doors based on current schedules\n- **Time Window Analysis**: Check availability for specific time ranges\n- **Conflict Detection**: Automatically exclude doors occupied by existing appointments\n- **Capacity Planning**: Support resource allocation and scheduling decisions\n- **Standards Compliance**: Returns standardized dock door identifiers\n\n### Business Logic\n- Analyzes trailers with status AT_DOCK or UNLOADING within the time window\n- Considers scheduled arrival and departure time overlaps\n- Handles edge cases for appointments without departure times (assumes 8-hour default)\n- Returns dock door identifiers in standard format (DOCK-01 through DOCK-20)\n- Excludes doors with active or overlapping appointments\n\n### Query Parameters\n- **dcId**: Required - Distribution center identifier for facility-specific availability\n- **startTime**: Required - Beginning of availability window in ISO 8601 format\n- **endTime**: Required - End of availability window in ISO 8601 format\n\n### Use Cases\n- **Appointment Scheduling**: Find available slots during trailer booking\n- **Dock Capacity Management**: Monitor and plan dock utilization\n- **Resource Allocation**: Optimize dock assignments for operational efficiency\n- **Real-time Dashboards**: Display current availability status\n- **Conflict Prevention**: Avoid double-booking dock resources\n ", + operationId: "getTMSAvailableDockDoors", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "dcId", + in: "query", + required: true, + description: "Distribution center identifier", + schema: { + type: "string", + example: "DC_ATL_001" + } + }, + { + name: "startTime", + in: "query", + required: true, + description: "Start time for availability window", + schema: { + type: "string", + format: "date-time", + example: "2024-01-20T08:00:00.000Z" + } + }, + { + name: "endTime", + in: "query", + required: true, + description: "End time for availability window", + schema: { + type: "string", + format: "date-time", + example: "2024-01-20T18:00:00.000Z" + } + } + ], + responses: { + "200": { + description: "Available dock doors retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + type: "string", + description: "Available dock door identifier", + example: "DOCK-01" + }, + description: "List of available dock door identifiers" + } + } + }, + example: { + success: true, + status: 200, + data: [ + "DOCK-01", + "DOCK-02", + "DOCK-05", + "DOCK-07", + "DOCK-10", + "DOCK-12", + "DOCK-15", + "DOCK-18", + "DOCK-19", + "DOCK-20" + ] + } + } + } + }, + "400": { + description: "Bad Request - Missing required dcId, startTime, or endTime parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "dcId, startTime, and endTime query parameters are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "500": { + description: "Internal Server Error - Failed to retrieve available dock doors", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "Failed to get available dock doors: Database connection failed", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/verification.paths.ts b/packages/controlmart/src/docs/paths/verification.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..ff4b450ea00fab16368226042a8f6e0bc4faae29 --- /dev/null +++ b/packages/controlmart/src/docs/paths/verification.paths.ts @@ -0,0 +1,180 @@ + +export const verificationPaths = { + "/{worldId}/verification/verify-ticket": { + post: { + tags: ["Verification"], + summary: "Forensic Verification of a Ticket", + description: + "Executes the verification engine against a specific historical FAILURE TICKET. Use this to analyze root causes of past failures. It re-evaluates the invariant checks in the context of the *current* world state, but focusing on the entities linked to the original ticket.", + operationId: "verifyTicket", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the business world (tenant) where the verification runs.", + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: ["ticketId"], + properties: { + ticketId: { + type: "string", + description: "The ID of the ITSM ticket representing the failure to analyze.", + example: "697c66182c864f4be38f0a1e" + } + } + } + } + } + }, + responses: { + "200": { + description: "Verification report generated successfully.", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/VerificationResult" + } + } + } + }, + "404": { + description: "Ticket not found.", + content: { + "application/json": { + schema: { + type: "object", + properties: { + error: { type: "string", example: "Ticket not found" } + } + } + } + } + }, + "500": { + description: "Internal verification engine failure.", + content: { + "application/json": { + schema: { + type: "object", + properties: { + error: { type: "string" }, + details: { type: "string" } + } + } + } + } + } + } + } + }, + + "/{worldId}/verification/verify-entity": { + post: { + tags: ["Verification"], + summary: "On-Demand Process Verification", + description: + "Executes the verification engine On-Demand against a specific LIVE ENTITY to confirm process health. Use this after applying fixes to verify that a specific Order, Task, or Workflow is now valid. This creates a virtual verification context and does *not* require an existing failure ticket.", + operationId: "verifyEntity", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the business world (tenant).", + schema: { + type: "string", + example: "550e8400-e29b-41d4-a716-446655440000" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: ["odId"], + properties: { + odId: { + type: "string", + description: "The Operational Descriptor (OD) ID representing the process/workflow type.", + example: "outbound-category-flow" + }, + entityId: { + type: "string", + description: "The unique ID of the primary entity (e.g., Order ID, Shipment ID) to verify. Optional for some ODs.", + example: "wms:outbound-order:697c660f2c864f4be38f0a1e" + }, + entityType: { + type: "string", + description: "The type of the entity being verified. Defaults to 'ORDER'.", + default: "ORDER", + enum: ["ORDER", "SHIPMENT", "WAVE", "TASK", "INVENTORY"], + example: "ORDER" + }, + metadata: { + type: "object", + description: "Additional context or metadata to pass to the verifier.", + example: { + snapshotId: "snap-12345", + triggerReason: "manual-check" + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Verification report generated successfully.", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/VerificationResult" + } + } + } + }, + "400": { + description: "Invalid input parameters (e.g. invalid OD ID).", + content: { + "application/json": { + schema: { + type: "object", + properties: { + error: { type: "string" } + } + } + } + } + }, + "500": { + description: "Internal verification engine failure.", + content: { + "application/json": { + schema: { + type: "object", + properties: { + error: { type: "string" }, + details: { type: "string" } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/cycleCount.wms.ts b/packages/controlmart/src/docs/paths/wms/cycleCount.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..619b8297b92aa2229e752c532feea60fb4c96c0a --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/cycleCount.wms.ts @@ -0,0 +1,1448 @@ +export const cycleCountPaths = { + "/{worldId}/wms/cycle-counts": { + post: { + tags: [ + "WMS" + ], + summary: "Create new cycle count", + description: "\n## Create WMS Cycle Count\n\nCreate a new cycle count for inventory accuracy verification with comprehensive scheduling, scope definition, and assignment capabilities.\n\n### Features\n- **Flexible Counting Types**: Support for daily, weekly, monthly, ABC, full, spot, and blind counts\n- **Precision Scheduling**: Define exact scheduling dates and execution windows\n- **Scope Configuration**: Target specific zones, bins, products, or ABC classifications\n- **User Assignment**: Assign specific users to count tasks with bin allocations\n- **Status Tracking**: Comprehensive status management throughout count lifecycle\n- **Result Aggregation**: Structured count results with variance analysis\n\n### Count Type Categories\n- **DAILY**: Daily cycle count operations\n- **WEEKLY**: Weekly scheduled cycle counts\n- **MONTHLY**: Monthly comprehensive counts\n- **ABC**: ABC classification-based counts\n- **FULL**: Full warehouse inventory counts\n- **SPOT**: Spot checks for specific items or locations\n- **BLIND**: Blind counts without system quantity display\n\n### Count Status Flow\n- **SCHEDULED**: Count scheduled and ready for execution\n- **IN_PROGRESS**: Count execution in progress\n- **COMPLETED**: Count execution completed\n- **APPROVED**: Count results approved\n- **REJECTED**: Count results rejected for recount\n- **CANCELLED**: Count cancelled before completion\n\n### Business Rules\n- cycleCountId is auto-generated with unique identifier if not provided\n- warehouseId is required for warehouse scoping\n- countType determines counting methodology and workflow\n- Scheduled date must be present or future date\n- Scope configuration defines count boundaries and targets\n- User assignments can be configured during creation or later\n ", + operationId: "createWMSCycleCount", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + cycleCountId: { + type: "string", + description: "Unique cycle count identifier (auto-generated if not provided)", + example: "CC_ATL_2024_001" + }, + warehouseId: { + type: "string", + description: "Target warehouse identifier", + example: "WH_ATL_001" + }, + countType: { + type: "string", + enum: [ + "DAILY", + "WEEKLY", + "MONTHLY", + "ABC", + "FULL", + "SPOT", + "BLIND" + ], + description: "Type of cycle count for methodology determination", + example: "ABC" + }, + countStatus: { + type: "string", + enum: [ + "SCHEDULED", + "IN_PROGRESS", + "COMPLETED", + "APPROVED", + "REJECTED", + "CANCELLED" + ], + description: "Initial count status (defaults to SCHEDULED)", + example: "SCHEDULED" + }, + schedule: { + type: "object", + properties: { + scheduledDate: { + type: "string", + format: "date-time", + description: "Date and time when count is scheduled to begin", + example: "2024-01-25T08:00:00.000Z" + }, + startDate: { + type: "string", + format: "date-time", + description: "Actual start date/time when count begins", + example: "2024-01-25T08:15:00.000Z" + }, + completedDate: { + type: "string", + format: "date-time", + description: "Date/time when count was completed", + example: "2024-01-25T16:30:00.000Z" + } + }, + required: [ + "scheduledDate" + ] + }, + scope: { + type: "object", + properties: { + zoneId: { + type: "string", + description: "Specific zone to include in count", + example: "ZONE_PICK_A" + }, + binIds: { + type: "array", + items: { + type: "string" + }, + description: "Specific bins to include in count", + example: [ + "BIN_ATL_A01_001", + "BIN_ATL_A01_002" + ] + }, + productIds: { + type: "array", + items: { + type: "string" + }, + description: "Specific products to count across warehouse", + example: [ + "PROD_12345", + "PROD_67890" + ] + }, + abcClassification: { + type: "string", + enum: [ + "A", + "B", + "C" + ], + description: "ABC classification for targeted counting", + example: "A" + } + } + }, + assignments: { + type: "array", + items: { + type: "object", + properties: { + userId: { + type: "string", + description: "User identifier for count assignment", + example: "USER_001" + }, + userName: { + type: "string", + description: "User display name for assignment tracking", + example: "John Smith" + }, + assignedBins: { + type: "array", + items: { + type: "string" + }, + description: "Specific bins assigned to this user", + example: [ + "BIN_ATL_A01_001", + "BIN_ATL_A01_002" + ] + }, + status: { + type: "string", + enum: [ + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED" + ], + description: "Status of user assignment", + example: "ASSIGNED" + } + }, + required: [ + "userId", + "userName" + ] + }, + description: "User assignments for count execution" + }, + notes: { + type: "string", + description: "Additional notes or instructions for count", + example: "Focus on high-value items in A classification" + }, + customFields: { + type: "object", + description: "Additional warehouse-specific count attributes", + example: { + priority: "HIGH", + countReason: "Quarterly ABC Analysis", + requiresApproval: true + } + } + }, + required: [ + "warehouseId", + "countType", + "schedule" + ] + } + } + } + }, + responses: { + "201": { + description: "Cycle count created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Cycle count created successfully" + }, + data: { + $ref: "#/components/schemas/WMSCycleCount" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid data or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "warehouseId, countType, and schedule are required", + meta: { event: "createWMSCycleCount", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/status": { + get: { + tags: [ + "WMS" + ], + summary: "Get cycle counts by status with filtering", + description: "\n## Get WMS Cycle Counts by Status\n\nRetrieve cycle counts filtered by status with comprehensive filtering capabilities for operational management and monitoring.\n\n### Features\n- **Multi-Status Filtering**: Filter by one or multiple status values using array syntax\n- **Warehouse Scoping**: Filter by specific warehouse for facility-focused operations\n- **Count Type Filtering**: Filter by count type for methodology-specific views\n- **Date Range Filtering**: Filter by date ranges for time-based analysis\n- **Cursor Pagination**: Efficient pagination for large datasets with cursor-based navigation\n- **Flexible Queries**: Combine multiple filter parameters for precise result sets\n\n### Query Parameters\n- **status**: Required - Single status or array of statuses for filtering\n- **warehouseId**: Optional - Filter by specific warehouse identifier\n- **countType**: Optional - Filter by count types (supports array of types)\n- **dateStart**: Optional - Start date for scheduled date filtering (ISO 8601 format)\n- **dateEnd**: Optional - End date for scheduled date filtering (ISO 8601 format)\n- **cursor**: Optional - Pagination cursor for efficient large dataset handling\n- **limit**: Optional - Maximum results per page (default: 50, max: 500)\n\n### Available Status Values\n- **SCHEDULED**: Counts scheduled and ready for execution\n- **IN_PROGRESS**: Counts currently being executed\n- **COMPLETED**: Count execution completed, pending approval\n- **APPROVED**: Count results approved and applied\n- **REJECTED**: Count results rejected, requiring recount\n- **CANCELLED**: Count cancelled before completion\n\n### Business Logic\n- Status parameter is required and supports multiple values\n- Date filtering applies to scheduled date field\n- Results sorted by scheduled date and creation time\n- Pagination preserves filter context across page requests\n- Returns complete count details including assignments and progress\n\n### Use Cases\n- **Operational Monitoring**: Track counts by status for operational oversight\n- **Workflow Management**: Manage count execution and approval workflows\n- **Performance Analysis**: Analyze count completion rates and timelines\n- **Resource Planning**: Plan resources based on scheduled and in-progress counts\n ", + operationId: "getWMSCycleCountsByStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Filter by count status (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "SCHEDULED", + "IN_PROGRESS", + "COMPLETED", + "APPROVED", + "REJECTED", + "CANCELLED" + ] + }, + example: [ + "SCHEDULED", + "IN_PROGRESS" + ] + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter by specific warehouse identifier", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "countType", + in: "query", + required: false, + description: "Filter by count types (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "DAILY", + "WEEKLY", + "MONTHLY", + "ABC", + "FULL", + "SPOT", + "BLIND" + ] + }, + example: [ + "ABC", + "DAILY" + ] + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for scheduled date filtering", + schema: { + type: "string", + format: "date-time", + example: "2024-01-01T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for scheduled date filtering", + schema: { + type: "string", + format: "date-time", + example: "2024-01-31T23:59:59.999Z" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for efficient data retrieval", + schema: { + type: "string", + example: "eyJfaWQiOiI2NWE5M2Q4NjQyOWM0YzAwMTNiMWQ4YjUifQ==" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 500, + default: 50, + example: 50 + } + } + ], + responses: { + "200": { + description: "Cycle counts retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Cycle counts by status retrieved successfully" + }, + data: { + type: "object", + properties: { + counts: { + type: "array", + items: { + $ref: "#/components/schemas/WMSCycleCount" + } + }, + pagination: { + type: "object", + properties: { + totalCount: { + type: "integer", + example: 25 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + nullable: true, + example: null + }, + previousCursor: { + type: "string", + nullable: true, + example: null + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required status parameter", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status parameter is required", + meta: { event: "getWMSCycleCountsByStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/{cycleCountId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get cycle count by ID", + description: "\n## Get WMS Cycle Count by ID\n\nRetrieve detailed information for a specific cycle count including complete count results, assignments, and variance analysis.\n\n### Features\n- **Complete Count Profile**: Full cycle count details including scope, schedule, and assignments\n- **Count Results**: Detailed count results with expected vs. actual quantities\n- **Variance Analysis**: Comprehensive variance reporting and analysis\n- **Assignment Details**: User assignments with status and progress tracking\n- **Audit Information**: Complete audit trail with timestamps and user actions\n- **Progress Tracking**: Current status and completion percentage\n\n### Response Data Includes\n- **Identification**: Cycle count ID, type, and status information\n- **Scheduling**: Scheduled, start, and completion timestamps\n- **Scope**: Target zones, bins, products, and classifications\n- **Assignments**: User assignments with bin allocations and status\n- **Count Results**: Item-by-item count results with variance analysis\n- **Summary**: Overall count accuracy and variance statistics\n- **Audit Trail**: Creation, updates, and approval information\n\n### Use Cases\n- **Count Execution**: Access count details during execution process\n- **Result Review**: Review count results and variance analysis\n- **Approval Workflow**: Support count approval and rejection workflows\n- **Progress Monitoring**: Track count progress and completion status\n- **Audit Review**: Historical analysis and audit trail examination\n ", + operationId: "getWMSCycleCountById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "cycleCountId", + in: "path", + required: true, + description: "Unique cycle count identifier", + schema: { + type: "string", + example: "CC_ATL_2024_001" + } + } + ], + responses: { + "200": { + description: "Cycle count retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Cycle count retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSCycleCount" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and cycleCountId are required", + meta: { event: "getWMSCycleCountById", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Cycle count does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Cycle count CC_ATL_2024_001 not found", + meta: { event: "getWMSCycleCountById", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/{cycleCountId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update cycle count status", + description: "\n## Update WMS Cycle Count Status\n\nUpdate the operational status of a cycle count with comprehensive status transition management and audit logging.\n\n### Features\n- **Status Transition Management**: Control count status throughout lifecycle\n- **Completion Tracking**: Record completion user and timestamp information\n- **Workflow Integration**: Trigger downstream processes based on status changes\n- **Audit Trail**: Track all status changes with user attribution and timestamps\n- **Validation**: Ensure valid status transitions according to business rules\n- **Notification Integration**: Support notification workflows for status changes\n\n### Valid Status Transitions\n- **SCHEDULED** → **IN_PROGRESS**: Begin count execution\n- **IN_PROGRESS** → **COMPLETED**: Complete count execution\n- **COMPLETED** → **APPROVED**: Approve count results\n- **COMPLETED** → **REJECTED**: Reject count results for recount\n- **Any Status** → **CANCELLED**: Cancel count operation\n- **REJECTED** → **SCHEDULED**: Reschedule after rejection\n- **APPROVED** → **COMPLETED**: Revert approval (admin only)\n\n### Business Rules\n- Status field is required for all updates\n- completedBy is required when transitioning to COMPLETED status\n- Status changes trigger automatic timestamp updates\n- Some transitions may require specific user permissions\n- Status history is maintained for audit compliance\n\n### Use Cases\n- **Count Execution**: Update status as count progresses through workflow\n- **Approval Management**: Approve or reject completed count results\n- **Workflow Control**: Manage count lifecycle and progression\n- **Exception Handling**: Handle cancellations and rejections\n ", + operationId: "updateWMSCycleCountStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "cycleCountId", + in: "path", + required: true, + description: "Unique cycle count identifier", + schema: { + type: "string", + example: "CC_ATL_2024_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "SCHEDULED", + "IN_PROGRESS", + "COMPLETED", + "APPROVED", + "REJECTED", + "CANCELLED" + ], + description: "New count status", + example: "COMPLETED" + }, + completedBy: { + type: "string", + description: "User identifier who completed the count (required for COMPLETED status)", + example: "USER_001" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Cycle count status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Cycle count status updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSCycleCount" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status is required", + meta: { event: "updateWMSCycleCountStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Cycle count does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Cycle count CC_ATL_2024_001 not found", + meta: { event: "updateWMSCycleCountStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/{cycleCountId}/assign": { + put: { + tags: [ + "WMS" + ], + summary: "Assign user to cycle count", + description: "\n## Assign User to WMS Cycle Count\n\nAssign specific users to cycle count tasks with precise bin allocations and assignment tracking for efficient count execution.\n\n### Features\n- **User Assignment Management**: Assign users to specific cycle count tasks\n- **Bin Allocation**: Specify exact bins assigned to each user for counting\n- **Assignment Tracking**: Track assignment status and progress\n- **Workload Distribution**: Distribute counting workload across available users\n- **Flexible Assignment**: Support individual and bulk user assignments\n- **Status Monitoring**: Monitor assignment completion and progress\n\n### Assignment Process\n- **User Identification**: Specify user ID and display name for assignment tracking\n- **Bin Allocation**: Define specific bins assigned to user for counting\n- **Status Initialization**: Set initial assignment status (typically ASSIGNED)\n- **Workflow Integration**: Enable assignment tracking and progress monitoring\n\n### Business Rules\n- userId and userName are required for all assignments\n- assignedBins array defines specific bins for user to count\n- Assignment status tracks individual user progress\n- Multiple users can be assigned to single cycle count\n- Bin assignments should not overlap between users for accuracy\n\n### Use Cases\n- **Task Distribution**: Distribute cycle count tasks across available personnel\n- **Workload Management**: Balance counting workload for efficient execution\n- **Progress Tracking**: Track individual user progress and completion\n- **Resource Planning**: Plan counting resources and user allocations\n ", + operationId: "assignUserToWMSCycleCount", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "cycleCountId", + in: "path", + required: true, + description: "Unique cycle count identifier", + schema: { + type: "string", + example: "CC_ATL_2024_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + userId: { + type: "string", + description: "Unique user identifier for assignment", + example: "USER_001" + }, + userName: { + type: "string", + description: "User display name for assignment tracking and communication", + example: "John Smith" + }, + assignedBins: { + type: "array", + items: { + type: "string" + }, + description: "Array of bin identifiers assigned to this user for counting", + example: [ + "BIN_ATL_A01_001", + "BIN_ATL_A01_002", + "BIN_ATL_A01_003" + ] + } + }, + required: [ + "userId", + "userName", + "assignedBins" + ] + } + } + } + }, + responses: { + "200": { + description: "User assigned to cycle count successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "User assigned to cycle count successfully" + }, + data: { + $ref: "#/components/schemas/WMSCycleCount" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required assignment data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "userId, userName, and assignedBins are required", + meta: { event: "assignUserToWMSCycleCount", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Cycle count does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Cycle count CC_ATL_2024_001 not found", + meta: { event: "assignUserToWMSCycleCount", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/{cycleCountId}/results": { + post: { + tags: [ + "WMS" + ], + summary: "Add count result to cycle count", + description: "\n## Add Count Result to WMS Cycle Count\n\nRecord individual count results for cycle count execution with precise variance calculation and audit tracking.\n\n### Features\n- **Precise Count Recording**: Record exact counted quantities for each item/location\n- **Automatic Variance Calculation**: Calculate variance between expected and actual quantities\n- **User Attribution**: Track who performed each count for accountability\n- **Timestamp Recording**: Capture exact timing of count execution\n- **Notes Support**: Add contextual notes for count discrepancies or observations\n- **Lot Tracking**: Support lot number tracking for detailed inventory control\n\n### Count Result Data\n- **Location Information**: Bin and product identification for precise tracking\n- **Quantity Reconciliation**: Expected vs. actual quantity comparison\n- **Variance Analysis**: Automatic calculation of quantity and percentage variances\n- **User Accountability**: Record counting user and timestamp for audit trail\n- **Additional Context**: Optional notes for explaining variances or observations\n\n### Business Rules\n- binId and productId are required for location and item identification\n- expectedQuantity and actualQuantity enable variance calculation\n- countedBy and countedAt provide user attribution and timing\n- variance is calculated automatically (actualQuantity - expectedQuantity)\n- All count results are preserved for audit and analysis purposes\n\n### Use Cases\n- **Count Execution**: Record results during cycle count execution\n- **Variance Tracking**: Track and analyze inventory variances\n- **Audit Documentation**: Provide detailed audit trail for count results\n- **Accuracy Analysis**: Support inventory accuracy reporting and improvement\n ", + operationId: "addWMSCycleCountResult", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "cycleCountId", + in: "path", + required: true, + description: "Unique cycle count identifier", + schema: { + type: "string", + example: "CC_ATL_2024_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + binId: { + type: "string", + description: "Bin identifier where count was performed", + example: "BIN_ATL_A01_001" + }, + productId: { + type: "string", + description: "Product identifier for counted item", + example: "PROD_12345" + }, + sku: { + type: "string", + description: "SKU identifier for product identification", + example: "SKU-WIDGET-001" + }, + expectedQuantity: { + type: "number", + description: "Expected quantity from system records", + example: 150 + }, + actualQuantity: { + type: "number", + description: "Actual counted quantity", + example: 148 + }, + variance: { + type: "number", + description: "Calculated variance (actualQuantity - expectedQuantity)", + example: -2 + }, + countedBy: { + type: "string", + description: "User identifier who performed the count", + example: "USER_001" + }, + countedAt: { + type: "string", + format: "date-time", + description: "Timestamp when count was performed", + example: "2024-01-25T14:30:00.000Z" + }, + notes: { + type: "string", + description: "Optional notes about count or observed discrepancies", + example: "Found damaged units, excluded from count" + } + }, + required: [ + "binId", + "productId", + "sku", + "expectedQuantity", + "actualQuantity", + "variance", + "countedBy", + "countedAt" + ] + } + } + } + }, + responses: { + "200": { + description: "Count result added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Count result added successfully" + }, + data: { + $ref: "#/components/schemas/WMSCycleCount" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required count result data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "binId, productId, sku, expectedQuantity, actualQuantity, variance, countedBy, and countedAt are required", + meta: { event: "addWMSCycleCountResult", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Cycle count does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Cycle count CC_ATL_2024_001 not found", + meta: { event: "addWMSCycleCountResult", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/warehouse/{warehouseId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get cycle counts by warehouse", + description: "\n## Get WMS Cycle Counts by Warehouse\n\nRetrieve all cycle counts for a specific warehouse with comprehensive filtering capabilities for warehouse-focused operational management.\n\n### Features\n- **Warehouse-Scoped Retrieval**: Get all cycle counts for specific warehouse facility\n- **Status Filtering**: Filter by count status for workflow management\n- **Type-Based Filtering**: Filter by count types for methodology-specific views\n- **Date Range Filtering**: Filter by scheduled dates for time-based analysis\n- **Complete Count Details**: Returns full cycle count information including results\n- **Operational Context**: Warehouse-focused view for facility management\n\n### Query Parameters\n- **status**: Optional - Filter by count status (supports multiple values)\n- **countType**: Optional - Filter by count types (supports multiple values)\n- **dateStart**: Optional - Start date for scheduled date filtering\n- **dateEnd**: Optional - End date for scheduled date filtering\n\n### Business Logic\n- warehouseId from path parameter scopes all results to specific warehouse\n- All filter parameters support array syntax for multiple values\n- Date filtering applies to scheduled date field\n- Results include complete count details with assignments and results\n- Sorted by scheduled date and creation time for operational relevance\n\n### Use Cases\n- **Warehouse Management**: Manage all cycle counts within specific warehouse\n- **Operational Planning**: Plan warehouse counting operations and resources\n- **Performance Monitoring**: Monitor warehouse counting performance and accuracy\n- **Historical Analysis**: Analyze warehouse counting history and trends\n ", + operationId: "getWMSCycleCountsByWarehouse", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier to retrieve counts for", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by count status (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "SCHEDULED", + "IN_PROGRESS", + "COMPLETED", + "APPROVED", + "REJECTED", + "CANCELLED" + ] + }, + example: [ + "SCHEDULED", + "IN_PROGRESS" + ] + } + }, + { + name: "countType", + in: "query", + required: false, + description: "Filter by count types (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "DAILY", + "WEEKLY", + "MONTHLY", + "ABC", + "FULL", + "SPOT", + "BLIND" + ] + }, + example: [ + "ABC", + "DAILY" + ] + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for scheduled date filtering", + schema: { + type: "string", + format: "date-time", + example: "2024-01-01T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for scheduled date filtering", + schema: { + type: "string", + format: "date-time", + example: "2024-01-31T23:59:59.999Z" + } + } + ], + responses: { + "200": { + description: "Warehouse cycle counts retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Cycle counts by warehouse retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSCycleCount" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and warehouseId are required", + meta: { event: "getWMSCycleCountsByWarehouse", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/variance-report": { + get: { + tags: [ + "WMS" + ], + summary: "Get cycle count variance report", + description: "\n## Get WMS Cycle Count Variance Report\n\nGenerate comprehensive variance analysis report for cycle count results with statistical analysis and operational insights.\n\n### Features\n- **Comprehensive Variance Analysis**: Detailed variance statistics and trends\n- **Multi-Dimensional Filtering**: Filter by warehouse, date range, and count type\n- **Statistical Insights**: Variance percentages, accuracy metrics, and trend analysis\n- **Financial Impact**: Variance value calculations and cost impact analysis\n- **Operational Metrics**: Count accuracy, completion rates, and efficiency indicators\n- **Trend Analysis**: Historical variance patterns and improvement tracking\n\n### Report Data Includes\n- **Overall Variance Statistics**: Total variances, accuracy percentages, and trends\n- **Variance Distribution**: Variance patterns by location, product, and user\n- **Financial Impact**: Monetary impact of variances and cost analysis\n- **Accuracy Metrics**: Count accuracy rates and improvement trends\n- **Operational Performance**: Count completion rates and efficiency metrics\n\n### Query Parameters\n- **warehouseId**: Optional - Scope report to specific warehouse\n- **dateStart**: Optional - Start date for report period\n- **dateEnd**: Optional - End date for report period\n- **countType**: Optional - Filter by count types for methodology analysis\n\n### Business Logic\n- Analyzes completed and approved cycle counts for variance calculation\n- Calculates statistical variance metrics and accuracy percentages\n- Includes financial impact analysis based on product costs\n- Provides trending analysis for operational improvement\n- Aggregates results by multiple dimensions for comprehensive insights\n\n### Use Cases\n- **Performance Analysis**: Analyze cycle count accuracy and variance trends\n- **Operational Improvement**: Identify areas for counting process improvement\n- **Financial Impact**: Assess monetary impact of inventory variances\n- **Management Reporting**: Generate variance reports for management review\n ", + operationId: "getWMSCycleCountVarianceReport", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter report to specific warehouse", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for report period", + schema: { + type: "string", + format: "date-time", + example: "2024-01-01T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for report period", + schema: { + type: "string", + format: "date-time", + example: "2024-01-31T23:59:59.999Z" + } + }, + { + name: "countType", + in: "query", + required: false, + description: "Filter by count types (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "DAILY", + "WEEKLY", + "MONTHLY", + "ABC", + "FULL", + "SPOT", + "BLIND" + ] + }, + example: [ + "ABC", + "FULL" + ] + } + } + ], + responses: { + "200": { + description: "Cycle count variance report generated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Cycle count variance report retrieved successfully" + }, + data: { + type: "object", + properties: { + summary: { + type: "object", + properties: { + totalCounts: { + type: "integer", + example: 45 + }, + totalVariances: { + type: "integer", + example: 128 + }, + accuracyPercent: { + type: "number", + example: 94.2 + }, + totalVarianceValue: { + type: "number", + example: 1247.85 + }, + avgVariancePercent: { + type: "number", + example: 2.3 + } + } + }, + variancesByType: { + type: "array", + items: { + type: "object", + properties: { + countType: { + type: "string", + example: "ABC" + }, + totalCounts: { + type: "integer", + example: 15 + }, + varianceCount: { + type: "integer", + example: 28 + }, + accuracyPercent: { + type: "number", + example: 96.5 + }, + avgVarianceValue: { + type: "number", + example: 25.67 + } + } + } + }, + variancesByWarehouse: { + type: "array", + items: { + type: "object", + properties: { + warehouseId: { + type: "string", + example: "WH_ATL_001" + }, + totalCounts: { + type: "integer", + example: 30 + }, + accuracyPercent: { + type: "number", + example: 95.1 + }, + totalVarianceValue: { + type: "number", + example: 892.45 + } + } + } + }, + trends: { + type: "object", + properties: { + monthlyAccuracy: { + type: "array", + items: { + type: "object", + properties: { + month: { + type: "string", + example: "2024-01" + }, + accuracyPercent: { + type: "number", + example: 94.8 + }, + totalVarianceValue: { + type: "number", + example: 445.67 + } + } + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "getWMSCycleCountVarianceReport", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/cycle-counts/scheduled": { + get: { + tags: [ + "WMS" + ], + summary: "Get scheduled cycle counts", + description: "\n## Get Scheduled WMS Cycle Counts\n\nRetrieve cycle counts scheduled within a specific date range for operational planning and resource allocation.\n\n### Features\n- **Date Range Filtering**: Get counts scheduled within specific time periods\n- **Warehouse Scoping**: Optional filtering by specific warehouse facility\n- **Planning Support**: Support daily, weekly, and monthly planning activities\n- **Resource Planning**: Enable resource allocation and scheduling coordination\n- **Operational Visibility**: Provide visibility into upcoming count activities\n\n### Query Parameters\n- **from**: Required - Start date for scheduled date range (ISO 8601 format)\n- **to**: Required - End date for scheduled date range (ISO 8601 format)\n- **warehouseId**: Optional - Filter to specific warehouse for facility planning\n\n### Business Logic\n- from and to parameters define the scheduled date range for filtering\n- Returns only counts with SCHEDULED status within the date range\n- Optional warehouse filtering for facility-specific planning\n- Results sorted by scheduled date for chronological planning\n- Includes complete count details for planning purposes\n\n### Use Cases\n- **Daily Planning**: Plan daily counting activities and resource allocation\n- **Weekly Coordination**: Coordinate weekly counting schedules across facilities\n- **Resource Planning**: Allocate personnel and equipment for scheduled counts\n- **Operational Scheduling**: Schedule and coordinate counting operations\n ", + operationId: "getScheduledWMSCycleCounts", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "from", + in: "query", + required: true, + description: "Start date for scheduled date range", + schema: { + type: "string", + format: "date-time", + example: "2024-01-25T00:00:00.000Z" + } + }, + { + name: "to", + in: "query", + required: true, + description: "End date for scheduled date range", + schema: { + type: "string", + format: "date-time", + example: "2024-01-31T23:59:59.999Z" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Optional warehouse identifier for facility-specific filtering", + schema: { + type: "string", + example: "WH_ATL_001" + } + } + ], + responses: { + "200": { + description: "Scheduled cycle counts retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Scheduled cycle counts retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSCycleCount" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId, from, and to parameters are required", + meta: { event: "getScheduledWMSCycleCounts", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/inbound.wms.ts b/packages/controlmart/src/docs/paths/wms/inbound.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..5a135e7ed562efdc0df718bb62434a7bf4033d19 --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/inbound.wms.ts @@ -0,0 +1,2586 @@ +export const inboundPaths = { + "/{worldId}/wms/inbound-orders": { + post: { + tags: [ + "WMS" + ], + summary: "Create new inbound order", + description: "\n## Create WMS Inbound Order\n\nCreate a new inbound order for receiving inventory into the warehouse, supporting purchase orders, transfers, returns, and sample deliveries.\n\n### Features\n- **Order Type Support**: PO, RETURN, TRANSFER, SAMPLE order types\n- **Vendor Management**: Complete vendor information and contact details\n- **Line-Item Tracking**: Detailed product lines with quantities and specifications\n- **Status Lifecycle**: Comprehensive order status from EXPECTED to CLOSED\n- **Appointment Integration**: Optional appointment scheduling for receiving coordination\n- **Batch Tracking**: Lot number and expiration date management for compliance\n\n### Business Logic\n- Validates required fields: warehouseId and order lines are mandatory\n- Prevents duplicate PO numbers within the same warehouse\n- Auto-generates unique inboundOrderId using WMS service prefix\n- Initializes order status to EXPECTED for incoming inventory management\n- Calculates total expected lines and quantities from line items\n- Establishes audit trail for all subsequent modifications and receiving activities\n\n### Use Cases\n- **Purchase Order Processing**: Create inbound orders from ERP purchase orders\n- **Transfer Management**: Handle warehouse-to-warehouse inventory transfers\n- **Return Processing**: Manage returned merchandise and defective product receipts\n- **Sample Tracking**: Process product samples and promotional materials\n- **Vendor Coordination**: Coordinate receiving schedules with supplier deliveries\n ", + operationId: "createWMSInboundOrder", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "warehouseId", + "lines" + ], + properties: { + warehouseId: { + type: "string", + description: "Warehouse identifier where inventory will be received", + example: "wms_warehouse_674565c1234567890abcdef" + }, + poNumber: { + type: "string", + description: "Purchase order number for ERP integration", + example: "PO-2024-001234" + }, + asnNumber: { + type: "string", + description: "Advanced Ship Notice number from vendor", + example: "ASN-VND001-20241127" + }, + vendor: { + type: "object", + description: "Vendor information and contact details", + properties: { + vendorId: { + type: "string", + description: "Unique vendor identifier from ERP", + example: "VND-SWIFT-001" + }, + vendorName: { + type: "string", + description: "Vendor company name", + example: "Swift Manufacturing Co." + }, + contactEmail: { + type: "string", + format: "email", + description: "Primary vendor contact email", + example: "receiving@swift-mfg.com" + }, + contactPhone: { + type: "string", + description: "Primary vendor contact phone", + example: "+1-555-0123" + } + } + }, + orderType: { + type: "string", + enum: [ + "PO", + "RETURN", + "TRANSFER", + "SAMPLE" + ], + description: "Type of inbound order for processing workflow", + example: "PO" + }, + dates: { + type: "object", + description: "Important dates for receiving coordination", + properties: { + expectedArrival: { + type: "string", + format: "date-time", + description: "Expected delivery date and time", + example: "2024-11-28T10:00:00Z" + }, + actualArrival: { + type: "string", + format: "date-time", + description: "Actual delivery date and time", + example: "2024-11-28T09:45:00Z" + } + } + }, + appointmentId: { + type: "string", + description: "Associated appointment ID for dock scheduling", + example: "tms_appointment_674565c1234567890abcdef" + }, + totals: { + type: "object", + description: "Order totals for capacity planning", + properties: { + pallets: { + type: "number", + description: "Total number of pallets expected", + example: 5 + }, + cases: { + type: "number", + description: "Total number of cases expected", + example: 120 + }, + units: { + type: "number", + description: "Total number of individual units", + example: 2400 + }, + expectedLines: { + type: "number", + description: "Number of different product lines", + example: 8 + } + } + }, + lines: { + type: "array", + description: "Product line items with receiving specifications", + items: { + type: "object", + required: [ + "productId", + "expectedQuantity" + ], + properties: { + lineNumber: { + type: "number", + description: "Sequential line number for tracking", + example: 1 + }, + productId: { + type: "string", + description: "Product identifier from catalog", + example: "PROD-WIDGET-001" + }, + sku: { + type: "string", + description: "Stock keeping unit identifier", + example: "SKU-WDG-BLU-SM" + }, + productName: { + type: "string", + description: "Human-readable product name", + example: "Blue Widget Small" + }, + expectedQuantity: { + type: "number", + description: "Expected quantity to receive", + example: 300 + }, + uom: { + type: "string", + description: "Unit of measure (EA, CS, PLT, etc.)", + example: "EA" + }, + lotNumber: { + type: "string", + description: "Lot number for batch tracking", + example: "LOT-2024-W47" + }, + expirationDate: { + type: "string", + format: "date-time", + description: "Product expiration date", + example: "2025-11-27T00:00:00Z" + } + } + } + }, + receivingNotes: { + type: "string", + description: "Special instructions for receiving team", + example: "Handle with care - fragile items. Check lot numbers carefully." + } + }, + example: { + warehouseId: "wms_warehouse_674565c1234567890abcdef", + poNumber: "PO-2024-001234", + asnNumber: "ASN-VND001-20241127", + vendor: { + vendorId: "VND-SWIFT-001", + vendorName: "Swift Manufacturing Co.", + contactEmail: "receiving@swift-mfg.com", + contactPhone: "+1-555-0123" + }, + orderType: "PO", + dates: { + expectedArrival: "2024-11-28T10:00:00Z" + }, + appointmentId: "tms_appointment_674565c1234567890abcdef", + totals: { + pallets: 5, + cases: 120, + units: 2400, + expectedLines: 8 + }, + lines: [ + { + lineNumber: 1, + productId: "PROD-WIDGET-001", + sku: "SKU-WDG-BLU-SM", + productName: "Blue Widget Small", + expectedQuantity: 300, + uom: "EA", + lotNumber: "LOT-2024-W47", + expirationDate: "2025-11-27T00:00:00Z" + } + ], + receivingNotes: "Handle with care - fragile items. Check lot numbers carefully." + } + } + } + } + }, + responses: { + "201": { + description: "Inbound order created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Validation errors for inbound order creation", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse ID and order lines are required", + meta: { event: "createWMSInboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "409": { + description: "Conflict - Duplicate PO number already exists in warehouse", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 409, + error: "Inbound order with PO number PO-2024-001234 already exists", + meta: { event: "createWMSInboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/status": { + get: { + tags: [ + "WMS" + ], + summary: "Get inbound orders by status", + description: "\n## Get WMS Inbound Orders by Status\n\nRetrieve filtered list of inbound orders by operational status with comprehensive filtering options for warehouse management and receiving coordination.\n\n### Features\n- **Multi-Status Filtering**: Support for multiple order statuses simultaneously\n- **Warehouse Scoping**: Filter orders by specific warehouse facilities\n- **Vendor Analysis**: Filter by vendor for supplier performance tracking\n- **Date Range Analysis**: Historical and scheduled delivery date filtering\n- **Priority Management**: Filter by priority levels for operational focus\n- **Performance Optimization**: Sorted results for efficient receiving workflows\n\n### Business Logic\n- Status array parameter enables multi-status queries for operational dashboards\n- Warehouse filtering supports multi-facility operations\n- Vendor filtering enables supplier performance analysis and coordination\n- Date range filtering uses expectedDeliveryDate for scheduling and planning\n- Priority filtering supports operational prioritization of receiving activities\n- Results sorted by expected delivery date and priority for workflow optimization\n\n### Query Parameters\n- **status**: Required - Order status array (EXPECTED, IN_TRANSIT, RECEIVING, RECEIVED, CLOSED, CANCELLED)\n- **warehouseId**: Optional - Filter by specific warehouse facility\n- **vendorId**: Optional - Filter by vendor for supplier-specific analysis\n- **dateStart**: Optional - Start date for delivery date filtering\n- **dateEnd**: Optional - End date for delivery date filtering \n- **priority**: Optional - Priority level filtering for operational focus\n\n### Use Cases\n- **Receiving Dashboard**: View orders by status for daily receiving operations\n- **Vendor Management**: Track orders by vendor for supplier coordination\n- **Capacity Planning**: Analyze incoming orders by date range for resource planning\n- **Priority Operations**: Focus on high-priority orders for critical receiving\n- **Status Monitoring**: Track order progression through receiving lifecycle\n ", + operationId: "getWMSInboundOrdersByStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Order status values for filtering - supports multiple values", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "EXPECTED", + "IN_TRANSIT", + "RECEIVING", + "RECEIVED", + "CLOSED", + "CANCELLED" + ] + } + }, + style: "form", + explode: true, + example: [ + "EXPECTED", + "IN_TRANSIT" + ] + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "vendorId", + in: "query", + required: false, + description: "Filter by vendor identifier for supplier analysis", + schema: { + type: "string", + example: "VND-SWIFT-001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for expected delivery filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for expected delivery filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + }, + { + name: "priority", + in: "query", + required: false, + description: "Priority level filtering - supports multiple values", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "LOW", + "NORMAL", + "HIGH", + "URGENT" + ] + } + }, + style: "form", + explode: true, + example: [ + "HIGH", + "URGENT" + ] + } + ], + responses: { + "200": { + description: "Inbound orders retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required query parameters for status filtering", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "getWMSInboundOrdersByStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/{inboundOrderId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get inbound order by ID", + description: "\n## Get WMS Inbound Order Details\n\nRetrieve comprehensive information about a specific inbound order including all line items, receiving progress, and vendor details.\n\n### Features\n- **Complete Order Details**: Full order information with vendor and line items\n- **Receiving Progress**: Real-time receiving status for each product line\n- **Appointment Integration**: Associated appointment details for dock coordination\n- **Status Tracking**: Current order status and historical progression\n- **Audit Information**: Creation and modification timestamps for compliance\n\n### Business Logic\n- orderId must reference an existing inbound order within the world\n- Returns complete order information including nested line items and vendor details\n- Includes receiving progress tracking for each line item\n- Shows appointment associations for dock scheduling coordination\n- Provides comprehensive order status and timing information\n\n### Path Parameters\n- **orderId**: Required - Unique identifier for the inbound order\n\n### Use Cases\n- **Receiving Operations**: View detailed order information during receiving process\n- **Status Verification**: Check current order status and receiving progress\n- **Vendor Coordination**: Access vendor contact information for communication\n- **Appointment Management**: Verify appointment associations and scheduling\n- **Audit Tracking**: Review order creation and modification history\n ", + operationId: "getWMSInboundOrderById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "inboundOrderId", + in: "path", + required: true, + description: "Unique identifier for the inbound order", + schema: { + type: "string", + example: "wms_inbound-order_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Inbound order retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required path parameters for order retrieval", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and inboundOrderId are required", + meta: { event: "getWMSInboundOrderById", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inbound order with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inbound Order wms_inbound-order_674565c1234567890abcdef not found", + meta: { event: "getWMSInboundOrderById", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + patch: { + tags: [ + "WMS" + ], + summary: "Partially update inbound order", + description: "\n## Patch WMS Inbound Order\n\nPartially update an inbound order with only the specified fields. This is useful for updating specific properties without affecting other fields.\n\n### Allowed Fields\n- **orderStatus**: Update the order status (EXPECTED, IN_TRANSIT, RECEIVING, RECEIVED, CLOSED, CANCELLED)\n- **priority**: Update the order priority (RUSH, URGENT, NORMAL, STANDARD)\n- **dates**: Update date fields like expectedArrival\n\n### Features\n- Partial updates - only specified fields are modified\n- Automatically updates the updatedAt timestamp\n- Supports dot notation for nested fields (e.g., \"dates.expectedArrival\")\n\n### Use Cases\n- Update order status as it progresses through receiving workflow\n- Change priority based on business needs\n- Update expected arrival dates\n ", + operationId: "patchWMSInboundOrder", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "inboundOrderId", + in: "path", + required: true, + description: "Unique identifier for the inbound order", + schema: { + type: "string", + example: "wms_inbound-order_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + orderStatus: { + type: "string", + enum: [ + "EXPECTED", + "IN_TRANSIT", + "RECEIVING", + "RECEIVED", + "CLOSED", + "CANCELLED" + ], + description: "New status for the order" + }, + priority: { + type: "string", + enum: [ + "RUSH", + "URGENT", + "NORMAL", + "STANDARD" + ], + description: "New priority for the order" + }, + dates: { + type: "object", + properties: { + expectedArrival: { + type: "string", + format: "date-time", + description: "Updated expected arrival date" + } + } + }, + "dates.expectedArrival": { + type: "string", + format: "date-time", + description: "Updated expected arrival date (dot notation)" + } + } + }, + examples: { + updateStatus: { + summary: "Update order status to RECEIVING", + value: { + orderStatus: "RECEIVING" + } + }, + updatePriority: { + summary: "Update order priority to RUSH", + value: { + priority: "RUSH" + } + }, + updateExpectedArrival: { + summary: "Update expected arrival date", + value: { + "dates.expectedArrival": "2024-12-01T10:00:00Z" + } + } + } + } + } + }, + responses: { + "200": { + description: "Inbound order updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing parameters or no valid updates provided for patch operation", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and inboundOrderId are required", + meta: { event: "patchWMSInboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inbound order with specified ID does not exist for update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inbound order not found", + meta: { event: "patchWMSInboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/po/{poNumber}": { + get: { + tags: [ + "WMS" + ], + summary: "Get inbound order by PO number", + description: "\n## Get WMS Inbound Order by PO Number\n\nRetrieve inbound order information using purchase order number for ERP integration and vendor coordination.\n\n### Features\n- **ERP Integration**: Direct lookup by purchase order number from ERP systems\n- **Vendor Communication**: Quick access to orders during vendor coordination\n- **Receiving Verification**: Verify incoming deliveries against purchase orders\n- **Cross-Reference Lookup**: Alternative lookup method for operational flexibility\n- **Real-Time Data**: Current order status and receiving progress information\n\n### Business Logic\n- poNumber must reference an existing purchase order within the world\n- Returns complete order information including vendor details and line items\n- Supports ERP integration workflows and vendor communication processes\n- Enables receiving verification against purchase order documentation\n- Provides alternative lookup method when order ID is not available\n\n### Path Parameters\n- **poNumber**: Required - Purchase order number from ERP system\n\n### Use Cases\n- **ERP Integration**: Lookup orders during automated ERP synchronization\n- **Vendor Coordination**: Verify order details during vendor communication\n- **Receiving Verification**: Confirm incoming deliveries against purchase orders\n- **Customer Service**: Assist with order inquiries using purchase order references\n- **Cross-System Lookup**: Bridge between ERP and WMS systems using common identifiers\n ", + operationId: "getWMSInboundOrderByPoNumber", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "poNumber", + in: "path", + required: true, + description: "Purchase order number from ERP system", + schema: { + type: "string", + example: "PO-2024-001234" + } + } + ], + responses: { + "200": { + description: "Inbound order retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required path parameters for PO lookup", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and poNumber are required", + meta: { event: "getWMSInboundOrderByPoNumber", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inbound order with specified PO number does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inbound Order with PO PO-2024-001234 not found", + meta: { event: "getWMSInboundOrderByPoNumber", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/{inboundOrderId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update inbound order status", + description: "\n## Update WMS Inbound Order Status\n\nUpdate the operational status of an inbound order with automatic timestamp tracking for comprehensive receiving lifecycle management.\n\n### Features\n- **Status Lifecycle Management**: EXPECTED → IN_TRANSIT → RECEIVING → RECEIVED → CLOSED transitions\n- **Automatic Timestamps**: Status-specific date field updates based on status changes\n- **Business Rule Enforcement**: Validates logical status progression\n- **Audit Trail**: Complete history of status modifications with timestamps\n- **ERP Integration**: Status updates can trigger downstream ERP notifications\n\n### Business Logic\n- orderId must reference an existing inbound order within the world\n- Status must be one of the valid enumerated values\n- Automatic timestamp updates based on status:\n - IN_TRANSIT: Updates dates.actualArrival if provided\n - RECEIVING: Updates dates.receivingStarted to current timestamp\n - RECEIVED: Updates dates.receivingCompleted to current timestamp\n- Status changes are tracked with automatic timestamps for audit compliance\n\n### Path Parameters\n- **orderId**: Required - Unique identifier for the inbound order\n\n### Request Body Fields\n- **status**: Required - New operational status (EXPECTED, IN_TRANSIT, RECEIVING, RECEIVED, CLOSED, CANCELLED)\n- **statusDate**: Optional - Specific date/time for status change (defaults to current time)\n\n### Business Rules\n- RECEIVING status indicates active receiving operations in progress\n- RECEIVED status marks completion of all receiving activities\n- CLOSED status finalizes order and prevents further modifications\n- CANCELLED status handles order cancellations with audit trail\n\n### Use Cases\n- **Receiving Workflow**: Update status as orders progress through receiving\n- **ERP Synchronization**: Sync status changes with upstream ERP systems\n- **Performance Tracking**: Monitor receiving timelines and efficiency\n- **Exception Handling**: Mark orders as cancelled or handle receiving issues\n- **Compliance Reporting**: Maintain detailed audit trail of status changes\n ", + operationId: "updateWMSInboundOrderStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "inboundOrderId", + in: "path", + required: true, + description: "Unique identifier for the inbound order", + schema: { + type: "string", + example: "wms_inbound-order_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "EXPECTED", + "IN_TRANSIT", + "RECEIVING", + "RECEIVED", + "CLOSED", + "CANCELLED" + ], + description: "New operational status for the inbound order", + example: "RECEIVING" + }, + statusDate: { + type: "string", + format: "date-time", + description: "Optional specific date/time for status change", + example: "2024-11-27T14:30:00Z" + } + }, + example: { + status: "RECEIVING", + statusDate: "2024-11-27T14:30:00Z" + } + } + } + } + }, + responses: { + "200": { + description: "Inbound order status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing or invalid status value for order status update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status is required", + meta: { event: "updateWMSInboundOrderStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inbound order with specified ID does not exist for status update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inbound Order wms_inbound-order_674565c1234567890abcdef not found", + meta: { event: "updateWMSInboundOrderStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/{inboundOrderId}/receiving-progress": { + put: { + tags: [ + "WMS" + ], + summary: "Update receiving progress", + description: "\n## Update WMS Inbound Order Receiving Progress\n\nUpdate the receiving progress for specific line items within an inbound order, tracking quantities received and batch information.\n\n### Features\n- **Line-Item Tracking**: Update receiving progress for individual product lines\n- **Quantity Management**: Track received quantities against expected quantities\n- **Batch Tracking**: Record lot numbers and expiration dates for compliance\n- **Progress Calculation**: Automatic calculation of receiving completion percentages\n- **Real-Time Updates**: Immediate visibility into receiving operations progress\n\n### Business Logic\n- orderId must reference an existing inbound order within the world\n- lineNumber must exist within the order's line items\n- receivedQuantity is added to existing received quantity (cumulative)\n- Line status automatically updated based on receiving progress:\n - RECEIVING: When receivedQuantity > 0 but < expectedQuantity\n - RECEIVED: When receivedQuantity >= expectedQuantity\n- Lot number and expiration date updates support batch tracking requirements\n\n### Path Parameters\n- **orderId**: Required - Unique identifier for the inbound order\n\n### Request Body Fields\n- **lineNumber**: Required - Line number within the order for specific product\n- **receivedQuantity**: Required - Quantity received in this receiving session\n- **lotNumber**: Optional - Lot number for batch tracking and traceability\n- **expirationDate**: Optional - Product expiration date for perishable items\n\n### Business Rules\n- Received quantities are cumulative across multiple receiving sessions\n- Line status updates automatically based on receiving completion\n- Lot numbers and expiration dates support regulatory compliance\n- Progress tracking enables real-time receiving dashboard updates\n\n### Use Cases\n- **Receiving Operations**: Track progress as products are received and processed\n- **Quality Control**: Record batch information during receiving inspection\n- **Inventory Management**: Update inventory levels with received quantities\n- **Compliance Tracking**: Maintain lot numbers and expiration dates for regulations\n- **Performance Monitoring**: Monitor receiving efficiency and completion rates\n ", + operationId: "updateWMSInboundOrderReceivingProgress", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "inboundOrderId", + in: "path", + required: true, + description: "Unique identifier for the inbound order", + schema: { + type: "string", + example: "wms_inbound-order_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "lineNumber", + "receivedQuantity" + ], + properties: { + lineNumber: { + type: "number", + description: "Line number within the order for specific product", + example: 1 + }, + receivedQuantity: { + type: "number", + description: "Quantity received in this receiving session", + example: 150 + }, + lotNumber: { + type: "string", + description: "Lot number for batch tracking and traceability", + example: "LOT-2024-W47" + }, + expirationDate: { + type: "string", + format: "date-time", + description: "Product expiration date for perishable items", + example: "2025-11-27T00:00:00Z" + } + }, + example: { + lineNumber: 1, + receivedQuantity: 150, + lotNumber: "LOT-2024-W47", + expirationDate: "2025-11-27T00:00:00Z" + } + } + } + } + }, + responses: { + "200": { + description: "Receiving progress updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required fields for receiving progress update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "lineNumber and receivedQuantity are required", + meta: { event: "updateWMSInboundOrderReceivingProgress", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inbound order with specified ID does not exist for progress update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inbound Order wms_inbound-order_674565c1234567890abcdef not found", + meta: { event: "updateWMSInboundOrderReceivingProgress", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/expected-today": { + get: { + tags: [ + "WMS" + ], + summary: "Get orders expected today", + description: "\n## Get WMS Inbound Orders Expected Today\n\nRetrieve inbound orders scheduled for delivery today or a specific target date, supporting daily receiving planning and resource allocation.\n\n### Features\n- **Daily Planning**: Focus on today's expected deliveries for receiving operations\n- **Flexible Date**: Support for custom target dates beyond today\n- **Warehouse Scoping**: Filter by specific warehouse for facility-focused planning\n- **Status Filtering**: Only active orders (SCHEDULED, IN_TRANSIT, ARRIVED) included\n- **Priority Sorting**: Results sorted by delivery time and priority for workflow optimization\n\n### Business Logic\n- targetDate defaults to current date if not specified\n- Filters orders with expectedDeliveryDate falling within the target date (start to end of day)\n- Only includes orders with active statuses (SCHEDULED, IN_TRANSIT, ARRIVED)\n- Results sorted by expectedDeliveryDate and priority for operational efficiency\n- Supports daily receiving planning and resource allocation workflows\n\n### Path Parameters\n- **warehouseId**: Required - Unique identifier for the warehouse (in route context)\n\n### Query Parameters\n- **targetDate**: Optional - Specific date for expected deliveries (ISO 8601) - defaults to today\n\n### Use Cases\n- **Daily Operations**: Plan receiving activities for today's expected deliveries\n- **Resource Planning**: Allocate staff and equipment based on expected arrivals\n- **Dock Scheduling**: Coordinate dock door assignments with expected deliveries\n- **Vendor Coordination**: Communicate with vendors about today's expected deliveries\n- **Performance Monitoring**: Track on-time delivery performance against schedules\n ", + operationId: "getWMSInboundOrdersExpectedToday", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: true, + description: "Warehouse identifier for delivery planning scope", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "targetDate", + in: "query", + required: false, + description: "Specific date for expected deliveries (ISO 8601) - defaults to today", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T00:00:00Z" + } + } + ], + responses: { + "200": { + description: "Expected orders retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters for expected orders query", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and warehouseId are required", + meta: { event: "getWMSInboundOrdersExpectedToday", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/metrics": { + get: { + tags: [ + "WMS" + ], + summary: "Get receiving performance metrics", + description: "\n## Get WMS Inbound Order Receiving Metrics\n\nRetrieve comprehensive receiving performance analytics including order completion rates, timing analysis, vendor performance, and operational efficiency metrics.\n\n### Features\n- **Performance Analytics**: Complete receiving metrics for operational insight\n- **Vendor Analysis**: Top vendor performance with on-time delivery tracking\n- **Timing Metrics**: Average receiving times and efficiency measurements\n- **Accuracy Tracking**: Receiving accuracy and quality performance indicators\n- **Status Distribution**: Order status breakdown for operational visibility\n- **Historical Analysis**: Date range filtering for trend analysis and reporting\n\n### Business Logic\n- Calculates comprehensive receiving metrics across specified filters\n- Vendor performance includes order count, on-time percentage, and total lines\n- Receiving accuracy based on expected vs. received quantities\n- Average receiving time calculated from receivingStarted to receivingCompleted\n- On-time receipts determined by comparing actual vs. expected arrival dates\n- Status distribution provides operational dashboard insights\n\n### Query Parameters\n- **warehouseId**: Optional - Filter metrics by specific warehouse facility\n- **dateStart**: Optional - Start date for metrics analysis period\n- **dateEnd**: Optional - End date for metrics analysis period\n- **vendorId**: Optional - Filter metrics for specific vendor analysis\n\n### Metrics Included\n- **Total Orders**: Complete count of orders in analysis period\n- **Completion Rate**: Percentage of orders fully received\n- **Average Receiving Time**: Mean time from start to completion of receiving\n- **On-Time Performance**: Ratio of orders received on schedule\n- **Receiving Accuracy**: Quality metric for receiving precision\n- **Status Distribution**: Breakdown of orders by current status\n- **Vendor Rankings**: Top vendors by performance metrics\n\n### Use Cases\n- **Performance Monitoring**: Track receiving efficiency and operational performance\n- **Vendor Management**: Evaluate vendor performance for supplier relationship management\n- **Operational Planning**: Use metrics for capacity planning and resource allocation\n- **Quality Management**: Monitor receiving accuracy and identify improvement opportunities\n- **Executive Reporting**: Generate comprehensive receiving performance reports\n ", + operationId: "getWMSInboundOrderReceivingMetrics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter metrics by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for metrics analysis period (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-20T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for metrics analysis period (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T23:59:59Z" + } + }, + { + name: "vendorId", + in: "query", + required: false, + description: "Filter metrics for specific vendor analysis", + schema: { + type: "string", + example: "VND-SWIFT-001" + } + } + ], + responses: { + "200": { + description: "Receiving metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + totalOrders: { + type: "number", + description: "Total number of inbound orders in analysis period", + example: 156 + }, + completedOrders: { + type: "number", + description: "Number of fully received orders", + example: 142 + }, + averageReceivingTime: { + type: "number", + description: "Average receiving time in hours", + example: 4.2 + }, + onTimeReceipts: { + type: "number", + description: "Number of orders received on time", + example: 134 + }, + lateReceipts: { + type: "number", + description: "Number of orders received late", + example: 8 + }, + receivingAccuracy: { + type: "number", + description: "Receiving accuracy percentage", + example: 98.7 + }, + ordersByStatus: { + type: "array", + description: "Distribution of orders by current status", + items: { + type: "object", + properties: { + status: { + type: "string", + description: "Order status", + example: "RECEIVED" + }, + count: { + type: "number", + description: "Number of orders with this status", + example: 142 + } + } + } + }, + topVendors: { + type: "array", + description: "Top performing vendors by order volume and performance", + items: { + type: "object", + properties: { + vendorId: { + type: "string", + description: "Vendor identifier", + example: "VND-SWIFT-001" + }, + vendorName: { + type: "string", + description: "Vendor company name", + example: "Swift Manufacturing Co." + }, + orderCount: { + type: "number", + description: "Number of orders from this vendor", + example: 24 + }, + totalLines: { + type: "number", + description: "Total product lines from this vendor", + example: 186 + }, + onTimePercentage: { + type: "number", + description: "On-time delivery percentage for this vendor", + example: 95.8 + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters for metrics retrieval", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "getWMSInboundOrderReceivingMetrics", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/vendor/{vendorId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get orders by vendor", + description: "\n## Get WMS Inbound Orders by Vendor\n\nRetrieve inbound orders filtered by specific vendor for supplier relationship management and vendor performance analysis.\n\n### Features\n- **Vendor-Specific Analysis**: Complete view of orders from specific vendors\n- **Multi-Filter Support**: Warehouse, status, and date range filtering\n- **Performance Tracking**: Vendor-specific order history and patterns\n- **Supplier Coordination**: Support vendor communication and coordination\n- **Historical Analysis**: Date-based filtering for vendor performance trends\n\n### Business Logic\n- vendorId must reference orders with matching vendor.vendorId field\n- Supports filtering by warehouse for multi-facility vendor analysis\n- Status filtering enables analysis of vendor orders by operational stage\n- Date range filtering uses expectedDeliveryDate for delivery performance analysis\n- Results sorted by expected delivery date (descending) for chronological view\n\n### Path Parameters\n- **vendorId**: Required - Unique identifier for the vendor\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse facility\n- **status**: Optional - Filter by order status for operational analysis\n- **dateStart**: Optional - Start date for vendor order analysis\n- **dateEnd**: Optional - End date for vendor order analysis\n\n### Use Cases\n- **Vendor Management**: Review all orders from specific vendor for relationship management\n- **Performance Analysis**: Analyze vendor delivery patterns and reliability\n- **Supplier Communication**: Access vendor-specific order details during coordination\n- **Contract Management**: Review vendor order history for contract negotiations\n- **Quality Tracking**: Monitor vendor order quality and delivery performance\n ", + operationId: "getWMSInboundOrdersByVendor", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "vendorId", + in: "path", + required: true, + description: "Unique identifier for the vendor", + schema: { + type: "string", + example: "VND-SWIFT-001" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by order status for operational analysis", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "EXPECTED", + "IN_TRANSIT", + "RECEIVING", + "RECEIVED", + "CLOSED", + "CANCELLED" + ] + } + }, + style: "form", + explode: true, + example: [ + "EXPECTED", + "IN_TRANSIT" + ] + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for vendor order analysis (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for vendor order analysis (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + } + ], + responses: { + "200": { + description: "Vendor orders retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInboundOrder" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters for vendor orders query", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and vendorId are required", + meta: { event: "getWMSInboundOrdersByVendor", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/receiving-transactions": { + post: { + tags: [ + "WMS" + ], + summary: "Create receiving transaction", + description: "\n## Create WMS Receiving Transaction\n\nCreate a new receiving transaction to record the receipt of inventory items from inbound orders.\n\n### Features\n- **Receiving Documentation**: Create structured records of goods received\n- **Quality Control Integration**: Record quality inspection and condition assessment\n- **Batch/Serial Tracking**: Support for lot numbers and serial number recording\n- **Damage Recording**: Document damaged items with detailed notes\n- **Status Management**: Track receiving status through workflow stages\n- **Auto-ID Generation**: Automatic transaction ID assignment for unique tracking\n\n### Business Logic\n- warehouseId and inboundOrderId are required for transaction creation\n- Transaction automatically receives a unique receivingId (auto-generated)\n- receivingStatus defaults to \"RECEIVED\" upon creation\n- Quality status defaults to \"PENDING\" until inspection\n- Each transaction represents receipt of specific products/quantities\n- Supports putaway location assignment for warehouse management\n\n**CRITICAL NOTE**: The model defines the primary identifier as 'receivingId' but the implementation uses 'transactionId' in queries. This documentation reflects the actual API behavior (transactionId parameter usage).\n\n### Use Cases\n- **Inbound Receipt Processing**: Record receipt of purchase order items\n- **Quality Control Documentation**: Track inspection results and quality status\n- **Batch Compliance**: Maintain lot number and expiration date records\n- **Damage Claims**: Document damaged goods for vendor claims processing\n- **Inventory Updates**: Trigger inventory adjustments based on received quantities\n ", + operationId: "createWMSReceivingTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "warehouseId", + "inboundOrderId", + "productId", + "receivedQuantity" + ], + properties: { + warehouseId: { + type: "string", + description: "Unique identifier of the warehouse facility", + example: "wms_warehouse_674565c1234567890abcdef" + }, + inboundOrderId: { + type: "string", + description: "Reference to the inbound order being received", + example: "wms_inbound-order_674565c1234567890abcdef" + }, + inboundLineId: { + type: "string", + description: "Specific line item within the inbound order", + example: "line_001" + }, + productId: { + type: "string", + description: "Product being received", + example: "prod_12345" + }, + sku: { + type: "string", + description: "Stock keeping unit code", + example: "ABC-123-XL" + }, + productName: { + type: "string", + description: "Human-readable product name", + example: "Premium Wireless Headphones" + }, + licensePlateNumber: { + type: "string", + description: "Container or pallet identifier", + example: "LP-20241201-001" + }, + lotNumber: { + type: "string", + description: "Lot number for batch tracking", + example: "LOT-2024-Q4-001" + }, + receivedQuantity: { + type: "number", + description: "Quantity actually received", + example: 50, + minimum: 0 + }, + uom: { + type: "string", + description: "Unit of measure", + example: "EA" + }, + dockDoorId: { + type: "string", + description: "Dock door where goods were received", + example: "wms_dock-door_674565c1234567890abcdef" + }, + receivingStatus: { + type: "string", + enum: [ + "RECEIVED", + "QC_HOLD", + "PUTAWAY_PENDING", + "COMPLETED", + "REJECTED" + ], + description: "Current status of the receiving transaction", + example: "RECEIVED" + }, + quality: { + type: "object", + description: "Quality control information", + properties: { + status: { + type: "string", + enum: [ + "PASS", + "FAIL", + "PENDING" + ], + description: "Quality inspection result", + example: "PENDING" + }, + inspectedBy: { + type: "string", + description: "User who performed quality inspection", + example: "user_qc_inspector_001" + }, + notes: { + type: "string", + description: "Quality inspection notes", + example: "Visual inspection completed, minor packaging damage noted" + } + } + }, + damage: { + type: "object", + description: "Damage assessment information", + properties: { + hasDamage: { + type: "boolean", + description: "Whether damage was observed", + example: false + }, + description: { + type: "string", + description: "Detailed damage description", + example: "Dented corner on 3 units, functionality unaffected" + }, + quantity: { + type: "number", + description: "Number of damaged units", + example: 3 + }, + reportedBy: { + type: "string", + description: "User who reported the damage", + example: "user_receiver_002" + } + } + }, + putaway: { + type: "object", + description: "Putaway location assignment", + properties: { + assignedLocation: { + type: "string", + description: "Designated storage location", + example: "A-01-01" + }, + assignedBy: { + type: "string", + description: "User who assigned the location", + example: "user_warehouse_manager_001" + }, + notes: { + type: "string", + description: "Putaway instructions", + example: "Stack carefully - fragile items" + } + } + }, + customFields: { + type: "object", + description: "Additional custom data", + additionalProperties: true, + example: { + temperatureZone: "ambient", + vendorRefNumber: "VEN-REF-12345" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Receiving transaction created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Receiving transaction created successfully" + }, + data: { + $ref: "#/components/schemas/WMSReceivingTransaction" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid input data for receiving transaction creation", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse ID and inbound order ID are required", + meta: { event: "createWMSReceivingTransaction", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "WMS" + ], + summary: "Get all receiving transactions", + description: "\n## Get All WMS Receiving Transactions\n\nRetrieve a paginated list of receiving transactions with comprehensive filtering capabilities.\n\n### Features\n- **Advanced Filtering**: Filter by warehouse, order, status, user, and date ranges\n- **Pagination Support**: Cursor-based pagination for efficient data retrieval\n- **Status Filtering**: Multi-status filtering for workflow management\n- **Date Range Analysis**: Time-based filtering for reporting and analytics\n- **User Tracking**: Filter by specific users for performance analysis\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse facility\n- **inboundOrderId**: Optional - Filter by specific inbound order\n- **status**: Optional - Filter by receiving status (can be array)\n- **userId**: Optional - Filter by user who processed the transaction\n- **dateStart/dateEnd**: Optional - Filter by date range\n- **cursor**: Optional - Pagination cursor for next page\n- **limit**: Optional - Maximum results per page (default: system limit)\n\n### Business Logic\n- Results ordered by creation date (newest first)\n- Cursor-based pagination ensures consistent results during concurrent operations\n- Status filtering supports multiple values for workflow analysis\n- Date filtering uses creation timestamp for temporal analysis\n\n**CRITICAL NOTE**: The model defines the primary identifier as 'receivingId' but the implementation uses 'transactionId' in queries. This documentation reflects the actual API behavior.\n\n### Use Cases\n- **Operational Dashboard**: Real-time view of receiving operations\n- **Performance Analysis**: User and warehouse performance tracking\n- **Status Monitoring**: Track transactions through receiving workflow\n- **Reporting**: Generate receiving transaction reports by various criteria\n- **Audit Trail**: Complete transaction history for compliance\n ", + operationId: "getAllWMSReceivingTransactions", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter transactions by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "inboundOrderId", + in: "query", + required: false, + description: "Filter transactions by specific inbound order", + schema: { + type: "string", + example: "wms_inbound-order_674565c1234567890abcdef" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by receiving status (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "RECEIVED", + "QC_HOLD", + "PUTAWAY_PENDING", + "COMPLETED", + "REJECTED" + ] + } + }, + style: "form", + explode: true, + example: [ + "RECEIVED", + "PUTAWAY_PENDING" + ] + }, + { + name: "userId", + in: "query", + required: false, + description: "Filter transactions by user who processed them", + schema: { + type: "string", + example: "user_receiver_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page of results", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 1000, + example: 50 + } + } + ], + responses: { + "200": { + description: "Receiving transactions retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "All receiving transactions retrieved successfully" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSReceivingTransaction" + } + }, + totalCount: { + type: "integer", + description: "Total number of transactions matching filters", + example: 1250 + }, + limit: { + type: "integer", + description: "Number of results per page", + example: 50 + }, + hasMore: { + type: "boolean", + description: "Whether more results are available", + example: true + }, + nextCursor: { + type: "string", + description: "Cursor for next page of results", + example: "507f1f77bcf86cd799439011" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters for transaction listing", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid date range provided", + meta: { event: "getAllWMSReceivingTransactions", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/receiving-transactions/{transactionId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get receiving transaction by ID", + description: "\n## Get WMS Receiving Transaction by ID\n\nRetrieve a specific receiving transaction with complete details including quality control, damage assessment, and putaway information.\n\n### Features\n- **Complete Transaction Details**: Full transaction record with all nested data\n- **Quality Control Information**: Inspection results and quality status\n- **Damage Documentation**: Comprehensive damage assessment details\n- **Putaway Tracking**: Location assignment and putaway progress\n- **Item-Level Details**: Detailed product and quantity information\n- **Audit Information**: Creation, update, and status change tracking\n\n### Response Data Includes\n- **Transaction Metadata**: IDs, references, and timestamps\n- **Product Information**: SKU, product name, quantities, and UOM\n- **Quality Control**: Inspection status, inspector, and notes\n- **Damage Assessment**: Damage flags, descriptions, and quantities\n- **Putaway Details**: Location assignments and instructions\n- **Status Information**: Current status and workflow progression\n- **Custom Fields**: Additional configurable data points\n\n**CRITICAL NOTE**: The URL parameter uses 'transactionId' but the model field is 'receivingId'. This reflects actual implementation behavior.\n\n### Use Cases\n- **Transaction Details View**: Complete receiving transaction information\n- **Quality Control Review**: Access quality inspection results\n- **Damage Assessment**: Review damage documentation and claims\n- **Putaway Coordination**: Check location assignments and instructions\n- **Audit and Compliance**: Transaction traceability and documentation\n ", + operationId: "getWMSReceivingTransactionById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "transactionId", + in: "path", + required: true, + description: "Unique identifier of the receiving transaction (maps to receivingId in model)", + schema: { + type: "string", + example: "wms_receiving-transaction_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Receiving transaction retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Receiving transaction retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSReceivingTransaction" + } + } + } + } + } + }, + "404": { + description: "Not Found - Receiving transaction with specified ID does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Receiving transaction not found", + meta: { event: "getWMSReceivingTransactionById", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/receiving-transactions/{transactionId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update receiving transaction status", + description: "\n## Update WMS Receiving Transaction Status\n\nUpdate the status of a receiving transaction with optional notes for workflow management and audit trail.\n\n### Features\n- **Status Workflow Management**: Progress transactions through defined workflow stages\n- **Audit Trail**: Automatic timestamp recording of status changes\n- **Notes Support**: Optional notes for status change documentation\n- **Business Rule Enforcement**: Validates status transitions per business logic\n- **Real-time Updates**: Immediate status reflection across the system\n\n### Status Workflow\n- **RECEIVED**: Initial status when goods are received at dock door\n- **QC_HOLD**: Quality control inspection required or failed\n- **PUTAWAY_PENDING**: Ready for putaway to storage locations\n- **COMPLETED**: Fully processed and stored\n- **REJECTED**: Rejected due to quality, damage, or other issues\n\n### Business Logic\n- Status transitions follow defined workflow rules\n- Status updates include automatic timestamp recording\n- Optional notes provide context for status changes\n- System validates status transitions for data integrity\n- Status changes trigger downstream workflow notifications\n\n**CRITICAL NOTE**: Parameter uses 'transactionId' but queries model's 'receivingId' field due to implementation mapping.\n\n### Use Cases\n- **Workflow Management**: Progress transactions through receiving workflow\n- **Quality Control**: Update status after inspection completion\n- **Exception Handling**: Mark transactions as on hold or rejected\n- **Process Completion**: Mark transactions as completed\n- **Audit Documentation**: Add notes for compliance and tracking\n ", + operationId: "updateWMSReceivingTransactionStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "transactionId", + in: "path", + required: true, + description: "Unique identifier of the receiving transaction", + schema: { + type: "string", + example: "wms_receiving-transaction_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "RECEIVED", + "QC_HOLD", + "PUTAWAY_PENDING", + "COMPLETED", + "REJECTED" + ], + description: "New status for the receiving transaction", + example: "PUTAWAY_PENDING" + }, + notes: { + type: "string", + description: "Optional notes explaining the status change", + example: "Quality inspection completed successfully, ready for putaway", + maxLength: 1000 + } + } + } + } + } + }, + responses: { + "200": { + description: "Transaction status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Transaction status updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSReceivingTransaction" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status transition or missing required data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid status transition", + meta: { event: "updateWMSReceivingTransactionStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Receiving transaction with specified ID does not exist for status update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Transaction not found", + meta: { event: "updateWMSReceivingTransactionStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/receiving-transactions/{transactionId}/items": { + post: { + tags: [ + "WMS" + ], + summary: "Add item to receiving transaction", + description: "\n## Add Item to WMS Receiving Transaction\n\nAdd a new item to an existing receiving transaction for multi-item receiving operations.\n\n### Features\n- **Multi-Item Support**: Add multiple products to single receiving transaction\n- **Batch Tracking**: Lot numbers and serial number support for traceability\n- **Condition Recording**: Item condition assessment and documentation\n- **Location Assignment**: Specific location assignment for received items\n- **Quantity Validation**: Expected vs received quantity tracking\n- **Real-time Updates**: Immediate transaction update with new item data\n\n### Item Data Includes\n- **Product Information**: SKU, product name, and identification\n- **Quantity Tracking**: Expected vs received quantity comparison\n- **Batch/Serial Data**: Lot numbers and serial number arrays\n- **Condition Assessment**: Item condition for quality control\n- **Location Assignment**: Specific bin/zone location targeting\n- **Measurement Units**: Unit of measure specification\n\n### Business Logic\n- Items are appended to existing transaction's item array\n- Each item maintains independent quantity and condition tracking\n- Location assignments can be bin-specific or zone-level\n- Serial numbers stored as array for individual item tracking\n- Condition assessment supports quality control workflows\n\n**CRITICAL NOTE**: Parameter uses 'transactionId' but targets model's 'receivingId' field.\n\n### Use Cases\n- **Multi-Product Receiving**: Add different products to single transaction\n- **Partial Receiving**: Add items as they are processed sequentially\n- **Location-Specific Storage**: Assign items to specific storage locations\n- **Batch Management**: Maintain lot number and serial tracking\n- **Quality Segregation**: Separate items by condition assessment\n ", + operationId: "addItemToWMSReceivingTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "transactionId", + in: "path", + required: true, + description: "Unique identifier of the receiving transaction", + schema: { + type: "string", + example: "wms_receiving-transaction_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "sku", + "productName", + "expectedQuantity", + "receivedQuantity", + "unitOfMeasure" + ], + properties: { + sku: { + type: "string", + description: "Stock keeping unit code for the item", + example: "ABC-123-XL" + }, + productName: { + type: "string", + description: "Human-readable product name", + example: "Premium Wireless Headphones" + }, + expectedQuantity: { + type: "number", + description: "Expected quantity to be received", + example: 50, + minimum: 0 + }, + receivedQuantity: { + type: "number", + description: "Actual quantity received", + example: 48, + minimum: 0 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure for quantities", + example: "EA" + }, + lotNumber: { + type: "string", + description: "Lot number for batch tracking", + example: "LOT-2024-Q4-001" + }, + serialNumbers: { + type: "array", + items: { + type: "string" + }, + description: "Array of serial numbers for individual item tracking", + example: [ + "SN001", + "SN002", + "SN003" + ] + }, + condition: { + type: "string", + description: "Condition assessment of received items", + example: "GOOD" + }, + location: { + type: "object", + description: "Storage location assignment for the item", + properties: { + binId: { + type: "string", + description: "Specific bin location identifier", + example: "BIN-A-01-01" + }, + zoneId: { + type: "string", + description: "Zone identifier for location grouping", + example: "ZONE-A" + } + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Item added to transaction successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Item added to transaction successfully" + }, + data: { + $ref: "#/components/schemas/WMSReceivingTransaction" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid or missing item data for transaction", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid item data provided", + meta: { event: "addItemToWMSReceivingTransaction", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Receiving transaction with specified ID does not exist for item addition", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Transaction not found", + meta: { event: "addItemToWMSReceivingTransaction", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/receiving-transactions/metrics": { + get: { + tags: [ + "WMS" + ], + summary: "Get receiving transaction metrics", + description: "\n## Get WMS Receiving Transaction Metrics\n\nGenerate comprehensive metrics and analytics for receiving transaction performance analysis.\n\n### Features\n- **Performance Analytics**: Transaction completion rates, processing times, and accuracy metrics\n- **User Performance**: Individual user performance tracking and comparison\n- **Daily Volume Analysis**: Daily transaction volumes and trends\n- **Accuracy Tracking**: Receiving accuracy percentages and discrepancy analysis\n- **Temporal Filtering**: Date range analysis for trend identification\n- **Multi-Dimensional Analysis**: Warehouse and user-specific metrics\n\n### Metrics Included\n- **Transaction Volumes**: Total, completed, and pending transaction counts\n- **Accuracy Metrics**: Receiving accuracy percentages and discrepancy tracking\n- **Processing Times**: Average processing times and efficiency indicators\n- **User Performance**: Individual user statistics and performance comparison\n- **Daily Trends**: Daily transaction volumes and accuracy trends\n- **Discrepancy Analysis**: Items received vs expected variance tracking\n\n### Query Parameters\n- **warehouseId**: Optional - Scope metrics to specific warehouse\n- **userId**: Optional - Focus on specific user performance\n- **dateStart/dateEnd**: Optional - Time range for metric calculation\n\n### Business Logic\n- Metrics calculated from completed and approved transactions\n- Processing time calculated from creation to completion timestamp\n- Accuracy based on received vs expected quantity comparisons\n- User performance aggregated across all transactions in period\n- Daily volume trends show transaction distribution patterns\n\n**CRITICAL NOTE**: Metrics queries use 'transactionId' field references in aggregations.\n\n### Use Cases\n- **Performance Dashboard**: Real-time receiving operation insights\n- **User Management**: Identify training needs and top performers\n- **Process Optimization**: Identify bottlenecks and improvement opportunities\n- **Executive Reporting**: High-level receiving performance summaries\n- **Trend Analysis**: Long-term performance trend identification\n ", + operationId: "getWMSReceivingTransactionMetrics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter metrics by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "userId", + in: "query", + required: false, + description: "Filter metrics for specific user performance", + schema: { + type: "string", + example: "user_receiver_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for metrics calculation (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for metrics calculation (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + } + ], + responses: { + "200": { + description: "Receiving metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Receiving metrics retrieved successfully" + }, + data: { + type: "object", + properties: { + totalTransactions: { + type: "integer", + description: "Total number of receiving transactions", + example: 1250 + }, + completedTransactions: { + type: "integer", + description: "Number of completed transactions", + example: 1180 + }, + pendingTransactions: { + type: "integer", + description: "Number of pending transactions", + example: 70 + }, + discrepancies: { + type: "integer", + description: "Number of transactions with quantity discrepancies", + example: 45 + }, + totalItemsReceived: { + type: "integer", + description: "Total quantity of items received", + example: 15750 + }, + totalItemsExpected: { + type: "integer", + description: "Total quantity of items expected", + example: 16000 + }, + receivingAccuracy: { + type: "number", + format: "float", + description: "Receiving accuracy percentage", + example: 98.44 + }, + averageProcessingTime: { + type: "number", + format: "float", + description: "Average processing time in minutes", + example: 23.5 + }, + userPerformance: { + type: "array", + items: { + type: "object", + properties: { + userId: { + type: "string", + example: "user_receiver_001" + }, + transactionCount: { + type: "integer", + example: 125 + }, + accuracy: { + type: "number", + format: "float", + example: 99.2 + }, + averageProcessingTime: { + type: "number", + format: "float", + example: 18.7 + } + } + } + }, + dailyVolume: { + type: "array", + items: { + type: "object", + properties: { + date: { + type: "string", + format: "date", + example: "2024-11-15" + }, + transactionCount: { + type: "integer", + example: 45 + }, + itemsReceived: { + type: "integer", + example: 580 + }, + accuracy: { + type: "number", + format: "float", + example: 97.8 + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters for metrics calculation", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid date range provided", + meta: { event: "getWMSReceivingTransactionMetrics", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inbound-orders/{inboundOrderId}/relations": { + get: { + tags: [ + "WMS" + ], + summary: "Get inbound order cross-service relations", + description: "\n## Get WMS Inbound Order Relations\n\nRetrieve cross-service related data for an inbound order, including linked ERP orders, EDI documents, and finance transactions.\n\n### Features\n- **ERP Integration**: Link to source ERP purchase order by PO number\n- **EDI Documents**: Related EDI transactions (856 ASN, etc.) by PO number or vendor\n- **Finance Tracking**: Associated payment_out finance transactions\n- **Cross-Reference**: Connect warehouse receiving to enterprise systems\n\n### Related Data Types\n- **erpOrder**: Original ERP purchase order with status and amount\n- **ediDocuments**: EDI documents (ASN, invoices) linked to the order\n- **financeTransaction**: Payment transactions associated with the order\n\n### Business Logic\n- Uses PO number to find related ERP order\n- Searches EDI documents by businessDocumentNumber or vendor ID\n- Finds finance transactions with matching sourceId\n- Returns empty arrays/undefined for missing relations\n\n### Use Cases\n- **Order Traceability**: Track order from PO to receiving to payment\n- **EDI Reconciliation**: View related EDI documents for compliance\n- **Finance Integration**: Link receiving to accounts payable\n- **Audit Trail**: Complete visibility across enterprise systems\n ", + operationId: "getWMSInboundOrderRelations", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "inboundOrderId", + in: "path", + required: true, + description: "Unique identifier for the inbound order", + schema: { + type: "string", + example: "wms_inbound-order_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Inbound order relations retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + erpOrder: { + type: "object", + description: "Related ERP purchase order", + properties: { + orderId: { + type: "string", + example: "PO-2024-001234" + }, + status: { + type: "string", + example: "APPROVED" + }, + totalAmount: { + type: "number", + example: 15000.00 + }, + customerId: { + type: "string", + example: "CUST-001" + }, + partnerId: { + type: "string", + example: "VENDOR-001" + }, + poType: { + type: "string", + example: "STANDARD" + }, + orderDate: { + type: "string", + format: "date-time", + example: "2024-01-10T00:00:00Z" + } + } + }, + ediDocuments: { + type: "array", + description: "Related EDI documents", + items: { + type: "object", + properties: { + transactionId: { + type: "string", + example: "edi_txn_674565c1234567890abcdef" + }, + docType: { + type: "string", + example: "856" + }, + status: { + type: "string", + example: "PROCESSED" + }, + direction: { + type: "string", + example: "INBOUND" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00Z" + }, + businessDocumentNumber: { + type: "string", + example: "PO-2024-001234" + } + } + } + }, + financeTransaction: { + type: "object", + description: "Related finance transaction", + properties: { + transactionId: { + type: "string", + example: "fin_txn_674565c1234567890abcdef" + }, + type: { + type: "string", + example: "payment_out" + }, + amount: { + type: "number", + example: 15000.00 + }, + status: { + type: "string", + example: "COMPLETED" + }, + processedAt: { + type: "string", + format: "date-time", + example: "2024-01-20T14:30:00Z" + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and orderId are required", + meta: { event: "getWMSInboundOrderRelations", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inbound order not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inbound order wms_inbound-order_674565c1234567890abcdef not found", + meta: { event: "getWMSInboundOrderRelations", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/index.ts b/packages/controlmart/src/docs/paths/wms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..dd8f295d88129802b73d63325bb87a9e45a07fec --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/index.ts @@ -0,0 +1,20 @@ + +import { warehousePaths } from './warehouse.wms'; +import { cycleCountPaths } from './cycleCount.wms'; +import { metricsPaths } from './metrics.wms'; +import { inboundPaths } from './inbound.wms'; +import { inventoryPaths } from './inventory.wms'; +import { outboundPaths } from './outbound.wms'; +import { replenishmentPaths } from './replenishment.wms'; +import { tasksPaths } from './tasks.wms'; + +export const wmsPaths = { + ...warehousePaths, + ...cycleCountPaths, + ...metricsPaths, + ...inboundPaths, + ...inventoryPaths, + ...outboundPaths, + ...replenishmentPaths, + ...tasksPaths, +}; diff --git a/packages/controlmart/src/docs/paths/wms/inventory.wms.ts b/packages/controlmart/src/docs/paths/wms/inventory.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..9e85f1dac03095bf9efe373a9dfc7e3fb6ff593c --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/inventory.wms.ts @@ -0,0 +1,1800 @@ +export const inventoryPaths = { + "/{worldId}/wms/operations-dashboard": { + get: { + tags: [ + "WMS" + ], + summary: "Get operations dashboard metrics", + description: "\n## Get WMS Operations Dashboard\n\nRetrieve aggregated metrics for the WMS Command Center dashboard, providing real-time KPIs across all warehouse operations.\n\n### Features\n- **Real-Time Aggregation**: Live metrics calculated from current operational data\n- **Multi-Dimensional Metrics**: Inventory, receiving, fulfillment, and task statistics\n- **Performance Indicators**: Key operational metrics for management oversight\n- **Parallel Processing**: Optimized concurrent data aggregation for performance\n\n### Metric Categories\n- **Inventory**: Total items, status breakdown, low stock alerts, expiring items\n- **Receiving**: Total/pending inbound orders, orders due today, receiving status\n- **Fulfillment**: Active orders, status breakdown by workflow stage, rush orders\n- **Tasks**: Total/pending/in-progress tasks, completed today, breakdown by type\n\n### Use Cases\n- **Executive Dashboard**: High-level operational overview for management\n- **Operational Monitoring**: Real-time tracking of warehouse performance\n- **Alert Management**: Track low stock and expiring inventory alerts\n- **Workload Planning**: Monitor task distribution and completion rates\n ", + operationId: "getWMSOperationsDashboard", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Operations dashboard retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + inventory: { + type: "object", + properties: { + totalItems: { + type: "integer", + description: "Total inventory items on hand", + example: 45890 + }, + byStatus: { + type: "object", + description: "Count of items by status", + additionalProperties: { + type: "integer" + }, + example: { + AVAILABLE: 42000, + ALLOCATED: 3500, + HOLD: 390 + } + }, + lowStockAlerts: { + type: "integer", + description: "Number of low stock alerts", + example: 12 + }, + expiringAlerts: { + type: "integer", + description: "Items expiring within 7 days", + example: 45 + } + } + }, + receiving: { + type: "object", + properties: { + total: { + type: "integer", + example: 156 + }, + pending: { + type: "integer", + example: 23 + }, + expected: { + type: "integer", + example: 15 + }, + inTransit: { + type: "integer", + example: 5 + }, + receiving: { + type: "integer", + example: 3 + }, + received: { + type: "integer", + example: 133 + }, + dueToday: { + type: "integer", + example: 8 + } + } + }, + fulfillment: { + type: "object", + properties: { + total: { + type: "integer", + example: 2450 + }, + active: { + type: "integer", + example: 125 + }, + created: { + type: "integer", + example: 45 + }, + released: { + type: "integer", + example: 30 + }, + allocated: { + type: "integer", + example: 25 + }, + picking: { + type: "integer", + example: 20 + }, + picked: { + type: "integer", + example: 15 + }, + packing: { + type: "integer", + example: 10 + }, + packed: { + type: "integer", + example: 25 + }, + shipped: { + type: "integer", + example: 2280 + }, + rushOrders: { + type: "integer", + example: 8 + } + } + }, + tasks: { + type: "object", + properties: { + total: { + type: "integer", + example: 3420 + }, + pending: { + type: "integer", + example: 156 + }, + inProgress: { + type: "integer", + example: 45 + }, + completedToday: { + type: "integer", + example: 234 + }, + byType: { + type: "object", + additionalProperties: { + type: "integer" + }, + example: { + PICK: 1500, + PUTAWAY: 800, + REPLENISHMENT: 420, + CYCLE_COUNT: 200 + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - worldId is required", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory": { + get: { + tags: [ + "WMS" + ], + summary: "Get inventory list", + description: "\n## Get WMS Inventory List\n\nRetrieve a paginated list of inventory items with comprehensive filtering options for inventory management and monitoring.\n\n### Features\n- **Paginated Results**: Efficient handling of large inventory datasets\n- **Multi-Filter Support**: Filter by status, warehouse, expiration, and search terms\n- **Search Capability**: Search by SKU or product name with case-insensitive matching\n- **Alert Filtering**: Filter for expiring items requiring attention\n- **Sorting**: Results sorted by last movement date and SKU\n\n### Query Parameters\n- **status**: Filter by inventory status (supports multiple values)\n- **warehouseId**: Filter by specific warehouse facility\n- **expiringSoon**: Filter for items expiring within 7 days\n- **search**: Search by SKU or product name\n- **limit**: Maximum results per page (default: 50, max: 100)\n- **offset**: Pagination offset for result navigation\n\n### Use Cases\n- **Inventory Management**: Browse and manage warehouse inventory\n- **Expiration Monitoring**: Track items approaching expiration dates\n- **Stock Lookup**: Find specific products by SKU or name\n- **Status Tracking**: Monitor inventory status distribution\n ", + operationId: "getWMSInventoryList", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by inventory status (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "AVAILABLE", + "ALLOCATED", + "QUARANTINE", + "HOLD", + "DAMAGED", + "EXPIRED" + ] + } + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "expiringSoon", + in: "query", + required: false, + description: "Filter for items expiring within 7 days", + schema: { + type: "string", + enum: ["true", "false"], + example: "true" + } + }, + { + name: "search", + in: "query", + required: false, + description: "Search by SKU or product name (case-insensitive)", + schema: { + type: "string", + example: "WIDGET-001" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum results per page (default: 50, max: 100)", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 50 + } + }, + { + name: "offset", + in: "query", + required: false, + description: "Pagination offset", + schema: { + type: "integer", + minimum: 0, + default: 0, + example: 0 + } + } + ], + responses: { + "200": { + description: "Inventory list retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + type: "object", + properties: { + inventoryId: { + type: "string", + example: "wms_inventory_674565c1234567890abcdef" + }, + sku: { + type: "string", + example: "WIDGET-001" + }, + productName: { + type: "string", + example: "Premium Widget" + }, + warehouseId: { + type: "string", + example: "WH_ATL_001" + }, + binId: { + type: "string", + example: "BIN-A01-01-01" + }, + quantityOnHand: { + type: "integer", + example: 150 + }, + quantityAllocated: { + type: "integer", + example: 25 + }, + quantityAvailable: { + type: "integer", + example: 125 + }, + inventoryStatus: { + type: "string", + example: "AVAILABLE" + }, + lotNumber: { + type: "string", + example: "LOT-2024-001" + }, + expirationDate: { + type: "string", + format: "date-time", + example: "2025-06-30T00:00:00Z" + }, + lastMovementAt: { + type: "string", + format: "date-time", + example: "2024-01-15T14:30:00Z" + } + } + } + }, + pagination: { + type: "object", + properties: { + total: { + type: "integer", + example: 1250 + }, + limit: { + type: "integer", + example: 50 + }, + offset: { + type: "integer", + example: 0 + }, + hasMore: { + type: "boolean", + example: true + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - worldId is required", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory/{inventoryId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update inventory status", + description: "\n## Update WMS Inventory Status\n\nUpdate the status of a specific inventory item for inventory control and management operations.\n\n### Features\n- **Status Management**: Change inventory item status for operational control\n- **Validation**: Ensures valid status values are provided\n- **Audit Trail**: Automatically updates timestamp on status change\n\n### Valid Status Values\n- **AVAILABLE**: Ready for allocation and fulfillment\n- **HOLD**: Temporarily held for review or special handling\n- **QUARANTINE**: Isolated for quality inspection\n- **ALLOCATED**: Reserved for specific orders\n- **EXPIRED**: Past expiration date, not available for sale\n\n### Use Cases\n- **Quality Control**: Place items on hold or quarantine for inspection\n- **Inventory Adjustment**: Update status based on physical inspection\n- **Expiration Management**: Mark expired inventory appropriately\n ", + operationId: "updateWMSInventoryStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "inventoryId", + in: "path", + required: true, + description: "Unique identifier for the inventory record", + schema: { + type: "string", + example: "wms_inventory_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: ["inventoryStatus"], + properties: { + inventoryStatus: { + type: "string", + enum: [ + "AVAILABLE", + "HOLD", + "QUARANTINE", + "ALLOCATED", + "EXPIRED" + ], + description: "New status for the inventory item", + example: "HOLD" + } + } + } + } + } + }, + responses: { + "200": { + description: "Inventory status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInventory" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status or missing parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "inventoryStatus is required in request body", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inventory item not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inventory item not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory/{inventoryId}": { + patch: { + tags: [ + "WMS" + ], + summary: "Partially update inventory item", + description: "\n## Patch WMS Inventory Item\n\nPartially update an inventory record with only the specified fields. This is useful for updating specific properties like status, lot number, or location without affecting other fields.\n\n### Allowed Fields\n- **inventoryStatus**: Update the inventory status (AVAILABLE, ALLOCATED, QUARANTINE, HOLD, DAMAGED, EXPIRED)\n- **lotNumber**: Update or assign a lot number for batch tracking\n- **expirationDate**: Update the expiration date for perishable items\n- **binId**: Update the storage location (move inventory to a different bin)\n\n### Features\n- Partial updates - only specified fields are modified\n- Automatically updates the updatedAt timestamp\n- Supports quick status changes for inventory management\n\n### Use Cases\n- Put inventory on hold for quality issues\n- Update lot numbers after verification\n- Move inventory to different bins\n- Update expiration dates\n- Mark inventory as damaged or expired\n ", + operationId: "patchWMSInventory", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "inventoryId", + in: "path", + required: true, + description: "Unique identifier for the inventory record", + schema: { + type: "string", + example: "wms_inventory_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + inventoryStatus: { + type: "string", + enum: [ + "AVAILABLE", + "ALLOCATED", + "QUARANTINE", + "HOLD", + "DAMAGED", + "EXPIRED" + ], + description: "New status for the inventory" + }, + lotNumber: { + type: "string", + description: "Lot number for batch tracking", + example: "LOT-2024-001" + }, + expirationDate: { + type: "string", + format: "date-time", + description: "Expiration date for perishable items", + example: "2025-06-30T00:00:00Z" + }, + binId: { + type: "string", + description: "New bin location for the inventory", + example: "BIN-A01-01-01" + } + } + }, + examples: { + updateStatus: { + summary: "Put inventory on hold", + value: { + inventoryStatus: "HOLD" + } + }, + updateLotNumber: { + summary: "Update lot number", + value: { + lotNumber: "LOT-2024-002" + } + }, + moveInventory: { + summary: "Move inventory to different bin", + value: { + binId: "BIN-B02-03-02" + } + }, + updateExpiration: { + summary: "Update expiration date", + value: { + expirationDate: "2025-12-31T00:00:00Z" + } + } + } + } + } + }, + responses: { + "200": { + description: "Inventory updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSInventory" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing parameters or no updates provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and inventoryId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Inventory record not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Inventory record not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory-transactions": { + post: { + tags: [ + "WMS" + ], + summary: "Create inventory transaction", + description: "\n## Create WMS Inventory Transaction\n\nCreate a new inventory transaction to record movement, adjustment, or other inventory operations within the warehouse.\n\n### Features\n- **Transaction Recording**: Document all inventory movements and changes\n- **Multi-Transaction Types**: Support for RECEIVE, PUTAWAY, PICK, MOVE, ADJUST, CYCLE_COUNT, RETURN, DAMAGE, SHIP\n- **Bin-to-Bin Tracking**: Record inventory movements between storage locations\n- **Reference Integration**: Link transactions to orders, tasks, and cycle counts\n- **Lot/License Plate Tracking**: Support for batch and container tracking\n- **Auto-ID Generation**: Automatic transaction ID assignment for unique tracking\n\n### Business Logic\n- warehouseId, transactionType, productId, and quantity are required\n- transactionId is auto-generated with wms_inventory-transaction prefix\n- transactionDate defaults to current timestamp if not provided\n- Supports bin-to-bin movement tracking with fromBinId/toBinId\n- referenceType must be one of: PO, ORDER, TASK, CYCLE_COUNT\n\n### Use Cases\n- **Receiving Operations**: Record inventory receipt from inbound orders\n- **Putaway Activities**: Track movement from receiving to storage locations\n- **Picking Operations**: Document inventory removal for outbound orders\n- **Inventory Adjustments**: Record quantity adjustments and corrections\n- **Cycle Count Updates**: Apply cycle count variances to inventory\n- **Returns Processing**: Handle returned merchandise transactions\n- **Damage Recording**: Document damaged inventory write-offs\n ", + operationId: "createWMSInventoryTransaction", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "warehouseId", + "transactionType", + "productId", + "quantity" + ], + properties: { + warehouseId: { + type: "string", + description: "Unique identifier of the warehouse facility", + example: "wms_warehouse_674565c1234567890abcdef" + }, + transactionType: { + type: "string", + enum: [ + "RECEIVE", + "PUTAWAY", + "PICK", + "MOVE", + "ADJUST", + "CYCLE_COUNT", + "RETURN", + "DAMAGE", + "SHIP" + ], + description: "Type of inventory transaction operation", + example: "PUTAWAY" + }, + productId: { + type: "string", + description: "Product identifier for the inventory transaction", + example: "prod_12345" + }, + sku: { + type: "string", + description: "Stock keeping unit code", + example: "ABC-123-XL" + }, + fromBinId: { + type: "string", + description: "Source bin identifier for movement transactions", + example: "BIN-RECV-001" + }, + toBinId: { + type: "string", + description: "Destination bin identifier for movement transactions", + example: "BIN-A-01-01" + }, + lotNumber: { + type: "string", + description: "Lot number for batch tracking", + example: "LOT-2024-Q4-001" + }, + licensePlateNumber: { + type: "string", + description: "Container or pallet identifier", + example: "LP-20241201-001" + }, + quantity: { + type: "number", + description: "Transaction quantity (positive for additions, negative for reductions)", + example: 25 + }, + uom: { + type: "string", + description: "Unit of measure", + example: "EA" + }, + referenceType: { + type: "string", + enum: [ + "PO", + "ORDER", + "TASK", + "CYCLE_COUNT" + ], + description: "Type of reference document", + example: "ORDER" + }, + referenceId: { + type: "string", + description: "Reference document identifier", + example: "wms_outbound-order_674565c1234567890abcdef" + }, + transactionDate: { + type: "string", + format: "date-time", + description: "Transaction timestamp (defaults to current time)", + example: "2024-12-01T14:30:00.000Z" + }, + userId: { + type: "string", + description: "User who performed the transaction", + example: "user_warehouse_worker_001" + }, + userName: { + type: "string", + description: "Human-readable user name", + example: "John Smith" + }, + reasonCode: { + type: "string", + description: "Reason code for adjustments or special transactions", + example: "CYCLE_COUNT_ADJUSTMENT" + }, + notes: { + type: "string", + description: "Additional notes about the transaction", + example: "Putaway completed after quality inspection" + }, + customFields: { + type: "object", + description: "Additional custom transaction data", + additionalProperties: true, + example: { + priority: "HIGH", + equipment: "Forklift-002" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Inventory transaction created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Inventory transaction created successfully" + }, + data: { + $ref: "#/components/schemas/WMSInventoryTransaction" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid input data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse ID, transaction type, product ID, and quantity are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory-transactions/product/{productId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get inventory transactions by product", + description: "\n## Get WMS Inventory Transactions by Product\n\nRetrieve paginated inventory transactions for a specific product with comprehensive filtering capabilities.\n\n### Features\n- **Product-Specific Filtering**: All transactions for a specific product across warehouses\n- **Advanced Filtering**: Filter by warehouse, transaction type, date range, and bin location\n- **Cursor-Based Pagination**: Efficient pagination for large transaction sets\n- **Multi-Transaction Type**: Support for filtering by multiple transaction types\n- **Bin Movement Tracking**: Filter by specific bin involvement (fromBin or toBin)\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse facility\n- **transactionType**: Optional - Filter by transaction type (supports arrays)\n- **dateStart/dateEnd**: Optional - Filter by transaction date range\n- **binId**: Optional - Filter transactions involving specific bin (fromBinId OR toBinId)\n- **cursor**: Optional - Pagination cursor for next page\n- **limit**: Optional - Maximum results per page\n\n### Business Logic\n- Results ordered by transaction date (newest first)\n- Cursor-based pagination ensures consistency during concurrent operations\n- binId filter uses OR logic (fromBinId OR toBinId) to capture all bin involvement\n- Date filtering uses transactionDate field for temporal analysis\n\n### Use Cases\n- **Product Movement Analysis**: Track all movements for specific products\n- **Inventory Audit**: Review complete transaction history for products\n- **Performance Analysis**: Analyze transaction patterns by product\n- **Compliance Reporting**: Generate product-specific transaction reports\n- **Troubleshooting**: Investigate inventory discrepancies for products\n ", + operationId: "getWMSInventoryTransactionsByProduct", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "productId", + in: "path", + required: true, + description: "Product identifier to filter transactions", + schema: { + type: "string", + example: "prod_12345" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter transactions by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "transactionType", + in: "query", + required: false, + description: "Filter by transaction type (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "RECEIVE", + "PUTAWAY", + "PICK", + "MOVE", + "ADJUST", + "CYCLE_COUNT", + "RETURN", + "DAMAGE", + "SHIP" + ] + } + }, + style: "form", + explode: true, + example: [ + "RECEIVE", + "PUTAWAY" + ] + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + }, + { + name: "binId", + in: "query", + required: false, + description: "Filter transactions involving specific bin (fromBinId OR toBinId)", + schema: { + type: "string", + example: "BIN-A-01-01" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page of results", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 1000, + example: 50 + } + } + ], + responses: { + "200": { + description: "Product transactions retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Transactions by product retrieved successfully" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInventoryTransaction" + } + }, + totalCount: { + type: "integer", + description: "Total number of transactions for this product", + example: 847 + }, + limit: { + type: "integer", + description: "Number of results per page", + example: 50 + }, + hasMore: { + type: "boolean", + description: "Whether more results are available", + example: true + }, + nextCursor: { + type: "string", + description: "Cursor for next page of results", + example: "507f1f77bcf86cd799439011" + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid date range provided", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory-transactions/bin/{binId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get inventory transactions by bin", + description: "\n## Get WMS Inventory Transactions by Bin\n\nRetrieve all inventory transactions involving a specific bin location, including both inbound and outbound movements.\n\n### Features\n- **Bin-Centric View**: All transactions where bin is source (fromBinId) or destination (toBinId)\n- **Comprehensive Filtering**: Filter by warehouse, transaction type, date range, and product\n- **Movement Direction Tracking**: See both incoming and outgoing transactions for the bin\n- **Real-Time Analysis**: Current transaction data for operational decisions\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse facility\n- **transactionType**: Optional - Filter by transaction type (supports arrays)\n- **dateStart/dateEnd**: Optional - Filter by transaction date range\n- **productId**: Optional - Filter transactions for specific product\n\n### Business Logic\n- Uses OR query logic: (fromBinId = binId OR toBinId = binId)\n- Results ordered by transaction date (newest first)\n- Includes all transaction types that involve the specified bin\n- Date filtering based on transactionDate for temporal analysis\n\n### Use Cases\n- **Bin Activity Analysis**: Review all activity for specific storage location\n- **Location Utilization**: Understand bin usage patterns and frequency\n- **Inventory Tracking**: Track product flow through specific locations\n- **Operational Planning**: Optimize bin assignments based on activity\n- **Compliance Audit**: Review all movements involving specific locations\n ", + operationId: "getWMSInventoryTransactionsByBin", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "binId", + in: "path", + required: true, + description: "Bin identifier to filter transactions (fromBinId OR toBinId)", + schema: { + type: "string", + example: "BIN-A-01-01" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter transactions by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "transactionType", + in: "query", + required: false, + description: "Filter by transaction type (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "RECEIVE", + "PUTAWAY", + "PICK", + "MOVE", + "ADJUST", + "CYCLE_COUNT", + "RETURN", + "DAMAGE", + "SHIP" + ] + } + }, + style: "form", + explode: true, + example: [ + "PUTAWAY", + "PICK" + ] + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + }, + { + name: "productId", + in: "query", + required: false, + description: "Filter transactions for specific product", + schema: { + type: "string", + example: "prod_12345" + } + } + ], + responses: { + "200": { + description: "Bin transactions retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Transactions by bin retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInventoryTransaction" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid date range provided", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory-transactions/reference/{referenceType}/{referenceId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get transactions by reference", + description: "\n## Get Transactions by Reference\n\nRetrieve inventory transactions associated with a specific reference document (e.g., Order, PO).\n\n### Features\n- **Reference-Based Filtering**: All transactions linked to specific orders, tasks, or counts\n- **Cross-Document Tracking**: Link inventory movements to their originating documents\n- **Complete Transaction Trail**: See all inventory impacts from a single reference\n- **Multi-Reference Support**: Support for PO, ORDER, TASK, CYCLE_COUNT references\n\n### Business Logic\n- Filters transactions where referenceType and referenceId match parameters\n- Results ordered by transaction date (newest first)\n- Includes all transaction types linked to the reference document\n\n### Use Cases\n- **Order Impact Analysis**: See all inventory transactions for specific orders\n- **Task Completion Tracking**: Review inventory movements from warehouse tasks\n- **Cycle Count Adjustments**: Track adjustments made from cycle count results\n- **Purchase Order Receiving**: Monitor all receipts against purchase orders\n- **Audit Trail**: Complete inventory transaction history for reference documents\n ", + operationId: "getWMSInventoryTransactionsByReference", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "referenceType", + in: "path", + required: true, + description: "Type of reference document (ORDER, PO, WORK_ORDER, etc.)", + schema: { + type: "string" + }, + example: "ORDER" + }, + { + name: "referenceId", + in: "path", + required: true, + description: "Reference document identifier", + schema: { + type: "string", + example: "ORD-2024-001" + } + } + ], + responses: { + "200": { + description: "Reference transactions retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Transactions by reference retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInventoryTransaction" + } + } + } + } + } + } + }, + "500": { + description: "Internal Server Error - Route/Controller parameter mismatch", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 500, + error: "referenceType parameter undefined - route configuration issue", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory-transactions/movement-report": { + get: { + tags: [ + "WMS" + ], + summary: "Get inventory movement report", + description: "\n## Get WMS Inventory Movement Report\n\nGenerate comprehensive inventory movement analytics with transaction summaries, top moving products, and temporal trends.\n\n### Features\n- **Movement Analytics**: Total transactions and quantity summaries by type\n- **Product Performance**: Identify top moving products with transaction volumes\n- **Temporal Trends**: Daily movement patterns and transaction distribution\n- **Multi-Dimensional Filtering**: Filter by warehouse, products, transaction types, and date range\n- **Executive Reporting**: High-level inventory movement insights for management\n\n### Report Data Includes\n- **Transaction Summary**: Total transaction counts by type with quantity totals\n- **Top Moving Products**: Products with highest transaction volumes and quantities\n- **Daily Trends**: Day-by-day transaction counts and movement quantities\n- **Movement Patterns**: Identify peak activity periods and seasonal trends\n\n### Query Parameters\n- **warehouseId**: Optional - Scope report to specific warehouse\n- **productIds**: Optional - Focus report on specific products (supports arrays)\n- **transactionType**: Optional - Filter by specific transaction types\n- **dateStart/dateEnd**: Optional - Time range for movement analysis\n\n### Business Logic\n- Aggregates transaction data across all selected criteria\n- Groups results by transaction type, product, and date\n- Calculates movement volumes and transaction frequencies\n- Orders results by transaction volume and activity levels\n\n### Use Cases\n- **Executive Dashboard**: High-level inventory movement insights\n- **Operational Planning**: Understand movement patterns for resource allocation\n- **Product Analysis**: Identify fast/slow moving products\n- **Performance Monitoring**: Track warehouse productivity and efficiency\n- **Trend Analysis**: Identify seasonal patterns and growth trends\n ", + operationId: "getWMSInventoryMovementReport", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter report by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "productIds", + in: "query", + required: false, + description: "Filter report by specific products (supports multiple values)", + schema: { + type: "array", + items: { + type: "string" + } + }, + style: "form", + explode: true, + example: [ + "prod_12345", + "prod_67890" + ] + }, + { + name: "transactionType", + in: "query", + required: false, + description: "Filter by transaction type (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "RECEIVE", + "PUTAWAY", + "PICK", + "MOVE", + "ADJUST", + "CYCLE_COUNT", + "RETURN", + "DAMAGE", + "SHIP" + ] + } + }, + style: "form", + explode: true, + example: [ + "RECEIVE", + "PUTAWAY", + "PICK" + ] + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for movement report analysis (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for movement report analysis (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + } + ], + responses: { + "200": { + description: "Movement report generated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Inventory movement report retrieved successfully" + }, + data: { + type: "object", + properties: { + totalTransactions: { + type: "integer", + description: "Total number of inventory transactions in period", + example: 2847 + }, + transactionsByType: { + type: "array", + items: { + type: "object", + properties: { + transactionType: { + type: "string", + description: "Type of inventory transaction", + example: "PUTAWAY" + }, + count: { + type: "integer", + description: "Number of transactions of this type", + example: 425 + }, + totalQuantity: { + type: "number", + description: "Total quantity moved in this transaction type", + example: 12750 + } + } + } + }, + topMovingProducts: { + type: "array", + items: { + type: "object", + properties: { + productId: { + type: "string", + description: "Product identifier", + example: "prod_12345" + }, + sku: { + type: "string", + description: "Stock keeping unit", + example: "ABC-123-XL" + }, + totalQuantity: { + type: "number", + description: "Total quantity moved for this product", + example: 1847 + }, + transactionCount: { + type: "integer", + description: "Number of transactions for this product", + example: 89 + } + } + } + }, + movementsByDate: { + type: "array", + items: { + type: "object", + properties: { + date: { + type: "string", + format: "date", + description: "Date of movements", + example: "2024-11-15" + }, + transactionCount: { + type: "integer", + description: "Number of transactions on this date", + example: 127 + }, + totalQuantity: { + type: "number", + description: "Total quantity moved on this date", + example: 3845 + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid date range provided", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory-transactions/history": { + get: { + tags: [ + "WMS" + ], + summary: "Get inventory transaction history", + description: "\n## Get WMS Inventory Transaction History\n\nRetrieve chronological inventory transaction history with comprehensive filtering options for operational analysis.\n\n### Features\n- **Complete Transaction History**: Chronological view of all inventory transactions\n- **Multi-Dimensional Filtering**: Filter by warehouse, product, bin, transaction type, and date range\n- **Operational Analysis**: Support for troubleshooting and audit requirements\n- **Flexible Limiting**: Configurable result limits for performance optimization\n- **Real-Time Data**: Current transaction data for immediate operational decisions\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse facility\n- **productId**: Optional - Filter transactions for specific product\n- **binId**: Optional - Filter transactions involving specific bin (fromBin OR toBin)\n- **transactionType**: Optional - Filter by transaction type (supports arrays)\n- **dateStart/dateEnd**: Optional - Filter by transaction date range\n- **limit**: Optional - Maximum number of results (performance control)\n\n### Business Logic\n- Results ordered by transaction date (newest first)\n- binId filter uses OR logic for comprehensive bin involvement tracking\n- Date filtering based on transactionDate field\n- Supports multiple transaction type filtering for workflow analysis\n\n### Use Cases\n- **Audit Trail**: Complete chronological transaction history\n- **Troubleshooting**: Investigate inventory discrepancies and issues\n- **Performance Analysis**: Analyze transaction patterns and frequencies\n- **Compliance Reporting**: Generate transaction history for regulatory requirements\n- **Operational Review**: Review recent transaction activity for management\n ", + operationId: "getWMSInventoryTransactionHistory", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter transactions by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "productId", + in: "query", + required: false, + description: "Filter transactions for specific product", + schema: { + type: "string", + example: "prod_12345" + } + }, + { + name: "binId", + in: "query", + required: false, + description: "Filter transactions involving specific bin (fromBinId OR toBinId)", + schema: { + type: "string", + example: "BIN-A-01-01" + } + }, + { + name: "transactionType", + in: "query", + required: false, + description: "Filter by transaction type (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "RECEIVE", + "PUTAWAY", + "PICK", + "MOVE", + "ADJUST", + "CYCLE_COUNT", + "RETURN", + "DAMAGE", + "SHIP" + ] + } + }, + style: "form", + explode: true, + example: [ + "RECEIVE", + "PUTAWAY", + "PICK" + ] + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for transaction filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results (performance optimization)", + schema: { + type: "integer", + minimum: 1, + maximum: 10000, + example: 1000 + } + } + ], + responses: { + "200": { + description: "Transaction history retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Transaction history retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInventoryTransaction" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid date range or limit value", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/inventory-transactions/adjustments": { + get: { + tags: [ + "WMS" + ], + summary: "Get inventory adjustments", + description: "\n## Get WMS Inventory Adjustments\n\nRetrieve inventory adjustment transactions for audit, compliance, and variance analysis.\n\n### Features\n- **Adjustment-Specific Filtering**: Automatically filters for transactionType = \"ADJUST\"\n- **Comprehensive Filtering**: Filter by warehouse, date range, and user\n- **Audit Trail**: Complete adjustment history for compliance requirements\n- **User Tracking**: Identify who made specific adjustments for accountability\n- **Temporal Analysis**: Date-based filtering for trend analysis and reporting\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse facility\n- **dateStart/dateEnd**: Optional - Filter by adjustment date range\n- **userId**: Optional - Filter adjustments by specific user\n\n### Business Logic\n- Automatically filters transactions where transactionType = \"ADJUST\"\n- Results ordered by transaction date (newest first)\n- Date filtering based on transactionDate field\n- User filtering for accountability and audit purposes\n\n### Use Cases\n- **Audit Compliance**: Review all inventory adjustments for audit purposes\n- **Variance Analysis**: Analyze adjustment patterns and identify trends\n- **User Accountability**: Track adjustments made by specific users\n- **Cycle Count Impact**: Review adjustments resulting from cycle counts\n- **Financial Reporting**: Generate adjustment reports for financial impact analysis\n ", + operationId: "getWMSInventoryAdjustments", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter adjustments by specific warehouse facility", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for adjustment filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-01T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for adjustment filtering (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-30T23:59:59Z" + } + }, + { + name: "userId", + in: "query", + required: false, + description: "Filter adjustments by specific user", + schema: { + type: "string", + example: "user_inventory_manager_001" + } + } + ], + responses: { + "200": { + description: "Inventory adjustments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Inventory adjustments retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSInventoryTransaction" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid date range provided", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/metrics.wms.ts b/packages/controlmart/src/docs/paths/wms/metrics.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..32c91e98766747f2a797f1574f111845afda38c3 --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/metrics.wms.ts @@ -0,0 +1,1172 @@ +export const metricsPaths = { + "/{worldId}/wms/daily-metrics": { + post: { + tags: [ + "WMS" + ], + summary: "Create new daily metrics record", + description: "\n## Create WMS Daily Metrics\n\nCreate a new daily metrics record for comprehensive warehouse performance tracking and operational analysis.\n\n### Features\n- **Comprehensive Metrics**: Track inbound, putaway, picking, packing, shipping, labor, inventory, and quality metrics\n- **Multi-Zone Support**: Record metrics by specific warehouse zones for detailed analysis\n- **Shift-Based Tracking**: Support multiple shifts with independent metrics\n- **Performance Monitoring**: Real-time operational performance measurement\n- **Quality Tracking**: Monitor errors, accuracy, and quality metrics\n- **Labor Analytics**: Track productivity, utilization, and efficiency metrics\n- **Custom Fields**: Support warehouse-specific operational metrics\n\n### Metric Categories\n- **Inbound**: Purchase orders received, units processed, receiving rates\n- **Putaway**: Tasks completed, pallets processed, productivity rates\n- **Picking**: Orders fulfilled, pick accuracy, productivity metrics\n- **Packing**: Orders packed, packaging rates, efficiency metrics\n- **Shipping**: Shipments created, carrier dispatch, package volumes\n- **Labor**: Worker counts, hours tracked, utilization percentages\n- **Inventory**: On-hand units, inventory values, turnover rates\n- **Quality**: Error tracking, damage reports, returns processed\n\n### Business Rules\n- metricId is auto-generated with unique identifier if not provided\n- warehouseId and date are required for metrics recording\n- Duplicate prevention for same date/warehouse/shift/zone combination\n- All numeric metrics are optional to support partial data collection\n- Custom fields support warehouse-specific operational requirements\n ", + operationId: "createWMSDailyMetrics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + warehouseId: { + type: "string", + description: "Warehouse identifier for metrics recording", + example: "WH_ATL_001" + }, + date: { + type: "string", + format: "date", + description: "Date for metrics recording", + example: "2024-11-27" + }, + shift: { + type: "string", + description: "Optional shift identifier for shift-based metrics", + example: "DAY_SHIFT_1" + }, + zoneId: { + type: "string", + description: "Optional zone identifier for zone-based metrics", + example: "ZONE_PICK_A" + }, + inbound: { + type: "object", + description: "Inbound receiving metrics", + properties: { + poReceived: { + type: "number", + description: "Number of POs received", + example: 45 + }, + linesReceived: { + type: "number", + description: "Number of lines received", + example: 320 + }, + unitsReceived: { + type: "number", + description: "Total units received", + example: 2450 + }, + palletsReceived: { + type: "number", + description: "Number of pallets received", + example: 28 + }, + receivingHours: { + type: "number", + description: "Total receiving hours", + example: 32.5 + }, + unitsPerHour: { + type: "number", + description: "Units processed per hour", + example: 75.4 + } + } + }, + putaway: { + type: "object", + description: "Putaway operation metrics", + properties: { + putawayTasks: { + type: "number", + description: "Number of putaway tasks completed", + example: 28 + }, + palletsPutaway: { + type: "number", + description: "Number of pallets put away", + example: 26 + }, + putawayHours: { + type: "number", + description: "Total putaway hours", + example: 18.5 + }, + palletsPerHour: { + type: "number", + description: "Pallets put away per hour", + example: 1.4 + } + } + }, + picking: { + type: "object", + description: "Picking operation metrics", + properties: { + ordersShipped: { + type: "number", + description: "Number of orders shipped", + example: 125 + }, + linesPicked: { + type: "number", + description: "Number of lines picked", + example: 890 + }, + unitsPicked: { + type: "number", + description: "Total units picked", + example: 2240 + }, + pickingHours: { + type: "number", + description: "Total picking hours", + example: 45.5 + }, + linesPerHour: { + type: "number", + description: "Lines picked per hour", + example: 19.6 + }, + unitsPerHour: { + type: "number", + description: "Units picked per hour", + example: 49.2 + }, + pickAccuracy: { + type: "number", + description: "Pick accuracy percentage", + example: 99.2 + } + } + }, + packing: { + type: "object", + description: "Packing operation metrics", + properties: { + ordersPacked: { + type: "number", + description: "Number of orders packed", + example: 120 + }, + packagesPacked: { + type: "number", + description: "Number of packages packed", + example: 98 + }, + packingHours: { + type: "number", + description: "Total packing hours", + example: 24 + }, + ordersPerHour: { + type: "number", + description: "Orders packed per hour", + example: 5 + } + } + }, + shipping: { + type: "object", + description: "Shipping operation metrics", + properties: { + shipmentsCreated: { + type: "number", + description: "Number of shipments created", + example: 85 + }, + carriersDispatched: { + type: "number", + description: "Number of carriers dispatched", + example: 12 + }, + packagesShipped: { + type: "number", + description: "Total packages shipped", + example: 96 + } + } + }, + labor: { + type: "object", + description: "Labor and workforce metrics", + properties: { + totalWorkers: { + type: "number", + description: "Total workers on shift", + example: 24 + }, + totalHours: { + type: "number", + description: "Total labor hours worked", + example: 192 + }, + productiveHours: { + type: "number", + description: "Productive labor hours", + example: 165.5 + }, + indirectHours: { + type: "number", + description: "Indirect labor hours", + example: 26.5 + }, + utilizationPercent: { + type: "number", + description: "Labor utilization percentage", + example: 86.2 + } + } + }, + inventory: { + type: "object", + description: "Inventory management metrics", + properties: { + onHandUnits: { + type: "number", + description: "Total units on hand", + example: 45890 + }, + inventoryValue: { + type: "number", + description: "Total inventory value in dollars", + example: 2456780.5 + }, + turnoverRate: { + type: "number", + description: "Inventory turnover rate", + example: 8.4 + } + } + }, + quality: { + type: "object", + description: "Quality and error tracking metrics", + properties: { + pickErrors: { + type: "number", + description: "Number of pick errors", + example: 7 + }, + packErrors: { + type: "number", + description: "Number of pack errors", + example: 3 + }, + damageReports: { + type: "number", + description: "Number of damage reports", + example: 2 + }, + returnsProcessed: { + type: "number", + description: "Number of returns processed", + example: 15 + } + } + }, + customFields: { + type: "object", + description: "Additional warehouse-specific metrics", + example: { + temperatureControlledZones: 4, + hazMatHandling: true, + specialEquipmentUsage: 12.5 + } + } + }, + required: [ + "warehouseId", + "date" + ] + } + } + } + }, + responses: { + "201": { + description: "Daily metrics created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Daily metrics created successfully" + }, + data: { + $ref: "#/components/schemas/WMSDailyMetrics" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid data or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse ID and date are required", + meta: { event: "createWMSDailyMetrics", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "409": { + description: "Conflict - Daily metrics already exist for this combination", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 409, + error: "Daily metrics already exist for this date/warehouse/shift combination", + meta: { event: "createWMSDailyMetrics", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/daily-metrics/daterange": { + get: { + tags: [ + "WMS" + ], + summary: "Get daily metrics by date range", + description: "\n## Get WMS Daily Metrics by Date Range\n\nRetrieve daily warehouse metrics within a specified date range with optional filtering by warehouse, shift, and zone for comprehensive operational analysis.\n\n### Features\n- **Date Range Filtering**: Retrieve metrics for any specified date range\n- **Warehouse Scoping**: Filter metrics by specific warehouse facility\n- **Shift-Based Filtering**: Optional filtering by shift for shift-specific analysis\n- **Zone-Based Filtering**: Optional filtering by warehouse zone for zone-specific metrics\n- **Chronological Ordering**: Results sorted by date in descending order (newest first)\n- **Comprehensive Data**: Full metrics data for all operational categories\n- **Performance Analysis**: Support for trend analysis and performance monitoring\n\n### Query Parameters\n- **warehouseId**: Required - Warehouse identifier for metrics filtering\n- **dateStart**: Required - Start date for metrics range (ISO 8601 format)\n- **dateEnd**: Required - End date for metrics range (ISO 8601 format)\n- **shift**: Optional - Filter by specific shift identifier\n- **zoneId**: Optional - Filter by specific zone identifier\n\n### Business Logic\n- warehouseId parameter is required for warehouse-specific filtering\n- dateStart and dateEnd define the inclusive date range for filtering\n- Optional shift and zoneId filters further refine the results\n- Results include all metric categories for comprehensive analysis\n- Date range is inclusive of both start and end dates\n- Maximum date range recommended: 90 days for performance optimization\n\n### Use Cases\n- **Performance Trends**: Analyze operational performance over time\n- **Shift Comparisons**: Compare metrics across different shifts\n- **Zone Analysis**: Evaluate zone-specific operational performance\n- **Monthly Reporting**: Generate monthly operational reports\n- **Capacity Planning**: Analyze historical data for capacity planning\n- **Productivity Analysis**: Track productivity trends and patterns\n ", + operationId: "getWMSDailyMetricsByDateRange", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier for metrics filtering", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "dateStart", + in: "query", + required: true, + description: "Start date for metrics range", + schema: { + type: "string", + format: "date", + example: "2024-11-01" + } + }, + { + name: "dateEnd", + in: "query", + required: true, + description: "End date for metrics range", + schema: { + type: "string", + format: "date", + example: "2024-11-27" + } + }, + { + name: "shift", + in: "query", + required: false, + description: "Optional shift identifier for filtering", + schema: { + type: "string", + example: "DAY_SHIFT_1" + } + }, + { + name: "zoneId", + in: "query", + required: false, + description: "Optional zone identifier for filtering", + schema: { + type: "string", + example: "ZONE_PICK_A" + } + } + ], + responses: { + "200": { + description: "Daily metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Daily metrics by date range retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSDailyMetrics" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId, warehouseId, dateStart, and dateEnd are required", + meta: { event: "getWMSDailyMetricsByDateRange", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/daily-metrics/{metricsId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get daily metrics by ID", + description: "\n## Get WMS Daily Metrics by ID\n\nRetrieve a specific daily metrics record using its unique identifier for detailed analysis and data access.\n\n### Features\n- **Direct Access**: Retrieve metrics using unique metric identifier\n- **Complete Data**: Full metrics record with all operational categories\n- **Single Record Focus**: Detailed view of specific day's performance\n- **Cross-Reference Support**: Support for metric ID-based references\n- **Audit Trail Support**: Enable audit trail and historical reference tracking\n\n### Path Parameters\n- **metricsId**: Required - Unique identifier for the specific metrics record\n\n### Business Logic\n- metricsId must be a valid, existing metrics record identifier\n- Returns complete metrics record with all categories and fields\n- Includes metadata like creation and update timestamps\n- Null response if metrics record is not found\n- Full data structure for comprehensive analysis\n\n### Use Cases\n- **Detailed Analysis**: Deep dive into specific day's performance\n- **Data Verification**: Verify specific metrics data for accuracy\n- **Cross-Reference**: Reference specific metrics from other systems\n- **Historical Review**: Review historical performance data\n- **Audit Support**: Support audit trail and compliance requirements\n ", + operationId: "getWMSDailyMetricsById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "metricsId", + in: "path", + required: true, + description: "Unique identifier for the metrics record", + schema: { + type: "string", + example: "wms_daily-metrics_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Daily metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Daily metrics retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSDailyMetrics" + } + } + } + } + } + }, + "404": { + description: "Not Found - Metrics record not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Daily metrics record not found", + meta: { event: "getWMSDailyMetricsById", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "WMS" + ], + summary: "Update daily metrics record", + description: "\n## Update WMS Daily Metrics\n\nUpdate an existing daily metrics record with new or corrected data to maintain accurate operational performance tracking.\n\n### Features\n- **Partial Updates**: Support partial field updates without affecting other data\n- **Data Correction**: Enable correction of recorded metrics data\n- **Real-time Updates**: Update metrics as operations progress throughout the day\n- **Flexible Modification**: Update any combination of metric categories\n- **Audit Trail**: Maintain update history through audit plugin\n- **Validation**: Ensure data integrity during update operations\n\n### Update Categories\n- **Inbound Metrics**: Update receiving and inbound processing data\n- **Putaway Metrics**: Modify putaway operation performance data\n- **Picking Metrics**: Update picking operation metrics and accuracy\n- **Packing Metrics**: Modify packing operation performance data\n- **Shipping Metrics**: Update shipping and dispatch metrics\n- **Labor Metrics**: Modify workforce and labor utilization data\n- **Inventory Metrics**: Update inventory levels and turnover data\n- **Quality Metrics**: Modify error rates and quality tracking data\n\n### Business Rules\n- metricsId must reference an existing metrics record\n- Only provided fields are updated (partial update support)\n- Core identifiers (warehouseId, date, metricId) cannot be modified\n- Custom fields support flexible warehouse-specific updates\n- Audit trail automatically tracks all modifications\n ", + operationId: "updateWMSDailyMetrics", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "metricsId", + in: "path", + required: true, + description: "Unique identifier for the metrics record to update", + schema: { + type: "string", + example: "wms_daily-metrics_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + description: "Partial metrics data for updating specific fields", + properties: { + inbound: { + type: "object", + description: "Updated inbound receiving metrics", + properties: { + poReceived: { + type: "number", + example: 47 + }, + linesReceived: { + type: "number", + example: 335 + }, + unitsReceived: { + type: "number", + example: 2580 + }, + palletsReceived: { + type: "number", + example: 30 + }, + receivingHours: { + type: "number", + example: 34 + }, + unitsPerHour: { + type: "number", + example: 75.9 + } + } + }, + picking: { + type: "object", + description: "Updated picking operation metrics", + properties: { + pickAccuracy: { + type: "number", + example: 99.4 + }, + ordersShipped: { + type: "number", + example: 128 + } + } + }, + quality: { + type: "object", + description: "Updated quality metrics", + properties: { + pickErrors: { + type: "number", + example: 5 + }, + damageReports: { + type: "number", + example: 1 + } + } + }, + customFields: { + type: "object", + description: "Updated custom warehouse-specific metrics", + example: { + temperatureControlledZones: 5, + specialEquipmentUsage: 15.2 + } + } + } + } + } + } + }, + responses: { + "200": { + description: "Daily metrics updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Daily metrics updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSDailyMetrics" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid update data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Invalid update data provided", + meta: { event: "updateWMSDailyMetrics", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Metrics record not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Daily metrics record not found for update", + meta: { event: "updateWMSDailyMetrics", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/daily-metrics/summary": { + get: { + tags: [ + "WMS" + ], + summary: "Get comprehensive metrics summary", + description: "\n## Get WMS Daily Metrics Summary\n\nGenerate a comprehensive summary of warehouse performance metrics with averages, totals, and trend analysis for strategic decision-making and operational planning.\n\n### Features\n- **Aggregated Analytics**: Calculate average performance metrics across date ranges\n- **Multi-Category Summary**: Comprehensive summary across all operational categories\n- **Trend Analysis**: Historical trend data for performance pattern identification\n- **Flexible Filtering**: Filter by warehouse, date range, and zone for targeted analysis\n- **Performance Benchmarking**: Establish performance baselines and benchmarks\n- **Executive Reporting**: High-level metrics for executive reporting and dashboards\n\n### Summary Categories\n- **Inbound Operations**: Average POs received, units processed, productivity rates\n- **Picking Operations**: Average orders shipped, pick accuracy, productivity metrics\n- **Packing Operations**: Average orders packed, packing rates, efficiency metrics\n- **Inventory Management**: Average accuracy and turnover rate metrics\n- **Trend Analysis**: Daily performance trends with key operational indicators\n\n### Query Parameters\n- **warehouseId**: Optional - Filter summary by specific warehouse facility\n- **dateStart**: Optional - Start date for summary period\n- **dateEnd**: Optional - End date for summary period\n- **zoneId**: Optional - Filter summary by specific zone\n\n### Business Logic\n- All query parameters are optional for maximum flexibility\n- Date range filtering is inclusive of both start and end dates\n- Warehouse filtering enables facility-specific analysis\n- Zone filtering provides zone-specific performance summaries\n- Averages calculated from actual recorded daily metrics\n- Trend data includes key performance indicators over time\n\n### Use Cases\n- **Executive Dashboards**: High-level performance metrics for leadership\n- **Performance Benchmarking**: Establish and track performance standards\n- **Operational Planning**: Use historical data for future planning\n- **Facility Comparisons**: Compare performance across different warehouses\n- **Zone Analysis**: Evaluate zone-specific operational efficiency\n- **Strategic Analysis**: Long-term performance trend analysis\n ", + operationId: "getWMSMetricsSummary", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Optional warehouse identifier for filtering", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Optional start date for summary period", + schema: { + type: "string", + format: "date", + example: "2024-11-01" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "Optional end date for summary period", + schema: { + type: "string", + format: "date", + example: "2024-11-27" + } + }, + { + name: "zoneId", + in: "query", + required: false, + description: "Optional zone identifier for filtering", + schema: { + type: "string", + example: "ZONE_PICK_A" + } + } + ], + responses: { + "200": { + description: "Metrics summary retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Metrics summary retrieved successfully" + }, + data: { + type: "object", + properties: { + totalDays: { + type: "number", + description: "Total days included in summary", + example: 27 + }, + averageMetrics: { + type: "object", + description: "Average metrics across all categories", + properties: { + inbound: { + type: "object", + properties: { + avgPoReceived: { + type: "number", + example: 42.5 + }, + avgUnitsReceived: { + type: "number", + example: 2345.8 + }, + avgUnitsPerHour: { + type: "number", + example: 74.2 + } + } + }, + picking: { + type: "object", + properties: { + avgOrdersShipped: { + type: "number", + example: 118.3 + }, + avgLinesPicked: { + type: "number", + example: 845.6 + }, + avgLinesPerHour: { + type: "number", + example: 18.9 + }, + avgPickAccuracy: { + type: "number", + example: 99.1 + } + } + }, + packing: { + type: "object", + properties: { + avgOrdersPacked: { + type: "number", + example: 115.2 + }, + avgPackingHours: { + type: "number", + example: 23.1 + }, + avgOrdersPerHour: { + type: "number", + example: 4.98 + } + } + }, + inventory: { + type: "object", + properties: { + avgAccuracy: { + type: "number", + example: 99.6 + }, + avgTurnover: { + type: "number", + example: 8.2 + } + } + } + } + }, + trends: { + type: "array", + description: "Daily trend data with key performance indicators", + items: { + type: "object", + properties: { + date: { + type: "string", + example: "2024-11-27" + }, + unitsReceived: { + type: "number", + example: 2450 + }, + unitsShipped: { + type: "number", + example: 2240 + }, + pickAccuracy: { + type: "number", + example: 99.2 + } + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/daily-metrics/trends": { + get: { + tags: [ + "WMS" + ], + summary: "Get performance trend analysis", + description: "\n## Get WMS Performance Trends\n\nRetrieve detailed performance trend analysis for specific metric categories over time to identify patterns, improvements, and areas requiring attention.\n\n### Features\n- **Category-Specific Analysis**: Focus on specific operational categories for detailed analysis\n- **Time-Series Data**: Chronological trend data for pattern identification\n- **Metric Type Filtering**: Filter by inbound, picking, packing, or inventory metrics\n- **Date Range Flexibility**: Analyze trends over any specified date range\n- **Performance Monitoring**: Track performance improvements and degradations over time\n- **Operational Intelligence**: Generate insights for operational optimization\n\n### Supported Metric Types\n- **inbound**: Receiving operations, PO processing, receiving productivity\n- **picking**: Order fulfillment, pick accuracy, picking productivity\n- **packing**: Order packing, packaging productivity, packing efficiency\n- **inventory**: Inventory levels, accuracy, turnover rates\n\n### Query Parameters\n- **warehouseId**: Required - Warehouse identifier for trend analysis\n- **metricType**: Required - Type of metrics for trend analysis\n- **dateStart**: Required - Start date for trend analysis\n- **dateEnd**: Required - End date for trend analysis\n\n### Business Logic\n- warehouseId is required for warehouse-specific trend analysis\n- metricType determines which category of metrics to analyze\n- Date range defines the period for trend analysis\n- Results sorted chronologically for time-series analysis\n- Each day's metrics returned as complete metric object for the specified category\n- Missing days in range will not appear in results\n\n### Use Cases\n- **Performance Monitoring**: Track daily performance trends over time\n- **Seasonal Analysis**: Identify seasonal performance patterns\n- **Improvement Tracking**: Monitor the impact of operational changes\n- **Capacity Planning**: Use historical trends for future capacity planning\n- **Problem Identification**: Identify periods of declining performance\n- **Benchmarking**: Compare current performance against historical trends\n ", + operationId: "getWMSPerformanceTrends", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier for trend analysis", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "metricType", + in: "query", + required: true, + description: "Type of metrics for trend analysis", + schema: { + type: "string", + enum: [ + "inbound", + "picking", + "packing", + "inventory" + ], + example: "picking" + } + }, + { + name: "dateStart", + in: "query", + required: true, + description: "Start date for trend analysis", + schema: { + type: "string", + format: "date", + example: "2024-11-01" + } + }, + { + name: "dateEnd", + in: "query", + required: true, + description: "End date for trend analysis", + schema: { + type: "string", + format: "date", + example: "2024-11-27" + } + } + ], + responses: { + "200": { + description: "Performance trends retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Performance trends retrieved successfully" + }, + data: { + type: "array", + items: { + type: "object", + properties: { + date: { + type: "string", + description: "Date for metrics", + example: "2024-11-27" + }, + metrics: { + type: "object", + description: "Complete metrics object for the specified category", + example: { + ordersShipped: 125, + linesPicked: 890, + unitsPicked: 2240, + pickingHours: 45.5, + linesPerHour: 19.6, + unitsPerHour: 49.2, + pickAccuracy: 99.2 + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "warehouseId, metricType, dateStart, and dateEnd are required", + meta: { event: "getWMSPerformanceTrends", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/daily-metrics/zone-comparison": { + get: { + tags: [ + "WMS" + ], + summary: "Get zone performance comparison", + description: "\n## Get WMS Zone Performance Comparison\n\nCompare operational performance metrics across different warehouse zones to identify high-performing areas and zones requiring optimization.\n\n### Features\n- **Multi-Zone Analysis**: Compare performance across all zones with recorded data\n- **Picking Performance Focus**: Specialized analysis of picking operations by zone\n- **Productivity Metrics**: Calculate productivity ratios and efficiency indicators\n- **Accuracy Tracking**: Monitor accuracy performance across different zones\n- **Date Range Analysis**: Analyze zone performance over specified time periods\n- **Benchmarking Support**: Identify best-performing zones for benchmarking\n\n### Performance Metrics\n- **Total Orders**: Cumulative orders processed by each zone\n- **Total Lines**: Cumulative lines picked by each zone\n- **Average Pick Time**: Calculated average time per line picked\n- **Accuracy**: Average pick accuracy percentage by zone\n\n### Query Parameters\n- **warehouseId**: Required - Warehouse identifier for zone comparison\n- **dateStart**: Required - Start date for comparison period\n- **dateEnd**: Required - End date for comparison period\n\n### Business Logic\n- warehouseId is required for warehouse-specific zone analysis\n- Date range defines the period for zone performance comparison\n- Only zones with recorded metrics data are included in results\n- Average pick time calculated as total picking hours divided by total lines\n- Accuracy calculated as average of daily accuracy percentages\n- Results include only zones with valid zoneId values (not null/empty)\n\n### Calculation Details\n- **averagePickTime**: Total picking hours ÷ total lines picked (in hours per line)\n- **accuracy**: Average of daily pick accuracy percentages across the date range\n- **totalOrders**: Sum of all orders processed by the zone\n- **totalLines**: Sum of all lines picked by the zone\n\n### Use Cases\n- **Zone Optimization**: Identify zones requiring operational improvements\n- **Performance Benchmarking**: Compare zone performance against best practices\n- **Resource Allocation**: Allocate resources based on zone performance data\n- **Layout Analysis**: Evaluate warehouse layout effectiveness by zone\n- **Training Needs**: Identify zones where additional training may be needed\n- **Capacity Planning**: Plan zone capacity based on historical performance\n ", + operationId: "getWMSZonePerformanceComparison", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier for zone comparison", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "dateStart", + in: "query", + required: true, + description: "Start date for comparison period", + schema: { + type: "string", + format: "date", + example: "2024-11-01" + } + }, + { + name: "dateEnd", + in: "query", + required: true, + description: "End date for comparison period", + schema: { + type: "string", + format: "date", + example: "2024-11-27" + } + } + ], + responses: { + "200": { + description: "Zone performance comparison retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Zone performance comparison retrieved successfully" + }, + data: { + type: "array", + items: { + type: "object", + properties: { + zoneId: { + type: "string", + description: "Zone identifier", + example: "ZONE_PICK_A" + }, + metrics: { + type: "object", + description: "Performance metrics for the zone", + properties: { + totalOrders: { + type: "number", + description: "Total orders processed", + example: 2850 + }, + totalLines: { + type: "number", + description: "Total lines picked", + example: 19420 + }, + averagePickTime: { + type: "number", + description: "Average hours per line picked", + example: 0.0234 + }, + accuracy: { + type: "number", + description: "Average pick accuracy percentage", + example: 99.3 + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "warehouseId, dateStart, and dateEnd are required", + meta: { event: "getWMSZonePerformanceComparison", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/outbound.wms.ts b/packages/controlmart/src/docs/paths/wms/outbound.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..b4edb234672e084dba21876207f2a9c9ad48083f --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/outbound.wms.ts @@ -0,0 +1,2983 @@ +export const outboundPaths = { + "/{worldId}/wms/outbound-orders": { + post: { + summary: "Create new outbound order", + description: "\n**Create New Outbound Order**\n\nCreates a new outbound order for warehouse fulfillment with comprehensive line item management.\n\n**Key Features:**\n- Multi-line order support with detailed product specifications\n- Customer and shipping address management \n- Priority-based order classification\n- Integrated timing workflow tracking\n- Warehouse-scoped order processing\n\n**Validation Requirements:**\n- warehouseId, orderNumber, and lines array are required\n- orderNumber must be unique within world scope\n- lines array must contain at least one item\n\n**Business Workflow:**\n1. Order created in PENDING status\n2. Released to warehouse (RELEASED status)\n3. Inventory allocated (ALLOCATED status)\n4. Picking process (PICKING → PICKED)\n5. Packing and shipping (PACKED → SHIPPED)\n ", + operationId: "createWMSOutboundOrder", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + }, + example: "550e8400-e29b-41d4-a716-446655440000" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + orderId: { + type: "string", + description: "Business primary identifier (consistent naming verified)", + example: "ORD-2024-001234" + }, + orderNumber: { + type: "string", + description: "Human-readable order number (unique per world)", + example: "WO-20241201-001" + }, + warehouseId: { + type: "string", + description: "Required - source warehouse identifier", + example: "WH-MAIN-001" + }, + customerId: { + type: "string", + description: "Customer account identifier", + example: "CUST-ABC-123" + }, + customerName: { + type: "string", + description: "Customer display name", + example: "ABC Corporation" + }, + orderDate: { + type: "string", + format: "date-time", + description: "Order creation timestamp", + example: "2024-12-01T09:00:00.000Z" + }, + requestedShipDate: { + type: "string", + format: "date-time", + description: "Customer delivery requirement", + example: "2024-12-03T17:00:00.000Z" + }, + priority: { + type: "string", + enum: [ + "URGENT", + "HIGH", + "NORMAL", + "LOW" + ], + description: "Order processing priority", + example: "HIGH" + }, + orderType: { + type: "string", + enum: [ + "STANDARD", + "EXPRESS", + "BULK", + "RETURNS" + ], + description: "Order classification type", + example: "STANDARD" + }, + lines: { + type: "array", + description: "Order line items (required, length > 0)", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + description: "Sequential line identifier within order", + example: 1 + }, + itemId: { + type: "string", + description: "SKU/product code", + example: "SKU-WIDGET-001" + }, + itemDescription: { + type: "string", + description: "Product display name", + example: "Premium Widget Assembly" + }, + orderedQuantity: { + type: "number", + description: "Customer requested amount", + example: 25 + }, + unitOfMeasure: { + type: "string", + description: "UOM code (EA, CS, LB, KG, etc.)", + example: "EA" + }, + unitPrice: { + type: "number", + description: "Price per unit (optional)", + example: 49.99 + }, + specialInstructions: { + type: "string", + description: "Line-specific handling notes", + example: "Handle with care - fragile" + } + }, + required: [ + "lineNumber", + "itemId", + "itemDescription", + "orderedQuantity", + "unitOfMeasure" + ] + } + }, + shippingAddress: { + type: "object", + description: "Delivery destination", + properties: { + street1: { + type: "string", + example: "123 Main Street" + }, + street2: { + type: "string", + example: "Suite 456" + }, + city: { + type: "string", + example: "Anytown" + }, + state: { + type: "string", + example: "CA" + }, + zipCode: { + type: "string", + example: "90210" + }, + country: { + type: "string", + example: "USA" + } + }, + required: [ + "street1", + "city", + "state", + "zipCode", + "country" + ] + }, + carrierInfo: { + type: "object", + description: "Optional shipping carrier details", + properties: { + carrierId: { + type: "string", + example: "CARRIER-UPS" + }, + carrierName: { + type: "string", + example: "UPS" + }, + serviceLevel: { + type: "string", + example: "GROUND" + }, + trackingNumber: { + type: "string", + example: "1Z999AA1234567890" + } + } + }, + specialInstructions: { + type: "string", + description: "Order-level notes", + example: "Deliver to loading dock" + } + }, + required: [ + "orderId", + "orderNumber", + "warehouseId", + "customerId", + "customerName", + "orderDate", + "requestedShipDate", + "priority", + "orderType", + "lines", + "shippingAddress" + ] + } + } + } + }, + responses: { + "201": { + description: "Outbound order created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Outbound order created successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid input data or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse ID, order number, and order lines are required", + meta: { event: "createWMSOutboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "409": { + description: "Conflict - Duplicate order number already exists in the system", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 409, + error: "Outbound order with number WO-20241201-001 already exists", + meta: { event: "createWMSOutboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/status": { + get: { + summary: "Get orders filtered by status with advanced filtering", + description: "\n**Status-Based Order Filtering**\n\nRetrieve outbound orders filtered by status with comprehensive query capabilities.\n\n**Advanced Filtering Options:**\n- Multiple status selection\n- Warehouse-specific filtering \n- Customer-specific orders\n- Order type filtering\n- Date range filtering\n- Priority-based filtering\n- Cursor-based pagination\n\n**Sorting Logic:**\n- Primary: Priority (ascending - URGENT first)\n- Secondary: Order date (ascending - oldest first)\n\n**Pagination:**\n- Cursor-based with overflow detection\n- Configurable page limits\n- Total count included\n ", + operationId: "getWMSOutboundOrdersByStatus", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Order status filter (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "PENDING", + "RELEASED", + "ALLOCATED", + "PICKING", + "PICKED", + "PACKED", + "SHIPPED", + "CANCELLED" + ] + } + }, + example: [ + "RELEASED", + "ALLOCATED" + ] + }, + { + name: "warehouseId", + in: "query", + description: "Optional warehouse filter", + schema: { + type: "string" + }, + example: "WH-MAIN-001" + }, + { + name: "customerId", + in: "query", + description: "Optional customer filter", + schema: { + type: "string" + }, + example: "CUST-ABC-123" + }, + { + name: "orderType", + in: "query", + description: "Optional order type filter", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "STANDARD", + "EXPRESS", + "BULK", + "RETURNS" + ] + } + } + }, + { + name: "priority", + in: "query", + description: "Optional priority filter", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "URGENT", + "HIGH", + "NORMAL", + "LOW" + ] + } + } + }, + { + name: "dateStart", + in: "query", + description: "Optional date range start", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "Optional date range end", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "cursor", + in: "query", + description: "Pagination cursor from previous response", + schema: { + type: "string" + } + }, + { + name: "limit", + in: "query", + description: "Page size (default: global page limit)", + schema: { + type: "number" + }, + example: 50 + } + ], + responses: { + "200": { + description: "Orders retrieved successfully with pagination metadata", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + }, + totalCount: { + type: "number", + description: "Total matching records across all pages", + example: 142 + }, + limit: { + type: "number", + description: "Applied page limit", + example: 50 + }, + hasMore: { + type: "boolean", + description: "More results available for pagination", + example: true + }, + nextCursor: { + type: "string", + description: "Cursor for next page (if hasMore is true)", + example: "507f1f77bcf86cd799439011" + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/{orderId}": { + get: { + summary: "Get single order by primary identifier", + description: "\n**Order Lookup by ID**\n\nRetrieve a specific outbound order using its primary identifier.\n\n**Field Consistency Verified:**\n- Uses consistent `orderId` field across model, controller, and repository\n- World-scoped lookup for multi-tenant isolation\n- Returns complete order object with all nested data\n ", + operationId: "getWMSOutboundOrderById", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Primary order identifier (consistent naming verified)", + schema: { + type: "string" + }, + example: "ORD-2024-001234" + } + ], + responses: { + "200": { + description: "Order retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + }, + "404": { + description: "Not Found - Outbound order does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Outbound order not found", + meta: { event: "getWMSOutboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + patch: { + tags: [ + "WMS" + ], + summary: "Partially update outbound order", + description: "\n## Patch WMS Outbound Order\n\nPartially update an outbound order with only the specified fields. This is useful for updating specific properties without affecting other fields.\n\n### Allowed Fields\n- **orderStatus**: Update the order status (CREATED, RELEASED, ALLOCATED, PICKING, PICKED, PACKING, PACKED, SHIPPED, CANCELLED)\n- **orderPriority**: Update the order priority (RUSH, URGENT, NORMAL, STANDARD)\n- **dates**: Update date fields like requiredShipDate or actualShipDate\n\n### Features\n- Partial updates - only specified fields are modified\n- Automatically updates the updatedAt timestamp\n- Supports dot notation for nested fields (e.g., \"dates.requiredShipDate\")\n- Auto-sets actualShipDate when status changes to SHIPPED\n\n### Use Cases\n- Update order status as it progresses through fulfillment workflow\n- Change priority based on customer requirements\n- Update shipping dates\n ", + operationId: "patchWMSOutboundOrder", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Unique identifier for the outbound order", + schema: { + type: "string", + example: "wms_outbound-order_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + orderStatus: { + type: "string", + enum: [ + "CREATED", + "RELEASED", + "ALLOCATED", + "PICKING", + "PICKED", + "PACKING", + "PACKED", + "SHIPPED", + "CANCELLED" + ], + description: "New status for the order" + }, + orderPriority: { + type: "string", + enum: [ + "RUSH", + "URGENT", + "NORMAL", + "STANDARD" + ], + description: "New priority for the order" + }, + dates: { + type: "object", + properties: { + requiredShipDate: { + type: "string", + format: "date-time", + description: "Updated required ship date" + }, + actualShipDate: { + type: "string", + format: "date-time", + description: "Actual ship date (auto-set when status is SHIPPED)" + } + } + }, + "dates.requiredShipDate": { + type: "string", + format: "date-time", + description: "Updated required ship date (dot notation)" + }, + "dates.actualShipDate": { + type: "string", + format: "date-time", + description: "Actual ship date (dot notation)" + } + } + }, + examples: { + updateStatus: { + summary: "Update order status to PICKING", + value: { + orderStatus: "PICKING" + } + }, + updatePriority: { + summary: "Update order priority to RUSH", + value: { + orderPriority: "RUSH" + } + }, + updateShipDate: { + summary: "Update required ship date", + value: { + "dates.requiredShipDate": "2024-12-01T10:00:00Z" + } + } + } + } + } + }, + responses: { + "200": { + description: "Outbound order updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing parameters or no updates provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and orderId are required", + meta: { event: "patchWMSOutboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Outbound order does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Outbound order not found", + meta: { event: "patchWMSOutboundOrder", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/number/{orderNumber}": { + get: { + summary: "Get single order by business order number", + description: "\n**Order Lookup by Business Number**\n\nRetrieve a specific outbound order using its business order number.\n\n**Business Key Lookup:**\n- Uses `orderNumber` field (unique per world)\n- Human-readable identifier for business operations\n- World-scoped uniqueness constraint\n ", + operationId: "getWMSOutboundOrderByNumber", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "orderNumber", + in: "path", + required: true, + description: "Business order number (unique per world)", + schema: { + type: "string" + }, + example: "WO-20241201-001" + } + ], + responses: { + "200": { + description: "Order retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + }, + "404": { + description: "Not Found - Outbound order with specified order number does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Outbound order not found", + meta: { event: "getWMSOutboundOrderByNumber", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/{orderId}/status": { + put: { + summary: "Update order status with automatic timestamp tracking", + description: "\n**Order Status Management**\n\nUpdate order status with automatic workflow timestamp tracking.\n\n**Automatic Timing Updates:**\n- RELEASED → `timing.releasedAt`\n- ALLOCATED → `timing.allocatedAt`\n- PICKING → `timing.pickingStartedAt`\n- PICKED → `timing.pickedAt`\n- PACKED → `timing.packedAt`\n- SHIPPED → `timing.shippedAt`\n\n**Business Workflow:**\nEach status represents a key milestone in order fulfillment with precise timing capture.\n ", + operationId: "updateWMSOutboundOrderStatus", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Order identifier", + schema: { + type: "string" + }, + example: "ORD-2024-001234" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "RELEASED", + "ALLOCATED", + "PICKING", + "PICKED", + "PACKED", + "SHIPPED" + ], + description: "New order status", + example: "ALLOCATED" + }, + statusDate: { + type: "string", + format: "date-time", + description: "Optional timestamp (defaults to current time)", + example: "2024-12-01T14:30:00.000Z" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Order status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Order status updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + }, + "404": { + description: "Not Found - Order does not exist for status update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Order not found", + meta: { event: "updateWMSOutboundOrderStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/{orderId}/lines/{lineId}/allocate": { + put: { + summary: "Allocate inventory to specific order line", + description: "\n**Line-Level Inventory Allocation**\n\nAllocate inventory to a specific order line with detailed bin-level tracking.\n\n**Allocation Features:**\n- Precise quantity allocation per line\n- Optional bin-level allocation details\n- Lot number tracking support\n- Automatic line status update to ALLOCATED\n\n**Repository Logic:**\n- Updates matching line by `lineNumber` field\n- Sets `allocatedQuantity` and `lineStatus`\n- Stores detailed `allocations` array if provided\n ", + operationId: "allocateWMSOrderLine", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Order identifier", + schema: { + type: "string" + } + }, + { + name: "lineId", + in: "path", + required: true, + description: "Line identifier (maps to lineNumber in array update)", + schema: { + type: "string" + }, + example: "1" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + allocatedQuantity: { + type: "number", + description: "Total allocated amount", + example: 20 + }, + allocationDetails: { + type: "array", + description: "Optional bin-level allocations", + items: { + type: "object", + properties: { + binId: { + type: "string", + description: "Source bin identifier", + example: "BIN-A1-001" + }, + quantity: { + type: "number", + description: "Quantity from this bin", + example: 10 + }, + lotNumber: { + type: "string", + description: "Optional lot tracking", + example: "LOT-20241201-A" + } + }, + required: [ + "binId", + "quantity" + ] + } + } + }, + required: [ + "allocatedQuantity" + ] + } + } + } + }, + responses: { + "200": { + description: "Line allocation successful", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Order line allocated successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/{orderId}/picking-progress": { + put: { + summary: "Update picked quantities for order lines", + description: "\n**Picking Progress Tracking**\n\nUpdate picked quantities with automatic line status management.\n\n**Status Logic:**\n- `pickedQuantity > 0` → Line status becomes \"PICKED\"\n- `pickedQuantity = 0` → Line status remains \"PICKING\"\n\n**Use Cases:**\n- Real-time picking progress updates\n- Partial picking scenarios\n- Pick completion confirmation\n ", + operationId: "updateWMSPickingProgress", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Order identifier", + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + lineNumber: { + type: "number", + description: "Target line identifier", + example: 1 + }, + pickedQuantity: { + type: "number", + description: "Actual picked amount", + example: 18 + } + }, + required: [ + "lineNumber", + "pickedQuantity" + ] + } + } + } + }, + responses: { + "200": { + description: "Picking progress updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Picking progress updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/ready-for-picking/{warehouseId}": { + get: { + summary: "Get orders ready for picking", + description: "\n## Get Orders Ready for Picking\n\nRetrieve all outbound orders that are fully allocated and ready to be picked in a specific warehouse.\n\n**Business Logic:**\n- Filters for orders with status **ALLOCATED** or **PARTIALLY_PICKED**\n- Ensures inventory is reserved and available for picking\n- Prioritizes orders based on ship date and priority level\n\n**Use Cases:**\n- Warehouse floor operations planning\n- Wave picking generation\n- Labor resource allocation\n ", + operationId: "getWMSOrdersReadyForPicking", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier to filter orders", + schema: { + type: "string" + }, + example: "WH-MAIN-001" + }, + { + name: "priority", + in: "query", + description: "Optional priority filter", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "URGENT", + "HIGH", + "NORMAL", + "LOW" + ] + } + } + }, + { + name: "orderType", + in: "query", + description: "Optional order type filter", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "STANDARD", + "EXPRESS", + "BULK", + "RETURNS" + ] + } + } + }, + { + name: "customerId", + in: "query", + description: "Optional customer filter", + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Ready orders retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/metrics": { + get: { + summary: "Get comprehensive fulfillment analytics", + description: "\n**Fulfillment Analytics Dashboard**\n\nProvide comprehensive metrics for outbound order performance analysis.\n\n**Metrics Included:**\n- Total and completed order counts\n- Average fulfillment time (hours from order to ship)\n- On-time shipment performance\n- Fulfillment rate percentage\n- Order status distribution\n- Top customers by volume\n\n**Advanced Features:**\n- MongoDB aggregation pipeline for performance\n- Configurable reporting periods\n- Warehouse-specific metrics\n- Customer ranking by order volume\n ", + operationId: "getWMSOrderFulfillmentMetrics", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "warehouseId", + in: "query", + description: "Optional warehouse filter", + schema: { + type: "string" + }, + example: "WH-MAIN-001" + }, + { + name: "dateStart", + in: "query", + description: "Optional reporting period start", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "Optional reporting period end", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "orderType", + in: "query", + description: "Optional order type filter", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "STANDARD", + "EXPRESS", + "BULK", + "RETURNS" + ] + } + } + } + ], + responses: { + "200": { + description: "Fulfillment metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + totalOrders: { + type: "number", + description: "Total orders in period", + example: 1250 + }, + completedOrders: { + type: "number", + description: "Orders with SHIPPED status", + example: 1180 + }, + averageFulfillmentTime: { + type: "number", + description: "Average hours from order to ship", + example: 18.5 + }, + onTimeShipments: { + type: "number", + description: "Orders shipped by requested date", + example: 1050 + }, + fulfillmentRate: { + type: "number", + description: "Completion percentage", + example: 94.4 + }, + ordersByStatus: { + type: "array", + description: "Status distribution", + items: { + type: "object", + properties: { + status: { + type: "string", + example: "SHIPPED" + }, + count: { + type: "number", + example: 1180 + } + } + } + }, + topCustomers: { + type: "array", + description: "Customer ranking by volume (top 10)", + items: { + type: "object", + properties: { + customerId: { + type: "string", + example: "CUST-ABC-123" + }, + customerName: { + type: "string", + example: "ABC Corporation" + }, + orderCount: { + type: "number", + example: 45 + }, + totalLines: { + type: "number", + example: 128 + } + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/customer/{customerId}": { + get: { + summary: "Get all orders for specific customer", + description: "\n**Customer Order History**\n\nRetrieve all outbound orders for a specific customer with optional filtering.\n\n**Filtering Options:**\n- Warehouse-specific orders\n- Status-based filtering\n- Date range filtering\n\n**Sorting:**\n- Orders sorted by `orderDate` descending (newest first)\n- No pagination applied - returns all matching orders\n\n**Use Cases:**\n- Customer service inquiries\n- Order history analysis\n- Account management\n ", + operationId: "getWMSOrdersByCustomer", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "customerId", + in: "path", + required: true, + description: "Customer identifier", + schema: { + type: "string" + }, + example: "CUST-ABC-123" + }, + { + name: "warehouseId", + in: "query", + description: "Optional warehouse filter", + schema: { + type: "string" + } + }, + { + name: "status", + in: "query", + description: "Optional status filter", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "PENDING", + "RELEASED", + "ALLOCATED", + "PICKING", + "PICKED", + "PACKED", + "SHIPPED", + "CANCELLED" + ] + } + } + }, + { + name: "dateStart", + in: "query", + description: "Optional date range start", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "Optional date range end", + schema: { + type: "string", + format: "date-time" + } + } + ], + responses: { + "200": { + description: "Customer orders retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments": { + post: { + summary: "Create new outbound shipment", + description: "\n**Create New Outbound Shipment**\n\nCreates a new outbound shipment for carrier dispatch with comprehensive line item and address management.\n\n**Key Features:**\n- Multi-line shipment support with order references\n- Carrier integration with SCAC codes and service levels\n- Address validation and routing\n- Status workflow tracking from creation to delivery\n- Document management and tracking event history\n\n**Validation Requirements:**\n- warehouseId, lines array, and toAddress are required\n- Unique shipmentId generation via service identifier\n- Proper carrier and service level specification\n\n**Status Workflow:**\nCREATED → MANIFESTED → LOADING → LOADED → SHIPPED → IN_TRANSIT → DELIVERED\n ", + operationId: "createWMSOutboundShipment", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + }, + example: "550e8400-e29b-41d4-a716-446655440000" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + shipmentId: { + type: "string", + description: "Business shipment identifier (auto-generated if not provided)", + example: "SHIP-2024-001234" + }, + warehouseId: { + type: "string", + description: "Required - source warehouse identifier", + example: "WH-MAIN-001" + }, + carrier: { + type: "object", + description: "Carrier information for shipment", + properties: { + name: { + type: "string", + description: "Carrier company name", + example: "UPS" + }, + scac: { + type: "string", + description: "Standard Carrier Alpha Code", + example: "UPSN" + }, + mode: { + type: "string", + enum: [ + "PARCEL", + "LTL", + "TL" + ], + description: "Transportation mode", + example: "PARCEL" + } + } + }, + serviceLevel: { + type: "string", + description: "Carrier service level", + example: "GROUND" + }, + trackingNumber: { + type: "string", + description: "Carrier tracking number (if available)", + example: "1Z999AA1234567890" + }, + trailerNumber: { + type: "string", + description: "Trailer identifier for LTL/TL shipments", + example: "TRL-001" + }, + dockDoorId: { + type: "string", + description: "Assigned dock door for loading", + example: "DOCK-A-001" + }, + orders: { + type: "array", + description: "Orders included in this shipment", + items: { + type: "object", + properties: { + orderId: { + type: "string", + description: "Order identifier", + example: "ORD-2024-001234" + } + }, + required: [ + "orderId" + ] + } + }, + lines: { + type: "array", + description: "Shipment line items (required)", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + description: "Sequential line number", + example: 1 + }, + orderId: { + type: "string", + description: "Source order identifier", + example: "ORD-2024-001234" + }, + orderLineId: { + type: "string", + description: "Source order line identifier", + example: "LINE-001" + }, + sku: { + type: "string", + description: "Product SKU", + example: "SKU-WIDGET-001" + }, + productName: { + type: "string", + description: "Product description", + example: "Premium Widget Assembly" + }, + quantityShipped: { + type: "number", + description: "Quantity included in shipment", + example: 25 + }, + quantityOrdered: { + type: "number", + description: "Originally ordered quantity", + example: 25 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure", + example: "EA" + }, + lotNumber: { + type: "string", + description: "Lot/batch number", + example: "LOT-20241201-A" + }, + serialNumbers: { + type: "array", + items: { + type: "string" + }, + description: "Serial numbers for serialized items", + example: [ + "SN123456", + "SN123457" + ] + }, + palletId: { + type: "string", + description: "Pallet identifier", + example: "PLT-001" + }, + packageCount: { + type: "number", + description: "Number of packages", + example: 2 + }, + weight: { + type: "number", + description: "Line weight", + example: 15.5 + } + }, + required: [ + "lineNumber", + "sku", + "quantityShipped" + ] + } + }, + fromAddress: { + type: "object", + description: "Origin address", + properties: { + street: { + type: "string", + example: "100 Warehouse Blvd" + }, + city: { + type: "string", + example: "Atlanta" + }, + state: { + type: "string", + example: "GA" + }, + zipCode: { + type: "string", + example: "30309" + }, + country: { + type: "string", + example: "USA" + } + } + }, + toAddress: { + type: "object", + description: "Required destination address", + properties: { + street: { + type: "string", + example: "123 Customer Ave" + }, + city: { + type: "string", + example: "Miami" + }, + state: { + type: "string", + example: "FL" + }, + zipCode: { + type: "string", + example: "33101" + }, + country: { + type: "string", + example: "USA" + } + }, + required: [ + "street", + "city", + "state", + "zipCode" + ] + }, + totals: { + type: "object", + description: "Shipment totals and metrics", + properties: { + packages: { + type: "number", + example: 5 + }, + pallets: { + type: "number", + example: 2 + }, + weight: { + type: "number", + example: 150.5 + }, + cube: { + type: "number", + example: 12.3 + }, + value: { + type: "number", + example: 1249.99 + } + } + }, + dates: { + type: "object", + description: "Scheduled dates and delivery requirements", + properties: { + shipDate: { + type: "string", + format: "date-time", + description: "Planned ship date", + example: "2024-12-02T09:00:00.000Z" + }, + estimatedDeliveryDate: { + type: "string", + format: "date-time", + description: "Estimated delivery date", + example: "2024-12-04T17:00:00.000Z" + } + } + }, + documents: { + type: "array", + description: "Shipment documents", + items: { + type: "object", + properties: { + type: { + type: "string", + example: "BOL" + }, + url: { + type: "string", + example: "https://docs.example.com/bol123.pdf" + }, + documentType: { + type: "string", + example: "PDF" + } + } + } + } + }, + required: [ + "warehouseId", + "lines", + "toAddress" + ] + } + } + } + }, + responses: { + "201": { + description: "Shipment created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Shipment created successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid input data or missing required fields for shipment creation", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse ID, shipment lines, and to address are required", + meta: { event: "createWMSShipment", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/status": { + get: { + summary: "Get shipments filtered by status with advanced filtering", + description: "\n**Status-Based Shipment Filtering**\n\nRetrieve outbound shipments filtered by status with comprehensive query capabilities.\n\n**Advanced Filtering Options:**\n- Multiple status selection\n- Warehouse-specific filtering\n- Carrier-specific filtering (uses carrier.name field)\n- Service level filtering\n- Date range filtering\n\n**Sorting Logic:**\n- Primary: Created date (descending - newest first)\n\n**Field Mapping Verified:**\n- Status field: `shipmentStatus` (consistent throughout model)\n- Carrier filter: `carrier.name` field mapping\n ", + operationId: "getWMSShipmentsByStatus", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Shipment status filter (supports multiple values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "CREATED", + "MANIFESTED", + "LOADING", + "LOADED", + "SHIPPED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ] + } + }, + example: [ + "MANIFESTED", + "LOADING" + ] + }, + { + name: "warehouseId", + in: "query", + description: "Optional warehouse filter", + schema: { + type: "string" + }, + example: "WH-MAIN-001" + }, + { + name: "carrierId", + in: "query", + description: "Optional carrier filter (maps to carrier.name)", + schema: { + type: "string" + }, + example: "UPS" + }, + { + name: "serviceLevel", + in: "query", + description: "Optional service level filter", + schema: { + type: "string" + }, + example: "GROUND" + }, + { + name: "dateStart", + in: "query", + description: "Optional date range start", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "Optional date range end", + schema: { + type: "string", + format: "date-time" + } + } + ], + responses: { + "200": { + description: "Shipments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipments retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/{shipmentId}": { + get: { + summary: "Get single shipment by identifier", + description: "\n**Shipment Lookup by ID**\n\nRetrieve a specific outbound shipment using its shipmentId.\n\n**Field Consistency Verified:**\n- Uses consistent `shipmentId` field across model, controller, and repository\n- World-scoped lookup for multi-tenant isolation\n- Returns complete shipment object with all nested data\n ", + operationId: "getWMSShipmentById", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Shipment identifier", + schema: { + type: "string" + }, + example: "SHIP-2024-001234" + } + ], + responses: { + "200": { + description: "Shipment retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + }, + "404": { + description: "Not Found - Shipment does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "getWMSShipment", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/{shipmentId}/status": { + put: { + summary: "Update shipment status with automatic timestamp tracking", + description: "\n**Shipment Status Management**\n\nUpdate shipment status with automatic workflow timestamp tracking.\n\n**Automatic Timestamp Updates:**\n- MANIFESTED → `dates.manifestDate`\n- SHIPPED → `dates.actualShipTime` (can include tracking number)\n- IN_TRANSIT → Ensures `dates.actualShipTime` is set\n- DELIVERED → `dates.actualDeliveryDate`\n\n**Tracking Integration:**\nShipped status updates can optionally include tracking number assignment.\n ", + operationId: "updateWMSShipmentStatus", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Shipment identifier", + schema: { + type: "string" + }, + example: "SHIP-2024-001234" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "CREATED", + "MANIFESTED", + "LOADING", + "LOADED", + "SHIPPED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ], + description: "New shipment status", + example: "SHIPPED" + }, + statusDate: { + type: "string", + format: "date-time", + description: "Optional timestamp (defaults to current time)", + example: "2024-12-01T16:00:00.000Z" + }, + trackingNumber: { + type: "string", + description: "Optional tracking number (for SHIPPED status)", + example: "1Z999AA1234567890" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Shipment status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment status updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + }, + "404": { + description: "Not Found - Shipment does not exist for status update", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "updateWMSShipmentStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/warehouse/{warehouseId}": { + get: { + summary: "Get shipments by warehouse with filtering", + description: "\n## Get Shipments by Warehouse\n\nRetrieve all outbound shipments for a specific warehouse with optional status filtering.\n\n**Use Cases:**\n- Warehouse shipping dock management\n- Daily shipping volume analysis\n- Carrier pickup coordination\n- Outbound logistics planning\n\n**Field Mapping:**\n- Filters by `warehouseId`\n- Optional status filtering using `shipmentStatus` field\n ", + operationId: "getWMSShipmentsByWarehouse", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier", + schema: { + type: "string" + }, + example: "WH-MAIN-001" + }, + { + name: "status", + in: "query", + description: "Filter by status(es)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "CREATED", + "MANIFESTED", + "LOADING", + "LOADED", + "SHIPPED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ] + } + } + }, + { + name: "dateStart", + in: "query", + description: "Optional date range start", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "Optional date range end", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "carrier", + in: "query", + description: "Optional carrier filter", + schema: { + type: "string" + }, + example: "UPS" + } + ], + responses: { + "200": { + description: "Warehouse shipments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipments by warehouse retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/{shipmentId}/tracking-events": { + post: { + summary: "Add tracking event to shipment", + description: "\n**Shipment Tracking Event Management**\n\nAdd tracking events to shipment history for status monitoring and customer visibility.\n\n**Event Types:**\n- Carrier status updates\n- Exception events\n- Delivery confirmations\n- Custom milestone events\n\n**Integration:**\n- Supports carrier event codes for EDI integration\n- Location-based tracking\n- Timestamp precision for accurate tracking\n ", + operationId: "addWMSTrackingEvent", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "shipmentId", + in: "path", + required: true, + description: "Shipment identifier", + schema: { + type: "string" + }, + example: "SHIP-2024-001234" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + eventType: { + type: "string", + description: "Type of tracking event", + example: "DEPARTED_FACILITY" + }, + eventDate: { + type: "string", + format: "date-time", + description: "Timestamp of the event", + example: "2024-12-01T18:30:00.000Z" + }, + location: { + type: "string", + description: "Location where event occurred", + example: "Atlanta, GA" + }, + description: { + type: "string", + description: "Human-readable event description", + example: "Package departed Atlanta facility" + }, + carrierEventCode: { + type: "string", + description: "Optional carrier-specific event code", + example: "DP" + } + }, + required: [ + "eventType", + "eventDate", + "location", + "description" + ] + } + } + } + }, + responses: { + "200": { + description: "Tracking event added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Tracking event added successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/metrics": { + get: { + summary: "Get comprehensive shipment analytics", + description: "\n**Shipment Performance Analytics**\n\nProvide comprehensive metrics for outbound shipment performance analysis.\n\n**Metrics Included:**\n- Total, shipped, delivered, and exception shipment counts\n- Average transit time calculations\n- On-time delivery performance tracking\n- Carrier performance breakdown with individual metrics\n- Daily shipment volume trends\n\n**Advanced Features:**\n- MongoDB aggregation pipeline for complex analytics\n- Configurable reporting periods\n- Warehouse-specific metrics\n- Multi-carrier performance comparison\n- Transit time calculations in days\n ", + operationId: "getWMSShipmentMetrics", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "warehouseId", + in: "query", + description: "Optional warehouse filter", + schema: { + type: "string" + }, + example: "WH-MAIN-001" + }, + { + name: "dateStart", + in: "query", + description: "Optional reporting period start", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "Optional reporting period end", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "carrier", + in: "query", + description: "Optional carrier filter (supports multiple values)", + schema: { + type: "array", + items: { + type: "string" + } + }, + example: [ + "UPS", + "FEDEX" + ] + } + ], + responses: { + "200": { + description: "Shipment metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipment metrics retrieved successfully" + }, + data: { + type: "object", + properties: { + totalShipments: { + type: "number", + description: "Total shipments in period", + example: 850 + }, + shippedShipments: { + type: "number", + description: "Shipments in transit or delivered", + example: 820 + }, + deliveredShipments: { + type: "number", + description: "Successfully delivered shipments", + example: 795 + }, + exceptionShipments: { + type: "number", + description: "Shipments with exceptions", + example: 12 + }, + averageTransitTime: { + type: "number", + description: "Average days from ship to delivery", + example: 2.3 + }, + onTimeDeliveryRate: { + type: "number", + description: "On-time delivery percentage", + example: 94.2 + }, + shipmentsByCarrier: { + type: "array", + description: "Carrier performance breakdown", + items: { + type: "object", + properties: { + carrier: { + type: "string", + example: "UPS" + }, + count: { + type: "number", + example: 450 + }, + onTimeRate: { + type: "number", + example: 96.1 + }, + avgTransitTime: { + type: "number", + example: 2.1 + } + } + } + }, + shipmentsByDay: { + type: "array", + description: "Daily volume trends", + items: { + type: "object", + properties: { + date: { + type: "string", + example: "2024-12-01" + }, + shipmentCount: { + type: "number", + example: 45 + }, + deliveredCount: { + type: "number", + example: 42 + } + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/ready-to-ship/{warehouseId}": { + get: { + summary: "Get shipments ready for dispatch", + description: "\n## Get Shipments Ready for Dispatch\n\nRetrieve all outbound shipments that are manifested and ready to be dispatched from a specific warehouse.\n\n**Business Logic:**\n- Filters for shipments with status **MANIFESTED**\n- Prioritizes shipments based on estimated delivery date and carrier service level\n- Ensures all required documentation is complete\n\n**Filtering Capabilities:**\n- Carrier-specific filtering\n- Service level filtering\n- Priority order handling\n\n**Sorting Logic:**\n- Priority (descending - high priority first)\n- Estimated delivery date (ascending - urgent deliveries first)\n- Created date (ascending - oldest first)\n ", + operationId: "getWMSShipmentsReadyToShip", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier to filter shipments", + schema: { + type: "string" + }, + example: "WH-MAIN-001" + }, + { + name: "carrier", + in: "query", + description: "Optional carrier filter", + schema: { + type: "string" + }, + example: "UPS" + }, + { + name: "serviceLevel", + in: "query", + description: "Optional service level filter", + schema: { + type: "string" + }, + example: "GROUND" + }, + { + name: "priorityOrders", + in: "query", + description: "Filter for priority orders only", + schema: { + type: "boolean" + }, + example: true + } + ], + responses: { + "200": { + description: "Ready shipments retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipments ready to ship retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/shipments/tracking/{trackingNumber}": { + get: { + summary: "Get shipment by tracking number", + description: "\n**Shipment Tracking Lookup**\n\nRetrieve shipment information using carrier tracking number.\n\n**Use Cases:**\n- Customer service inquiries\n- Carrier integration callbacks\n- Delivery confirmation tracking\n- Exception handling\n\n**Field Mapping:**\n- Uses `trackingNumber` field for lookup\n- Returns single shipment object (tracking numbers are typically unique)\n ", + operationId: "getWMSShipmentsByTrackingNumber", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "trackingNumber", + in: "path", + required: true, + description: "Carrier tracking number", + schema: { + type: "string" + }, + example: "1Z999AA1234567890" + } + ], + responses: { + "200": { + description: "Shipment retrieved by tracking number successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Shipments by tracking number retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundShipment" + } + } + } + } + } + }, + "404": { + description: "Not Found - Shipment with specified tracking number does not exist", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Shipment not found", + meta: { event: "getWMSShipmentsByTrackingNumber", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/{orderId}/priority": { + put: { + tags: [ + "WMS" + ], + summary: "Update outbound order priority", + description: "\n## Update WMS Outbound Order Priority\n\nUpdate the priority level of an outbound order to expedite or deprioritize fulfillment.\n\n### Features\n- **Priority Management**: Change order priority for fulfillment sequencing\n- **Workflow Impact**: Higher priority orders are processed first\n- **Audit Trail**: Priority changes are tracked for reporting\n\n### Priority Levels\n- **RUSH**: Highest priority, immediate processing required\n- **URGENT**: High priority, expedited processing\n- **NORMAL**: Standard priority, regular processing\n- **STANDARD**: Lowest priority, process when capacity allows\n\n### Use Cases\n- **Customer Escalation**: Elevate priority for VIP customers\n- **Deadline Management**: Rush orders with tight ship dates\n- **Resource Balancing**: Deprioritize orders to manage capacity\n ", + operationId: "updateWMSOutboundOrderPriority", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Unique identifier for the outbound order", + schema: { + type: "string", + example: "wms_outbound-order_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: ["priority"], + properties: { + priority: { + type: "string", + enum: [ + "RUSH", + "URGENT", + "NORMAL", + "STANDARD" + ], + description: "New priority level for the order", + example: "RUSH" + } + } + } + } + } + }, + responses: { + "200": { + description: "Order priority updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "Order priority updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSOutboundOrder" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Priority is required", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + error: "priority is required in request body" + } + } + } + }, + "404": { + description: "Not Found - Order not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + error: "Order wms_outbound-order_674565c1234567890abcdef not found" + } + } + } + } + } + } + }, + "/{worldId}/wms/outbound-orders/{orderId}/relations": { + get: { + tags: [ + "WMS" + ], + summary: "Get outbound order cross-service relations", + description: "\n## Get WMS Outbound Order Relations\n\nRetrieve cross-service related data for an outbound order, including linked ERP orders, EDI documents, and finance transactions.\n\n### Features\n- **ERP Integration**: Link to source ERP sales order\n- **EDI Documents**: Related EDI transactions (850 PO, 810 Invoice, etc.)\n- **Finance Tracking**: Associated payment_in finance transactions\n- **Cross-Reference**: Connect warehouse fulfillment to enterprise systems\n\n### Related Data Types\n- **erpOrder**: Original ERP sales order with status and amount\n- **ediDocuments**: EDI documents (purchase orders, invoices) linked to the order\n- **financeTransaction**: Payment transactions associated with the order\n\n### Business Logic\n- Uses order number to find related ERP order\n- Searches EDI documents by businessDocumentNumber or customer ID\n- Finds finance transactions with matching sourceId\n- Returns empty arrays/undefined for missing relations\n\n### Use Cases\n- **Order Traceability**: Track order from sales to fulfillment to payment\n- **EDI Reconciliation**: View related EDI documents for compliance\n- **Finance Integration**: Link fulfillment to accounts receivable\n- **Audit Trail**: Complete visibility across enterprise systems\n ", + operationId: "getWMSOutboundOrderRelations", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "orderId", + in: "path", + required: true, + description: "Unique identifier for the outbound order", + schema: { + type: "string", + example: "wms_outbound-order_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Outbound order relations retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + erpOrder: { + type: "object", + description: "Related ERP sales order", + properties: { + orderId: { + type: "string", + example: "SO-2024-001234" + }, + status: { + type: "string", + example: "APPROVED" + }, + totalAmount: { + type: "number", + example: 25000.00 + }, + customerId: { + type: "string", + example: "CUST-001" + }, + partnerId: { + type: "string", + example: "PARTNER-001" + }, + poType: { + type: "string", + example: "SALES" + }, + orderDate: { + type: "string", + format: "date-time", + example: "2024-01-10T00:00:00Z" + } + } + }, + ediDocuments: { + type: "array", + description: "Related EDI documents", + items: { + type: "object", + properties: { + transactionId: { + type: "string", + example: "edi_txn_674565c1234567890abcdef" + }, + docType: { + type: "string", + example: "850" + }, + status: { + type: "string", + example: "PROCESSED" + }, + direction: { + type: "string", + example: "INBOUND" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00Z" + }, + businessDocumentNumber: { + type: "string", + example: "WO-20241201-001" + } + } + } + }, + financeTransaction: { + type: "object", + description: "Related finance transaction", + properties: { + transactionId: { + type: "string", + example: "fin_txn_674565c1234567890abcdef" + }, + type: { + type: "string", + example: "payment_in" + }, + amount: { + type: "number", + example: 25000.00 + }, + status: { + type: "string", + example: "COMPLETED" + }, + processedAt: { + type: "string", + format: "date-time", + example: "2024-01-20T14:30:00Z" + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and orderId are required", + meta: { event: "getWMSOutboundOrderRelations", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Outbound order not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Outbound order wms_outbound-order_674565c1234567890abcdef not found", + meta: { event: "getWMSOutboundOrderRelations", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/replenishment.wms.ts b/packages/controlmart/src/docs/paths/wms/replenishment.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..14a6ee3bc6c73183df7abf9e4fd8f22eff221aa3 --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/replenishment.wms.ts @@ -0,0 +1,1003 @@ +export const replenishmentPaths = { + "/{worldId}/wms/replenishments": { + post: { + summary: "Create replenishment", + description: "\n## Create WMS Replenishment\n\nCreates a new inventory replenishment request for moving stock between bins.\n\n**Business Process:**\n- Creates replenishment suggestion with from/to bin movement\n- Sets initial status to SUGGESTED\n- Validates bin availability and capacity constraints\n- Supports various replenishment types (MIN_MAX, DEMAND, CYCLE)\n\n**Use Cases:**\n- Automatic stock replenishment based on min/max levels\n- Demand-driven replenishment for picking zones\n- Cycle-based replenishment scheduling\n- Manual replenishment requests\n\n**Field Mapping:**\n- Uses `replenishmentId` as primary identifier (consistent with model)\n- Complex nested `fromBin` and `toBin` structures with availability tracking\n- Quantity object with suggested/approved/actual values\n ", + operationId: "createWMSReplenishment", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + }, + example: "507f1f77bcf86cd799439011" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + replenishmentId: { + type: "string", + description: "Unique identifier for the replenishment request", + example: "REPL-001-WH001-20241127" + }, + warehouseId: { + type: "string", + description: "Source warehouse identifier", + example: "WH001" + }, + productId: { + type: "string", + description: "Product being replenished", + example: "PROD-12345" + }, + sku: { + type: "string", + description: "Product SKU for reference", + example: "ABC-XYZ-001" + }, + fromBin: { + type: "object", + description: "Source bin details with availability", + properties: { + binId: { + type: "string", + example: "BIN-A001" + }, + binCode: { + type: "string", + example: "A-001" + }, + availableQuantity: { + type: "number", + example: 500 + }, + currentQuantity: { + type: "number", + example: 1000 + } + } + }, + toBin: { + type: "object", + description: "Destination bin details with capacity", + properties: { + binId: { + type: "string", + example: "BIN-P001" + }, + binCode: { + type: "string", + example: "P-001" + }, + currentQuantity: { + type: "number", + example: 50 + }, + minQuantity: { + type: "number", + example: 100 + }, + maxQuantity: { + type: "number", + example: 200 + } + } + }, + quantity: { + type: "object", + description: "Quantity information for replenishment", + properties: { + suggested: { + type: "number", + example: 150 + }, + uom: { + type: "string", + example: "EA" + } + }, + required: [ + "suggested", + "uom" + ] + }, + replenishmentType: { + type: "string", + enum: [ + "MIN_MAX", + "DEMAND", + "CYCLE", + "MANUAL" + ], + example: "MIN_MAX" + }, + priority: { + type: "number", + description: "Priority level (1-10, 10 being highest)", + example: 5 + }, + dueDate: { + type: "string", + format: "date-time", + description: "When replenishment should be completed", + example: "2024-11-28T10:00:00Z" + } + }, + required: [ + "replenishmentId", + "warehouseId", + "productId", + "fromBin", + "toBin", + "quantity" + ] + } + } + } + }, + responses: { + "201": { + description: "Replenishment created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Replenishment created successfully" + }, + data: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/metrics": { + get: { + summary: "Get replenishment metrics", + description: "\n## Get Replenishment Metrics\n\nRetrieve comprehensive analytics and metrics for replenishment operations.\n\n**Metrics Provided:**\n- Total replenishments count\n- Status breakdown (pending vs completed)\n- Average completion time in hours\n- Performance by replenishment type\n- Top replenished products\n\n**Use Cases:**\n- Operational performance monitoring\n- Replenishment efficiency analysis\n- Resource planning and optimization\n- Executive reporting dashboards\n ", + operationId: "getWMSReplenishmentMetrics", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "warehouseId", + in: "query", + description: "Filter by specific warehouse", + schema: { + type: "string" + } + }, + { + name: "dateStart", + in: "query", + description: "Start date for metrics calculation", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "End date for metrics calculation", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "replenishmentType", + in: "query", + description: "Filter by replenishment type(s)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "MIN_MAX", + "DEMAND", + "CYCLE", + "MANUAL" + ] + } + } + } + ], + responses: { + "200": { + description: "Replenishment metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishment metrics retrieved successfully" + }, + data: { + type: "object", + properties: { + totalReplenishments: { + type: "number", + example: 1250 + }, + pendingReplenishments: { + type: "number", + example: 45 + }, + completedReplenishments: { + type: "number", + example: 1180 + }, + averageCompletionTime: { + type: "number", + description: "Hours", + example: 2.5 + }, + replenishmentsByType: { + type: "array", + items: { + type: "object", + properties: { + type: { + type: "string", + example: "MIN_MAX" + }, + count: { + type: "number", + example: 650 + }, + completionRate: { + type: "number", + example: 94.5 + } + } + } + }, + topReplenishedProducts: { + type: "array", + items: { + type: "object", + properties: { + productId: { + type: "string", + example: "PROD-12345" + }, + sku: { + type: "string", + example: "ABC-XYZ-001" + }, + replenishmentCount: { + type: "number", + example: 45 + }, + totalQuantity: { + type: "number", + example: 6750 + } + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/status": { + get: { + summary: "Get replenishments by status", + description: "\n## Get Replenishments by Status\n\nRetrieve replenishments filtered by one or more status values with additional filtering options.\n\n**Status Workflow:**\n- SUGGESTED → Initial creation\n- APPROVED → Management approval\n- TASK_CREATED → Work order generated\n- IN_PROGRESS → Active execution\n- COMPLETED → Successfully finished\n- CANCELLED → Process terminated\n\n**Use Cases:**\n- Work queue management for operators\n- Status-based workflow processing \n- Priority-based task assignment\n- Performance monitoring by status\n\n**Field Mapping:**\n- Filters by `status` field using `$in` operator for multiple values\n- Supports priority filtering (greater than or equal to specified value)\n ", + operationId: "getWMSReplenishmentsByStatus", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Status value(s) to filter by", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "SUGGESTED", + "APPROVED", + "TASK_CREATED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED" + ] + } + }, + example: [ + "SUGGESTED", + "APPROVED" + ] + }, + { + name: "warehouseId", + in: "query", + description: "Filter by warehouse", + schema: { + type: "string" + } + }, + { + name: "productId", + in: "query", + description: "Filter by product", + schema: { + type: "string" + } + }, + { + name: "replenishmentType", + in: "query", + description: "Filter by replenishment type(s)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "MIN_MAX", + "DEMAND", + "CYCLE", + "MANUAL" + ] + } + } + }, + { + name: "priority", + in: "query", + description: "Minimum priority level (returns items with priority >= value)", + schema: { + type: "number" + } + } + ], + responses: { + "200": { + description: "Replenishments by status retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishments by status retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/{replenishmentId}": { + get: { + summary: "Get replenishment by ID", + description: "\n## Get Replenishment by ID\n\nRetrieve detailed information for a specific replenishment request.\n\n**Use Cases:**\n- Detailed replenishment status checking\n- Work instruction display for operators\n- Audit trail and history review\n- Integration with task management systems\n\n**Field Mapping:**\n- Uses `replenishmentId` as lookup field (matches model schema)\n- Returns complete object with nested bin structures\n ", + operationId: "getWMSReplenishmentById", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "replenishmentId", + in: "path", + required: true, + description: "Unique replenishment identifier", + schema: { + type: "string" + }, + example: "REPL-001-WH001-20241127" + } + ], + responses: { + "200": { + description: "Replenishment retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishment retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + }, + "404": { + description: "Not Found - Replenishment not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Replenishment not found", + meta: { event: "getWMSReplenishmentById", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/{replenishmentId}/status": { + put: { + summary: "Update replenishment status", + description: "\n## Update Replenishment Status\n\nUpdate the status of a replenishment request, optionally linking to a task.\n\n**Status Transitions:**\n- SUGGESTED → APPROVED (management approval)\n- APPROVED → TASK_CREATED (work order generation)\n- TASK_CREATED → IN_PROGRESS (execution start)\n- IN_PROGRESS → COMPLETED (successful finish)\n- Any status → CANCELLED (process termination)\n\n**Use Cases:**\n- Workflow progression tracking\n- Task system integration\n- Process state management\n- Audit trail maintenance\n\n**Field Mapping:**\n- Updates `status` field directly\n- Optionally sets `taskId` when status becomes TASK_CREATED\n ", + operationId: "updateWMSReplenishmentStatus", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "replenishmentId", + in: "path", + required: true, + description: "Unique replenishment identifier", + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "SUGGESTED", + "APPROVED", + "TASK_CREATED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED" + ], + description: "New status for the replenishment", + example: "APPROVED" + }, + taskId: { + type: "string", + description: "Task ID when status is TASK_CREATED", + example: "TASK-12345" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Replenishment status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishment status updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/{replenishmentId}/approve": { + put: { + summary: "Approve replenishment", + description: "\n## Approve Replenishment Request\n\nApprove a suggested replenishment with optional quantity adjustments.\n\n**Business Process:**\n- Changes status from SUGGESTED to APPROVED\n- Sets approval metadata (approvedBy, approvedDate)\n- Allows quantity adjustments from original suggestion\n- Triggers downstream workflow processes\n\n**Use Cases:**\n- Management approval workflows\n- Quantity adjustment and optimization\n- Approval audit trail maintenance\n- Integration with approval systems\n ", + operationId: "approveWMSReplenishment", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "replenishmentId", + in: "path", + required: true, + description: "Unique replenishment identifier", + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + approvedBy: { + type: "string", + description: "User ID of the approving manager", + example: "MGR-001" + }, + approvedQuantity: { + type: "number", + description: "Approved quantity (if different from suggested)", + example: 120 + } + }, + required: [ + "approvedBy" + ] + } + } + } + }, + responses: { + "200": { + description: "Replenishment approved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishment approved successfully" + }, + data: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/{replenishmentId}/cancel": { + put: { + summary: "Cancel replenishment", + description: "\n## Cancel Replenishment Request\n\nCancel a replenishment request with reason and audit information.\n\n**Business Process:**\n- Changes status to CANCELLED regardless of current state\n- Records cancellation metadata (reason, cancelledBy, cancelledDate)\n- Maintains audit trail for operational analysis\n- Prevents further workflow processing\n\n**Use Cases:**\n- Process termination and cleanup\n- Business rule changes\n- Error correction and recovery\n- Resource reallocation\n\n**Field Mapping:**\n- Sets `status` to CANCELLED\n- Updates cancellation audit fields\n ", + operationId: "cancelWMSReplenishment", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "replenishmentId", + in: "path", + required: true, + description: "Unique replenishment identifier", + schema: { + type: "string" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + reason: { + type: "string", + description: "Reason for cancellation", + example: "Product discontinued" + }, + cancelledBy: { + type: "string", + description: "User ID who cancelled the replenishment", + example: "MGR-002" + } + }, + required: [ + "reason", + "cancelledBy" + ] + } + } + } + }, + responses: { + "200": { + description: "Replenishment cancelled successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishment cancelled successfully" + }, + data: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/product/{productId}": { + get: { + summary: "Get replenishments by product", + description: "\n## Get Replenishments by Product\n\nRetrieve all replenishments for a specific product with filtering options.\n\n**Use Cases:**\n- Product-specific replenishment analysis\n- Inventory movement tracking per SKU\n- Product performance monitoring\n- Supply chain optimization\n\n**Field Mapping:**\n- Filters by `productId` field directly\n- Supports date range filtering on `createdAt`\n- Optional status and warehouse filtering\n ", + operationId: "getWMSReplenishmentsByProduct", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "productId", + in: "path", + required: true, + description: "Product identifier to filter by", + schema: { + type: "string" + }, + example: "PROD-12345" + }, + { + name: "warehouseId", + in: "query", + description: "Filter by specific warehouse", + schema: { + type: "string" + } + }, + { + name: "status", + in: "query", + description: "Filter by status(es)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "SUGGESTED", + "APPROVED", + "TASK_CREATED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED" + ] + } + } + }, + { + name: "dateStart", + in: "query", + description: "Start date for filtering", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "End date for filtering", + schema: { + type: "string", + format: "date-time" + } + } + ], + responses: { + "200": { + description: "Replenishments by product retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishments by product retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/replenishments/bin/{binId}": { + get: { + summary: "Get replenishments by bin", + description: "\n## Get Replenishments by Bin\n\nRetrieve replenishments involving a specific bin as either source or destination.\n\n**Bin Relationship Types:**\n- **source**: Bin is the fromBin (stock is taken from this bin)\n- **destination**: Bin is the toBin (stock is moved to this bin)\n\n**Use Cases:**\n- Bin utilization and activity analysis\n- Source/destination movement tracking\n- Bin-specific workflow management \n- Capacity planning and optimization\n\n**Field Mapping:**\n- Uses nested field queries: `fromBin.binId` or `toBin.binId`\n- Filters based on `type` parameter (source/destination)\n ", + operationId: "getWMSReplenishmentsByBin", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "binId", + in: "path", + required: true, + description: "Bin identifier to filter by", + schema: { + type: "string" + }, + example: "BIN-A001" + }, + { + name: "type", + in: "query", + required: true, + description: "Relationship type - source (fromBin) or destination (toBin)", + schema: { + type: "string", + enum: [ + "source", + "destination" + ] + }, + example: "source" + }, + { + name: "warehouseId", + in: "query", + description: "Filter by specific warehouse", + schema: { + type: "string" + } + }, + { + name: "status", + in: "query", + description: "Filter by status(es)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "SUGGESTED", + "APPROVED", + "TASK_CREATED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED" + ] + } + } + } + ], + responses: { + "200": { + description: "Replenishments by bin retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Replenishments by bin retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSReplenishment" + } + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/tasks.wms.ts b/packages/controlmart/src/docs/paths/wms/tasks.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..da35c29032bb4f754314622e3d461615321a89e5 --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/tasks.wms.ts @@ -0,0 +1,1460 @@ +export const tasksPaths = { + "/{worldId}/wms/tasks": { + post: { + summary: "Create task", + description: "\n## Create WMS Task\n\nCreates a new warehouse task for specific operations like picking, putaway, replenishment, etc.\n\n**Business Process:**\n- Creates structured work instructions for warehouse staff\n- Assigns task with priority-based sequencing\n- Supports various task types with detailed tracking\n- Enables performance measurement and optimization\n\n**Use Cases:**\n- Order fulfillment picking tasks\n- Inventory putaway operations \n- Replenishment movement tasks\n- Cycle counting assignments\n- Cross-docking operations\n\n**Field Mapping:**\n- Uses `taskId` as primary identifier (auto-generated using WMS service prefix)\n- Complex nested structures for assignment, timing, and performance tracking\n- Detailed product, location, and quantity specifications\n ", + operationId: "createWMSTask", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + }, + example: "507f1f77bcf86cd799439011" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + taskId: { + type: "string", + description: "Unique task identifier (auto-generated if not provided)", + example: "wms_task_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Warehouse identifier where task occurs", + example: "WH001" + }, + taskType: { + type: "string", + enum: [ + "PICK", + "PUTAWAY", + "REPLENISHMENT", + "CYCLE_COUNT", + "MOVE", + "LOAD", + "UNLOAD", + "PACK", + "SORT" + ], + description: "Type of warehouse operation", + example: "PICK" + }, + taskSubtype: { + type: "string", + enum: [ + "DISCRETE", + "BATCH", + "CLUSTER", + "ZONE" + ], + description: "Task execution methodology", + example: "DISCRETE" + }, + priority: { + type: "number", + description: "Task priority level (higher values = higher priority)", + default: 50, + example: 75 + }, + reference: { + type: "object", + description: "Reference to originating document", + properties: { + type: { + type: "string", + enum: [ + "PO", + "ORDER", + "WAVE", + "INBOUND", + "REPLENISHMENT" + ], + example: "ORDER" + }, + id: { + type: "string", + example: "ORD-12345" + } + } + }, + product: { + type: "object", + description: "Product information for the task", + properties: { + productId: { + type: "string", + example: "PROD-12345" + }, + sku: { + type: "string", + example: "ABC-XYZ-001" + }, + productName: { + type: "string", + example: "Widget Premium" + } + } + }, + from: { + type: "object", + description: "Source location details", + properties: { + binId: { + type: "string", + example: "BIN-A001" + }, + binCode: { + type: "string", + example: "A-001" + }, + zoneId: { + type: "string", + example: "ZONE-PICK" + } + } + }, + to: { + type: "object", + description: "Destination location details", + properties: { + binId: { + type: "string", + example: "BIN-SHIP-001" + }, + binCode: { + type: "string", + example: "SHIP-001" + }, + zoneId: { + type: "string", + example: "ZONE-SHIPPING" + } + } + }, + quantity: { + type: "object", + description: "Quantity requirements", + properties: { + requested: { + type: "number", + example: 24 + }, + uom: { + type: "string", + example: "EA" + } + } + }, + assignment: { + type: "object", + description: "Task assignment information", + properties: { + userId: { + type: "string", + example: "USER-001" + }, + userName: { + type: "string", + example: "John Smith" + }, + equipmentId: { + type: "string", + example: "FORK-001" + } + } + }, + timing: { + type: "object", + description: "Task timing estimates", + properties: { + estimatedDuration: { + type: "number", + description: "Estimated duration in minutes", + example: 15 + } + } + } + }, + required: [ + "warehouseId", + "taskType" + ] + } + } + } + }, + responses: { + "201": { + description: "Task created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Task created successfully" + }, + data: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + } + } + }, + get: { + summary: "Get task logs", + description: "\n## Get Task Logs\n\nRetrieve comprehensive task logs with advanced filtering capabilities for operational monitoring.\n\n**Use Cases:**\n- Operational audit trails and compliance\n- Performance analysis and bottleneck identification\n- Task completion tracking and verification\n- Historical data analysis and reporting\n\n**Field Mapping:**\n- Filters by `taskId`, `taskType`, `assignment.userId` arrays\n- Date filtering on `timing.createdAt` field\n- Zone-based filtering using `zoneId`\n- Status filtering using `taskStatus` field\n ", + operationId: "getWMSTaskLogs", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "taskIds", + in: "query", + description: "Filter by specific task IDs", + schema: { + type: "array", + items: { + type: "string" + } + } + }, + { + name: "taskTypes", + in: "query", + description: "Filter by task types", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "PICK", + "PUTAWAY", + "REPLENISHMENT", + "CYCLE_COUNT", + "MOVE", + "LOAD", + "UNLOAD", + "PACK", + "SORT" + ] + } + } + }, + { + name: "userIds", + in: "query", + description: "Filter by assigned user IDs", + schema: { + type: "array", + items: { + type: "string" + } + } + }, + { + name: "status", + in: "query", + description: "Filter by task status", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "CREATED", + "RELEASED", + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED", + "SUSPENDED" + ] + } + } + }, + { + name: "dateStart", + in: "query", + description: "Start date for filtering", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "End date for filtering", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "zoneId", + in: "query", + description: "Filter by zone", + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Task logs retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Task logs retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/user/{userId}": { + get: { + summary: "Get tasks by user", + description: "\n## Get Tasks by User\n\nRetrieve all tasks assigned to a specific user with optional status filtering.\n\n**Use Cases:**\n- Personal task queues for warehouse workers\n- Productivity tracking per user\n- Workload distribution analysis\n- Performance evaluation\n\n**Field Mapping:**\n- Filters by `assignment.userId` field\n- Optional status filtering on `taskStatus`\n- Sorted by priority descending, then creation date ascending\n ", + operationId: "getWMSTasksByUser", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "userId", + in: "path", + required: true, + description: "User identifier to filter tasks", + schema: { + type: "string" + }, + example: "USER-001" + }, + { + name: "status", + in: "query", + description: "Filter by status(es)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "CREATED", + "RELEASED", + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED", + "SUSPENDED" + ] + } + } + } + ], + responses: { + "200": { + description: "Tasks by user retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Tasks by user retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/timestamps": { + get: { + summary: "Get task timestamps", + description: "\n## Get Task Timestamps\n\nRetrieve timing data for tasks with detailed timestamp analysis capabilities.\n\n**Use Cases:**\n- Performance analysis and cycle time calculation\n- Process optimization and bottleneck identification\n- Labor planning and capacity modeling\n- SLA compliance monitoring\n\n**Field Mapping:**\n- Returns selected fields: `taskId`, `taskType`, `assignment.userId`, `timing` object\n- Excludes historical completed tasks unless `includeHistorical` is true\n- Date filtering on `timing.createdAt`\n ", + operationId: "getWMSTaskTimestamps", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "taskId", + in: "query", + description: "Specific task ID to retrieve", + schema: { + type: "string" + } + }, + { + name: "taskTypes", + in: "query", + description: "Filter by task types", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "PICK", + "PUTAWAY", + "REPLENISHMENT", + "CYCLE_COUNT", + "MOVE", + "LOAD", + "UNLOAD", + "PACK", + "SORT" + ] + } + } + }, + { + name: "userIds", + in: "query", + description: "Filter by user IDs", + schema: { + type: "array", + items: { + type: "string" + } + } + }, + { + name: "dateStart", + in: "query", + description: "Start date for filtering", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "End date for filtering", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "includeHistorical", + in: "query", + description: "Include completed historical tasks", + schema: { + type: "boolean", + default: false + } + } + ], + responses: { + "200": { + description: "Task timestamps retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Task timestamps retrieved successfully" + }, + data: { + type: "array", + items: { + type: "object", + properties: { + taskId: { + type: "string", + example: "wms_task_674565c1234567890abcdef" + }, + taskType: { + type: "string", + example: "PICK" + }, + userId: { + type: "string", + example: "USER-001", + nullable: true + }, + timestamps: { + type: "object", + properties: { + createdAt: { + type: "string", + format: "date-time", + nullable: true + }, + releasedAt: { + type: "string", + format: "date-time", + nullable: true + }, + assignedAt: { + type: "string", + format: "date-time", + nullable: true + }, + startedAt: { + type: "string", + format: "date-time", + nullable: true + }, + completedAt: { + type: "string", + format: "date-time", + nullable: true + }, + estimatedDuration: { + type: "number", + nullable: true + }, + actualDuration: { + type: "number", + nullable: true + } + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/{taskId}": { + patch: { + tags: [ + "WMS" + ], + summary: "Partially update task", + description: "\n## Patch WMS Task\n\nPartially update a warehouse task with only the specified fields. This is useful for updating specific properties without affecting other fields.\n\n### Allowed Fields\n- **taskStatus**: Update the task status (CREATED, RELEASED, ASSIGNED, IN_PROGRESS, COMPLETED, CANCELLED, SUSPENDED)\n- **assignment**: Update assignment information (userId, userName)\n- **priority**: Update the task priority (numeric value)\n\n### Features\n- Partial updates - only specified fields are modified\n- Automatically updates the updatedAt timestamp\n- Supports nested objects for assignment\n- Auto-sets timing fields based on status changes\n\n### Use Cases\n- Quick status updates during task execution\n- Re-assign tasks to different users\n- Adjust priority based on business needs\n ", + operationId: "patchWMSTask", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "taskId", + in: "path", + required: true, + description: "Unique identifier for the task", + schema: { + type: "string", + example: "wms_task_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + taskStatus: { + type: "string", + enum: [ + "CREATED", + "RELEASED", + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED", + "SUSPENDED" + ], + description: "New status for the task" + }, + assignment: { + type: "object", + description: "Assignment information", + properties: { + userId: { + type: "string", + description: "User ID to assign the task to" + }, + userName: { + type: "string", + description: "User name for display purposes" + } + } + }, + priority: { + type: "integer", + description: "Task priority (higher number = higher priority)", + minimum: 1, + maximum: 10, + example: 5 + } + } + }, + examples: { + updateStatus: { + summary: "Update task status to IN_PROGRESS", + value: { + taskStatus: "IN_PROGRESS" + } + }, + assignTask: { + summary: "Assign task to a user", + value: { + assignment: { + userId: "USER-001", + userName: "John Doe" + } + } + }, + updatePriority: { + summary: "Update task priority", + value: { + priority: 8 + } + } + } + } + } + }, + responses: { + "200": { + description: "Task updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing parameters or no updates provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and taskId are required", + meta: { event: "patchWMSTask", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Task not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Task not found", + meta: { event: "patchWMSTask", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/{taskId}/status": { + put: { + summary: "Update task status", + description: "\n## Update Task Status\n\nUpdate task status with automatic timestamp tracking and workflow progression.\n\n**Status Workflow:**\n- **CREATED** → RELEASED (task becomes available)\n- **RELEASED** → ASSIGNED (assigned to user) \n- **ASSIGNED** → IN_PROGRESS (work started)\n- **IN_PROGRESS** → COMPLETED (successfully finished)\n- Any status → CANCELLED/SUSPENDED (exception handling)\n\n**Use Cases:**\n- Workflow progression tracking\n- Real-time task status updates\n- Performance measurement\n- Exception handling and recovery\n\n**Field Mapping:**\n- Updates `taskStatus` field directly\n- Automatically sets corresponding timestamp fields in `timing` object\n- Updates assignment fields when user information provided\n ", + operationId: "updateWMSTaskStatus", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "taskId", + in: "path", + required: true, + description: "Task identifier to update", + schema: { + type: "string" + }, + example: "wms_task_674565c1234567890abcdef" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "CREATED", + "RELEASED", + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED", + "SUSPENDED" + ], + description: "New task status", + example: "IN_PROGRESS" + }, + timestamp: { + type: "string", + format: "date-time", + description: "Optional custom timestamp (defaults to current time)", + example: "2024-11-27T14:30:00Z" + }, + userId: { + type: "string", + description: "User ID for assignment or completion tracking", + example: "USER-001" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Task status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Task status updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + }, + "404": { + description: "Not Found - Task not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Task not found", + meta: { event: "updateWMSTaskStatus", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/metrics": { + get: { + summary: "Get task performance metrics", + description: "\n## Get Task Performance Metrics\n\nRetrieve comprehensive analytics for task performance and productivity measurement.\n\n**Metrics Provided:**\n- Total and completed task counts\n- Average task duration and on-time completion rates\n- Productivity analysis by user\n- Performance breakdown by task type\n\n**Use Cases:**\n- Operational performance monitoring\n- Labor productivity analysis\n- Process optimization insights\n- Executive reporting dashboards\n ", + operationId: "getWMSTaskPerformanceMetrics", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "taskTypes", + in: "query", + description: "Filter by task types", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "PICK", + "PUTAWAY", + "REPLENISHMENT", + "CYCLE_COUNT", + "MOVE", + "LOAD", + "UNLOAD", + "PACK", + "SORT" + ] + } + } + }, + { + name: "userIds", + in: "query", + description: "Filter by user IDs", + schema: { + type: "array", + items: { + type: "string" + } + } + }, + { + name: "dateStart", + in: "query", + description: "Start date for metrics calculation", + schema: { + type: "string", + format: "date-time" + } + }, + { + name: "dateEnd", + in: "query", + description: "End date for metrics calculation", + schema: { + type: "string", + format: "date-time" + } + } + ], + responses: { + "200": { + description: "Task performance metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Task performance metrics retrieved successfully" + }, + data: { + type: "object", + properties: { + totalTasks: { + type: "number", + example: 2450 + }, + completedTasks: { + type: "number", + example: 2380 + }, + averageDuration: { + type: "number", + description: "Average duration in minutes", + example: 12.5 + }, + onTimeCompletion: { + type: "number", + description: "Count of on-time completions", + example: 2156 + }, + productivityByUser: { + type: "array", + items: { + type: "object", + properties: { + userId: { + type: "string", + example: "USER-001" + }, + tasksCompleted: { + type: "number", + example: 145 + }, + averageDuration: { + type: "number", + example: 11.2 + }, + unitsPerHour: { + type: "number", + example: 32.5 + } + } + } + }, + taskTypeMetrics: { + type: "array", + items: { + type: "object", + properties: { + taskType: { + type: "string", + example: "PICK" + }, + count: { + type: "number", + example: 1250 + }, + averageDuration: { + type: "number", + example: 8.7 + } + } + } + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/active": { + get: { + summary: "Get active tasks", + description: "\n## Get Active Tasks\n\nRetrieve all currently active (non-completed) tasks with optional zone filtering.\n\n**Active Status Definition:**\n- CREATED, RELEASED, ASSIGNED, IN_PROGRESS (excludes COMPLETED, CANCELLED, SUSPENDED)\n\n**Use Cases:**\n- Real-time operational dashboards\n- Work-in-progress monitoring\n- Resource allocation planning\n- Zone-specific task management\n\n**Field Mapping:**\n- Filters using `taskStatus` field with `$in` operator\n- Optional `zoneId` filtering\n- Sorted by priority descending, then creation time ascending\n ", + operationId: "getWMSActiveTasks", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "zoneId", + in: "query", + description: "Filter by specific zone", + schema: { + type: "string" + }, + example: "ZONE-PICK" + } + ], + responses: { + "200": { + description: "Active tasks retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Active tasks retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/{taskId}/scans": { + post: { + summary: "Add task scan", + description: "\n## Add Task Scan\n\nRecord barcode/RFID scan data for task validation and verification.\n\n**Scan Types:**\n- **BIN**: Location verification\n- **PRODUCT**: Product identification \n- **LPN**: License plate number tracking\n- **DESTINATION**: Destination verification\n\n**Scan Results:**\n- **MATCH**: Scan matches expected value\n- **MISMATCH**: Scan does not match expected value\n- **OVERRIDE**: Manual override by supervisor\n\n**Use Cases:**\n- Pick accuracy validation\n- Location verification\n- Product identification\n- Quality control and audit trails\n\n**Field Mapping:**\n- Pushes scan data to `scans` array with automatic timestamp\n- Uses `taskId` for task identification\n ", + operationId: "addWMSTaskScan", + tags: [ + "WMS" + ], + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "World identifier for multi-tenant context", + schema: { + type: "string" + } + }, + { + name: "taskId", + in: "path", + required: true, + description: "Task identifier to add scan to", + schema: { + type: "string" + }, + example: "wms_task_674565c1234567890abcdef" + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + scanType: { + type: "string", + enum: [ + "BIN", + "PRODUCT", + "LPN", + "DESTINATION" + ], + description: "Type of scan being performed", + example: "PRODUCT" + }, + scannedValue: { + type: "string", + description: "Actual scanned value", + example: "ABC-XYZ-001" + }, + expectedValue: { + type: "string", + description: "Expected value for validation", + example: "ABC-XYZ-001" + }, + scanResult: { + type: "string", + enum: [ + "MATCH", + "MISMATCH", + "OVERRIDE" + ], + description: "Result of scan validation", + example: "MATCH" + } + }, + required: [ + "scanType" + ] + } + } + } + }, + responses: { + "200": { + description: "Task scan added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Task scan added successfully" + }, + data: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid scan data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "scan data with scanType is required", + meta: { event: "addWMSTaskScan", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/{taskId}/assign": { + put: { + tags: [ + "WMS" + ], + summary: "Assign task to user", + description: "\n## Assign WMS Task\n\nAssign a task to a specific user for execution. Updates the task's assigned user information and sets the status to ASSIGNED.\n\n### Features\n- **User Assignment**: Assign task to specific warehouse worker\n- **Status Update**: Automatically updates task status to ASSIGNED\n- **Audit Trail**: Records assignment timestamp for tracking\n\n### Required Fields\n- **userId**: Unique identifier of the user being assigned\n- **userName**: Display name of the user for reporting\n\n### Use Cases\n- **Manual Assignment**: Supervisors assign tasks to specific workers\n- **Workload Balancing**: Distribute tasks across available workers\n- **Reassignment**: Transfer task from one worker to another\n ", + operationId: "assignWMSTask", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "taskId", + in: "path", + required: true, + description: "Unique identifier for the task", + schema: { + type: "string", + example: "wms_task_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: ["userId", "userName"], + properties: { + userId: { + type: "string", + description: "Unique identifier of the user being assigned", + example: "user_picker_001" + }, + userName: { + type: "string", + description: "Display name of the user", + example: "John Smith" + } + } + } + } + } + }, + responses: { + "200": { + description: "Task assigned successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "userId and userName are required", + meta: { event: "assignWMSTask", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Task not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Task wms_task_674565c1234567890abcdef not found", + meta: { event: "assignWMSTask", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/tasks/{taskId}/priority": { + put: { + tags: [ + "WMS" + ], + summary: "Update task priority", + description: "\n## Update WMS Task Priority\n\nUpdate the priority level of a task to change its processing order in the work queue.\n\n### Features\n- **Priority Management**: Change task priority for execution sequencing\n- **Numeric Priority**: Higher numbers indicate higher priority\n- **Real-time Update**: Immediately affects task queue ordering\n\n### Priority Guidelines\n- **1-10**: Low priority, process when capacity allows\n- **11-50**: Normal priority, standard processing\n- **51-90**: High priority, expedited processing\n- **91-100**: Critical priority, immediate attention required\n\n### Use Cases\n- **Rush Orders**: Elevate priority for urgent customer orders\n- **SLA Management**: Prioritize tasks approaching deadlines\n- **Resource Optimization**: Balance workload across priority levels\n ", + operationId: "updateWMSTaskPriority", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "taskId", + in: "path", + required: true, + description: "Unique identifier for the task", + schema: { + type: "string", + example: "wms_task_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: ["priority"], + properties: { + priority: { + type: "number", + description: "New priority level (1-100, higher = more urgent)", + minimum: 1, + maximum: 100, + example: 75 + } + } + } + } + } + }, + responses: { + "200": { + description: "Task priority updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSTask" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid priority value", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "priority must be a number", + meta: { event: "updateWMSTaskPriority", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Task not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Task wms_task_674565c1234567890abcdef not found", + meta: { event: "updateWMSTaskPriority", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/wms/warehouse.wms.ts b/packages/controlmart/src/docs/paths/wms/warehouse.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..61b02614bd385aef12b28215d06479adbda477d9 --- /dev/null +++ b/packages/controlmart/src/docs/paths/wms/warehouse.wms.ts @@ -0,0 +1,5644 @@ +export const warehousePaths = { + "/{worldId}/wms/warehouses": { + post: { + tags: [ + "WMS" + ], + summary: "Create new warehouse", + description: "\nCreate a new warehouse facility with comprehensive location and operational configuration.\n\n**Core Features**:\n- **Facility Setup**: Complete warehouse configuration with address and timezone\n- **Auto-Generated Codes**: Automatic warehouseId and warehouseCode generation from name\n- **Type Classification**: Support for multiple warehouse operational types\n- **Status Management**: Built-in status lifecycle with ACTIVE, DISABLED, ARCHIVED\n\n**Use Cases**:\n- **Network Expansion**: Add new warehouse facilities to distribution network\n- **Regional Setup**: Configure warehouses for specific geographic regions\n- **Operational Classification**: Define warehouse types for specialized operations\n- **Multi-tenant Support**: Create isolated warehouse environments per world\n ", + operationId: "createWarehouse", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "warehouseName", + "address", + "timezone" + ], + properties: { + warehouseName: { + type: "string", + description: "Human readable warehouse name", + example: "Atlanta Distribution Center" + }, + address: { + type: "object", + description: "Physical warehouse address", + required: [ + "street1", + "city", + "state", + "postalCode", + "country" + ], + properties: { + street1: { + type: "string", + example: "1234 Industrial Blvd" + }, + city: { + type: "string", + example: "Atlanta" + }, + state: { + type: "string", + example: "GA" + }, + postalCode: { + type: "string", + example: "30309" + }, + country: { + type: "string", + example: "USA" + }, + latitude: { + type: "number", + example: 33.749 + }, + longitude: { + type: "number", + example: -84.388 + } + } + }, + timezone: { + type: "string", + description: "IANA timezone identifier", + example: "America/New_York" + }, + warehouseType: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ], + description: "Warehouse operational classification", + example: "FULFILLMENT" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "DISABLED", + "ARCHIVED" + ], + description: "Warehouse operational status", + default: "ACTIVE", + example: "ACTIVE" + } + } + } + } + } + }, + responses: { + "201": { + description: "Warehouse created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Warehouse created successfully" + }, + data: { + $ref: "#/components/schemas/WMSWarehouse" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid warehouse data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse name, address, and timezone are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "WMS" + ], + summary: "Get all warehouses", + description: "\nRetrieve all warehouses with comprehensive filtering and pagination capabilities.\n\n**Core Features**:\n- **Complete Listing**: Get all warehouses in world environment\n- **Advanced Filtering**: Filter by type, status, and operational criteria\n- **Paginated Results**: Efficient handling of large warehouse datasets\n- **Cursor-based Pagination**: Optimized performance for large result sets\n\n**Use Cases**:\n- **Network Overview**: Get complete warehouse network visibility\n- **Operational Filtering**: Find warehouses by operational status or type\n- **Data Management**: Bulk operations and comprehensive reporting\n- **Performance Monitoring**: Track warehouse network performance metrics\n ", + operationId: "getAllWarehouses", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseType", + in: "query", + required: false, + description: "Filter by warehouse types (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ] + }, + example: [ + "FULFILLMENT", + "3PL" + ] + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by warehouse status (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "ACTIVE", + "DISABLED", + "ARCHIVED" + ] + }, + example: [ + "ACTIVE" + ] + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Warehouses retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouses retrieved successfully" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSWarehouse" + } + }, + totalCount: { + type: "integer", + example: 12 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + example: null + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/warehouses/code/{warehouseCode}": { + get: { + tags: [ + "WMS" + ], + summary: "Get warehouse by code", + description: "\nRetrieve warehouse by unique warehouse code for direct facility access.\n\n**Core Features**:\n- **Direct Access**: Get warehouse by auto-generated warehouse code\n- **Code-based Lookup**: Fast retrieval using slugified warehouse name\n- **Complete Data**: Returns full warehouse configuration and address\n- **Optimized Query**: Uses indexed warehouseCode field for performance\n\n**Use Cases**:\n- **Code Integration**: Access warehouses via human-readable codes\n- **System Integration**: External system references using warehouse codes\n- **URL-friendly Access**: Use slugified codes in user interfaces\n- **Quick Lookup**: Direct warehouse access without ID knowledge\n ", + operationId: "getWarehouseByCode", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseCode", + in: "path", + required: true, + description: "Warehouse code (auto-generated from warehouse name)", + schema: { + type: "string", + example: "atlanta-distribution-center" + } + } + ], + responses: { + "200": { + description: "Warehouse retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouse retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSWarehouse" + } + } + } + } + } + }, + "404": { + description: "Not Found - Warehouse not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Warehouse atlanta-distribution-center not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/warehouses/{warehouseId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get warehouse by ID", + description: "\nRetrieve warehouse by unique warehouse identifier for direct facility access and management.\n\n**Core Features**:\n- **Direct Access**: Get warehouse by unique warehouseId\n- **Complete Data**: Returns full warehouse configuration including address\n- **Fast Lookup**: Optimized query using indexed warehouseId field\n- **Reference Resolution**: Resolve warehouse references from other operations\n\n**Use Cases**:\n- **Facility Details**: Get complete warehouse information for management\n- **Reference Lookup**: Resolve warehouse references from orders and operations\n- **Configuration Review**: Access warehouse settings for updates\n- **Integration Support**: Direct API access for external systems\n ", + operationId: "getWarehouseById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse unique identifier", + schema: { + type: "string", + example: "WH_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Warehouse retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouse retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSWarehouse" + } + } + } + } + } + }, + "404": { + description: "Not Found - Warehouse not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Warehouse WH_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "WMS" + ], + summary: "Update warehouse configuration", + description: "\nUpdate warehouse configuration with partial data for operational adjustments.\n\n**Core Features**:\n- **Partial Updates**: Update specific warehouse fields without replacing entire record\n- **Configuration Changes**: Modify address, timezone, type, and operational settings\n- **Validation**: Ensures data consistency during updates\n- **Flexible Modification**: Support for incremental configuration changes\n\n**Use Cases**:\n- **Address Updates**: Update warehouse physical location information\n- **Timezone Changes**: Adjust timezone settings for operational requirements\n- **Type Modifications**: Convert warehouse purposes based on operational needs\n- **Configuration Maintenance**: Keep warehouse data current and accurate\n ", + operationId: "updateWarehouse", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse unique identifier", + schema: { + type: "string", + example: "WH_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + warehouseName: { + type: "string", + description: "Updated warehouse name", + example: "Enhanced Atlanta Distribution Center" + }, + address: { + type: "object", + description: "Updated warehouse address", + properties: { + street1: { + type: "string", + example: "5678 Logistics Parkway" + }, + city: { + type: "string", + example: "Atlanta" + }, + state: { + type: "string", + example: "GA" + }, + postalCode: { + type: "string", + example: "30309" + }, + country: { + type: "string", + example: "USA" + }, + latitude: { + type: "number", + example: 33.749 + }, + longitude: { + type: "number", + example: -84.388 + } + } + }, + timezone: { + type: "string", + description: "Updated IANA timezone identifier", + example: "America/New_York" + }, + warehouseType: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ], + description: "Updated warehouse operational type", + example: "3PL" + } + } + } + } + } + }, + responses: { + "200": { + description: "Warehouse updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouse updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSWarehouse" + } + } + } + } + } + }, + "404": { + description: "Not Found - Warehouse not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Warehouse WH_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/warehouses/type/{warehouseType}": { + get: { + tags: [ + "WMS" + ], + summary: "Get warehouses by type", + description: "\nRetrieve warehouses filtered by specific operational type for targeted facility management.\n\n**Core Features**:\n- **Type Filtering**: Get all warehouses of specific operational type\n- **Status Filtering**: Additional filtering by warehouse operational status\n- **Paginated Results**: Efficient handling of large warehouse datasets\n- **Operational Planning**: Support for type-specific warehouse operations\n\n**Use Cases**:\n- **Operational Planning**: Find warehouses for specific operational activities\n- **Network Analysis**: Analyze distribution network by facility types\n- **Resource Allocation**: Identify warehouses available for particular functions\n- **Capacity Planning**: Assess type-specific warehouse capabilities\n ", + operationId: "getWarehousesByType", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseType", + in: "path", + required: true, + description: "Warehouse type filter", + schema: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ], + example: "FULFILLMENT" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by warehouse status (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "ACTIVE", + "DISABLED", + "ARCHIVED" + ] + }, + example: [ + "ACTIVE" + ] + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Warehouses retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouses retrieved successfully" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSWarehouse" + } + }, + totalCount: { + type: "integer", + example: 5 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + example: null + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/warehouses/active": { + get: { + tags: [ + "WMS" + ], + summary: "Get active warehouses", + description: "\nRetrieve all active warehouses for operational planning and current facility management.\n\n**Core Features**:\n- **Active Filtering**: Get only warehouses with ACTIVE status\n- **Type Filtering**: Optional filtering by warehouse operational type\n- **Paginated Results**: Efficient handling of active warehouse datasets\n- **Operational Focus**: Optimized for current operational planning\n\n**Use Cases**:\n- **Operational Planning**: Get currently operational warehouses\n- **Network Visibility**: View active distribution network facilities\n- **Resource Planning**: Plan operations using available warehouses\n- **Performance Monitoring**: Track active warehouse performance metrics\n ", + operationId: "getActiveWarehouses", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseType", + in: "query", + required: false, + description: "Filter by warehouse types (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ] + }, + example: [ + "FULFILLMENT" + ] + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor for next page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439012" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results per page", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Active warehouses retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Active warehouses retrieved successfully" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSWarehouse" + } + }, + totalCount: { + type: "integer", + example: 8 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + example: null + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/warehouses/{warehouseId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update warehouse status", + description: "\nUpdate warehouse operational status for lifecycle management and facility control.\n\n**Core Features**:\n- **Status Management**: Update warehouse status between ACTIVE, DISABLED, ARCHIVED\n- **Lifecycle Control**: Support for warehouse operational lifecycle management\n- **Validation**: Ensures valid status transitions and data consistency\n- **Operational Control**: Enable/disable warehouse operations dynamically\n\n**Use Cases**:\n- **Facility Management**: Control warehouse operational availability\n- **Maintenance Mode**: Temporarily disable warehouses for maintenance\n- **Decommissioning**: Archive warehouses that are no longer operational\n- **Operational Control**: Dynamic warehouse activation and deactivation\n ", + operationId: "updateWarehouseStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse unique identifier", + schema: { + type: "string", + example: "WH_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "ACTIVE", + "DISABLED", + "ARCHIVED" + ], + description: "New warehouse operational status", + example: "DISABLED" + } + } + } + } + } + }, + responses: { + "200": { + description: "Warehouse status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouse status updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSWarehouse" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid status provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Valid status is required (ACTIVE, DISABLED, or ARCHIVED)", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Warehouse not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Warehouse WH_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/warehouses/search": { + get: { + tags: [ + "WMS" + ], + summary: "Search warehouses", + description: "\nSearch warehouses by name and other criteria with flexible filtering capabilities.\n\n**Core Features**:\n- **Text Search**: Search warehouses by name using flexible text matching\n- **Advanced Filtering**: Filter results by type and status\n- **Flexible Matching**: Support for partial name matches and text search\n- **Operational Filtering**: Combine text search with type and status filters\n\n**Use Cases**:\n- **Facility Discovery**: Find warehouses by name or partial name\n- **Dynamic Filtering**: Interactive warehouse search and filtering\n- **Integration Support**: API-based warehouse lookup for external systems\n- **User Interfaces**: Support for warehouse selection and discovery\n ", + operationId: "searchWarehouses", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "q", + in: "query", + required: true, + description: "Search term for warehouse name", + schema: { + type: "string", + example: "Atlanta" + } + }, + { + name: "warehouseType", + in: "query", + required: false, + description: "Filter by warehouse types (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ] + }, + example: [ + "FULFILLMENT" + ] + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by warehouse status (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "ACTIVE", + "DISABLED", + "ARCHIVED" + ] + }, + example: [ + "ACTIVE" + ] + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 10 + } + } + ], + responses: { + "200": { + description: "Warehouse search completed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouse search completed successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSWarehouse" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Search term required", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Search term (q) is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/warehouses/timezone/{timezone}": { + get: { + tags: [ + "WMS" + ], + summary: "Get warehouses by timezone", + description: "\nRetrieve warehouses filtered by timezone for regional operational planning and coordination.\n\n**Core Features**:\n- **Timezone Filtering**: Get warehouses in specific IANA timezone\n- **Regional Planning**: Support for timezone-based operational coordination\n- **Advanced Filtering**: Additional filtering by type and status\n- **Operational Coordination**: Enable timezone-aware warehouse operations\n\n**Use Cases**:\n- **Regional Operations**: Coordinate operations within specific timezones\n- **Shift Planning**: Plan warehouse operations by timezone alignment\n- **Multi-regional Management**: Manage warehouse networks across timezones\n- **Operational Scheduling**: Schedule activities based on warehouse timezones\n ", + operationId: "getWarehousesByTimezone", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "timezone", + in: "path", + required: true, + description: "IANA timezone identifier", + schema: { + type: "string", + example: "America/New_York" + } + }, + { + name: "warehouseType", + in: "query", + required: false, + description: "Filter by warehouse types (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ] + }, + example: [ + "FULFILLMENT" + ] + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by warehouse status (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "ACTIVE", + "DISABLED", + "ARCHIVED" + ] + }, + example: [ + "ACTIVE" + ] + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of results", + schema: { + type: "integer", + minimum: 1, + maximum: 100, + default: 50, + example: 25 + } + } + ], + responses: { + "200": { + description: "Warehouses retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Warehouses retrieved successfully" + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSWarehouse" + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/bins": { + post: { + tags: [ + "WMS" + ], + summary: "Create new warehouse bin", + description: "\n## Create WMS Bin\n\nCreate a new warehouse bin for inventory storage and management with comprehensive location and capacity configuration.\n\n### Features\n- **Location Management**: Define precise warehouse location with aisle, bay, level, and position\n- **Capacity Configuration**: Set weight, volume, and pallet capacity constraints\n- **Type Classification**: Configure bin type for operational workflow optimization\n- **ABC Classification**: Support inventory velocity classification for efficient picking\n- **Status Management**: Initialize bin with appropriate operational status\n- **Warehouse Integration**: Link bin to specific warehouse and zone structures\n\n### Bin Type Categories\n- **PALLET**: Full pallet storage locations\n- **SHELF**: Shelf-based case or piece picking locations\n- **FLOOR**: Floor-level storage for large items\n- **CASE_FLOW**: Dynamic case flow storage systems\n- **RESERVE**: Reserve or bulk storage locations\n- **PICK_FACE**: Forward pick face locations\n\n### Location Type Categories\n- **STORAGE**: General inventory storage locations\n- **STAGING**: Temporary staging areas for operations\n- **DOCK**: Dock door staging and loading areas\n- **QC**: Quality control inspection areas\n- **RETURN**: Return merchandise processing areas\n\n### Status Values\n- **AVAILABLE**: Ready for inventory storage\n- **OCCUPIED**: Currently contains inventory\n- **RESERVED**: Reserved for specific operations\n- **DAMAGED**: Physically damaged, unusable\n- **BLOCKED**: Temporarily blocked from use\n\n### Business Rules\n- binId is auto-generated with unique identifier if not provided\n- binCode must be unique within the warehouse\n- warehouseId and zoneId are required for location hierarchy\n- Capacity settings define operational constraints\n- Custom fields support warehouse-specific requirements\n ", + operationId: "createWMSBin", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + binId: { + type: "string", + description: "Unique bin identifier (auto-generated if not provided)", + example: "BIN_ATL_A01_001" + }, + binCode: { + type: "string", + description: "Human-readable bin code", + example: "A01-B05-L02-P03" + }, + warehouseId: { + type: "string", + description: "Parent warehouse identifier", + example: "WH_ATL_001" + }, + zoneId: { + type: "string", + description: "Zone identifier within warehouse", + example: "ZONE_PICK_A" + }, + aisleId: { + type: "string", + description: "Aisle identifier (optional)", + example: "AISLE_A01" + }, + location: { + type: "object", + properties: { + aisle: { + type: "string", + description: "Aisle designation", + example: "A01" + }, + bay: { + type: "string", + description: "Bay designation within aisle", + example: "B05" + }, + level: { + type: "integer", + description: "Level/tier number", + example: 2 + }, + position: { + type: "string", + description: "Position within bay", + example: "P03" + } + } + }, + binType: { + type: "string", + enum: [ + "PALLET", + "SHELF", + "FLOOR", + "CASE_FLOW", + "RESERVE", + "PICK_FACE" + ], + description: "Type of bin for operational classification", + example: "PICK_FACE" + }, + locationType: { + type: "string", + enum: [ + "STORAGE", + "STAGING", + "DOCK", + "QC", + "RETURN" + ], + description: "Functional location type", + example: "STORAGE" + }, + capacity: { + type: "object", + properties: { + maxWeightLbs: { + type: "number", + description: "Maximum weight capacity in pounds", + example: 2000 + }, + maxCubicFeet: { + type: "number", + description: "Maximum volume capacity in cubic feet", + example: 50.5 + }, + maxPallets: { + type: "integer", + description: "Maximum pallet capacity", + example: 1 + } + } + }, + status: { + type: "string", + enum: [ + "AVAILABLE", + "OCCUPIED", + "RESERVED", + "DAMAGED", + "BLOCKED" + ], + description: "Initial operational status", + example: "AVAILABLE" + }, + abcClassification: { + type: "string", + enum: [ + "A", + "B", + "C" + ], + description: "ABC velocity classification for inventory management", + example: "A" + }, + pickable: { + type: "boolean", + description: "Whether bin is available for picking operations", + example: true + }, + customFields: { + type: "object", + description: "Additional warehouse-specific bin attributes", + example: { + temperatureControlled: true, + hazMatApproved: false + } + } + }, + required: [ + "binCode", + "warehouseId", + "zoneId" + ] + } + } + } + }, + responses: { + "201": { + description: "Bin created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + data: { + $ref: "#/components/schemas/WMSBin" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid data or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/bins/available": { + get: { + tags: [ + "WMS" + ], + summary: "Get available bins with filtering", + description: "\n## Get Available WMS Bins\n\nRetrieve available warehouse bins based on capacity requirements, location filters, and operational criteria for inventory placement and picking operations.\n\n### Features\n- **Capacity Filtering**: Filter by minimum weight, volume, and pallet requirements\n- **Zone-Based Filtering**: Limit results to specific warehouse zones\n- **Type-Based Filtering**: Filter by bin types for operational compatibility\n- **Warehouse Scoping**: Filter by specific warehouse for multi-facility operations\n- **Availability Status**: Only returns bins available for inventory placement\n- **Real-Time Availability**: Current availability based on inventory occupancy\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse\n- **zoneIds**: Optional - Array of zone identifiers to include\n- **binType**: Optional - Array of bin types to filter by\n- **minWeight**: Optional - Minimum weight capacity requirement in pounds\n- **minVolume**: Optional - Minimum volume capacity requirement in cubic feet\n- **minPallets**: Optional - Minimum pallet capacity requirement\n\n### Business Logic\n- Returns only bins with status AVAILABLE\n- Excludes bins that are OCCUPIED, RESERVED, DAMAGED, or BLOCKED\n- Capacity filters are cumulative (bin must meet all specified minimums)\n- Zone filtering allows multiple zones for flexible operations\n- Results sorted by zone, aisle, and position for efficient navigation\n\n### Use Cases\n- **Inventory Putaway**: Find suitable bins for incoming inventory\n- **Pick Path Optimization**: Locate bins for efficient picking routes\n- **Capacity Planning**: Assess available storage capacity\n- **Slotting Optimization**: Support slotting and re-slotting operations\n ", + operationId: "getAvailableWMSBins", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Filter by specific warehouse", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "zoneIds", + in: "query", + required: false, + description: "Filter by specific zones (comma-separated)", + schema: { + type: "array", + items: { + type: "string" + }, + example: [ + "ZONE_PICK_A", + "ZONE_PICK_B" + ] + } + }, + { + name: "binType", + in: "query", + required: false, + description: "Filter by bin types (comma-separated)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "PALLET", + "SHELF", + "FLOOR", + "CASE_FLOW", + "RESERVE", + "PICK_FACE" + ] + }, + example: [ + "PICK_FACE", + "SHELF" + ] + } + }, + { + name: "minWeight", + in: "query", + required: false, + description: "Minimum weight capacity requirement in pounds", + schema: { + type: "number", + minimum: 0, + example: 1000 + } + }, + { + name: "minVolume", + in: "query", + required: false, + description: "Minimum volume capacity requirement in cubic feet", + schema: { + type: "number", + minimum: 0, + example: 25.5 + } + }, + { + name: "minPallets", + in: "query", + required: false, + description: "Minimum pallet capacity requirement", + schema: { + type: "integer", + minimum: 1, + example: 1 + } + } + ], + responses: { + "200": { + description: "Available bins retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSBin" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/bins/code/{binCode}": { + get: { + tags: [ + "WMS" + ], + summary: "Get bin by code", + description: "\n## Get WMS Bin by Code\n\nRetrieve a specific warehouse bin using its human-readable bin code for operational identification and management.\n\n### Features\n- **Code-Based Lookup**: Find bins using human-readable bin codes\n- **Warehouse Scoping**: Optional warehouse filtering for multi-facility operations\n- **Complete Bin Profile**: Returns all bin attributes including location and capacity\n- **Status Information**: Current operational status and availability\n- **Inventory Context**: Location context for inventory management operations\n\n### Query Parameters\n- **warehouseId**: Optional - Scope search to specific warehouse for faster lookup\n\n### Business Rules\n- binCode must be exact match (case-sensitive)\n- If warehouseId provided, search limited to that warehouse\n- Returns 404 if bin not found or not accessible\n- Includes all bin configuration and status information\n\n### Use Cases\n- **Inventory Operations**: Look up bin details during putaway or picking\n- **Maintenance**: Access bin information for maintenance operations\n- **Verification**: Verify bin attributes and capacity constraints\n- **Location Services**: Support warehouse navigation and location services\n ", + operationId: "getWMSBinByCode", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "binCode", + in: "path", + required: true, + description: "Human-readable bin code identifier", + schema: { + type: "string", + example: "A01-B05-L02-P03" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Optional warehouse identifier for scoped search", + schema: { + type: "string", + example: "WH_ATL_001" + } + } + ], + responses: { + "200": { + description: "Bin retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/WMSBin" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and binCode are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Bin not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Bin A01-B05-L02-P03 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/bins/zone/{zoneId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get bins by zone with filtering", + description: "\n## Get WMS Bins by Zone\n\nRetrieve all bins within a specific warehouse zone with comprehensive filtering capabilities for operational management and planning.\n\n### Features\n- **Zone-Based Retrieval**: Get all bins within a specific warehouse zone\n- **Multi-Status Filtering**: Filter by one or multiple bin status values\n- **Type-Based Filtering**: Filter by bin types for operational compatibility\n- **Location Type Filtering**: Filter by functional location types\n- **ABC Classification**: Filter by inventory velocity classification\n- **Comprehensive Results**: Returns complete bin profiles with all attributes\n\n### Query Parameters\n- **status**: Optional - Filter by bin status (supports multiple values)\n- **binType**: Optional - Filter by bin types (supports multiple values)\n- **locationType**: Optional - Filter by location types (supports multiple values)\n- **abcClassification**: Optional - Filter by ABC classification (supports multiple values)\n\n### Available Status Values\n- **AVAILABLE**: Ready for inventory storage\n- **OCCUPIED**: Currently contains inventory\n- **RESERVED**: Reserved for specific operations\n- **DAMAGED**: Physically damaged, unusable\n- **BLOCKED**: Temporarily blocked from use\n\n### Business Rules\n- zoneId must be valid and accessible\n- All filter parameters support multiple values (comma-separated)\n- Results include complete bin configuration and current status\n- Sorted by aisle, bay, level, and position for operational efficiency\n\n### Use Cases\n- **Zone Operations**: Manage all bins within a specific zone\n- **Capacity Analysis**: Analyze zone capacity and utilization\n- **Maintenance Planning**: Identify bins requiring maintenance\n- **Operational Planning**: Plan putaway and picking operations by zone\n- **Slotting Analysis**: Support slotting optimization within zones\n ", + operationId: "getWMSBinsByZone", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneId", + in: "path", + required: true, + description: "Zone identifier to retrieve bins from", + schema: { + type: "string", + example: "ZONE_PICK_A" + } + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by bin status (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "AVAILABLE", + "OCCUPIED", + "RESERVED", + "DAMAGED", + "BLOCKED" + ] + }, + example: [ + "AVAILABLE", + "OCCUPIED" + ] + } + }, + { + name: "binType", + in: "query", + required: false, + description: "Filter by bin types (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "PALLET", + "SHELF", + "FLOOR", + "CASE_FLOW", + "RESERVE", + "PICK_FACE" + ] + }, + example: [ + "PICK_FACE", + "SHELF" + ] + } + }, + { + name: "locationType", + in: "query", + required: false, + description: "Filter by location types (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "STORAGE", + "STAGING", + "DOCK", + "QC", + "RETURN" + ] + }, + example: [ + "STORAGE" + ] + } + }, + { + name: "abcClassification", + in: "query", + required: false, + description: "Filter by ABC classification (supports multiple comma-separated values)", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "A", + "B", + "C" + ] + }, + example: [ + "A", + "B" + ] + } + } + ], + responses: { + "200": { + description: "Zone bins retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSBin" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and zoneId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/bins/{binId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update bin status", + description: "\n## Update WMS Bin Status\n\nUpdate the operational status of a warehouse bin with optional reason documentation for audit and operational tracking.\n\n### Features\n- **Status Management**: Control bin operational availability and usage\n- **Reason Documentation**: Optional reason field for audit trail and communication\n- **Workflow Integration**: Trigger downstream processes based on status changes\n- **Audit Trail**: Track all status changes with timestamps and reasons\n- **Operational Control**: Support maintenance, blocking, and reservation workflows\n\n### Status Transitions\n- **AVAILABLE** ↔ **OCCUPIED**: Inventory placement and removal\n- **AVAILABLE** ↔ **RESERVED**: Reserve for specific operations\n- **Any Status** → **DAMAGED**: Mark as damaged for maintenance\n- **Any Status** → **BLOCKED**: Temporarily block for operations\n- **DAMAGED** → **AVAILABLE**: Repair completion and restoration\n- **BLOCKED** → **AVAILABLE**: Remove operational block\n\n### Business Rules\n- Status field is required for all updates\n- Reason field is optional but recommended for non-standard changes\n- Status changes are logged for audit compliance\n- Some status changes may trigger workflow notifications\n- Bins with DAMAGED or BLOCKED status are excluded from availability\n\n### Use Cases\n- **Maintenance Management**: Mark bins as damaged for repair workflows\n- **Operational Control**: Block bins for cleaning or reorganization\n- **Reservation Management**: Reserve bins for specific operations or inventory\n- **Status Monitoring**: Track bin status for operational dashboards\n ", + operationId: "updateWMSBinStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "binId", + in: "path", + required: true, + description: "Unique bin identifier", + schema: { + type: "string", + example: "BIN_ATL_A01_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "AVAILABLE", + "OCCUPIED", + "RESERVED", + "DAMAGED", + "BLOCKED" + ], + description: "New operational status for the bin", + example: "BLOCKED" + }, + reason: { + type: "string", + description: "Optional reason for status change (recommended for audit trail)", + example: "Scheduled maintenance - cleaning and inspection" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Bin status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/WMSBin" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing or invalid status", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Bin not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Bin BIN_ATL_A01_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/bins/{binId}/capacity": { + put: { + tags: [ + "WMS" + ], + summary: "Update bin capacity", + description: "\n## Update WMS Bin Capacity\n\nUpdate the capacity constraints for a warehouse bin to reflect changes in storage capabilities, equipment modifications, or operational requirements.\n\n### Features\n- **Capacity Management**: Update weight, volume, and pallet capacity constraints\n- **Operational Flexibility**: Modify capacity based on operational changes\n- **Safety Compliance**: Ensure capacity limits meet safety requirements\n- **Slotting Support**: Support slotting optimization with accurate capacity data\n- **Equipment Integration**: Reflect changes in storage equipment capabilities\n\n### Capacity Parameters\n- **maxWeightLbs**: Maximum weight capacity in pounds\n- **maxCubicFeet**: Maximum volume capacity in cubic feet\n- **maxPallets**: Maximum number of pallets that can be stored\n\n### Business Rules\n- At least one capacity parameter must be provided\n- Capacity values must be positive numbers\n- Weight and volume can include decimal values for precision\n- Pallet count must be integer value\n- Changes are logged for audit and capacity planning\n\n### Use Cases\n- **Equipment Changes**: Update capacity after storage equipment modifications\n- **Safety Updates**: Adjust capacity limits for safety compliance\n- **Slotting Optimization**: Update capacity data for slotting analysis\n- **Operational Changes**: Reflect operational procedure changes affecting capacity\n ", + operationId: "updateWMSBinCapacity", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "binId", + in: "path", + required: true, + description: "Unique bin identifier", + schema: { + type: "string", + example: "BIN_ATL_A01_001" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + maxWeightLbs: { + type: "number", + minimum: 0, + description: "Maximum weight capacity in pounds", + example: 2500 + }, + maxCubicFeet: { + type: "number", + minimum: 0, + description: "Maximum volume capacity in cubic feet", + example: 75.25 + }, + maxPallets: { + type: "integer", + minimum: 1, + description: "Maximum pallet capacity", + example: 2 + } + }, + anyOf: [ + { + required: [ + "maxWeightLbs" + ] + }, + { + required: [ + "maxCubicFeet" + ] + }, + { + required: [ + "maxPallets" + ] + } + ] + } + } + } + }, + responses: { + "200": { + description: "Bin capacity updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/WMSBin" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing or invalid capacity data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "capacity data is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Bin not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Bin BIN_ATL_A01_001 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/bins/utilization": { + get: { + tags: [ + "WMS" + ], + summary: "Get bin utilization metrics", + description: "\n## Get WMS Bin Utilization Metrics\n\nRetrieve comprehensive bin utilization metrics and analytics for warehouse capacity planning, performance monitoring, and operational optimization.\n\n### Features\n- **Utilization Analytics**: Calculate space utilization percentages across bins\n- **Zone-Based Analysis**: Analyze utilization by specific warehouse zones\n- **Capacity Planning**: Support capacity planning with current and projected utilization\n- **Performance Monitoring**: Track utilization trends for operational efficiency\n- **Warehouse Scoping**: Filter analysis by specific warehouse facilities\n\n### Metrics Included\n- **Space Utilization**: Percentage of capacity utilized by weight, volume, and count\n- **Bin Status Distribution**: Count of bins by operational status\n- **Zone Performance**: Utilization metrics aggregated by zone\n- **Capacity Analysis**: Available vs. utilized capacity across dimensions\n- **Efficiency Indicators**: Operational efficiency metrics and trends\n\n### Query Parameters\n- **zoneIds**: Optional - Filter analysis to specific zones (supports multiple)\n- **warehouseId**: Optional - Scope analysis to specific warehouse\n- **dateStart**: Optional - Start date for historical analysis\n- **dateEnd**: Optional - End date for historical analysis\n\n### Business Logic\n- Calculates real-time utilization based on current inventory\n- Excludes DAMAGED and BLOCKED bins from available capacity\n- Provides both absolute and percentage utilization metrics\n- Aggregates data by zone and warehouse for hierarchical analysis\n\n### Use Cases\n- **Capacity Planning**: Assess current capacity utilization for expansion planning\n- **Performance Monitoring**: Track warehouse operational efficiency\n- **Zone Optimization**: Identify underutilized or overcapacity zones\n- **Resource Allocation**: Optimize bin allocation and slotting strategies\n- **Operational Reporting**: Generate utilization reports for management\n ", + operationId: "getWMSBinUtilization", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneIds", + in: "query", + required: false, + description: "Filter analysis to specific zones (comma-separated)", + schema: { + type: "array", + items: { + type: "string" + }, + example: [ + "ZONE_PICK_A", + "ZONE_RESERVE_B" + ] + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Scope analysis to specific warehouse", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for historical utilization analysis", + schema: { + type: "string", + format: "date-time", + example: "2024-01-01T00:00:00.000Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for historical utilization analysis", + schema: { + type: "string", + format: "date-time", + example: "2024-01-31T23:59:59.999Z" + } + } + ], + responses: { + "200": { + description: "Bin utilization metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + overall: { + type: "object", + properties: { + totalBins: { + type: "integer", + example: 1250 + }, + availableBins: { + type: "integer", + example: 450 + }, + occupiedBins: { + type: "integer", + example: 650 + }, + utilization: { + type: "object", + properties: { + weightPercent: { + type: "number", + example: 72.5 + }, + volumePercent: { + type: "number", + example: 68.3 + }, + countPercent: { + type: "number", + example: 52 + } + } + } + } + }, + byZone: { + type: "array", + items: { + type: "object", + properties: { + zoneId: { + type: "string", + example: "ZONE_PICK_A" + }, + totalBins: { + type: "integer", + example: 300 + }, + availableBins: { + type: "integer", + example: 125 + }, + occupiedBins: { + type: "integer", + example: 175 + }, + utilization: { + type: "object", + properties: { + weightPercent: { + type: "number", + example: 75.2 + }, + volumePercent: { + type: "number", + example: 71.8 + }, + countPercent: { + type: "number", + example: 58.3 + } + } + } + } + } + }, + capacity: { + type: "object", + properties: { + totalWeight: { + type: "number", + example: 2500000 + }, + availableWeight: { + type: "number", + example: 687500 + }, + totalVolume: { + type: "number", + example: 125000 + }, + availableVolume: { + type: "number", + example: 39625 + }, + totalPallets: { + type: "integer", + example: 1250 + }, + availablePallets: { + type: "integer", + example: 450 + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid query parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/distribution-centers": { + post: { + tags: [ + "WMS" + ], + summary: "Create new distribution center", + description: "\n## Create WMS Distribution Center\n\nCreate a new distribution center within a warehouse for comprehensive facility management and operational coordination.\n\n### Features\n- **Facility Management**: Complete distribution center configuration and management\n- **Multi-Type Support**: Support for various DC types (Fulfillment, Cross-dock, Cold storage, 3PL)\n- **Location Integration**: Full address and timezone configuration for accurate operations\n- **Operational Hours**: Configurable operating hours for scheduling and planning\n- **Contact Management**: Complete contact information for facility coordination\n- **Custom Configuration**: Flexible custom fields for facility-specific requirements\n\n### Distribution Center Types\n- **FULFILLMENT**: Standard fulfillment centers for order processing and shipping\n- **CROSS_DOCK**: Cross-docking facilities for rapid product flow and minimal storage\n- **COLD_STORAGE**: Temperature-controlled facilities for perishable goods\n- **3PL**: Third-party logistics facilities for outsourced operations\n\n### Operational Status Values\n- **ACTIVE**: Fully operational and accepting operations\n- **INACTIVE**: Temporarily inactive but available for activation\n- **MAINTENANCE**: Under maintenance, operations suspended\n\n### Business Rules\n- dcId is auto-generated with unique identifier if not provided\n- warehouseId and dcName are required for facility creation\n- dcName must be unique within the warehouse\n- Operating hours support full weekly schedule configuration\n- Custom fields support facility-specific operational requirements\n ", + operationId: "createWMSDistributionCenter", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + warehouseId: { + type: "string", + description: "Parent warehouse identifier", + example: "WH_ATL_001" + }, + dcName: { + type: "string", + description: "Distribution center name", + example: "Atlanta Fulfillment Center East" + }, + dcType: { + type: "string", + enum: [ + "FULFILLMENT", + "CROSS_DOCK", + "COLD_STORAGE", + "3PL" + ], + description: "Type of distribution center operation", + example: "FULFILLMENT" + }, + address: { + type: "object", + description: "Physical address of the distribution center", + properties: { + street1: { + type: "string", + example: "1234 Industrial Blvd" + }, + city: { + type: "string", + example: "Atlanta" + }, + state: { + type: "string", + example: "GA" + }, + zipCode: { + type: "string", + example: "30309" + }, + country: { + type: "string", + example: "US" + } + } + }, + timezone: { + type: "string", + description: "Timezone for facility operations", + example: "America/New_York" + }, + totalSqFootage: { + type: "number", + description: "Total square footage of the facility", + example: 250000 + }, + operationalStatus: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "MAINTENANCE" + ], + description: "Initial operational status", + example: "ACTIVE" + }, + operatingHours: { + type: "object", + description: "Weekly operating schedule", + properties: { + monday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time (HH:MM format)", + example: "06:00" + }, + close: { + type: "string", + description: "Closing time (HH:MM format)", + example: "22:00" + } + } + }, + tuesday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + wednesday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + thursday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + friday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + saturday: { + type: "object", + properties: { + open: { + type: "string", + example: "08:00" + }, + close: { + type: "string", + example: "18:00" + } + } + }, + sunday: { + type: "object", + properties: { + open: { + type: "string", + example: "10:00" + }, + close: { + type: "string", + example: "16:00" + } + } + } + } + }, + contactInfo: { + type: "object", + description: "Contact information for the facility", + properties: { + phone: { + type: "string", + description: "Primary phone number", + example: "+1-404-555-0123" + }, + email: { + type: "string", + description: "Primary email address", + example: "ops@atlanta-east.company.com" + }, + manager: { + type: "string", + description: "Facility manager name", + example: "John Smith" + } + } + }, + customFields: { + type: "object", + description: "Additional facility-specific configuration", + example: { + hazmatCertified: true, + securityLevel: "HIGH", + temperatureControlZones: 4, + dockDoors: 24, + certification: [ + "ISO9001", + "SOC2" + ] + } + } + }, + required: [ + "warehouseId", + "dcName" + ] + } + } + } + }, + responses: { + "201": { + description: "Distribution center created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDistributionCenter" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid data or missing required fields", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Request body is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "409": { + description: "Conflict - Distribution center with same name already exists", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 409, + error: "Distribution Center with name Atlanta Fulfillment Center East already exists in warehouse", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + get: { + tags: [ + "WMS" + ], + summary: "Get all distribution centers with filtering", + description: "\n## Get All WMS Distribution Centers\n\nRetrieve all distribution centers within a world with comprehensive filtering options for facility management and operational oversight.\n\n### Features\n- **Comprehensive Listing**: Retrieve all distribution centers across warehouses\n- **Warehouse Filtering**: Filter by specific warehouse for facility-specific queries\n- **Type-Based Filtering**: Filter by distribution center types for operational categorization\n- **Status Filtering**: Filter by operational status for active facility management\n- **Multi-Filter Support**: Combine filters for precise facility selection\n- **Sorted Results**: Results sorted alphabetically by DC name for consistent ordering\n\n### Query Parameters\n- **warehouseId**: Optional - Filter by specific warehouse identifier\n- **dcType**: Optional - Array of DC types for type-based filtering\n- **operationalStatus**: Optional - Array of operational statuses for status-based filtering\n\n### Business Logic\n- All query parameters are optional for maximum flexibility\n- Multiple values for dcType and operationalStatus supported as arrays\n- Results include complete facility information for comprehensive management\n- Sorted alphabetically by dcName for consistent presentation\n- Cross-warehouse querying supported when warehouseId not specified\n\n### Use Cases\n- **Facility Management**: Overview of all facilities across operations\n- **Warehouse Coordination**: Facility listing for specific warehouse operations\n- **Status Monitoring**: Active facility monitoring and status tracking\n- **Type-Based Operations**: Operations specific to facility types\n- **Multi-Facility Planning**: Planning across multiple distribution centers\n ", + operationId: "getAllWMSDistributionCenters", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Optional warehouse identifier for filtering", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "dcType", + in: "query", + required: false, + description: "Optional DC type(s) for filtering", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "FULFILLMENT", + "CROSS_DOCK", + "COLD_STORAGE", + "3PL" + ] + }, + example: [ + "FULFILLMENT", + "CROSS_DOCK" + ] + }, + style: "form", + explode: true + }, + { + name: "operationalStatus", + in: "query", + required: false, + description: "Optional operational status(es) for filtering", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "MAINTENANCE" + ] + }, + example: [ + "ACTIVE" + ] + }, + style: "form", + explode: true + } + ], + responses: { + "200": { + description: "Distribution centers retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSDistributionCenter" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/distribution-centers/status": { + get: { + tags: [ + "WMS" + ], + summary: "Get distribution centers by operational status", + description: "\n## Get WMS Distribution Centers by Status\n\nRetrieve distribution centers filtered by specific operational statuses for targeted facility management and operational coordination.\n\n### Features\n- **Status-Based Filtering**: Primary filtering by one or more operational statuses\n- **Warehouse Scoping**: Secondary filtering by warehouse for facility-specific queries\n- **Type-Based Refinement**: Additional filtering by DC types for operational categorization\n- **Multi-Status Support**: Query multiple statuses simultaneously for comprehensive oversight\n- **Operational Focus**: Designed for status-driven facility management workflows\n\n### Query Parameters\n- **status**: Required - Array of operational statuses for filtering\n- **warehouseId**: Optional - Warehouse identifier for warehouse-specific filtering\n- **dcType**: Optional - Array of DC types for additional type-based filtering\n\n### Business Logic\n- status parameter is required and supports multiple values as array\n- Empty status array will return no results (intentional design)\n- warehouseId and dcType provide additional filtering refinement\n- Results sorted alphabetically by DC name for consistent presentation\n- Supports operational workflow patterns for status-specific operations\n\n### Operational Status Values\n- **ACTIVE**: Fully operational facilities accepting all operations\n- **INACTIVE**: Temporarily inactive facilities available for activation\n- **MAINTENANCE**: Facilities under maintenance with suspended operations\n\n### Use Cases\n- **Active Facility Management**: List all currently active facilities\n- **Maintenance Coordination**: Identify facilities under maintenance\n- **Status Monitoring**: Monitor facility status across operations\n- **Operational Planning**: Plan operations based on facility availability\n- **Multi-Status Queries**: Complex status-based facility selection\n ", + operationId: "getWMSDistributionCentersByStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "status", + in: "query", + required: true, + description: "Operational status(es) for filtering", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "MAINTENANCE" + ] + }, + minItems: 1, + example: [ + "ACTIVE", + "MAINTENANCE" + ] + }, + style: "form", + explode: true + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Optional warehouse identifier for additional filtering", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "dcType", + in: "query", + required: false, + description: "Optional DC type(s) for additional filtering", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "FULFILLMENT", + "CROSS_DOCK", + "COLD_STORAGE", + "3PL" + ] + }, + example: [ + "FULFILLMENT" + ] + }, + style: "form", + explode: true + } + ], + responses: { + "200": { + description: "Distribution centers by status retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSDistributionCenter" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing or invalid parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/distribution-centers/{dcId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get distribution center by ID", + description: "\n## Get WMS Distribution Center by ID\n\nRetrieve a specific distribution center using its unique identifier for detailed facility information and configuration access.\n\n### Features\n- **Direct Access**: Retrieve facility using unique distribution center identifier\n- **Complete Information**: Full facility details including configuration and contact information\n- **Single Facility Focus**: Detailed view of specific facility for management operations\n- **Configuration Access**: Access to complete facility configuration and settings\n\n### Path Parameters\n- **dcId**: Required - Unique identifier for the distribution center\n\n### Business Logic\n- dcId must be a valid, existing distribution center identifier\n- Returns complete facility record with all configuration details\n- Includes operational status, contact information, and custom fields\n- Null response if distribution center is not found\n- Full facility data for comprehensive management operations\n\n### Use Cases\n- **Facility Details**: Access complete facility information for operations\n- **Configuration Review**: Review facility configuration and settings\n- **Contact Information**: Access facility contact details for coordination\n- **Status Verification**: Verify current operational status and configuration\n- **Integration Support**: Provide facility data for integration with other systems\n ", + operationId: "getWMSDistributionCenterById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "dcId", + in: "path", + required: true, + description: "Unique identifier for the distribution center", + schema: { + type: "string", + example: "wms_distribution-center_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Distribution center retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDistributionCenter" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and dcId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Distribution center not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Distribution Center wms_distribution-center_674565c1234567890abcdef not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/distribution-centers/{dcId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update distribution center operational status", + description: "\n## Update WMS Distribution Center Status\n\nUpdate the operational status of a distribution center with optional reason tracking for facility management and operational coordination.\n\n### Features\n- **Status Management**: Update operational status for facility management\n- **Reason Tracking**: Optional reason documentation for status changes\n- **Timestamp Tracking**: Automatic timestamp recording for status change audit trail\n- **Validation**: Comprehensive validation of status values and facility existence\n- **Audit Trail**: Complete audit trail through status change tracking\n\n### Operational Status Values\n- **ACTIVE**: Fully operational, accepting all operations and workflows\n- **INACTIVE**: Temporarily inactive, facility available but not processing operations\n- **MAINTENANCE**: Under maintenance, all operations suspended for facility maintenance\n\n### Request Body\n- **status**: Required - New operational status for the facility\n- **reason**: Optional - Reason for status change (recommended for audit trail)\n\n### Business Rules\n- dcId must reference an existing distribution center\n- status must be a valid operational status value\n- Optional reason field is stored for audit trail and compliance\n- Automatic timestamp recording for status change tracking\n- Status change immediately affects facility operational availability\n\n### Use Cases\n- **Maintenance Mode**: Set facility to maintenance status during scheduled maintenance\n- **Activation**: Activate facilities for operational use\n- **Emergency Shutdown**: Quickly disable facility operations during emergencies\n- **Planned Downtime**: Schedule facility downtime with reason documentation\n- **Status Compliance**: Maintain facility status compliance and audit trails\n ", + operationId: "updateWMSDistributionCenterStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "dcId", + in: "path", + required: true, + description: "Unique identifier for the distribution center", + schema: { + type: "string", + example: "wms_distribution-center_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + status: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "MAINTENANCE" + ], + description: "New operational status for the facility", + example: "MAINTENANCE" + }, + reason: { + type: "string", + description: "Optional reason for status change", + example: "Scheduled maintenance for HVAC system upgrade" + } + }, + required: [ + "status" + ] + } + } + } + }, + responses: { + "200": { + description: "Distribution center status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDistributionCenter" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters or invalid status", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Distribution center not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Distribution Center wms_distribution-center_674565c1234567890abcdef not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/distribution-centers/{dcId}/capacity": { + get: { + tags: [ + "WMS" + ], + summary: "Get distribution center capacity and utilization", + description: "\n## Get WMS Distribution Center Capacity\n\nRetrieve comprehensive capacity and utilization metrics for a distribution center including space utilization, zone metrics, and operational efficiency indicators.\n\n### Features\n- **Space Utilization**: Complete space utilization metrics and analysis\n- **Zone Metrics**: Total zones and operational zone configuration\n- **Bin Utilization**: Bin occupancy and utilization percentage tracking\n- **Operational Hours**: Complete operating hours schedule for capacity planning\n- **Capacity Planning**: Support for capacity planning and optimization analysis\n- **Real-Time Data**: Current utilization metrics for operational decision-making\n\n### Capacity Metrics\n- **Total Square Footage**: Complete facility square footage for space planning\n- **Total Zones**: Number of operational zones within the facility\n- **Total Bins**: Complete bin count for storage capacity assessment\n- **Occupied Bins**: Currently occupied bins for real-time utilization\n- **Utilization Percentage**: Calculated utilization percentage for capacity monitoring\n\n### Path Parameters\n- **dcId**: Required - Unique identifier for the distribution center\n\n### Business Logic\n- dcId must reference an existing distribution center\n- Utilization metrics calculated from real-time zone and bin data\n- Operating hours included for capacity planning coordination\n- Comprehensive metrics for operational efficiency analysis\n- Future implementation will include real-time zone and bin queries\n\n### Use Cases\n- **Capacity Planning**: Assess current capacity and plan for future growth\n- **Utilization Monitoring**: Monitor real-time facility utilization\n- **Operational Efficiency**: Analyze operational efficiency and optimization opportunities\n- **Space Management**: Optimize space allocation and utilization\n- **Resource Planning**: Plan resource allocation based on capacity metrics\n ", + operationId: "getWMSDistributionCenterCapacity", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "dcId", + in: "path", + required: true, + description: "Unique identifier for the distribution center", + schema: { + type: "string", + example: "wms_distribution-center_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Distribution center capacity retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + dcId: { + type: "string", + description: "Distribution center identifier", + example: "wms_distribution-center_674565c1234567890abcdef" + }, + dcName: { + type: "string", + description: "Distribution center name", + example: "Atlanta Fulfillment Center East" + }, + totalSqFootage: { + type: "number", + description: "Total facility square footage", + example: 250000 + }, + utilizationMetrics: { + type: "object", + description: "Comprehensive utilization metrics", + properties: { + totalZones: { + type: "number", + description: "Total operational zones", + example: 12 + }, + totalBins: { + type: "number", + description: "Total bin locations", + example: 5840 + }, + occupiedBins: { + type: "number", + description: "Currently occupied bins", + example: 4672 + }, + utilizationPercentage: { + type: "number", + description: "Facility utilization percentage", + example: 80 + } + } + }, + operationalHours: { + type: "object", + description: "Facility operating hours schedule", + properties: { + monday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + tuesday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + wednesday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + thursday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + friday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + saturday: { + type: "object", + properties: { + open: { + type: "string", + example: "08:00" + }, + close: { + type: "string", + example: "18:00" + } + } + }, + sunday: { + type: "object", + properties: { + open: { + type: "string", + example: "10:00" + }, + close: { + type: "string", + example: "16:00" + } + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and dcId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Distribution center not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Distribution Center not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors": { + post: { + tags: [ + "WMS" + ], + summary: "Create new dock door", + description: "\n## Create WMS Dock Door\n\nCreate a new dock door within a warehouse for trailer loading/unloading operations and appointment scheduling.\n\n### Features\n- **Comprehensive Configuration**: Full facility setup including capabilities, equipment, and safety features\n- **Multi-Type Support**: INBOUND, OUTBOUND, or CROSS_DOCK operational modes\n- **Equipment Specifications**: Detailed equipment and capability tracking\n- **Safety Standards**: Emergency stop systems and inspection scheduling\n- **Operating Hours**: Configurable daily operating schedules\n- **Audit Trail**: Automatic creation and modification tracking\n\n### Business Logic\n- Validates required fields: warehouseId, doorNumber, and doorType\n- Prevents duplicate door numbers within the same warehouse\n- Auto-generates unique dockDoorId using WMS service prefix\n- Sets default status to AVAILABLE for immediate scheduling\n- Initializes safety equipment defaults (emergency stop and safety lights enabled)\n- Establishes audit trail for all subsequent modifications\n\n### Use Cases\n- **Facility Setup**: Initial dock door configuration during warehouse establishment\n- **Capacity Expansion**: Add new dock doors to increase warehouse throughput\n- **Equipment Upgrade**: Create new doors with enhanced capabilities\n- **Cross-Dock Operations**: Establish specialized cross-dock facilities\n- **Safety Compliance**: Ensure proper safety equipment configuration\n ", + operationId: "createWMSDockDoor", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "warehouseId", + "doorNumber", + "doorType" + ], + properties: { + warehouseId: { + type: "string", + description: "Warehouse identifier where dock door is located", + example: "wms_warehouse_674565c1234567890abcdef" + }, + doorNumber: { + type: "string", + description: "Physical door number or identifier", + example: "DOCK-01" + }, + doorType: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "CROSS_DOCK" + ], + description: "Operational type of dock door", + example: "INBOUND" + }, + zoneId: { + type: "string", + description: "Zone identifier for dock door location", + example: "wms_zone_674565c1234567890abcdef" + }, + capabilities: { + type: "object", + description: "Physical capabilities and specifications", + properties: { + maxTrailerLength: { + type: "number", + description: "Maximum trailer length in feet", + example: 53 + }, + maxTrailerHeight: { + type: "number", + description: "Maximum trailer height in feet", + example: 13.5 + }, + levelingDock: { + type: "boolean", + description: "Leveling dock capability", + example: true + }, + hydraulicLeveler: { + type: "boolean", + description: "Hydraulic leveling system", + example: true + }, + restraintSystem: { + type: "boolean", + description: "Trailer restraint system", + example: true + }, + weatherSeal: { + type: "boolean", + description: "Weather sealing capability", + example: true + } + } + }, + equipment: { + type: "object", + description: "Available equipment and systems", + properties: { + forkliftAccess: { + type: "boolean", + description: "Forklift access capability", + example: true + }, + conveyorSystem: { + type: "boolean", + description: "Conveyor system availability", + example: false + }, + scales: { + type: "boolean", + description: "Weighing scales availability", + example: true + }, + lightSystem: { + type: "boolean", + description: "Lighting system capability", + example: true + } + } + }, + operatingHours: { + type: "object", + description: "Daily operating hours schedule", + properties: { + monday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + tuesday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + wednesday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + thursday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + friday: { + type: "object", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + saturday: { + type: "object", + properties: { + open: { + type: "string", + example: "08:00" + }, + close: { + type: "string", + example: "18:00" + } + } + }, + sunday: { + type: "object", + properties: { + open: { + type: "string", + example: "10:00" + }, + close: { + type: "string", + example: "16:00" + } + } + } + } + } + }, + example: { + warehouseId: "wms_warehouse_674565c1234567890abcdef", + doorNumber: "DOCK-01", + doorType: "INBOUND", + zoneId: "wms_zone_674565c1234567890abcdef", + capabilities: { + maxTrailerLength: 53, + maxTrailerHeight: 13.5, + levelingDock: true, + hydraulicLeveler: true, + restraintSystem: true, + weatherSeal: true + }, + equipment: { + forkliftAccess: true, + conveyorSystem: false, + scales: true, + lightSystem: true + }, + operatingHours: { + monday: { + open: "06:00", + close: "22:00" + }, + tuesday: { + open: "06:00", + close: "22:00" + }, + wednesday: { + open: "06:00", + close: "22:00" + }, + thursday: { + open: "06:00", + close: "22:00" + }, + friday: { + open: "06:00", + close: "22:00" + }, + saturday: { + open: "08:00", + close: "18:00" + }, + sunday: { + open: "10:00", + close: "16:00" + } + } + } + } + } + } + }, + responses: { + "201": { + description: "Dock door created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDockDoor" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Validation errors", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Warehouse ID, door number, and door type are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "409": { + description: "Conflict - Duplicate door number", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 409, + error: "Dock door with number DOCK-01 already exists in warehouse", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/{doorId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get dock door by ID", + description: "\n## Get WMS Dock Door Details\n\nRetrieve comprehensive information about a specific dock door including current status, appointments, equipment capabilities, and operational configuration.\n\n### Features\n- **Complete Information**: Full dock door configuration and current state\n- **Real-Time Status**: Current appointment and trailer assignments\n- **Equipment Details**: Capabilities, safety equipment, and maintenance schedules\n- **Operating Hours**: Daily schedule for operational planning\n- **Audit Information**: Creation and modification timestamps\n\n### Business Logic\n- doorId must reference an existing dock door within the world\n- Returns comprehensive dock door information including nested objects\n- Includes current appointment details if door is occupied\n- Shows maintenance and safety inspection schedules\n- Provides operating hours for scheduling validation\n\n### Path Parameters\n- **doorId**: Required - Unique identifier for the dock door\n\n### Use Cases\n- **Appointment Planning**: Check door availability and capabilities for scheduling\n- **Maintenance Management**: Review maintenance schedules and safety inspections\n- **Operational Overview**: Get complete door status for warehouse management\n- **Equipment Verification**: Confirm door capabilities for specific trailer requirements\n- **Schedule Coordination**: Verify operating hours for appointment scheduling\n ", + operationId: "getWMSDockDoorById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "doorId", + in: "path", + required: true, + description: "Unique identifier for the dock door", + schema: { + type: "string", + example: "wms_dock-door_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Dock door retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDockDoor" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and dockDoorId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Dock door not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Dock Door wms_dock-door_674565c1234567890abcdef not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/warehouse/{warehouseId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get dock doors by warehouse", + description: "\n## Get WMS Dock Doors by Warehouse\n\nRetrieve filtered list of dock doors within a specific warehouse with advanced filtering capabilities for operational management and scheduling optimization.\n\n### Features\n- **Warehouse-Scoped**: All dock doors within specified warehouse facility\n- **Multi-Filter Support**: Door type, status, and zone filtering\n- **Pagination Support**: Efficient handling of large door inventories\n- **Real-Time Status**: Current availability and occupancy information\n- **Standards Compliance**: Structured response format for downstream integration\n\n### Business Logic\n- warehouseId must reference an existing warehouse\n- Supports multiple filter combinations for precise door selection\n- Returns paginated results with total count and navigation metadata\n- doorType and status support multiple values using array format\n- zoneId filters to specific warehouse zones for operational efficiency\n- Default pagination limit applies if not specified\n\n### Path Parameters\n- **warehouseId**: Required - Unique identifier for the warehouse\n\n### Query Parameters\n- **doorType**: Optional - Filter by door type (INBOUND, OUTBOUND, CROSS_DOCK)\n- **status**: Optional - Filter by operational status (AVAILABLE, OCCUPIED, MAINTENANCE, CLOSED)\n- **zoneId**: Optional - Filter by specific zone within warehouse\n\n### Use Cases\n- **Capacity Planning**: Assess total dock door capacity by type and availability\n- **Operational Scheduling**: Find available doors for appointment assignment\n- **Maintenance Management**: Identify doors requiring maintenance attention\n- **Zone Management**: Review door distribution across warehouse zones\n- **Status Monitoring**: Monitor real-time dock door utilization\n ", + operationId: "getWMSDockDoorsByWarehouse", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Unique identifier for the warehouse", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "doorType", + in: "query", + required: false, + description: "Filter by door type(s) - supports multiple values", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "CROSS_DOCK" + ] + } + }, + style: "form", + explode: true, + example: [ + "INBOUND", + "OUTBOUND" + ] + }, + { + name: "status", + in: "query", + required: false, + description: "Filter by operational status(es) - supports multiple values", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "AVAILABLE", + "OCCUPIED", + "MAINTENANCE", + "CLOSED" + ] + } + }, + style: "form", + explode: true, + example: [ + "AVAILABLE" + ] + }, + { + name: "zoneId", + in: "query", + required: false, + description: "Filter by specific zone identifier", + schema: { + type: "string", + example: "wms_zone_674565c1234567890abcdef" + } + } + ], + responses: { + "200": { + description: "Dock doors retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSDockDoor" + } + }, + totalCount: { + type: "number", + description: "Total number of dock doors matching criteria", + example: 12 + }, + limit: { + type: "number", + description: "Maximum items per page", + example: 50 + }, + hasMore: { + type: "boolean", + description: "Whether more items are available", + example: false + }, + nextCursor: { + type: "string", + description: "Cursor for next page navigation", + nullable: true, + example: null + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and warehouseId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/{doorId}/status": { + put: { + tags: [ + "WMS" + ], + summary: "Update dock door status", + description: "\n## Update WMS Dock Door Status\n\nUpdate the operational status of a dock door with optional reason tracking for comprehensive status management and audit trail maintenance.\n\n### Features\n- **Status Lifecycle Management**: AVAILABLE → OCCUPIED → MAINTENANCE → CLOSED transitions\n- **Reason Tracking**: Optional status change reasoning for audit trails\n- **Timestamp Recording**: Automatic status change timestamps\n- **Validation Logic**: Ensures valid status transitions based on business rules\n- **Audit Trail**: Complete history of status modifications with timestamps and reasons\n\n### Business Logic\n- doorId must reference an existing dock door within the world\n- Status must be one of the valid enumerated values\n- Status changes are tracked with automatic timestamps\n- Optional reason field captures business justification for status changes\n- Previous status transitions are maintained in audit trail\n- Concurrent appointment management ensures data consistency\n\n### Path Parameters\n- **doorId**: Required - Unique identifier for the dock door\n\n### Request Body Fields\n- **status**: Required - New operational status (AVAILABLE, OCCUPIED, MAINTENANCE, CLOSED)\n- **reason**: Optional - Explanation for status change\n\n### Business Rules\n- OCCUPIED status typically reserved for doors with active appointments\n- MAINTENANCE status requires coordination with appointment scheduling\n- CLOSED status removes door from all operational scheduling\n- AVAILABLE status enables immediate appointment scheduling\n\n### Use Cases\n- **Operational Management**: Change door status based on operational requirements\n- **Maintenance Coordination**: Mark doors as under maintenance to prevent scheduling\n- **Emergency Closure**: Temporarily close doors due to safety or equipment issues\n- **Capacity Management**: Manage overall dock capacity by controlling door availability\n- **Audit Compliance**: Track status changes with timestamps and reasoning\n ", + operationId: "updateWMSDockDoorStatus", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "doorId", + in: "path", + required: true, + description: "Unique identifier for the dock door", + schema: { + type: "string", + example: "wms_dock-door_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "status" + ], + properties: { + status: { + type: "string", + enum: [ + "AVAILABLE", + "OCCUPIED", + "MAINTENANCE", + "CLOSED" + ], + description: "New operational status for the dock door", + example: "MAINTENANCE" + }, + reason: { + type: "string", + description: "Optional reason for status change", + example: "Scheduled weekly maintenance and safety inspection" + } + }, + example: { + status: "MAINTENANCE", + reason: "Scheduled weekly maintenance and safety inspection" + } + } + } + } + }, + responses: { + "200": { + description: "Dock door status updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDockDoor" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Validation errors", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "status is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "404": { + description: "Not Found - Dock door not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Dock Door wms_dock-door_674565c1234567890abcdef not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/{doorId}/assign": { + put: { + tags: [ + "WMS" + ], + summary: "Assign appointment to dock door", + description: "\n## Assign Appointment to WMS Dock Door\n\nAssign a trailer appointment to an available dock door, automatically updating door status and establishing operational schedule coordination.\n\n### Features\n- **Appointment Assignment**: Assign scheduled appointments to available dock doors\n- **Automatic Status Management**: Changes door status from AVAILABLE to OCCUPIED\n- **Schedule Coordination**: Validates appointment timing and door availability\n- **Carrier Integration**: Links carrier and trailer information to door operations\n- **Time Management**: Tracks start time and expected completion for scheduling\n- **Conflict Prevention**: Ensures doors are available before assignment\n\n### Business Logic\n- doorId must reference an existing dock door within the world\n- Door must be in AVAILABLE status for assignment (enforced at repository level)\n- appointmentId is required and must be unique within appointment system\n- Assignment automatically changes door status to OCCUPIED\n- Previous appointment data is cleared before new assignment\n- Start time and expected end time establish operational schedule\n\n### Path Parameters\n- **doorId**: Required - Unique identifier for the dock door\n\n### Request Body Fields\n- **appointmentId**: Required - Unique appointment identifier from scheduling system\n- **carrier**: Required - Carrier company name or identifier\n- **trailerNumber**: Required - Trailer identification number\n- **startTime**: Required - Scheduled appointment start time\n- **expectedEndTime**: Required - Expected completion time for planning\n\n### Business Rules\n- Door must be AVAILABLE for new appointments\n- Concurrent appointments on same door are prevented\n- Assignment coordinates with TMS trailer management\n- Time windows support operational planning and resource allocation\n\n### Use Cases\n- **Appointment Scheduling**: Assign confirmed appointments to available dock doors\n- **Operational Coordination**: Link trailers with specific dock facilities\n- **Resource Management**: Optimize dock door utilization through strategic assignment\n- **Schedule Optimization**: Coordinate appointment timing with dock availability\n- **Carrier Services**: Provide carriers with specific dock assignments for deliveries\n ", + operationId: "assignWMSAppointmentToDoor", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "doorId", + in: "path", + required: true, + description: "Unique identifier for the dock door", + schema: { + type: "string", + example: "wms_dock-door_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "appointmentId", + "carrier", + "trailerNumber", + "startTime", + "expectedEndTime" + ], + properties: { + appointmentId: { + type: "string", + description: "Unique identifier for the appointment", + example: "tms_appointment_674565c1234567890abcdef" + }, + carrier: { + type: "string", + description: "Carrier company name or identifier", + example: "Swift Transportation" + }, + trailerNumber: { + type: "string", + description: "Trailer identification number", + example: "TRL-98765" + }, + startTime: { + type: "string", + format: "date-time", + description: "Scheduled appointment start time", + example: "2024-11-27T09:00:00Z" + }, + expectedEndTime: { + type: "string", + format: "date-time", + description: "Expected completion time", + example: "2024-11-27T13:00:00Z" + } + }, + example: { + appointmentId: "tms_appointment_674565c1234567890abcdef", + carrier: "Swift Transportation", + trailerNumber: "TRL-98765", + startTime: "2024-11-27T09:00:00Z", + expectedEndTime: "2024-11-27T13:00:00Z" + } + } + } + } + }, + responses: { + "200": { + description: "Appointment assigned successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDockDoor" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Validation errors", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "appointment data with appointmentId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + }, + "422": { + description: "Unprocessable Entity - Door not available", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 422, + error: "Dock door not available for assignment", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/{doorId}/clear": { + put: { + tags: [ + "WMS" + ], + summary: "Clear appointment from dock door", + description: "\n## Clear Appointment from WMS Dock Door\n\nClear the current appointment from a dock door upon completion or cancellation, returning the door to available status for new scheduling.\n\n### Features\n- **Appointment Completion**: Clear completed appointments from dock doors\n- **Status Reset**: Automatically returns door status to AVAILABLE\n- **Completion Notes**: Optional notes for appointment closure documentation\n- **History Tracking**: Maintains completion timestamps and notes for audit\n- **Resource Liberation**: Frees dock door for new appointment scheduling\n\n### Business Logic\n- doorId must reference an existing dock door within the world\n- Clears currentAppointment object and resets door status to AVAILABLE\n- Optional completionNotes captured for operational documentation\n- lastAppointmentNotes and lastAppointmentCompleted fields updated for history\n- Door becomes immediately available for new appointment assignment\n\n### Path Parameters\n- **doorId**: Required - Unique identifier for the dock door\n\n### Request Body Fields\n- **completionNotes**: Optional - Notes about appointment completion or issues\n\n### Business Rules\n- Clearing appointment makes door immediately available for new scheduling\n- Completion notes provide operational feedback for continuous improvement\n- Historical appointment data preserved for reporting and analysis\n- Timestamp tracking enables utilization and efficiency metrics\n\n### Use Cases\n- **Appointment Completion**: Mark appointments as completed and free dock doors\n- **Operational Documentation**: Record completion notes for process improvement\n- **Schedule Management**: Clear doors for immediate reassignment\n- **Resource Optimization**: Maximize dock door utilization through quick turnaround\n- **Quality Control**: Document any issues or observations during appointment completion\n ", + operationId: "clearWMSAppointmentFromDoor", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "doorId", + in: "path", + required: true, + description: "Unique identifier for the dock door", + schema: { + type: "string", + example: "wms_dock-door_674565c1234567890abcdef" + } + } + ], + requestBody: { + required: false, + content: { + "application/json": { + schema: { + type: "object", + properties: { + completionNotes: { + type: "string", + description: "Optional notes about appointment completion", + example: "Unloading completed successfully. Minor delay due to trailer seal issues." + } + }, + example: { + completionNotes: "Unloading completed successfully. Minor delay due to trailer seal issues." + } + } + } + } + }, + responses: { + "200": { + description: "Appointment cleared successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + $ref: "#/components/schemas/WMSDockDoor" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and dockDoorId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/available": { + get: { + tags: [ + "WMS" + ], + summary: "Get available dock doors", + description: "\n## Get Available WMS Dock Doors\n\nRetrieve dock doors available for appointment scheduling with advanced filtering by capabilities, location, and time slots for optimal assignment coordination.\n\n### Features\n- **Availability Filtering**: Find doors with AVAILABLE status for immediate scheduling\n- **Capability Matching**: Filter by trailer length, leveling dock, and restraint systems\n- **Time Slot Validation**: Check availability for specific time periods\n- **Zone-Based Search**: Locate doors within specific warehouse zones\n- **Equipment Requirements**: Match doors with required equipment and capabilities\n- **Real-Time Status**: Current availability based on active appointments\n\n### Business Logic\n- warehouseId and doorType are required for warehouse-scoped searches\n- Availability determined by AVAILABLE status and no conflicting appointments\n- Time slot filtering validates against current appointment schedules\n- Capability filters ensure doors meet trailer and equipment requirements\n- Zone filtering supports location-based optimization within warehouses\n- Returns doors immediately ready for appointment assignment\n\n### Path Parameters\n- **warehouseId**: Required - Unique identifier for the warehouse (in route context)\n- **doorType**: Required - Type of door operations (INBOUND, OUTBOUND, CROSS_DOCK)\n\n### Query Parameters\n- **zoneId**: Optional - Filter by specific zone within warehouse\n- **maxTrailerLength**: Optional - Minimum trailer length capability required\n- **levelingDock**: Optional - Require leveling dock capability\n- **restraintSystem**: Optional - Require trailer restraint system\n- **startTime**: Optional - Availability window start time (ISO 8601)\n- **endTime**: Optional - Availability window end time (ISO 8601)\n\n### Use Cases\n- **Appointment Scheduling**: Find suitable doors for incoming trailer appointments\n- **Capability Matching**: Locate doors with specific equipment requirements\n- **Time Management**: Check door availability for specific time periods\n- **Operational Planning**: Identify available capacity for scheduling optimization\n- **Resource Allocation**: Match doors to trailer requirements for efficient operations\n ", + operationId: "getWMSAvailableDockDoors", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: true, + description: "Warehouse identifier for door search scope", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "doorType", + in: "query", + required: true, + description: "Type of dock door operations required", + schema: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "CROSS_DOCK" + ], + example: "INBOUND" + } + }, + { + name: "zoneId", + in: "query", + required: false, + description: "Filter by specific zone within warehouse", + schema: { + type: "string", + example: "wms_zone_674565c1234567890abcdef" + } + }, + { + name: "maxTrailerLength", + in: "query", + required: false, + description: "Minimum trailer length capability required (feet)", + schema: { + type: "number", + example: 53 + } + }, + { + name: "levelingDock", + in: "query", + required: false, + description: "Require leveling dock capability", + schema: { + type: "boolean", + example: true + } + }, + { + name: "restraintSystem", + in: "query", + required: false, + description: "Require trailer restraint system", + schema: { + type: "boolean", + example: true + } + }, + { + name: "startTime", + in: "query", + required: false, + description: "Availability window start time (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T09:00:00Z" + } + }, + { + name: "endTime", + in: "query", + required: false, + description: "Availability window end time (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T17:00:00Z" + } + } + ], + responses: { + "200": { + description: "Available dock doors retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "array", + items: { + $ref: "#/components/schemas/WMSDockDoor" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId, warehouseId, and doorType are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/utilization": { + get: { + tags: [ + "WMS" + ], + summary: "Get dock door utilization metrics", + description: "\n## Get WMS Dock Door Utilization Metrics\n\nRetrieve comprehensive utilization analytics for dock doors within a warehouse, including status distribution, type breakdown, and operational efficiency metrics.\n\n### Features\n- **Comprehensive Metrics**: Total doors, status distribution, and utilization percentages\n- **Type Analysis**: Breakdown by door type (INBOUND, OUTBOUND, CROSS_DOCK)\n- **Time-Based Filtering**: Historical utilization analysis for specified date ranges\n- **Operational Insights**: Real-time efficiency and capacity utilization metrics\n- **Performance Tracking**: Monitor dock door productivity and identify optimization opportunities\n\n### Business Logic\n- warehouseId scopes utilization analysis to specific facility\n- Status aggregation provides real-time capacity management insights\n- doorType filtering enables type-specific utilization analysis\n- Date range filtering supports historical trend analysis and reporting\n- Utilization percentages calculated as (occupied doors / total doors) * 100\n- Type-specific breakdown enables targeted operational improvements\n\n### Path Parameters\n- **warehouseId**: Required - Unique identifier for the warehouse (in route context)\n\n### Query Parameters\n- **doorType**: Optional - Filter by specific door type(s)\n- **dateStart**: Optional - Start date for historical utilization analysis\n- **dateEnd**: Optional - End date for historical utilization analysis\n\n### Business Metrics\n- **Total Doors**: Complete inventory of dock doors in warehouse\n- **Status Distribution**: Count of doors by operational status\n- **Utilization Percentage**: Overall facility dock utilization efficiency\n- **Type Breakdown**: Door type specific utilization and productivity metrics\n\n### Use Cases\n- **Capacity Planning**: Assess current dock door utilization for expansion planning\n- **Operational Efficiency**: Monitor real-time dock door productivity\n- **Performance Analysis**: Analyze historical trends and identify improvement opportunities\n- **Resource Optimization**: Balance door assignments across types and zones\n- **Management Reporting**: Generate utilization reports for stakeholder analysis\n ", + operationId: "getWMSDockDoorUtilization", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "query", + required: true, + description: "Warehouse identifier for utilization analysis scope", + schema: { + type: "string", + example: "wms_warehouse_674565c1234567890abcdef" + } + }, + { + name: "doorType", + in: "query", + required: false, + description: "Filter by door type(s) for type-specific analysis", + schema: { + type: "array", + items: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "CROSS_DOCK" + ] + } + }, + style: "form", + explode: true, + example: [ + "INBOUND", + "OUTBOUND" + ] + }, + { + name: "dateStart", + in: "query", + required: false, + description: "Start date for historical utilization analysis", + schema: { + type: "string", + format: "date-time", + example: "2024-11-20T00:00:00Z" + } + }, + { + name: "dateEnd", + in: "query", + required: false, + description: "End date for historical utilization analysis", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T23:59:59Z" + } + } + ], + responses: { + "200": { + description: "Dock door utilization metrics retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "object", + properties: { + totalDoors: { + type: "number", + description: "Total number of dock doors in warehouse", + example: 12 + }, + availableDoors: { + type: "number", + description: "Number of doors currently available", + example: 7 + }, + occupiedDoors: { + type: "number", + description: "Number of doors currently occupied", + example: 4 + }, + maintenanceDoors: { + type: "number", + description: "Number of doors under maintenance", + example: 1 + }, + utilizationPercentage: { + type: "number", + description: "Overall utilization percentage", + example: 33.33 + }, + utilizationByType: { + type: "array", + description: "Utilization breakdown by door type", + items: { + type: "object", + properties: { + doorType: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "CROSS_DOCK" + ], + example: "INBOUND" + }, + totalDoors: { + type: "number", + description: "Total doors of this type", + example: 8 + }, + occupiedDoors: { + type: "number", + description: "Occupied doors of this type", + example: 3 + }, + utilizationPercentage: { + type: "number", + description: "Type-specific utilization percentage", + example: 37.5 + } + } + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId and warehouseId are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/dock-doors/schedule": { + get: { + tags: [ + "WMS" + ], + summary: "Get dock door schedule", + description: "\n## Get WMS Dock Door Schedule\n\nRetrieve appointment schedule for a specific dock door within a date range, providing comprehensive scheduling information for operational coordination.\n\n### Features\n- **Schedule Visibility**: Complete appointment schedule for specified dock door\n- **Date Range Filtering**: Appointments within specified time periods\n- **Appointment Details**: Carrier, trailer, timing, and status information\n- **Operational Coordination**: Support for appointment management and resource planning\n- **Schedule Conflicts**: Identify potential scheduling conflicts and overlaps\n\n### Business Logic\n- doorId must reference an existing dock door within the world\n- from and to parameters define the schedule query date range\n- Returns appointments with start times falling within the specified range\n- Includes appointment type classification and current status\n- Handles cases where doors have no appointments (empty array)\n- Future implementation will support complex appointment scheduling systems\n\n### Path Parameters\n- **doorId**: Required - Unique identifier for the dock door (in route context)\n\n### Query Parameters\n- **from**: Required - Schedule start date and time (ISO 8601)\n- **to**: Required - Schedule end date and time (ISO 8601)\n\n### Response Fields\n- **appointmentId**: Unique appointment identifier for tracking\n- **carrierName**: Carrier company responsible for the appointment\n- **trailerNumber**: Trailer identification for operational coordination\n- **scheduledArrival**: Planned arrival time for resource preparation\n- **appointmentType**: Classification of appointment (SCHEDULED, EMERGENCY, etc.)\n- **status**: Current appointment status for operational awareness\n\n### Use Cases\n- **Schedule Management**: View comprehensive appointment schedules for planning\n- **Resource Coordination**: Prepare resources based on scheduled appointments\n- **Conflict Resolution**: Identify and resolve scheduling conflicts\n- **Operational Planning**: Coordinate warehouse activities with appointment timing\n- **Performance Monitoring**: Track appointment adherence and operational efficiency\n ", + operationId: "getWMSDockDoorSchedule", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "dockDoorId", + in: "query", + required: true, + description: "Unique identifier for the dock door", + schema: { + type: "string", + example: "wms_dock-door_674565c1234567890abcdef" + } + }, + { + name: "from", + in: "query", + required: true, + description: "Schedule start date and time (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T00:00:00Z" + } + }, + { + name: "to", + in: "query", + required: true, + description: "Schedule end date and time (ISO 8601)", + schema: { + type: "string", + format: "date-time", + example: "2024-11-27T23:59:59Z" + } + } + ], + responses: { + "200": { + description: "Dock door schedule retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + data: { + type: "array", + items: { + type: "object", + properties: { + appointmentId: { + type: "string", + description: "Unique appointment identifier", + example: "tms_appointment_674565c1234567890abcdef" + }, + carrierName: { + type: "string", + description: "Carrier company name", + example: "Swift Transportation" + }, + trailerNumber: { + type: "string", + description: "Trailer identification number", + example: "TRL-98765" + }, + scheduledArrival: { + type: "string", + format: "date-time", + description: "Scheduled appointment arrival time", + example: "2024-11-27T09:00:00Z" + }, + appointmentType: { + type: "string", + description: "Type of appointment", + example: "SCHEDULED" + }, + status: { + type: "string", + description: "Current appointment status", + example: "OCCUPIED" + } + } + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Missing required parameters", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "from and to date parameters are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones": { + post: { + tags: [ + "WMS" + ], + summary: "Create new warehouse zone", + description: "\nCreate a new warehouse zone with specified configuration and capacity settings.\n\n**Core Features**:\n- **Zone Organization**: Define zone type, capacity, and temperature controls\n- **Aisle Management**: Configure initial aisle assignments for zone layout\n- **Auto-Generated Codes**: Automatic zoneId and zoneCode generation from zoneName\n\n**Use Cases**:\n- **Warehouse Setup**: Initial zone configuration during warehouse setup\n- **Zone Expansion**: Add new zones to accommodate growth\n- **Specialized Areas**: Create temperature-controlled or purpose-specific zones\n ", + operationId: "createZone", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "warehouseId", + "zoneName" + ], + properties: { + warehouseId: { + type: "string", + description: "Warehouse identifier", + example: "WH_ATL_001" + }, + zoneName: { + type: "string", + description: "Human readable zone name", + example: "Picking Zone A" + }, + zoneType: { + type: "string", + enum: [ + "RECEIVING", + "STORAGE", + "PICKING", + "PACKING", + "SHIPPING", + "STAGING", + "QC", + "RETURNS" + ], + description: "Zone type classification", + example: "PICKING" + }, + temperatureControlled: { + type: "boolean", + description: "Temperature control flag", + example: false + }, + temperatureRange: { + type: "object", + description: "Temperature configuration for controlled zones", + properties: { + min: { + type: "number", + example: 32 + }, + max: { + type: "number", + example: 40 + }, + unit: { + type: "string", + example: "Fahrenheit" + } + } + }, + capacityCubicFeet: { + type: "number", + description: "Zone storage capacity", + example: 50000 + }, + aisles: { + type: "array", + description: "Initial aisle configuration", + items: { + type: "object", + properties: { + aisleId: { + type: "string", + example: "AISLE_001" + }, + aisleCode: { + type: "string", + example: "A1" + }, + aisleType: { + type: "string", + example: "STANDARD" + } + } + } + }, + customFields: { + type: "object", + description: "Additional custom data" + } + } + } + } + } + }, + responses: { + "201": { + description: "Zone created successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 201 + }, + message: { + type: "string", + example: "Zone created successfully" + }, + data: { + $ref: "#/components/schemas/WMSZone" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid zone data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "Zone name and warehouse ID are required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones/warehouse/{warehouseId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get zones by warehouse", + description: "\nRetrieve all zones within a specific warehouse with optional filtering capabilities.\n\n**Core Features**:\n- **Warehouse Scoping**: Get all zones within specified warehouse\n- **Type Filtering**: Filter by specific zone types\n- **Temperature Filtering**: Filter by temperature control requirements\n- **Paginated Results**: Efficient handling of large zone datasets\n\n**Use Cases**:\n- **Zone Overview**: Get complete zone listing for warehouse management\n- **Type-Specific Operations**: Find zones for specific operational needs \n- **Temperature Management**: Identify climate-controlled storage areas\n ", + operationId: "getZonesByWarehouse", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "zoneType", + in: "query", + required: false, + description: "Filter by zone types (comma-separated)", + schema: { + type: "array", + items: { + type: "string" + }, + example: [ + "PICKING", + "STORAGE" + ] + } + }, + { + name: "temperatureControlled", + in: "query", + required: false, + description: "Filter by temperature control requirement", + schema: { + type: "boolean", + example: false + } + } + ], + responses: { + "200": { + description: "Zones retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Zones retrieved successfully" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSZone" + } + }, + totalCount: { + type: "integer", + example: 25 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + example: null + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones/warehouse/{warehouseId}/code/{zoneCode}": { + get: { + tags: [ + "WMS" + ], + summary: "Get zone by code", + description: "\n## Get Zone by Code\n\nRetrieve zone by unique zone code within warehouse scope for direct zone access.\n\n**Core Features**:\n- **Direct Access**: Get zone by auto-generated zone code\n- **Code-based Lookup**: Fast retrieval using slugified zone name\n- **Complete Data**: Returns full zone configuration\n- **Scoped Lookup**: Ensures zone belongs to specified warehouse\n\n**Use Cases**:\n- **Code Integration**: Access zones via human-readable codes\n- **System Integration**: External system references using zone codes\n- **URL-friendly Access**: Use slugified codes in user interfaces\n ", + operationId: "getZoneByCode", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "warehouseId", + in: "path", + required: true, + description: "Warehouse identifier", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "zoneCode", + in: "path", + required: true, + description: "Zone code identifier", + schema: { + type: "string", + example: "picking-zone-a" + } + } + ], + responses: { + "200": { + description: "Zone retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Zone retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSZone" + } + } + } + } + } + }, + "404": { + description: "Not Found - Zone not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Zone picking-zone-a not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones/{zoneId}": { + get: { + tags: [ + "WMS" + ], + summary: "Get zone by ID", + description: "\nRetrieve zone by unique zone identifier for direct zone access and management.\n\n**Core Features**:\n- **Direct Access**: Get zone by unique zoneId\n- **Complete Data**: Returns full zone configuration including aisles\n- **Fast Lookup**: Optimized query using indexed zoneId field\n\n**Use Cases**:\n- **Zone Details**: Get complete zone information for management\n- **Reference Resolution**: Resolve zone references from other operations\n- **Configuration Review**: Access zone settings for updates\n ", + operationId: "getZoneById", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneId", + in: "path", + required: true, + description: "Zone unique identifier", + schema: { + type: "string", + example: "ZNE_507f1f77bcf86cd799439012" + } + } + ], + responses: { + "200": { + description: "Zone retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Zone retrieved successfully" + }, + data: { + $ref: "#/components/schemas/WMSZone" + } + } + } + } + } + }, + "404": { + description: "Not Found - Zone not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Zone ZNE_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + }, + put: { + tags: [ + "WMS" + ], + summary: "Update zone configuration", + description: "\nUpdate zone configuration with partial data for operational adjustments.\n\n**Core Features**:\n- **Partial Updates**: Update specific zone fields without replacing entire record\n- **Configuration Changes**: Modify capacity, temperature settings, and type\n- **Validation**: Ensures data consistency during updates\n\n**Use Cases**:\n- **Capacity Adjustments**: Update storage capacity based on operational changes\n- **Type Changes**: Convert zone purposes based on operational needs\n- **Temperature Updates**: Modify climate control settings\n ", + operationId: "updateZone", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneId", + in: "path", + required: true, + description: "Zone unique identifier", + schema: { + type: "string", + example: "ZNE_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + zoneName: { + type: "string", + description: "Updated zone name", + example: "Enhanced Picking Zone A" + }, + zoneType: { + type: "string", + enum: [ + "RECEIVING", + "STORAGE", + "PICKING", + "PACKING", + "SHIPPING", + "STAGING", + "QC", + "RETURNS" + ], + description: "Updated zone type", + example: "STORAGE" + }, + temperatureControlled: { + type: "boolean", + description: "Updated temperature control flag", + example: true + }, + temperatureRange: { + type: "object", + description: "Updated temperature configuration", + properties: { + min: { + type: "number", + example: 35 + }, + max: { + type: "number", + example: 40 + }, + unit: { + type: "string", + example: "Fahrenheit" + } + } + }, + capacityCubicFeet: { + type: "number", + description: "Updated zone capacity", + example: 75000 + }, + customFields: { + type: "object", + description: "Updated custom data" + } + } + } + } + } + }, + responses: { + "200": { + description: "Zone updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Zone updated successfully" + }, + data: { + $ref: "#/components/schemas/WMSZone" + } + } + } + } + } + }, + "404": { + description: "Not Found - Zone not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "Zone ZNE_507f1f77bcf86cd799439012 not found", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones/type/{zoneType}": { + get: { + tags: [ + "WMS" + ], + summary: "Get zones by type", + description: "\nRetrieve zones filtered by specific zone type across warehouse environments.\n\n**Core Features**:\n- **Type Filtering**: Get all zones of specific operational type\n- **Cross-Warehouse Search**: Optional warehouse filtering for targeted results \n- **Paginated Results**: Efficient handling of large zone datasets\n\n**Use Cases**:\n- **Operational Planning**: Find all zones for specific operational activities\n- **Resource Allocation**: Identify zones available for particular functions\n- **Capacity Planning**: Assess type-specific storage capabilities\n ", + operationId: "getZonesByType", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneType", + in: "path", + required: true, + description: "Zone type filter", + schema: { + type: "string", + enum: [ + "RECEIVING", + "STORAGE", + "PICKING", + "PACKING", + "SHIPPING", + "STAGING", + "QC", + "RETURNS" + ], + example: "PICKING" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Optional warehouse filter", + schema: { + type: "string", + example: "WH_ATL_001" + } + } + ], + responses: { + "200": { + description: "Zones retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Zones retrieved successfully" + }, + data: { + type: "object", + properties: { + items: { + type: "array", + items: { + $ref: "#/components/schemas/WMSZone" + } + }, + totalCount: { + type: "integer", + example: 8 + }, + limit: { + type: "integer", + example: 50 + }, + hasMore: { + type: "boolean", + example: false + }, + nextCursor: { + type: "string", + example: null + } + } + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones/{zoneId}/aisles": { + post: { + tags: [ + "WMS" + ], + summary: "Add aisle to zone", + description: "\nAdd aisle configuration to existing zone for layout management and organization.\n\n**Core Features**:\n- **Aisle Management**: Add new aisles to zone layout\n- **Configuration Tracking**: Maintain aisle type and identification\n- **Array Operations**: Uses MongoDB $push for aisle array management\n\n**Use Cases**:\n- **Zone Expansion**: Add aisles as zone grows\n- **Layout Updates**: Modify zone organization structure\n- **Configuration Management**: Maintain accurate zone layout data\n ", + operationId: "addAisleToZone", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneId", + in: "path", + required: true, + description: "Zone unique identifier", + schema: { + type: "string", + example: "ZNE_507f1f77bcf86cd799439012" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + required: [ + "aisleId", + "aisleCode", + "aisleType" + ], + properties: { + aisleId: { + type: "string", + description: "Aisle identifier", + example: "AISLE_A1_001" + }, + aisleCode: { + type: "string", + description: "Aisle code", + example: "A1" + }, + aisleType: { + type: "string", + description: "Aisle type", + example: "STANDARD" + } + } + } + } + } + }, + responses: { + "200": { + description: "Aisle added successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Aisle added successfully" + }, + data: { + $ref: "#/components/schemas/WMSZone" + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid aisle data provided", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "aisle data with aisleId is required", + meta: { event: "message", timestamp: "2024-01-15T10:30:00.123Z" } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones/{zoneId}/aisles/{aisleId}": { + delete: { + tags: [ + "WMS" + ], + summary: "Remove aisle from zone", + description: "\nRemove specific aisle from zone configuration for layout management.\n\n**Core Features**:\n- **Aisle Removal**: Remove specific aisles from zone layout\n- **Configuration Cleanup**: Maintain accurate zone organization\n- **Array Operations**: Uses MongoDB $pull for aisle array management\n\n**Use Cases**:\n- **Layout Optimization**: Remove unused or inefficient aisles\n- **Zone Restructuring**: Modify zone organization for operational efficiency\n- **Configuration Maintenance**: Keep zone layout data current\n ", + operationId: "removeAisleFromZone", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneId", + in: "path", + required: true, + description: "Zone unique identifier", + schema: { + type: "string", + example: "ZNE_507f1f77bcf86cd799439012" + } + }, + { + name: "aisleId", + in: "path", + required: true, + description: "Aisle identifier to remove", + schema: { + type: "string", + example: "AISLE_A1_001" + } + } + ], + responses: { + "200": { + description: "Aisle removed successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Aisle removed successfully" + }, + data: { + $ref: "#/components/schemas/WMSZone" + } + } + } + } + } + } + } + } + }, + "/{worldId}/wms/zones/{zoneId}/capacity-utilization": { + get: { + tags: [ + "WMS" + ], + summary: "Get zone capacity utilization", + description: "\n## Get Zone Capacity Utilization\n\nGet capacity utilization metrics for specific zone(s) including bin counts and capacity calculations.\n\n**Core Features**:\n- **Utilization Metrics**: Calculate used vs total capacity\n- **Bin Statistics**: Count available and occupied bins\n- **Zone Filtering**: Filter by specific zone ID or zone type\n- **Warehouse Scope**: Calculate metrics within specific warehouse\n\n**Use Cases**:\n- **Capacity Planning**: Identify zones nearing full capacity\n- **Storage Optimization**: Find underutilized zones for consolidation\n- **Operational Reporting**: Track storage efficiency metrics\n ", + operationId: "getZoneCapacityUtilization", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "zoneId", + in: "path", + required: true, + description: "⚠️ Zone identifier - currently ignored by controller", + schema: { + type: "string", + example: "ZNE_507f1f77bcf86cd799439012" + } + }, + { + name: "warehouseId", + in: "query", + required: false, + description: "Warehouse filter", + schema: { + type: "string", + example: "WH_ATL_001" + } + }, + { + name: "zoneIds", + in: "query", + required: false, + description: "Multiple zone filter (comma-separated)", + schema: { + type: "array", + items: { + type: "string" + }, + example: [ + "ZNE_001", + "ZNE_002" + ] + } + }, + { + name: "zoneType", + in: "query", + required: false, + description: "Zone type filter (comma-separated)", + schema: { + type: "array", + items: { + type: "string" + }, + example: [ + "PICKING", + "STORAGE" + ] + } + } + ], + responses: { + "200": { + description: "⚠️ Returns utilization for ALL zones, not specific zone", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + message: { + type: "string", + example: "Zone capacity utilization retrieved successfully" + }, + data: { + type: "array", + items: { + type: "object", + properties: { + zoneId: { + type: "string", + example: "ZNE_507f1f77bcf86cd799439012" + }, + zoneCode: { + type: "string", + example: "picking-zone-a" + }, + zoneName: { + type: "string", + example: "Picking Zone A" + }, + zoneType: { + type: "string", + example: "PICKING" + }, + totalCapacity: { + type: "number", + example: 0, + description: "⚠️ Hardcoded to 0" + }, + usedCapacity: { + type: "number", + example: 0, + description: "⚠️ Hardcoded to 0" + }, + utilizationPercentage: { + type: "number", + example: 0, + description: "⚠️ Hardcoded to 0" + }, + binCount: { + type: "number", + example: 0, + description: "⚠️ Hardcoded to 0" + }, + availableBinCount: { + type: "number", + example: 0, + description: "⚠️ Hardcoded to 0" + } + } + } + } + } + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/paths/world.paths.ts b/packages/controlmart/src/docs/paths/world.paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..b5c8ad02f1e5f99292ea0a896ccef567d85b0015 --- /dev/null +++ b/packages/controlmart/src/docs/paths/world.paths.ts @@ -0,0 +1,2063 @@ +export const worldPaths = { + "/world": { + get: { + tags: [ + "World" + ], + summary: "List all worlds with filtering and pagination", + description: "\n## List All Worlds\n\nRetrieve a paginated list of all world environments with optional filtering capabilities.\n\n### Features\n- **Pagination**: Cursor-based pagination for efficient data retrieval\n- **Search**: Text search across world names\n- **Default Filter**: Filter for default worlds\n- **Company Filter**: Filter by associated MPC company\n\n### Use Cases\n- Admin dashboard world management\n- World selection interfaces\n- System monitoring and analytics\n- Multi-tenant environment oversight\n- Default world identification\n\n### Pagination\nUses cursor-based pagination for optimal performance:\n- **cursor**: Use the _id from the last item in the previous page\n- **limit**: Control the number of results per page (max 20)\n- **nextCursor**: Provided in response for subsequent pages\n ", + operationId: "listWorlds", + parameters: [ + { + name: "is_default", + in: "query", + required: false, + description: "Filter worlds by default status", + schema: { + type: "boolean", + example: true + } + }, + { + name: "mpcCompany", + in: "query", + required: false, + description: "Filter worlds by MPC company identifier", + schema: { + type: "string", + example: "company_skyfall_123" + } + }, + { + name: "search", + in: "query", + required: false, + description: "Search worlds by name (case-insensitive)", + schema: { + type: "string", + example: "production" + } + }, + { + name: "cursor", + in: "query", + required: false, + description: "Pagination cursor from previous page", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + }, + { + name: "limit", + in: "query", + required: false, + description: "Maximum number of worlds to return (max: 20)", + schema: { + type: "integer", + minimum: 1, + maximum: 20, + default: 20, + example: 10 + } + } + ], + responses: { + "200": { + description: "Successfully retrieved worlds", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + worlds: { + type: "array", + items: { + $ref: "#/components/schemas/World" + }, + description: "Array of world objects" + }, + nextCursor: { + type: "string", + nullable: true, + description: "Cursor for next page of results", + example: "507f1f77bcf86cd799439012" + } + }, + required: [ + "worlds" + ] + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + examples: { + allWorlds: { + summary: "List all worlds", + value: { + success: true, + status: 200, + data: { + worlds: [ + { + _id: "507f1f77bcf86cd799439011", + name: "Production Environment", + url: "production-environment", + apiKey: "prod_api_key_123", + apiSecret: "prod_secret_456", + is_default: true, + description: "Main production environment for live operations", + mpcCompany: "company_skyfall_123", + createdAt: "2024-01-15T09:00:00.000Z", + updatedAt: "2024-01-15T10:30:00.000Z" + }, + { + _id: "507f1f77bcf86cd799439012", + name: "Development Environment", + url: "development-environment", + apiKey: "dev_api_key_789", + apiSecret: "dev_secret_abc", + is_default: false, + description: "Development environment for testing new features", + mpcCompany: "company_skyfall_dev_456", + createdAt: "2024-01-15T08:00:00.000Z", + updatedAt: "2024-01-15T09:15:00.000Z" + } + ], + nextCursor: "507f1f77bcf86cd799439013" + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + defaultOnly: { + summary: "List default worlds only", + value: { + success: true, + status: 200, + data: { + worlds: [ + { + _id: "507f1f77bcf86cd799439011", + name: "Production Environment", + url: "production-environment", + apiKey: "prod_api_key_123", + apiSecret: "prod_secret_456", + is_default: true, + description: "Main production environment for live operations", + mpcCompany: "company_skyfall_123", + createdAt: "2024-01-15T09:00:00.000Z", + updatedAt: "2024-01-15T10:30:00.000Z" + } + ], + nextCursor: null + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + }, + post: { + tags: [ + "World" + ], + summary: "Create a new world environment", + description: "\n## Create New World\n\nCreate a new isolated world environment with automatic setup of companies and products.\n\n### Features\n- **Isolated Environment**: Each world is completely isolated from others\n- **Auto-Generated Content**: Automatically creates MPC company and NPC companies \n- **Product Catalog**: Auto-generates initial product catalog\n- **Streaming Support**: Optional real-time progress updates via Server-Sent Events\n- **URL Generation**: Automatically generates URL slug from name if not provided\n- **Default Management**: Automatically manages default world status\n\n### Auto-Generated Content\nWhen a world is created, the system automatically generates:\n- **1 MPC Company**: Your main company for this world\n- **5 NPC Companies**: Trading partner companies\n- **20 Products**: Initial product catalog\n\n### Streaming Mode\nSet `?stream=true` to receive real-time progress updates:\n- **connected**: Stream connection established\n- **progress**: Step-by-step creation updates\n- **complete**: Final result with all generated data\n- **error**: Any errors during creation\n\n### Default World Management\n- Only one world can be default at a time\n- Setting `is_default: true` automatically unsets other defaults\n- Default worlds are used for system-wide operations\n ", + operationId: "createWorld", + parameters: [ + { + name: "stream", + in: "query", + required: false, + description: "Enable streaming mode for real-time progress updates", + schema: { + type: "boolean", + default: false, + example: true + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + name: { + type: "string", + description: "Unique name for the world environment", + example: "Production Environment" + }, + url: { + type: "string", + description: "URL slug for the world (auto-generated from name if not provided)", + example: "production-environment" + }, + description: { + type: "string", + description: "Detailed description of the world's purpose", + example: "Main production environment for live customer operations" + }, + layout: { + type: "string", + description: "ID of the layout template to seed the world with", + example: "perishables-food-manufacturer" + }, + is_default: { + type: "boolean", + description: "Whether this should be the default world", + default: false, + example: true + }, + apiKey: { + type: "string", + description: "API key for world authentication (optional)", + example: "prod_api_key_123456" + }, + apiSecret: { + type: "string", + description: "API secret for world authentication (optional)", + example: "prod_secret_789012" + }, + mpcCompany: { + type: "string", + description: "MPC company identifier (optional)", + example: "company_skyfall_main" + }, + realHoursPerSimDay: { + type: "number", + description: "Number of real-world hours that equal one simulation day", + default: 2, + example: 2 + }, + chaos: { + $ref: "#/components/schemas/ChaosConfig" + } + }, + required: [ + "name", + "layout" + ] + }, + examples: { + production: { + summary: "Create production world", + value: { + name: "Production Environment", + description: "Main production environment for live customer operations", + layout: "perishables-food-manufacturer", + is_default: true, + apiKey: "prod_api_key_123456", + apiSecret: "prod_secret_789012", + realHoursPerSimDay: 2 + } + }, + development: { + summary: "Create development world", + value: { + name: "Development Environment", + description: "Development environment for testing new features and integrations", + layout: "perishables-food-manufacturer", + is_default: false + } + }, + minimal: { + summary: "Create minimal world", + value: { + name: "Testing Environment", + layout: "perishables-food-manufacturer" + } + } + } + } + } + }, + responses: { + "200": { + description: "World created successfully (non-streaming mode)", + content: { + "application/json": { + schema: { + type: "object", + properties: { + world: { + $ref: "#/components/schemas/World" + }, + mainCompany: { + description: "The automatically created MPC company", + $ref: "#/components/schemas/ERPCompany" + }, + npcCompanies: { + type: "array", + description: "Array of NPC companies created", + items: { + $ref: "#/components/schemas/ERPCompany" + } + }, + productsForMpc: { + type: "array", + description: "Array of products created for MPC", + items: { + $ref: "#/components/schemas/ERPProduct" + } + }, + seedResult: { + type: "object", + nullable: true, + description: "Result of the world seeding process (if layout seeder was run)" + }, + capabilities: { + type: "object", + description: "Capability sampling results (only present when samplingStrategy is provided)", + properties: { + samplingType: { + type: "string", + description: "Type of sampling used", + example: "random" + }, + count: { + type: "integer", + description: "Number of capabilities sampled", + example: 10 + }, + ids: { + type: "array", + items: { + type: "string" + }, + description: "IDs of sampled capabilities" + }, + validationWarnings: { + type: "array", + items: { + type: "object" + }, + description: "Any validation warnings from capability validation" + } + } + } + } + }, + example: { + world: { + _id: "507f1f77bcf86cd799439025", + name: "Production Environment", + url: "production-environment", + apiKey: "prod_api_key_123456", + apiSecret: "prod_secret_789012", + is_default: true, + description: "Main production environment for live customer operations", + mpcCompany: "company_skyfall_main", + createdAt: "2024-01-15T11:30:00.000Z", + updatedAt: "2024-01-15T11:30:00.000Z" + }, + mainCompany: { + _id: "507f1f77bcf86cd799439026", + name: "Skyfall Technologies", + duns: "123456789" + }, + npcCompanies: [ + { _id: "507f1f77bcf86cd799439027", name: "NPC Company 1" } + ], + productsForMpc: [ + { _id: "507f1f77bcf86cd799439028", name: "Product 1" } + ], + seedResult: null + } + } + } + }, + "400": { + description: "Bad Request - Invalid input", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + examples: { + missingName: { + summary: "Missing required name field", + value: { + success: false, + status: 400, + error: "Missing required field: name", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + duplicateName: { + summary: "World name already exists", + value: { + success: false, + status: 400, + error: "World \"Production Environment\" already exists", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "200 (Streaming)": { + description: "Streaming response with real-time updates", + content: { + "text/event-stream": { + schema: { + type: "string", + description: "Server-sent events stream" + }, + examples: { + streamingEvents: { + summary: "Example streaming events", + value: "event: connected\\ndata: {\"message\":\"Stream connected\"}\\n\\nevent: progress\\ndata: {\"step\":\"creating_world\"}\\n\\nevent: progress\\ndata: {\"step\":\"world_created\",\"worldId\":\"507f1f77bcf86cd799439025\"}\\n\\nevent: progress\\ndata: {\"step\":\"seeding_data\",\"layout\":\"Skyfoods Manufacturer\"}\\n\\nevent: progress\\ndata: {\"step\":\"seeding_ods\",\"layout\":\"Skyfoods Manufacturer\"}\\n\\nevent: complete\\ndata: {\"world\":{...},\"mainCompany\":{...},\"npcCompanies\":5,\"productsForMpc\":20}\\n\\n" + } + } + } + } + } + } + } + }, + "/world/act": { + post: { + tags: [ + "World" + ], + summary: "Dynamic internal API call execution", + description: "\n## Execute Internal API Call\n\nDynamically execute any internal API endpoint via a unified interface. This endpoint acts as a loopback proxy, allowing orchestration of complex workflows or testing internal routes.\n\n### Features\n- **Dynamic Routing**: Call any internal endpoint by path\n- **Method Support**: Supports all HTTP methods (GET, POST, PUT, DELETE, etc.)\n- **Parameter Substitution**: Automatically substitutes path parameters\n- **Query String Construction**: Automatically builds query strings from objects\n- **Response Forwarding**: Returns the exact response from the internal call\n\n### Use Cases\n- **Workflow Orchestration**: Chain multiple internal calls\n- **Testing**: Verify internal endpoints without external clients\n- **Proxying**: Unified access point for internal services\n ", + operationId: "act", + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + method: { + type: "string", + example: "GET", + description: "HTTP method to use (default: GET)" + }, + path: { + type: "string", + example: "/:worldId/erp/orders", + description: "Target URL path (can include params like :id)" + }, + params: { + type: "object", + example: { + worldId: "123" + }, + description: "Path parameters to substitute in the path" + }, + query: { + type: "object", + example: { + status: "open" + }, + description: "Query parameters to append to the URL" + }, + body: { + type: "object", + example: { + customerId: "abc" + }, + description: "Request body for POST/PUT/PATCH methods" + } + }, + required: [ + "path" + ] + } + } + } + }, + responses: { + "200": { + description: "Internal call executed successfully", + content: { + "application/json": { + schema: { + type: "object", + description: "Response from the internal endpoint" + } + } + } + }, + "400": { + description: "Bad Request - Missing path or invalid parameters" + }, + "500": { + description: "Internal Server Error" + } + } + } + }, + "/world/layouts": { + get: { + tags: [ + "World" + ], + summary: "List available world layouts", + description: "Retrieve a list of all available world layout templates that can be used to seed new worlds.", + operationId: "getWorldLayouts", + responses: { + "200": { + description: "Successfully retrieved layouts", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "array", + items: { + type: "object", + properties: { + id: { + type: "string", + example: "perishables-food-manufacturer" + }, + name: { + type: "string", + example: "Skyfoods Manufacturer" + }, + description: { + type: "string" + }, + shortDescription: { + type: "string" + } + } + } + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time" + } + } + } + } + } + } + } + } + } + } + }, + "/world/layouts/{layoutId}": { + get: { + tags: [ + "World" + ], + summary: "Get specific world layout details", + operationId: "getWorldLayoutById", + parameters: [ + { + name: "layoutId", + in: "path", + required: true, + description: "ID of the layout to retrieve", + schema: { + type: "string" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved layout details", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + id: { + type: "string" + }, + name: { + type: "string" + }, + description: { + type: "string" + }, + shortDescription: { + type: "string" + }, + docs: { + type: "object" + } + } + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time" + } + } + } + } + } + } + } + } + } + } + }, + "/world/{worldId}": { + get: { + tags: [ + "World" + ], + summary: "Get a specific world by ID", + description: "\n## Get World Details\n\nRetrieve detailed information about a specific world environment.\n\n### Use Cases\n- World dashboard displays\n- Environment configuration views\n- System administration interfaces\n- API client world validation\n- Multi-tenant routing decisions\n\n### Response Data\nReturns complete world information including:\n- Basic world metadata (name, description, URLs)\n- Authentication credentials (API keys)\n- Default status and configuration\n- Creation and update timestamps\n- Associated company references\n ", + operationId: "getWorld", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "World retrieved successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/World" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + example: { + success: true, + status: 200, + data: { + _id: "507f1f77bcf86cd799439011", + name: "Production Environment", + url: "production-environment", + apiKey: "prod_api_key_123456", + apiSecret: "prod_secret_789012", + is_default: true, + description: "Main production environment for live customer operations", + mpcCompany: "company_skyfall_main", + createdAt: "2024-01-15T09:00:00.000Z", + updatedAt: "2024-01-15T10:30:00.000Z" + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid worldId", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "World not found", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + }, + delete: { + tags: [ + "World" + ], + summary: "Delete a world environment", + description: "\n## Delete World\n\nPermanently delete a world environment and all associated data.\n\n### ⚠️ Warning\nThis action is **irreversible** and will delete:\n- The world environment and all its metadata\n- All companies, products, and business data within the world\n- All EDI transactions, logs, and audit records\n- All ITSM tickets and work notes\n- All WMS and TMS data\n- All configuration and settings\n\n### When to Delete Worlds\n- **Test environments** that are no longer needed\n- **Development worlds** after project completion \n- **Duplicate worlds** created by mistake\n- **Deprecated environments** during cleanup\n- **Data migration scenarios** after successful migration\n\n### Before Deletion\nConsider these actions before deleting:\n- **Export important data** for archival\n- **Notify stakeholders** who may be using the world\n- **Verify no active integrations** depend on this world\n- **Check for dependent worlds** that reference this one\n- **Review audit logs** for compliance requirements\n\n### Alternative Actions\nInstead of deletion, consider:\n- **Archiving**: Mark the world as inactive\n- **Backup**: Export data before deletion\n- **Migration**: Move data to another world\n- **Suspension**: Temporarily disable access\n\n### Best Practices\n- Use deletion sparingly in production environments\n- Always backup critical data before deletion\n- Document the reason for deletion\n- Verify the correct worldId before confirming deletion\n- Implement approval workflows for production world deletions\n ", + operationId: "deleteWorld", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment to delete", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "World deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + success: { + type: "boolean", + example: true, + description: "Confirms successful deletion" + } + } + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + example: { + success: true, + status: 200, + data: { + success: true + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid worldId", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "World not found", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + }, + patch: { + tags: [ + "World" + ], + summary: "Partially update a world environment", + description: ` +## Update World + +Partially update a world environment's configuration. Only the fields provided in the request body will be updated. + +### Updatable Fields +- **name**: Change the world's display name +- **description**: Update the world description +- **is_default**: Set or unset as default world +- **layout**: Change the layout template (does not re-seed data) +- **realHoursPerSimDay**: Adjust simulation speed +- **samplingStrategy**: Update capability sampling configuration +- **capabilityIds**: Directly set capability IDs +- **personas**: Update persona access configuration +- **chaos**: Update chaos engineering policy +- **ticketCreationEnabled**: Enable/disable ITSM ticket creation + +### Use Cases +- Adjust simulation parameters without recreating the world +- Enable/disable chaos engineering for testing +- Update persona access permissions +- Change default world assignment +- Fine-tune capability assignments + +### Notes +- Updating \`samplingStrategy\` will re-sample capabilities based on the new strategy +- Changing \`is_default\` to true will automatically unset the previous default world + `, + operationId: "updateWorld", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment to update", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + type: "object", + properties: { + name: { + type: "string", + description: "Updated name for the world", + example: "Updated Production Environment" + }, + description: { + type: "string", + description: "Updated description", + example: "Updated description for the production environment" + }, + is_default: { + type: "boolean", + description: "Set as default world", + example: true + }, + layout: { + type: "string", + description: "Layout template ID", + example: "perishables-food-manufacturer" + }, + realHoursPerSimDay: { + type: "number", + description: "Simulation speed ratio", + example: 4 + }, + samplingStrategy: { + $ref: "#/components/schemas/SamplingStrategy" + }, + capabilityIds: { + type: "array", + items: { + type: "string" + }, + description: "Direct capability ID assignment", + example: ["cap_order_processing", "cap_inventory_check"] + }, + personas: { + $ref: "#/components/schemas/PersonaConfig" + }, + chaos: { + $ref: "#/components/schemas/ChaosPolicy" + }, + ticketCreationEnabled: { + type: "boolean", + description: "Enable/disable ITSM ticket creation", + example: false + } + } + }, + examples: { + updateDescription: { + summary: "Update world description", + value: { + description: "Updated description for better clarity" + } + }, + enableChaos: { + summary: "Enable chaos engineering", + value: { + chaos: { + enabled: true, + probability: 0.1, + scenarios: [ + { + type: "missing_data", + weight: 1.0, + description: "Simulate missing fields", + config: { + missingFields: ["quantity"], + throwError: false + } + } + ] + } + } + }, + setDefault: { + summary: "Set as default world", + value: { + is_default: true + } + } + } + } + } + }, + responses: { + "200": { + description: "World updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + $ref: "#/components/schemas/World" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + example: { + success: true, + status: 200, + data: { + _id: "507f1f77bcf86cd799439011", + name: "Updated Production Environment", + url: "production-environment", + apiKey: "prod_api_key_123456", + apiSecret: "prod_secret_789012", + is_default: true, + description: "Updated description for the production environment", + layout: "perishables-food-manufacturer", + mpcCompany: "company_skyfall_main", + realHoursPerSimDay: 2, + ticketCreationEnabled: true, + createdAt: "2024-01-15T09:00:00.000Z", + updatedAt: "2024-01-15T11:00:00.000Z" + }, + meta: { + event: "message", + timestamp: "2024-01-15T11:00:00.123Z" + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid worldId or invalid update data", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "World not found", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + }, + "/world/{worldId}/reset": { + post: { + tags: [ + "World" + ], + summary: "Reset a world environment", + description: "\n## Reset World Environment\n\nReset a world environment to its initial state, clearing all data while preserving the world definition and configuration.\n\n### What happens during reset:\n1. **Data Cleanup**: All existing data associated with the world is permanently deleted:\n - Companies, products, and customers\n - Orders, invoices, and shipments\n - EDI transactions and logs\n - WMS inventory and tasks\n - ITSM tickets\n2. **Re-Seeding**: The world is automatically re-populated with fresh seed data:\n - Main company (MPC) regeneration\n - Partner/Customer generation based on world layout\n - Product catalog regeneration\n - Operational Descriptor (OD) re-initialization\n\n### Use Cases\n- **Simulation Restart**: Restarting a simulation scenario from scratch\n- **Test Automation**: Cleaning up state between test runs\n- **Development Loop**: Rapidly iterating on world configurations\n- **Demo Resets**: Resetting a demo environment for a new audience\n\n### ⚠️ Warning\nThis action deletes all transactional data. Ensure you have backed up any critical information before proceeding.\n ", + operationId: "resetWorld", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment to reset", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "World reset successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + message: { + type: "string", + example: "World reset and re-seeded" + }, + seedResult: { + type: "object", + description: "Result of the re-seeding process" + } + } + } + }, + required: [ + "success", + "status", + "data" + ] + } + } + } + }, + "400": { + description: "Bad Request", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + }, + "/world/{worldId}/capabilities": { + get: { + tags: [ + "World" + ], + summary: "Get capabilities assigned to a world", + description: ` +## Get World Capabilities + +Retrieve the list of capabilities assigned to a specific world, including sampling strategy information and full capability details. + +### Response Data +- **worldId**: The world's unique identifier +- **worldName**: Human-readable world name +- **samplingStrategy**: The strategy used to select capabilities +- **count**: Total number of assigned capabilities +- **capabilityIds**: Array of capability ID strings +- **capabilities**: Full capability objects with metadata + +### Capability Information +Each capability includes: +- **id**: Unique capability identifier +- **name**: Human-readable name +- **description**: What the capability does +- **domain**: Business domain (e.g., order-management, inventory) +- **odId**: Associated Operational Descriptor ID +- **tags**: Metadata including personas, services, complexity + +### Use Cases +- Display available capabilities for a world +- Verify capability sampling results +- Debug persona-capability mappings +- Audit world configurations + `, + operationId: "getWorldCapabilities", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved world capabilities", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + worldId: { + type: "string", + description: "World unique identifier", + example: "507f1f77bcf86cd799439011" + }, + worldName: { + type: "string", + description: "World display name", + example: "Production Environment" + }, + samplingStrategy: { + $ref: "#/components/schemas/SamplingStrategy" + }, + count: { + type: "integer", + description: "Number of assigned capabilities", + example: 15 + }, + capabilityIds: { + type: "array", + items: { + type: "string" + }, + description: "Array of capability IDs", + example: ["cap_order_processing", "cap_inventory_check", "cap_shipment_tracking"] + }, + capabilities: { + type: "array", + items: { + type: "object", + properties: { + id: { + type: "string", + description: "Capability ID" + }, + name: { + type: "string", + description: "Capability name" + }, + description: { + type: "string", + description: "Capability description" + }, + domain: { + type: "string", + description: "Business domain" + }, + odId: { + type: "string", + description: "Associated Operational Descriptor ID" + }, + tags: { + type: "object", + description: "Capability metadata tags" + } + } + }, + description: "Full capability objects" + } + }, + required: [ + "worldId", + "worldName", + "samplingStrategy", + "count", + "capabilityIds", + "capabilities" + ] + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + example: { + success: true, + status: 200, + data: { + worldId: "507f1f77bcf86cd799439011", + worldName: "Production Environment", + samplingStrategy: { + type: "random", + count: 10, + seed: 42 + }, + count: 10, + capabilityIds: [ + "cap_order_processing", + "cap_inventory_check" + ], + capabilities: [ + { + id: "cap_order_processing", + name: "Order Processing", + description: "Process incoming purchase orders", + domain: "order-management", + odId: "od_process_order", + tags: { + personas: ["sales-rep", "customer-service"], + complexity: "medium" + } + } + ] + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid worldId", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 400, + error: "worldId is required", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + }, + example: { + success: false, + status: 404, + error: "World not found", + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + }, + "/world/{worldId}/chaos": { + get: { + tags: [ + "World" + ], + summary: "Get chaos configuration for a world", + description: ` +## Get World Chaos Configuration + +Retrieve the chaos engineering configuration for a specific world environment. + +### Response Data +- **worldId**: The world's unique identifier +- **worldName**: Human-readable world name +- **chaos**: The chaos configuration settings + +### Chaos Configuration +The chaos configuration controls fault injection: +- **processChaosEnabled**: Enable chaos for process execution (ODs) +- **infraChaosEnabled**: Enable chaos for infrastructure components + +### Use Cases +- View current chaos settings before testing +- Debug unexpected failures in workflows +- Audit chaos configurations across environments + `, + operationId: "getWorldChaos", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Successfully retrieved chaos configuration", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + worldId: { + type: "string", + description: "World unique identifier", + example: "507f1f77bcf86cd799439011" + }, + worldName: { + type: "string", + description: "World display name", + example: "Production Environment" + }, + chaos: { + $ref: "#/components/schemas/ChaosConfig" + } + }, + required: [ + "worldId", + "worldName", + "chaos" + ] + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + examples: { + chaosEnabled: { + summary: "Chaos enabled configuration", + value: { + success: true, + status: 200, + data: { + worldId: "507f1f77bcf86cd799439011", + worldName: "Testing Environment", + chaos: { + processChaosEnabled: true, + infraChaosEnabled: true + } + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + }, + chaosDisabled: { + summary: "Chaos disabled (default)", + value: { + success: true, + status: 200, + data: { + worldId: "507f1f77bcf86cd799439011", + worldName: "Production Environment", + chaos: { + processChaosEnabled: false, + infraChaosEnabled: false + } + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid worldId", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + }, + put: { + tags: [ + "World" + ], + summary: "Update chaos configuration for a world", + description: ` +## Update World Chaos Configuration + +Replace the chaos engineering configuration for a specific world environment. + +### Request Body +Provide a complete chaos configuration: +- **processChaosEnabled**: Enable chaos for process execution (ODs) +- **infraChaosEnabled**: Enable chaos for infrastructure components + +### Use Cases +- Enable process chaos testing for a development environment +- Enable infrastructure chaos for resilience testing +- Disable all chaos for production stability + `, + operationId: "updateWorldChaos", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + requestBody: { + required: true, + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ChaosConfig" + }, + examples: { + enableAllChaos: { + summary: "Enable all chaos", + value: { + processChaosEnabled: true, + infraChaosEnabled: true + } + }, + enableProcessChaosOnly: { + summary: "Enable process chaos only", + value: { + processChaosEnabled: true, + infraChaosEnabled: false + } + }, + disableChaos: { + summary: "Disable chaos", + value: { + processChaosEnabled: false, + infraChaosEnabled: false + } + } + } + } + } + }, + responses: { + "200": { + description: "Chaos configuration updated successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + worldId: { + type: "string", + example: "507f1f77bcf86cd799439011" + }, + chaos: { + $ref: "#/components/schemas/ChaosConfig" + }, + message: { + type: "string", + example: "World chaos configuration updated successfully" + } + }, + required: [ + "worldId", + "chaos", + "message" + ] + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + example: { + success: true, + status: 200, + data: { + worldId: "507f1f77bcf86cd799439011", + chaos: { + processChaosEnabled: true, + infraChaosEnabled: true + }, + message: "World chaos configuration updated successfully" + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid worldId or chaos configuration", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + }, + delete: { + tags: [ + "World" + ], + summary: "Delete chaos configuration from a world", + description: ` +## Delete World Chaos Configuration + +Remove the chaos engineering configuration from a world, resetting it to the disabled default state. + +### What Happens +- Removes the chaos policy from the world document +- Resets the chaos registry entry to disabled state +- Future workflow executions will not have chaos injection + +### Default State After Deletion +\`\`\`json +{ + "processChaosEnabled": false, + "infraChaosEnabled": false +} +\`\`\` + +### Use Cases +- Disable chaos after testing is complete +- Reset a world to clean state before production use +- Remove misconfigured chaos settings +- Prepare a world for stable demonstration + `, + operationId: "deleteWorldChaos", + parameters: [ + { + name: "worldId", + in: "path", + required: true, + description: "Unique identifier for the world environment", + schema: { + type: "string", + example: "507f1f77bcf86cd799439011" + } + } + ], + responses: { + "200": { + description: "Chaos configuration deleted successfully", + content: { + "application/json": { + schema: { + type: "object", + properties: { + success: { + type: "boolean", + example: true + }, + status: { + type: "integer", + example: 200 + }, + data: { + type: "object", + properties: { + worldId: { + type: "string", + example: "507f1f77bcf86cd799439011" + }, + message: { + type: "string", + example: "World chaos configuration deleted successfully" + } + }, + required: [ + "worldId", + "message" + ] + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message" + }, + timestamp: { + type: "string", + format: "date-time", + example: "2024-01-15T10:30:00.123Z" + } + } + } + }, + required: [ + "success", + "status", + "data", + "meta" + ] + }, + example: { + success: true, + status: 200, + data: { + worldId: "507f1f77bcf86cd799439011", + message: "World chaos configuration deleted successfully" + }, + meta: { + event: "message", + timestamp: "2024-01-15T10:30:00.123Z" + } + } + } + } + }, + "400": { + description: "Bad Request - Invalid worldId", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "404": { + description: "World not found", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + }, + "500": { + description: "Internal Server Error", + content: { + "application/json": { + schema: { + $ref: "#/components/schemas/ErrorResponse" + } + } + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/schemas/common.schema.ts b/packages/controlmart/src/docs/schemas/common.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..03279914c4c7beb283a850e6e1bb6712d6d88992 --- /dev/null +++ b/packages/controlmart/src/docs/schemas/common.schema.ts @@ -0,0 +1,122 @@ +export const commonSchemas = { + ErrorResponse: { + type: "object", + description: "Standard error response format", + properties: { + success: { + type: "boolean", + example: false, + description: "Always false for error responses" + }, + status: { + type: "integer", + description: "HTTP status code", + example: 400 + }, + error: { + type: "string", + description: "Human-readable error message", + example: "worldId is required" + }, + meta: { + type: "object", + properties: { + event: { + type: "string", + example: "message", + description: "Event type for streaming responses" + }, + timestamp: { + type: "string", + format: "date-time", + description: "Error response generation timestamp", + example: "2024-01-15T10:30:00.123Z" + } + }, + required: [ + "event", + "timestamp" + ] + } + }, + required: [ + "success", + "status", + "error", + "meta" + ] + }, + AuditLog: { + type: "object", + description: "A single audit log entry tracking data changes within a world environment", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + model: { + type: "string", + description: "The type of data model that was changed", + example: "EdiTransaction" + }, + documentId: { + type: "string", + description: "Unique identifier of the document that was changed", + example: "edi_txn_123456789" + }, + changedBy: { + type: "string", + nullable: true, + description: "User ID or system identifier that made the change", + example: "user_john_doe" + }, + before: { + type: "object", + nullable: true, + description: "Complete state of the document before the change (null for new documents)", + additionalProperties: true, + example: { + status: "PENDING", + lastUpdated: "2024-01-15T09:00:00.000Z" + } + }, + after: { + type: "object", + nullable: true, + description: "Complete state of the document after the change (null for deleted documents)", + additionalProperties: true, + example: { + status: "PROCESSED", + lastUpdated: "2024-01-15T10:25:30.123Z", + processedBy: "edi-processor-v2" + } + }, + reason: { + type: "string", + nullable: true, + description: "Optional reason or context for the change", + example: "Automated EDI processing completed" + }, + createdAt: { + type: "string", + format: "date-time", + description: "When this audit log entry was created", + example: "2024-01-15T10:25:30.123Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "When this audit log entry was last updated", + example: "2024-01-15T10:25:30.123Z" + } + }, + required: [ + "_id", + "model", + "documentId", + "createdAt", + "updatedAt" + ] + } +}; diff --git a/packages/controlmart/src/docs/schemas/edi.schema.ts b/packages/controlmart/src/docs/schemas/edi.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..877d5941c1ca5b43975f7227e060e48d6ed90b34 --- /dev/null +++ b/packages/controlmart/src/docs/schemas/edi.schema.ts @@ -0,0 +1,165 @@ +export const ediSchemas = { + EdiTransaction: { + type: "object", + description: "Electronic Data Interchange transaction for B2B document exchange", + properties: { + _id: { + type: "string", + description: "Unique database identifier for the EDI transaction", + example: "507f1f77bcf86cd799439015" + }, + transactionId: { + type: "string", + description: "System-generated unique transaction identifier", + example: "edi_edi_2024_001" + }, + partnerId: { + type: "string", + description: "Trading partner identifier", + example: "PARTNER_WALMART_001" + }, + customerId: { + type: "string", + description: "Customer identifier (optional, used for simulation)", + example: "CUSTOMER_AMAZON_123" + }, + companyId: { + type: "string", + description: "Portal company identifier (optional)", + example: "COMPANY_SKYFALL_MAIN" + }, + docType: { + type: "string", + enum: [ + "850", + "855", + "856", + "810", + "820", + "997", + "999" + ], + description: "EDI document type code (850=PO, 810=Invoice, 856=ASN, etc.)", + example: "810" + }, + dollarValue: { + type: "number", + description: "Monetary value extracted from the EDI document", + example: 1250 + }, + direction: { + type: "string", + enum: [ + "IN", + "OUT" + ], + description: "Transaction direction - IN=received, OUT=sending", + example: "IN" + }, + timestamp: { + type: "string", + format: "date-time", + description: "When the EDI transaction was processed", + example: "2024-01-15T09:30:00.000Z" + }, + interchangeControlNumber: { + type: "string", + description: "ISA13 - X12 Interchange Control Number for correlation", + example: "000000001" + }, + groupControlNumber: { + type: "string", + description: "GS06 - X12 Functional Group Control Number", + example: "1" + }, + transactionSetControlNumber: { + type: "string", + description: "ST02 - X12 Transaction Set Control Number", + example: "0001" + }, + businessDocumentNumber: { + type: "string", + description: "Business-level document identifier (PO#, Invoice#, ASN#)", + example: "INV-2024-001" + }, + status: { + type: "string", + enum: [ + "RECEIVED", + "QUEUED", + "PROCESSING", + "DELIVERED", + "ERRORED", + "ARCHIVED" + ], + description: "Current processing status of the transaction", + example: "ERRORED" + }, + errorReason: { + type: "string", + description: "Reason for error if status is ERRORED", + example: "Validation failed: Missing required field TXN02" + }, + errorDetails: { + type: "object", + description: "Detailed error information for troubleshooting", + example: { + segment: "TXN", + position: "02", + field: "Total Amount", + error: "Required field missing" + } + }, + fileName: { + type: "string", + description: "Original filename if transaction originated from file", + example: "invoice_walmart_20240115.edi" + }, + flowId: { + type: "string", + description: "Business flow identifier for transaction correlation", + example: "FLOW_PO_2024_001" + }, + rawEdi: { + type: "string", + description: "Complete raw EDI X12/EDIFACT document text", + example: "ISA*00* *00* *ZZ*WALMART *ZZ*SKYFALL *240115*1030*U*00401*000000001*0*P*>~GS*IN*WALMART*SKYFALL*20240115*1030*1*X*004010~ST*810*0001~BIG*20240115*INV-2024-001*PO-2024-5678~..." + }, + payload: { + type: "object", + description: "Structured metadata or parsed EDI business data", + example: { + invoiceNumber: "INV-2024-001", + poNumber: "PO-2024-5678", + totalAmount: 1250, + lineItems: 5, + department: "DEPT001", + requestedDeliveryDate: "2024-01-20" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "When the transaction record was created", + example: "2024-01-15T09:30:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "When the transaction record was last modified", + example: "2024-01-15T09:35:00.000Z" + } + }, + required: [ + "_id", + "transactionId", + "partnerId", + "docType", + "direction", + "timestamp", + "status", + "createdAt", + "updatedAt" + ] + } +}; diff --git a/packages/controlmart/src/docs/schemas/erp.schema.ts b/packages/controlmart/src/docs/schemas/erp.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..437b9ddb960e1e608cd86672ae55c86c1540cf0f --- /dev/null +++ b/packages/controlmart/src/docs/schemas/erp.schema.ts @@ -0,0 +1,1750 @@ +export const erpSchemas = { + ERPCompany: { + type: "object", + description: "Complete ERP company entity with comprehensive business information and operational configuration", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + worldRef: { + type: "object", + description: "Reference to the world this company belongs to", + properties: { + worldId: { + type: "string", + description: "Unique identifier of the world", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + required: [ + "worldId" + ] + }, + isMpcCompany: { + type: "boolean", + description: "Main Player Company designation (exclusive within world)", + default: false, + example: false + }, + companyId: { + type: "string", + description: "Unique company identifier within the world", + example: "COMP_507f1f77bcf86cd799439012" + }, + externalReference: { + type: "string", + description: "External system reference identifier", + example: "EXT_REF_12345" + }, + name: { + type: "string", + description: "Company name", + example: "Acme Corporation" + }, + legalName: { + type: "string", + description: "Legal business name", + example: "Acme Corporation LLC" + }, + duns: { + type: "string", + description: "DUNS (Data Universal Numbering System) number", + example: "123456789" + }, + taxId: { + type: "string", + description: "Tax identification number", + example: "TAX123456789" + }, + taxRegistrationNumbers: { + type: "array", + description: "Country-specific tax registration numbers", + items: { + type: "object", + properties: { + country: { + type: "string", + description: "Country code", + example: "USA" + }, + number: { + type: "string", + description: "Tax registration number", + example: "REG123456789" + } + }, + required: [ + "country", + "number" + ] + } + }, + currency: { + type: "string", + description: "Primary operating currency", + default: "USD", + example: "USD" + }, + paymentTerms: { + type: "string", + description: "Payment terms and conditions", + example: "NET30" + }, + creditLimit: { + type: "number", + description: "Credit limit amount", + example: 100000 + }, + creditHold: { + type: "boolean", + description: "Credit hold status", + default: false, + example: false + }, + billingAddress: { + $ref: "#/components/schemas/Address" + }, + shippingAddress: { + $ref: "#/components/schemas/Address" + }, + remitTo: { + $ref: "#/components/schemas/Address" + }, + primaryContact: { + type: "object", + description: "Primary contact information", + properties: { + name: { + type: "string", + description: "Contact person name", + example: "John Smith" + }, + email: { + type: "string", + format: "email", + description: "Contact email address", + example: "john.smith@acme.com" + }, + phone: { + type: "string", + description: "Contact phone number", + example: "+1-555-123-4567" + } + } + }, + salesOrg: { + type: "string", + description: "Sales organization code", + example: "US_EAST" + }, + priceList: { + type: "string", + description: "Price list identifier", + example: "STANDARD_RETAIL" + }, + glAccount: { + type: "string", + description: "General ledger account", + example: "1200-AR-TRADE" + }, + customerClass: { + type: "string", + description: "Customer classification", + example: "PREMIUM" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "PROSPECT", + "BLOCKED" + ], + description: "Company operational status", + default: "ACTIVE", + example: "ACTIVE" + }, + companyType: { + type: "string", + enum: [ + "CUSTOMER", + "SUPPLIER", + "PARTNER", + "INTERNAL" + ], + description: "Company relationship type", + default: "CUSTOMER", + example: "CUSTOMER" + }, + customFields: { + type: "object", + description: "Additional custom fields for company-specific data", + additionalProperties: true, + example: { + erpSource: "SAP", + regionCode: "US", + salesRep: "JOHN_DOE" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Company record creation timestamp", + example: "2024-01-15T10:25:30.123Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Company record last update timestamp", + example: "2024-01-15T14:30:45.678Z" + } + }, + required: [ + "_id", + "worldRef", + "companyId", + "name", + "status", + "companyType" + ] + }, + ERPProduct: { + type: "object", + description: "Complete ERP product entity with comprehensive product information and operational configuration", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + worldRef: { + type: "object", + description: "Reference to the world this product belongs to", + properties: { + worldId: { + type: "string", + description: "Unique identifier of the world", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + required: [ + "worldId" + ] + }, + productId: { + type: "string", + description: "Unique product identifier within the world (auto-generated if not provided)", + example: "PROD_507f1f77bcf86cd799439012" + }, + upc: { + type: "string", + description: "Universal Product Code for retail identification", + example: "123456789012" + }, + ean: { + type: "string", + description: "European Article Number for international identification", + example: "1234567890123" + }, + name: { + type: "string", + description: "Product name", + example: "Premium Widget" + }, + description: { + type: "string", + description: "Detailed product description", + example: "High-quality premium widget with enhanced features for professional use" + }, + commodityCode: { + type: "string", + description: "Commodity classification code for trade and customs", + example: "8421.21.0000" + }, + taxClassification: { + type: "string", + description: "Tax classification for accounting and compliance", + example: "TAXABLE_GOODS" + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure for product quantification", + default: "EA", + example: "EA" + }, + weight: { + type: "object", + description: "Product weight specification", + properties: { + value: { + type: "number", + description: "Weight value", + example: 2.5 + }, + unit: { + type: "string", + description: "Weight unit", + example: "LB" + } + } + }, + dimensions: { + type: "object", + description: "Product dimensions for shipping and storage calculations", + properties: { + length: { + type: "number", + description: "Product length", + example: 12.5 + }, + width: { + type: "number", + description: "Product width", + example: 8 + }, + height: { + type: "number", + description: "Product height", + example: 3.5 + }, + unit: { + type: "string", + description: "Dimension unit", + example: "IN" + } + } + }, + inventoryTracking: { + type: "boolean", + description: "Enable inventory tracking for this product", + default: true, + example: true + }, + price: { + type: "object", + description: "Product selling price for sales operations", + properties: { + currency: { + type: "string", + description: "Price currency code", + example: "USD" + }, + amount: { + type: "number", + description: "Price amount", + example: 99.99 + } + } + }, + cost: { + type: "object", + description: "Product cost basis for margin calculations and financial reporting", + properties: { + currency: { + type: "string", + description: "Cost currency code", + example: "USD" + }, + amount: { + type: "number", + description: "Cost amount", + example: 45.5 + } + } + }, + leadTimeDays: { + type: "number", + description: "Lead time in days for procurement or manufacturing", + example: 14 + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "DISCONTINUED" + ], + description: "Product lifecycle status", + default: "ACTIVE", + example: "ACTIVE" + }, + customFields: { + type: "object", + description: "Additional custom fields for product-specific data and metadata", + additionalProperties: true, + example: { + brand: "Premium Brand", + category: "Electronics", + manufacturer: "ACME Corp", + warranty: "2 years" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Product record creation timestamp", + example: "2024-01-15T10:25:30.123Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Product record last update timestamp", + example: "2024-01-15T14:30:45.678Z" + } + }, + required: [ + "_id", + "worldRef", + "productId", + "name", + "status" + ] + }, + ERPOrder: { + type: "object", + description: "ERP purchase order with comprehensive order information and line item details", + properties: { + _id: { + type: "string", + description: "MongoDB unique identifier", + example: "507f1f77bcf86cd799439012" + }, + __v: { + type: "number", + description: "MongoDB version key", + example: 0 + }, + worldId: { + type: "string", + description: "World environment identifier", + example: "507f1f77bcf86cd799439011" + }, + orderId: { + type: "string", + description: "Unique order identifier (CRITICAL: Route parameter 'poNumber' maps to this field)", + example: "ORDER_507f1f77bcf86cd799439012" + }, + poType: { + type: "string", + enum: [ + "STANDARD", + "BLANKET", + "CONTRACT", + "DROP_SHIP" + ], + description: "Purchase order type", + example: "STANDARD" + }, + direction: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND" + ], + description: "Order direction - INBOUND for purchase orders, OUTBOUND for sales orders", + default: "INBOUND", + example: "INBOUND" + }, + customerId: { + type: "string", + description: "Customer identifier", + example: "CUST_507f1f77bcf86cd799439013" + }, + partnerId: { + type: "string", + description: "Partner identifier", + example: "PARTNER_507f1f77bcf86cd799439014" + }, + orderDate: { + type: "string", + format: "date", + description: "Order placement date", + example: "2024-01-15" + }, + requestedDate: { + type: "string", + format: "date", + description: "Requested delivery date", + example: "2024-01-25" + }, + dueDate: { + type: "string", + format: "date", + description: "Due date for order completion", + example: "2024-01-30" + }, + buyer: { + type: "object", + description: "Buyer information", + properties: { + id: { + type: "string", + example: "BUYER001" + }, + name: { + type: "string", + example: "John Smith" + } + } + }, + currency: { + type: "string", + description: "Order currency", + example: "USD" + }, + subtotal: { + type: "number", + description: "Order subtotal before taxes and fees", + example: 1250 + }, + discounts: { + type: "array", + description: "Order-level discounts", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Volume discount" + }, + amount: { + type: "number", + example: 50 + } + } + } + }, + taxes: { + type: "array", + description: "Order-level tax details", + items: { + $ref: "#/components/schemas/TaxDetail" + } + }, + totalAmount: { + type: "number", + description: "Total order amount", + example: 1335 + }, + status: { + type: "string", + enum: [ + "RECEIVED", + "ACKED", + "IN_PROGRESS", + "MATERIALS_PICKED", + "MANUFACTURING_COMPLETE", + "PARTIALLY_SHIPPED", + "COMPLETED", + "CANCELLED" + ], + description: "Order processing status", + example: "RECEIVED" + }, + lines: { + type: "array", + description: "Order line items", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + example: 1 + }, + sku: { + type: "string", + example: "PROD_WIDGET_001" + }, + description: { + type: "string", + example: "Premium Widget - Blue" + }, + quantityOrdered: { + type: "number", + example: 10 + }, + unitPrice: { + type: "number", + example: 99.99 + }, + lineTotal: { + type: "number", + example: 999.9 + }, + promisedDate: { + type: "string", + format: "date", + example: "2024-01-20" + }, + poLineId: { + type: "string", + description: "PO line identifier", + example: "PO_LINE_001" + }, + quantityBackordered: { + type: "number", + description: "Quantity on backorder", + default: 0, + example: 0 + }, + quantityCanceled: { + type: "number", + description: "Quantity canceled", + default: 0, + example: 0 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure", + example: "EA" + }, + shipToAddress: { + $ref: "#/components/schemas/Address" + }, + taxDetails: { + type: "array", + description: "Line-level tax details", + items: { + $ref: "#/components/schemas/TaxDetail" + } + }, + scheduleLines: { + type: "array", + description: "Delivery schedule lines", + items: { + type: "object", + properties: { + qty: { + type: "number", + example: 5 + }, + date: { + type: "string", + format: "date", + example: "2024-01-20" + } + } + } + }, + customFields: { + type: "object", + description: "Line-specific custom fields", + additionalProperties: true + } + } + } + }, + attachments: { + type: "array", + description: "Order attachments and documents", + items: { + type: "object", + properties: { + filename: { + type: "string", + example: "po_document.pdf" + }, + url: { + type: "string", + example: "https://storage.example.com/po_document.pdf" + } + } + } + }, + ediTransactionId: { + type: "string", + description: "Reference to EDI transaction ID", + example: "507f1f77bcf86cd799439021" + }, + flowId: { + type: "string", + description: "Business flow identifier for workflow tracking", + example: "FLOW_PO_001" + }, + notes: { + type: "string", + description: "Order notes and instructions", + example: "Please deliver to dock 3" + }, + customFields: { + type: "object", + description: "Additional order-specific fields", + additionalProperties: true, + example: { + salesRep: "JOHN_DOE", + priority: "HIGH" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Order creation timestamp", + example: "2024-01-15T10:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last update timestamp", + example: "2024-01-15T15:30:00.000Z" + } + }, + required: [ + "_id", + "worldId", + "orderId", + "customerId", + "orderDate", + "lines", + "status" + ] + }, + ERPInvoice: { + type: "object", + description: "ERP invoice with comprehensive billing information and line item details", + properties: { + _id: { + type: "string", + description: "MongoDB unique identifier", + example: "507f1f77bcf86cd799439015" + }, + __v: { + type: "number", + description: "MongoDB version key", + example: 0 + }, + worldId: { + type: "string", + description: "World environment identifier", + example: "507f1f77bcf86cd799439011" + }, + invoiceId: { + type: "string", + description: "Unique invoice identifier (NOTE: Route parameter 'invoiceNumber' maps to this field)", + example: "INV_507f1f77bcf86cd799439012" + }, + invoiceType: { + type: "string", + enum: [ + "STANDARD", + "CREDIT", + "DEBIT", + "CORRECTION" + ], + description: "Invoice type", + example: "STANDARD" + }, + poNumber: { + type: "string", + description: "Related purchase order number", + example: "ORDER_507f1f77bcf86cd799439013" + }, + orderId: { + type: "string", + description: "Reference to order ID", + example: "ORDER_507f1f77bcf86cd799439013" + }, + customerId: { + type: "string", + description: "Customer identifier", + example: "CUST_507f1f77bcf86cd799439014" + }, + partnerId: { + type: "string", + description: "Partner identifier", + example: "PARTNER_507f1f77bcf86cd799439015" + }, + billTo: { + $ref: "#/components/schemas/Address" + }, + remitTo: { + $ref: "#/components/schemas/Address" + }, + issueDate: { + type: "string", + format: "date", + description: "Invoice issue date", + example: "2024-01-15" + }, + dueDate: { + type: "string", + format: "date", + description: "Payment due date", + example: "2024-02-15" + }, + currency: { + type: "string", + description: "Invoice currency", + example: "USD" + }, + subtotal: { + type: "number", + description: "Invoice subtotal", + example: 1000 + }, + allowances: { + type: "array", + description: "Invoice allowances and discounts", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Early payment discount" + }, + amount: { + type: "number", + example: 25 + } + } + } + }, + charges: { + type: "array", + description: "Additional charges and fees", + items: { + type: "object", + properties: { + description: { + type: "string", + example: "Shipping charge" + }, + amount: { + type: "number", + example: 15 + } + } + } + }, + taxes: { + type: "array", + description: "Invoice-level tax details", + items: { + $ref: "#/components/schemas/TaxDetail" + } + }, + totalAmount: { + type: "number", + description: "Total invoice amount", + example: 1080 + }, + balanceDue: { + type: "number", + description: "Outstanding balance due", + example: 1080 + }, + status: { + type: "string", + enum: [ + "DRAFT", + "SENT", + "VALIDATED", + "REJECTED", + "PAID", + "PARTIALLY_PAID" + ], + description: "Invoice status", + example: "DRAFT" + }, + lines: { + type: "array", + description: "Invoice line items", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + example: 1 + }, + sku: { + type: "string", + example: "PROD_WIDGET_001" + }, + description: { + type: "string", + example: "Premium Widget - Blue" + }, + quantity: { + type: "number", + example: 10 + }, + unitPrice: { + type: "number", + example: 99.99 + }, + lineAmount: { + type: "number", + example: 999.9 + }, + discount: { + type: "object", + description: "Line-level discount", + properties: { + amount: { + type: "number", + example: 10 + }, + percent: { + type: "number", + example: 5 + } + } + }, + taxDetails: { + type: "array", + description: "Line-level tax details", + items: { + $ref: "#/components/schemas/TaxDetail" + } + }, + accounting: { + type: "object", + description: "Line-level accounting info", + properties: { + costCenter: { + type: "string", + example: "CC001" + }, + glAccount: { + type: "string", + example: "4100-REVENUE" + } + } + }, + customFields: { + type: "object", + description: "Line-specific custom fields", + additionalProperties: true + } + } + } + }, + references: { + type: "array", + description: "Document references", + items: { + type: "object", + properties: { + docType: { + type: "string", + example: "SHIPMENT" + }, + docId: { + type: "string", + example: "SHIP_507f1f77bcf86cd799439012" + } + } + } + }, + ediTransactionId: { + type: "string", + description: "Reference to EDI transaction ID", + example: "507f1f77bcf86cd799439021" + }, + taxSummary: { + type: "object", + description: "Tax summary data", + additionalProperties: true, + example: { + totalTax: 80, + taxBreakdown: [ + { taxType: "STATE", amount: 60 }, + { taxType: "LOCAL", amount: 20 } + ] + } + }, + accounting: { + type: "object", + description: "Accounting configuration", + properties: { + arAccount: { + type: "string", + example: "1200-AR-TRADE" + }, + period: { + type: "string", + example: "2024-01" + }, + postingDate: { + type: "string", + format: "date-time", + example: "2024-01-15T00:00:00.000Z" + } + } + }, + disputes: { + type: "array", + description: "Invoice disputes", + items: { + type: "object", + properties: { + ticketId: { + type: "string", + example: "TICKET_001" + }, + reason: { + type: "string", + example: "Price discrepancy" + }, + createdAt: { + type: "string", + format: "date-time", + example: "2024-01-20T10:00:00.000Z" + } + } + } + }, + correctionHistory: { + type: "array", + description: "Invoice correction history", + items: { + type: "object", + properties: { + previous: { + type: "object", + description: "Previous invoice state", + additionalProperties: true + }, + correctedAt: { + type: "string", + format: "date-time", + example: "2024-01-18T14:00:00.000Z" + }, + correctedBy: { + type: "string", + example: "user@example.com" + } + } + } + }, + attachments: { + type: "array", + description: "Invoice attachments and documents", + items: { + type: "object", + properties: { + filename: { + type: "string", + example: "invoice.pdf" + }, + url: { + type: "string", + example: "https://storage.example.com/invoice.pdf" + } + } + } + }, + flowId: { + type: "string", + description: "Business flow identifier for workflow tracking", + example: "FLOW_INV_001" + }, + paymentTerms: { + type: "string", + description: "Payment terms", + example: "NET30" + }, + customFields: { + type: "object", + description: "Additional invoice fields", + additionalProperties: true, + example: { + salesRep: "JANE_DOE", + region: "NORTHEAST" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Invoice creation timestamp", + example: "2024-01-15T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last update timestamp", + example: "2024-01-15T14:30:00.000Z" + } + }, + required: [ + "_id", + "worldId", + "invoiceId", + "customerId", + "issueDate", + "totalAmount", + "lines", + "status" + ] + }, + ERPShipment: { + type: "object", + description: "ERP shipment with comprehensive logistics and tracking information", + properties: { + _id: { + type: "string", + description: "MongoDB unique identifier", + example: "507f1f77bcf86cd799439020" + }, + __v: { + type: "number", + description: "MongoDB version key", + example: 0 + }, + worldId: { + type: "string", + description: "World environment identifier", + example: "507f1f77bcf86cd799439011" + }, + shipmentId: { + type: "string", + description: "Unique shipment identifier (auto-generated)", + example: "SHIP_507f1f77bcf86cd799439012" + }, + poNumber: { + type: "string", + description: "Related purchase order number", + example: "ORDER_507f1f77bcf86cd799439013" + }, + carrier: { + type: "object", + description: "Carrier information", + properties: { + name: { + type: "string", + example: "FedEx" + }, + scac: { + type: "string", + example: "FDXE" + }, + mode: { + type: "string", + example: "Ground" + } + } + }, + trackingNumber: { + type: "string", + description: "Carrier tracking number", + example: "1Z999AA1234567890" + }, + shipDate: { + type: "string", + format: "date", + description: "Shipment date", + example: "2024-01-15" + }, + estimatedArrival: { + type: "string", + format: "date", + description: "Estimated arrival date", + example: "2024-01-17" + }, + actualArrival: { + type: "string", + format: "date", + description: "Actual arrival date", + example: "2024-01-17" + }, + status: { + type: "string", + enum: [ + "CREATED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ], + description: "Shipment status", + example: "CREATED" + }, + fromAddress: { + $ref: "#/components/schemas/Address" + }, + toAddress: { + $ref: "#/components/schemas/Address" + }, + packaging: { + type: "object", + description: "Packaging information", + properties: { + palletCount: { + type: "number", + example: 2 + }, + totalPackages: { + type: "number", + example: 10 + }, + packagingType: { + type: "string", + example: "BOX" + } + } + }, + lines: { + type: "array", + description: "Shipment line items", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + example: 1 + }, + sku: { + type: "string", + example: "PROD_WIDGET_001" + }, + quantityShipped: { + type: "number", + example: 10 + }, + quantityOrdered: { + type: "number", + example: 15 + }, + unitOfMeasure: { + type: "string", + example: "EA" + }, + lotNumber: { + type: "string", + example: "LOT_2024_001" + }, + serialNumbers: { + type: "array", + items: { + type: "string" + }, + example: [ + "SN001", + "SN002" + ] + }, + palletId: { + type: "string", + example: "PALLET_001" + }, + packageCount: { + type: "number", + example: 2 + }, + weight: { + type: "number", + example: 25.5 + }, + customs: { + type: "object", + description: "Customs information", + additionalProperties: true + }, + customFields: { + type: "object", + description: "Line-specific custom fields", + additionalProperties: true + } + } + } + }, + ediTransactionId: { + type: "string", + description: "Related EDI transaction ID", + example: "507f1f77bcf86cd799439021" + }, + documents: { + type: "array", + description: "Document URLs for bills of lading, labels, etc.", + items: { + type: "string" + }, + example: [ + "https://storage.example.com/bill-of-lading.pdf", + "https://storage.example.com/shipping-label.pdf" + ] + }, + events: { + type: "array", + description: "Shipment tracking events", + items: { + type: "object", + properties: { + ts: { + type: "string", + format: "date-time", + example: "2024-01-15T14:30:00Z" + }, + location: { + type: "string", + example: "Memphis, TN Hub" + }, + status: { + type: "string", + example: "Package scanned at facility" + }, + note: { + type: "string", + example: "Package processed through automated sorting" + } + } + } + }, + flowId: { + type: "string", + description: "Business flow identifier", + example: "FLOW_OUTBOUND_001" + }, + customFields: { + type: "object", + description: "Additional shipment fields", + additionalProperties: true, + example: { + expedited: true, + specialHandling: "FRAGILE" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Shipment creation timestamp", + example: "2024-01-15T08:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last update timestamp", + example: "2024-01-15T16:45:00.000Z" + } + }, + required: [ + "_id", + "worldId", + "shipmentId", + "status", + "toAddress", + "lines" + ] + }, + ERPPayment: { + type: "object", + description: "ERP payment with comprehensive financial and allocation management", + properties: { + _id: { + type: "string", + description: "MongoDB unique identifier", + example: "507f1f77bcf86cd799439020" + }, + __v: { + type: "number", + description: "MongoDB version key", + example: 0 + }, + worldId: { + type: "string", + description: "World environment identifier", + example: "507f1f77bcf86cd799439011" + }, + paymentId: { + type: "string", + description: "Unique payment identifier (auto-generated via generateIdByService)", + example: "PAY_507f1f77bcf86cd799439012" + }, + remittanceId: { + type: "string", + description: "Remittance advice identifier for payment tracking", + example: "REM_507f1f77bcf86cd799439013" + }, + customerId: { + type: "string", + description: "Customer identifier (required for payment processing)", + example: "CUST_507f1f77bcf86cd799439014" + }, + partnerId: { + type: "string", + description: "Partner identifier for B2B payment relationships", + example: "PARTNER_507f1f77bcf86cd799439015" + }, + paymentDate: { + type: "string", + format: "date", + description: "Date when payment was received (required)", + example: "2024-01-15" + }, + currency: { + type: "string", + description: "Payment currency code", + default: "USD", + example: "USD" + }, + totalAmount: { + type: "number", + description: "Total payment amount (required)", + example: 1500 + }, + method: { + type: "string", + enum: [ + "ACH", + "WIRE", + "CHECK", + "CREDIT_CARD", + "OTHER" + ], + description: "Payment method used", + default: "ACH", + example: "ACH" + }, + bankDetails: { + type: "object", + description: "Banking information for payment processing", + properties: { + bankName: { + type: "string", + description: "Name of the bank", + example: "Wells Fargo" + }, + accountNumber: { + type: "string", + description: "Bank account number (masked for security)", + example: "****1234" + }, + routingNumber: { + type: "string", + description: "Bank routing number", + example: "121000248" + }, + swift: { + type: "string", + description: "SWIFT code for international payments", + example: "WFBIUS6S" + } + } + }, + allocations: { + type: "array", + description: "Payment allocations to invoices with comprehensive tracking", + items: { + $ref: "#/components/schemas/PaymentAllocation" + } + }, + status: { + type: "string", + enum: [ + "RECEIVED", + "APPLIED", + "UNMATCHED", + "REVERSAL" + ], + description: "Current payment processing status", + default: "RECEIVED", + example: "RECEIVED" + }, + ediTransactionId: { + type: "string", + description: "Reference to inbound EDI 820 transaction ID", + example: "507f1f77bcf86cd799439021" + }, + referenceNumbers: { + type: "array", + description: "Payment reference numbers for tracking", + items: { + type: "string" + }, + example: [ + "REF001", + "CHECK12345" + ] + }, + notes: { + type: "string", + description: "Payment notes and additional information", + example: "Customer payment for invoices INV_001 and INV_002" + }, + flowId: { + type: "string", + description: "Business flow identifier for payment processing", + example: "FLOW_AR_PROCESSING" + }, + customFields: { + type: "object", + description: "Additional payment-specific fields", + additionalProperties: true, + example: { + processingFee: 5, + customerReference: "CUST_PAY_001" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Payment creation timestamp", + example: "2024-01-15T08:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last update timestamp", + example: "2024-01-15T16:45:00.000Z" + } + }, + required: [ + "_id", + "worldId", + "paymentId", + "customerId", + "paymentDate", + "totalAmount" + ] + }, + PaymentAllocation: { + type: "object", + description: "Individual payment allocation to invoice with tracking details", + properties: { + invoiceNumber: { + type: "string", + description: "Invoice number for allocation (required)", + example: "INV_507f1f77bcf86cd799439016" + }, + appliedAmount: { + type: "number", + description: "Amount applied to this invoice (required)", + example: 750 + }, + discountTaken: { + type: "number", + description: "Early payment discount amount taken", + example: 15 + }, + unappliedAmount: { + type: "number", + description: "Amount remaining unapplied for this allocation", + example: 0 + }, + allocationMethod: { + type: "string", + description: "Method used for this allocation", + enum: [ + "FIFO", + "LIFO", + "MANUAL", + "AUTOMATIC" + ], + example: "FIFO" + } + }, + required: [ + "invoiceNumber", + "appliedAmount" + ] + }, + Address: { + type: "object", + description: "Physical address for billing, shipping, or remittance", + properties: { + street1: { type: "string", description: "Street address", example: "123 Main St" }, + street2: { type: "string", description: "Additional address line", example: "Suite 100" }, + city: { type: "string", description: "City name", example: "Atlanta" }, + state: { type: "string", description: "State or province code", example: "GA" }, + postalCode: { type: "string", description: "Postal/ZIP code", example: "30303" }, + country: { type: "string", description: "Country code (ISO 3166-1 alpha-2)", example: "US" }, + }, + required: ["street1", "city", "state", "postalCode", "country"], + }, + TaxDetail: { + type: "object", + description: "Tax detail information for orders, invoices, and line items", + properties: { + taxType: { + type: "string", + description: "Type of tax (e.g., VAT, SALES, STATE, LOCAL)", + example: "VAT" + }, + jurisdiction: { + type: "string", + description: "Tax jurisdiction", + example: "US-CA" + }, + taxRate: { + type: "number", + description: "Tax rate as a decimal", + example: 0.08 + }, + taxAmount: { + type: "number", + description: "Calculated tax amount", + example: 24.00 + }, + taxableBase: { + type: "number", + description: "Base amount on which tax is calculated", + example: 300.00 + }, + taxId: { + type: "string", + description: "Tax identifier", + example: "TAX_001" + } + } + }, + ERPOperationsDashboard: { + type: "object", + description: "Aggregated metrics for the ERP Command Center dashboard", + properties: { + orders: { + type: "object", + description: "Order metrics split by purchase orders and sales orders", + properties: { + purchaseOrders: { + type: "object", + description: "Purchase order (inbound) metrics", + properties: { + total: { + type: "number", + description: "Total number of purchase orders", + example: 150 + }, + byStatus: { + type: "object", + description: "Count of orders by status", + additionalProperties: { type: "number" }, + example: { "RECEIVED": 50, "IN_PROGRESS": 30, "COMPLETED": 70 } + }, + recentOrders: { + type: "number", + description: "Orders in the last 30 days", + example: 25 + }, + totalValue: { + type: "number", + description: "Total value of all orders", + example: 125000.00 + }, + averageOrderValue: { + type: "number", + description: "Average order value", + example: 833.33 + } + } + }, + salesOrders: { + type: "object", + description: "Sales order (outbound) metrics", + properties: { + total: { + type: "number", + description: "Total number of sales orders", + example: 200 + }, + byStatus: { + type: "object", + description: "Count of orders by status", + additionalProperties: { type: "number" }, + example: { "RECEIVED": 40, "IN_PROGRESS": 60, "COMPLETED": 100 } + }, + recentOrders: { + type: "number", + description: "Orders in the last 30 days", + example: 45 + }, + totalValue: { + type: "number", + description: "Total value of all orders", + example: 250000.00 + }, + averageOrderValue: { + type: "number", + description: "Average order value", + example: 1250.00 + } + } + } + } + }, + invoices: { + type: "object", + description: "Invoice metrics", + properties: { + total: { + type: "number", + description: "Total number of invoices", + example: 300 + }, + byStatus: { + type: "object", + description: "Count of invoices by status", + additionalProperties: { type: "number" }, + example: { "DRAFT": 20, "SENT": 50, "PAID": 200, "PARTIALLY_PAID": 30 } + }, + totalOutstanding: { + type: "number", + description: "Total outstanding balance", + example: 45000.00 + }, + overdueCount: { + type: "number", + description: "Number of overdue invoices", + example: 12 + }, + paidThisMonth: { + type: "number", + description: "Number of invoices paid this month", + example: 35 + } + } + }, + companies: { + type: "object", + description: "Company metrics", + properties: { + total: { + type: "number", + description: "Total number of companies", + example: 500 + }, + byType: { + type: "object", + description: "Count of companies by type", + additionalProperties: { type: "number" }, + example: { "CUSTOMER": 350, "SUPPLIER": 120, "PARTNER": 30 } + }, + activeCustomers: { + type: "number", + description: "Number of active customers", + example: 300 + }, + activeSuppliers: { + type: "number", + description: "Number of active suppliers", + example: 100 + }, + activeCompanies: { + type: "number", + description: "Total active companies", + example: 420 + } + } + }, + products: { + type: "object", + description: "Product metrics", + properties: { + total: { + type: "number", + description: "Total number of products", + example: 1000 + }, + activeProducts: { + type: "number", + description: "Number of active products", + example: 850 + }, + discontinuedProducts: { + type: "number", + description: "Number of discontinued products", + example: 150 + } + } + } + } + } +}; diff --git a/packages/controlmart/src/docs/schemas/finance.schema.ts b/packages/controlmart/src/docs/schemas/finance.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..2f97ca3163e4a091373ee4a5288fd4d5e0922cf0 --- /dev/null +++ b/packages/controlmart/src/docs/schemas/finance.schema.ts @@ -0,0 +1,181 @@ +export const financeSchemas = { + FinanceTransaction: { + type: "object", + description: "Finance transaction with comprehensive financial tracking and business intelligence", + properties: { + _id: { + type: "string", + description: "MongoDB unique identifier", + example: "507f1f77bcf86cd799439020" + }, + __v: { + type: "number", + description: "MongoDB version key", + example: 0 + }, + worldRef: { + type: "object", + description: "World reference for multi-tenant isolation", + properties: { + worldId: { + type: "string", + description: "World environment identifier", + example: "507f1f77bcf86cd799439011" + } + }, + required: [ + "worldId" + ] + }, + transactionId: { + type: "string", + description: "Unique transaction identifier (auto-generated via generateIdByService)", + example: "TRANS_507f1f77bcf86cd799439012" + }, + partnerId: { + type: "string", + description: "Business partner identifier for relationship tracking", + example: "PARTNER_507f1f77bcf86cd799439013" + }, + type: { + type: "string", + enum: [ + "payment_in", + "payment_out" + ], + description: "Transaction direction - incoming or outgoing payment (required)", + example: "payment_in" + }, + amount: { + type: "number", + description: "Transaction amount - must be greater than 0 (required)", + minimum: 0.01, + example: 1500 + }, + sourceType: { + type: "string", + enum: [ + "invoice", + "bill", + "manual", + "interest", + "payment" + ], + description: "Source document type for transaction linkage (required)", + example: "invoice" + }, + sourceId: { + type: "string", + description: "Source document identifier for audit trail (required)", + example: "INV_507f1f77bcf86cd799439014" + }, + metadata: { + type: "object", + description: "Additional transaction-specific data for flexible extensions", + additionalProperties: true, + example: { + paymentMethod: "ACH", + bankReference: "REF123456", + customerReference: "CUST_PAY_001", + exchangeRate: 1, + originalCurrency: "USD" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Transaction creation timestamp", + example: "2024-01-15T08:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last update timestamp", + example: "2024-01-15T16:45:00.000Z" + } + }, + required: [ + "_id", + "worldRef", + "transactionId", + "type", + "amount", + "sourceType", + "sourceId" + ] + }, + CompanyLedger: { + type: "object", + description: "Company ledger with comprehensive financial position tracking and automatic net position calculation", + properties: { + _id: { + type: "string", + description: "MongoDB unique identifier", + example: "507f1f77bcf86cd799439020" + }, + __v: { + type: "number", + description: "MongoDB version key", + example: 0 + }, + worldRef: { + type: "object", + description: "World reference for multi-tenant isolation", + properties: { + worldId: { + type: "string", + description: "World environment identifier (unique constraint)", + example: "507f1f77bcf86cd799439011" + } + }, + required: [ + "worldId" + ] + }, + cash: { + type: "number", + description: "Company cash position", + default: 0, + example: 25000 + }, + totalReceivables: { + type: "number", + description: "Total accounts receivable outstanding", + default: 0, + example: 45000 + }, + totalPayables: { + type: "number", + description: "Total accounts payable outstanding", + default: 0, + example: 18000 + }, + netPosition: { + type: "number", + description: "Calculated net financial position (cash + receivables - payables) - Auto-calculated, cannot be manually set", + readOnly: true, + example: 52000 + }, + createdAt: { + type: "string", + format: "date-time", + description: "Ledger creation timestamp", + example: "2024-01-15T08:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last update timestamp", + example: "2024-01-15T16:45:00.000Z" + } + }, + required: [ + "_id", + "worldRef", + "cash", + "totalReceivables", + "totalPayables", + "netPosition" + ] + } +}; diff --git a/packages/controlmart/src/docs/schemas/index.ts b/packages/controlmart/src/docs/schemas/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..22467d12a6fc2f455a5cc6e0b1848eb01fa0a957 --- /dev/null +++ b/packages/controlmart/src/docs/schemas/index.ts @@ -0,0 +1,20 @@ + +import { commonSchemas } from './common.schema'; +import { worldSchemas } from './world.schema'; +import { ediSchemas } from './edi.schema'; +import { erpSchemas } from './erp.schema'; +import { financeSchemas } from './finance.schema'; +import { ticketsSchemas } from './tickets.schema'; +import { tmsSchemas } from './tms.schema'; +import { wmsSchemas } from './wms.schema'; + +export const allSchemas = { + ...commonSchemas, + ...worldSchemas, + ...ediSchemas, + ...erpSchemas, + ...financeSchemas, + ...ticketsSchemas, + ...tmsSchemas, + ...wmsSchemas, +}; diff --git a/packages/controlmart/src/docs/schemas/tickets.schema.ts b/packages/controlmart/src/docs/schemas/tickets.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..6ab337d3b725c23f23e04d6da9358634899c46c7 --- /dev/null +++ b/packages/controlmart/src/docs/schemas/tickets.schema.ts @@ -0,0 +1,418 @@ +export const ticketsSchemas = { + ItsmTicket: { + type: "object", + description: "ITSM ticket for managing incidents, service requests, problems, and changes", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + worldRef: { + type: "object", + properties: { + worldId: { + type: "string", + description: "Reference to the world this ticket belongs to", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + required: [ + "worldId" + ] + }, + title: { + type: "string", + description: "Brief descriptive title of the ticket", + example: "Email server not responding" + }, + description: { + type: "string", + description: "Detailed description of the issue or request", + example: "Users unable to access email. Server appears to be down since 9:00 AM." + }, + requester: { + type: "string", + description: "User ID who created the ticket", + example: "507f1f77bcf86cd799439020" + }, + assignedTo: { + type: "string", + nullable: true, + description: "User ID assigned to handle the ticket", + example: "507f1f77bcf86cd799439021" + }, + status: { + type: "string", + enum: [ + "new", + "open", + "in_progress", + "on_hold", + "resolved", + "closed" + ], + description: "Current status of the ticket", + example: "in_progress" + }, + priority: { + type: "string", + enum: [ + "low", + "medium", + "high", + "critical" + ], + description: "Priority level of the ticket", + example: "high" + }, + impact: { + type: "string", + enum: [ + "low", + "medium", + "high" + ], + description: "Business impact level", + example: "high" + }, + urgency: { + type: "string", + enum: [ + "low", + "medium", + "high" + ], + description: "Time sensitivity level", + example: "medium" + }, + category: { + type: "string", + nullable: true, + description: "Department or category classification", + example: "Infrastructure" + }, + type: { + type: "string", + enum: [ + "incident", + "service_request", + "problem", + "change" + ], + description: "Type of ITSM ticket", + example: "incident" + }, + attachments: { + type: "array", + items: { + type: "object", + properties: { + url: { + type: "string", + description: "URL to the attached file", + example: "https://storage.example.com/attachments/error_logs.txt" + }, + filename: { + type: "string", + description: "Original filename of the attachment", + example: "error_logs.txt" + }, + uploadedAt: { + type: "string", + format: "date-time", + description: "When the file was uploaded", + example: "2024-01-15T10:30:00.000Z" + } + } + }, + description: "File attachments related to the ticket" + }, + resolutionNotes: { + type: "string", + nullable: true, + description: "Notes describing how the ticket was resolved", + example: "Cleared disk space on mail server. Service restored at 11:45 AM." + }, + workNotes: { + type: "array", + items: { + $ref: "#/components/schemas/WorkNote" + }, + description: "Array of work notes documenting progress and actions" + }, + createdAt: { + type: "string", + format: "date-time", + description: "When the ticket was created", + example: "2024-01-15T09:15:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "When the ticket was last updated", + example: "2024-01-15T11:45:00.000Z" + }, + metadata: { + type: "object", + description: "Additional context and system metadata", + properties: { + odId: { type: "string", description: "Operational Descriptor ID" }, + odName: { type: "string", description: "Operational Descriptor Name" }, + runId: { type: "string", description: "Unique execution run ID" }, + failedStepId: { type: "string", description: "ID of the step where failure occurred", nullable: true }, + failureType: { type: "string", description: "Type of chaos or error that caused failure", nullable: true }, + contextSnapshots: { + type: "array", + description: "Snapshots of context data at the time of failure", + items: { type: "object" } + } + } + } + }, + required: [ + "_id", + "worldRef", + "title", + "description", + "requester", + "status", + "priority", + "impact", + "urgency", + "type", + "createdAt", + "updatedAt" + ] + }, + WorkNote: { + type: "object", + description: "Work note entry for tracking progress and communication", + properties: { + author: { + type: "string", + description: "User ID of the note author", + example: "tech_support_1" + }, + note: { + type: "string", + description: "Text content of the note", + example: "Investigating server logs. Found disk space issue on mail server." + }, + isPublic: { + type: "boolean", + description: "Whether the note is visible to the requester", + example: false, + default: false + }, + createdAt: { + type: "string", + format: "date-time", + description: "When the note was created", + example: "2024-01-15T10:25:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "When the note was last updated", + example: "2024-01-15T10:25:00.000Z" + } + }, + required: [ + "author", + "note", + "isPublic", + "createdAt", + "updatedAt" + ] + }, + InvariantCheckResult: { + type: "object", + description: "Result of an invariant check", + properties: { + invariantId: { + type: "string", + description: "Unique identifier for the invariant", + example: "INV_001" + }, + description: { + type: "string", + description: "Description of the invariant", + example: "Shipment must belong to the correct warehouse" + }, + passed: { + type: "boolean", + description: "Whether the check passed", + example: true + }, + actual: { + type: "object", + description: "Actual value found", + nullable: true + }, + expected: { + type: "object", + description: "Expected value", + nullable: true + }, + error: { + type: "string", + description: "Error message if applicable", + nullable: true, + example: "Mismatch in warehouse ID" + } + }, + required: ["invariantId", "description", "passed"] + }, + EntityStateCheckResult: { + type: "object", + description: "Result of an entity state check", + properties: { + entityType: { + type: "string", + description: "Type of the entity", + example: "shipment" + }, + entityId: { + type: "string", + description: "ID of the entity", + example: "SHP_12345" + }, + expectedStates: { + type: "array", + items: { + type: "string" + }, + description: "List of allowed states", + example: ["SHIPPED", "DELIVERED"] + }, + actualState: { + type: "string", + description: "Actual state of the entity", + example: "PENDING" + }, + passed: { + type: "boolean", + description: "Whether the check passed", + example: false + } + }, + required: ["entityType", "entityId", "expectedStates", "actualState", "passed"] + }, + ConstraintCheckResult: { + type: "object", + description: "Result of a constraint check", + properties: { + constraintId: { + type: "string", + description: "Unique identifier for the constraint", + example: "CONST_001" + }, + description: { + type: "string", + description: "Description of the constraint", + example: "Total weight must not exceed limit" + }, + passed: { + type: "boolean", + description: "Whether the check passed", + example: true + }, + details: { + type: "object", + description: "Additional details about the check", + nullable: true + } + }, + required: ["constraintId", "description", "passed"] + }, + VerificationResult: { + type: "object", + description: "Comprehensive result of the verification process", + properties: { + passed: { + type: "boolean", + description: "Overall pass/fail status of the verification", + example: false + }, + ticketId: { + type: "string", + description: "ID of the ticket being verified", + example: "507f1f77bcf86cd799439011" + }, + worldId: { + type: "string", + description: "ID of the world where verification ran", + example: "550e8400-e29b-41d4-a716-446655440000" + }, + timestamp: { + type: "string", + format: "date-time", + description: "When the verification was performed", + example: "2024-01-15T10:30:00.000Z" + }, + invariantChecks: { + type: "array", + items: { + $ref: "#/components/schemas/InvariantCheckResult" + }, + description: "Results of invariant checks" + }, + entityStateChecks: { + type: "array", + items: { + $ref: "#/components/schemas/EntityStateCheckResult" + }, + description: "Results of entity state checks" + }, + constraintChecks: { + type: "array", + items: { + $ref: "#/components/schemas/ConstraintCheckResult" + }, + description: "Results of constraint checks" + }, + totalChecks: { + type: "integer", + description: "Total number of checks performed", + example: 10 + }, + passedChecks: { + type: "integer", + description: "Number of passed checks", + example: 8 + }, + failedChecks: { + type: "integer", + description: "Number of failed checks", + example: 2 + }, + failureReason: { + type: "string", + description: "High-level reason for failure if applicable", + nullable: true, + example: "Entity state mismatch" + }, + failureDetails: { + type: "object", + description: "Detailed failure information", + nullable: true + }, + verificationDurationMs: { + type: "number", + description: "Duration of verification in milliseconds", + example: 150.5 + } + }, + required: [ + "passed", + "ticketId", + "worldId", + "timestamp", + "totalChecks", + "passedChecks", + "failedChecks", + "verificationDurationMs" + ] + } +}; diff --git a/packages/controlmart/src/docs/schemas/tms.schema.ts b/packages/controlmart/src/docs/schemas/tms.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..9a5e3b1f7b7b47cc9de582e48511d9003b97c99f --- /dev/null +++ b/packages/controlmart/src/docs/schemas/tms.schema.ts @@ -0,0 +1,1716 @@ +export const tmsSchemas = { + TMSShipment: { + type: "object", + description: "Complete TMS shipment record with all tracking and logistics information", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + description: "Formatted ID for client use (same as _id)", + example: "507f1f77bcf86cd799439011" + }, + shipmentId: { + type: "string", + description: "Unique shipment identifier", + example: "TMS_SHIP_674565c1234567890abcdef" + }, + shipmentNumber: { + type: "string", + description: "Business shipment number", + example: "SHIP-2024-001234" + }, + status: { + type: "string", + enum: [ + "PLANNED", + "TENDERED", + "ACCEPTED", + "PICKED_UP", + "IN_TRANSIT", + "OUT_FOR_DELIVERY", + "DELIVERED", + "CANCELLED", + "DELAYED", + "EXCEPTION" + ], + description: "Current shipment status", + example: "IN_TRANSIT" + }, + shipmentType: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "TRANSFER", + "RETURN" + ], + description: "Type of shipment movement", + example: "OUTBOUND" + }, + serviceLevel: { + type: "string", + enum: [ + "STANDARD", + "EXPEDITED", + "NEXT_DAY", + "TWO_DAY", + "ECONOMY" + ], + description: "Service level for delivery", + example: "STANDARD" + }, + carrierInfo: { + type: "object", + description: "Carrier and transportation details", + properties: { + carrierId: { + type: "string", + example: "CARRIER_FEDEX_001" + }, + carrierName: { + type: "string", + example: "FedEx Freight" + }, + carrierCode: { + type: "string", + example: "FDXF" + }, + scacCode: { + type: "string", + example: "FXFE" + }, + proNumber: { + type: "string", + example: "PRO123456789" + }, + trackingNumber: { + type: "string", + example: "TRK987654321" + } + } + }, + origin: { + $ref: "#/components/schemas/TMSLocation" + }, + destination: { + $ref: "#/components/schemas/TMSLocation" + }, + dates: { + type: "object", + description: "Important shipment dates", + properties: { + plannedPickupDate: { + type: "string", + format: "date-time", + example: "2024-11-27T08:00:00.000Z" + }, + actualPickupDate: { + type: "string", + format: "date-time", + nullable: true, + description: "Actual pickup timestamp (null until pickup occurs)", + example: "2024-11-27T08:15:00.000Z" + }, + plannedDeliveryDate: { + type: "string", + format: "date-time", + example: "2024-11-29T17:00:00.000Z" + }, + estimatedDeliveryDate: { + type: "string", + format: "date-time", + example: "2024-11-29T16:30:00.000Z" + }, + actualDeliveryDate: { + type: "string", + format: "date-time", + nullable: true, + description: "Actual delivery timestamp (null until delivery occurs)", + example: "2024-11-29T16:45:00.000Z" + } + } + }, + cargo: { + $ref: "#/components/schemas/TMSCargo" + }, + costs: { + type: "object", + nullable: true, + description: "Shipment cost breakdown (optional, may be null if costs not yet calculated)", + properties: { + baseCost: { + type: "number", + nullable: true, + description: "Base freight cost", + example: 1250 + }, + fuelSurcharge: { + type: "number", + nullable: true, + description: "Fuel surcharge amount", + example: 125.5 + }, + accessorialCharges: { + type: "number", + nullable: true, + description: "Additional accessorial charges", + example: 75.25 + }, + totalCost: { + type: "number", + nullable: true, + description: "Total shipment cost", + example: 1450.75 + }, + currency: { + type: "string", + description: "Currency code (ISO 4217)", + example: "USD" + }, + costPerMile: { + type: "number", + nullable: true, + description: "Cost per mile", + example: 2.15 + } + } + }, + currentLocation: { + type: "object", + nullable: true, + description: "Current shipment location (null until tracking begins)", + properties: { + lastKnownPosition: { + type: "object", + properties: { + latitude: { + type: "number", + example: 35.1495 + }, + longitude: { + type: "number", + example: -90.049 + } + } + }, + lastUpdateTime: { + type: "string", + format: "date-time", + example: "2024-11-26T14:30:00.000Z" + }, + currentCity: { + type: "string", + example: "Memphis" + }, + currentState: { + type: "string", + example: "TN" + } + } + }, + delays: { + type: "array", + description: "Recorded delays for this shipment", + items: { + $ref: "#/components/schemas/TMSDelay" + } + }, + routeInfo: { + type: "object", + nullable: true, + description: "Route planning and distance information", + properties: { + plannedRoute: { + type: "array", + description: "Planned stops along the route", + items: { + type: "object", + properties: { + stopNumber: { + type: "integer", + description: "Sequential stop number", + example: 1 + }, + locationName: { + type: "string", + description: "Name of the stop location", + example: "Memphis Hub" + }, + arrivalTime: { + type: "string", + format: "date-time", + nullable: true, + description: "Planned arrival time at the stop", + example: "2024-11-27T14:00:00.000Z" + }, + departureTime: { + type: "string", + format: "date-time", + nullable: true, + description: "Planned departure time from the stop", + example: "2024-11-27T16:00:00.000Z" + } + } + } + }, + estimatedDistance: { + type: "number", + nullable: true, + description: "Estimated total distance in miles", + example: 675.5 + }, + estimatedDuration: { + type: "number", + nullable: true, + description: "Estimated total duration in minutes", + example: 720 + } + } + }, + externalEvents: { + type: "array", + description: "External events related to this shipment from external systems", + items: { + type: "object", + properties: { + eventType: { + type: "string", + description: "Type of external event", + example: "EDI_214" + }, + eventDescription: { + type: "string", + description: "Detailed description of the event", + example: "Shipment arrived at cross-dock facility" + }, + eventTime: { + type: "string", + format: "date-time", + description: "Timestamp when the event occurred", + example: "2024-11-27T10:30:00.000Z" + }, + source: { + type: "string", + description: "Source system that generated the event", + example: "CARRIER_EDI" + } + } + } + }, + references: { + type: "object", + nullable: true, + description: "Business reference numbers (optional)", + properties: { + orderId: { + type: "string", + nullable: true, + description: "Associated order ID", + example: "ORD-2024-5678" + }, + purchaseOrderNumber: { + type: "string", + nullable: true, + description: "Purchase order number", + example: "PO-2024-9012" + }, + invoiceNumber: { + type: "string", + nullable: true, + description: "Invoice number", + example: "INV-2024-3456" + }, + customerReference: { + type: "string", + nullable: true, + description: "Customer reference number", + example: "CUST-REF-789" + }, + loadNumber: { + type: "string", + nullable: true, + description: "Load number", + example: "LOAD-2024-012" + } + } + }, + createdAt: { + type: "string", + format: "date-time", + example: "2024-11-26T08:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + example: "2024-11-26T14:30:00.000Z" + } + }, + required: [ + "_id", + "id", + "shipmentId", + "shipmentNumber", + "status", + "origin", + "destination" + ] + }, + TMSShipmentInput: { + type: "object", + description: "Input data for creating a new TMS shipment", + properties: { + shipmentNumber: { + type: "string", + description: "Business shipment number (required)", + example: "SHIP-2024-001234" + }, + shipmentType: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "TRANSFER", + "RETURN" + ], + description: "Type of shipment movement", + example: "OUTBOUND" + }, + serviceLevel: { + type: "string", + enum: [ + "STANDARD", + "EXPEDITED", + "NEXT_DAY", + "TWO_DAY", + "ECONOMY" + ], + description: "Service level for delivery", + example: "STANDARD" + }, + origin: { + $ref: "#/components/schemas/TMSLocationInput" + }, + destination: { + $ref: "#/components/schemas/TMSLocationInput" + }, + dates: { + type: "object", + description: "Planned shipment dates", + properties: { + plannedPickupDate: { + type: "string", + format: "date-time", + example: "2024-11-27T08:00:00.000Z" + }, + plannedDeliveryDate: { + type: "string", + format: "date-time", + example: "2024-11-29T17:00:00.000Z" + } + } + }, + cargo: { + $ref: "#/components/schemas/TMSCargoInput" + }, + references: { + type: "object", + description: "Business reference numbers", + properties: { + orderId: { + type: "string", + example: "ORD-2024-5678" + }, + purchaseOrderNumber: { + type: "string", + example: "PO-2024-9012" + }, + customerReference: { + type: "string", + example: "CUST-REF-789" + } + } + }, + laneId: { + type: "string", + example: "LANE_ATL_MEM_001" + } + }, + required: [ + "shipmentNumber", + "origin", + "destination" + ] + }, + TMSLocation: { + type: "object", + description: "Complete location information for TMS operations", + properties: { + locationId: { + type: "string", + example: "DC_001" + }, + locationName: { + type: "string", + example: "Main Distribution Center" + }, + address: { + type: "object", + description: "Physical address of the location", + properties: { + street1: { + type: "string", + description: "Street address including number and street name", + example: "1000 Industrial Blvd" + }, + city: { + type: "string", + description: "City name", + example: "Atlanta" + }, + state: { + type: "string", + description: "State or province code", + example: "GA" + }, + postalCode: { + type: "string", + description: "Postal or ZIP code", + example: "30309" + }, + country: { + type: "string", + description: "Country code (ISO 3166-1 alpha-2)", + example: "US" + } + } + }, + coordinates: { + type: "object", + description: "Geographic coordinates of the location", + properties: { + latitude: { + type: "number", + description: "Latitude coordinate", + example: 33.749 + }, + longitude: { + type: "number", + description: "Longitude coordinate", + example: -84.388 + } + } + }, + contactName: { + type: "string", + description: "Name of the primary contact at this location", + example: "Shipping Manager" + }, + contactPhone: { + type: "string", + description: "Phone number for the primary contact", + example: "555-0123" + } + }, + required: [ + "locationName", + "address" + ] + }, + TMSLocationInput: { + type: "object", + description: "Input data for TMS location", + properties: { + locationId: { + type: "string", + description: "Unique identifier for the location", + example: "DC_001" + }, + locationName: { + type: "string", + description: "Human-readable name for the location", + example: "Main Distribution Center" + }, + address: { + type: "object", + description: "Physical address of the location", + properties: { + street1: { + type: "string", + description: "Street address including number and street name", + example: "1000 Industrial Blvd" + }, + city: { + type: "string", + description: "City name", + example: "Atlanta" + }, + state: { + type: "string", + description: "State or province code", + example: "GA" + }, + postalCode: { + type: "string", + description: "Postal or ZIP code", + example: "30309" + }, + country: { + type: "string", + description: "Country code (ISO 3166-1 alpha-2)", + example: "US" + } + }, + required: [ + "street", + "city", + "state", + "zipCode" + ] + }, + coordinates: { + type: "object", + description: "Geographic coordinates of the location", + properties: { + latitude: { + type: "number", + description: "Latitude coordinate", + example: 33.749 + }, + longitude: { + type: "number", + description: "Longitude coordinate", + example: -84.388 + } + } + }, + contactName: { + type: "string", + description: "Name of the primary contact at this location", + example: "Shipping Manager" + }, + contactPhone: { + type: "string", + description: "Phone number for the primary contact", + example: "555-0123" + } + }, + required: [ + "locationName", + "address" + ] + }, + TMSCargo: { + type: "object", + description: "Cargo and freight information", + properties: { + totalWeight: { + type: "number", + example: 15000 + }, + totalWeightUOM: { + type: "string", + example: "LBS" + }, + totalVolume: { + type: "number", + example: 800 + }, + totalVolumeUOM: { + type: "string", + example: "CUFT" + }, + palletCount: { + type: "integer", + example: 20 + }, + packageCount: { + type: "integer", + example: 100 + }, + commodityType: { + type: "string", + example: "General Merchandise" + }, + hazmat: { + type: "boolean", + example: false + }, + temperatureControlled: { + type: "boolean", + example: false + }, + temperatureRange: { + type: "object", + properties: { + min: { + type: "number", + example: 32 + }, + max: { + type: "number", + example: 45 + }, + uom: { + type: "string", + enum: [ + "F", + "C" + ], + example: "F" + } + } + } + } + }, + TMSCargoInput: { + type: "object", + description: "Input data for TMS cargo", + properties: { + totalWeight: { + type: "number", + example: 15000 + }, + totalWeightUOM: { + type: "string", + example: "LBS" + }, + totalVolume: { + type: "number", + example: 800 + }, + totalVolumeUOM: { + type: "string", + example: "CUFT" + }, + palletCount: { + type: "integer", + example: 20 + }, + packageCount: { + type: "integer", + example: 100 + }, + commodityType: { + type: "string", + example: "General Merchandise" + }, + hazmat: { + type: "boolean", + example: false + }, + temperatureControlled: { + type: "boolean", + example: false + } + } + }, + TMSDelay: { + type: "object", + description: "Delay information for shipments", + properties: { + delayType: { + type: "string", + enum: [ + "WEATHER", + "TRAFFIC", + "MECHANICAL", + "CARRIER", + "CUSTOMS", + "OTHER" + ], + example: "WEATHER" + }, + reason: { + type: "string", + example: "Severe thunderstorms causing safety delays" + }, + startTime: { + type: "string", + format: "date-time", + example: "2024-11-26T16:00:00.000Z" + }, + endTime: { + type: "string", + format: "date-time", + example: "2024-11-26T18:00:00.000Z" + }, + estimatedDelay: { + type: "number", + example: 120 + } + }, + required: [ + "delayType", + "reason", + "startTime" + ] + }, + TMSShipmentStatusEvent: { + type: "object", + description: "Status event record for shipment tracking", + properties: { + _id: { + type: "string", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + example: "507f1f77bcf86cd799439011", + description: "Formatted ID for client use (same as _id)" + }, + eventId: { + type: "string", + example: "TMS_EVENT_674565c1234567890abcdef" + }, + shipmentId: { + type: "string", + example: "SHIP-2024-001234" + }, + eventType: { + type: "string", + enum: [ + "STATUS_CHANGE", + "LOCATION_UPDATE", + "ETA_UPDATE", + "DELAY", + "EXCEPTION", + "MILESTONE" + ], + example: "STATUS_CHANGE" + }, + eventTime: { + type: "string", + format: "date-time", + example: "2024-11-26T14:30:00.000Z" + }, + statusInfo: { + type: "object", + description: "Status change details for STATUS_CHANGE events", + properties: { + previousStatus: { + type: "string", + description: "Previous shipment status before the change", + example: "ACCEPTED" + }, + newStatus: { + type: "string", + description: "New shipment status after the change", + example: "PICKED_UP" + } + } + }, + locationInfo: { + type: "object", + description: "Location details for LOCATION_UPDATE events", + properties: { + latitude: { + type: "number", + description: "Latitude coordinate of the current position", + example: 35.1495 + }, + longitude: { + type: "number", + description: "Longitude coordinate of the current position", + example: -90.049 + }, + city: { + type: "string", + description: "City name at the current location", + example: "Memphis" + }, + state: { + type: "string", + description: "State or province code", + example: "TN" + }, + zipCode: { + type: "string", + description: "Postal or ZIP code at the current location", + example: "38103" + }, + facility: { + type: "string", + description: "Name of the facility if at a known location", + example: "Memphis Hub" + } + } + }, + etaInfo: { + type: "object", + description: "ETA update details for ETA_UPDATE events", + properties: { + previousETA: { + type: "string", + format: "date-time", + description: "Previous estimated time of arrival", + example: "2024-11-29T17:00:00.000Z" + }, + newETA: { + type: "string", + format: "date-time", + description: "Updated estimated time of arrival", + example: "2024-11-29T16:30:00.000Z" + }, + delayMinutes: { + type: "number", + description: "Delay in minutes (negative values indicate earlier arrival)", + example: -30 + } + } + }, + exceptionInfo: { + type: "object", + description: "Exception details for EXCEPTION events", + properties: { + exceptionType: { + type: "string", + description: "Category or type of the exception", + example: "WEATHER_DELAY" + }, + severity: { + type: "string", + enum: [ + "LOW", + "MEDIUM", + "HIGH", + "CRITICAL" + ], + description: "Severity level of the exception", + example: "MEDIUM" + }, + description: { + type: "string", + description: "Detailed description of the exception", + example: "Severe weather causing transit delays" + }, + resolution: { + type: "string", + description: "Current resolution or mitigation action", + example: "Monitoring weather for safe continuation" + } + } + }, + source: { + type: "string", + enum: [ + "API", + "EDI", + "MANUAL", + "GPS", + "CARRIER_PORTAL" + ], + example: "EDI" + }, + createdAt: { + type: "string", + format: "date-time", + example: "2024-11-26T14:30:00.000Z" + } + }, + required: [ + "_id", + "id", + "eventId", + "shipmentId", + "eventType", + "eventTime" + ] + }, + TMSShipmentStatusEventInput: { + type: "object", + description: "Input data for creating a shipment status event", + properties: { + eventType: { + type: "string", + enum: [ + "STATUS_CHANGE", + "LOCATION_UPDATE", + "ETA_UPDATE", + "DELAY", + "EXCEPTION", + "MILESTONE" + ], + description: "Type of event being recorded", + example: "STATUS_CHANGE" + }, + eventTime: { + type: "string", + format: "date-time", + description: "When the event occurred", + example: "2024-11-26T14:30:00.000Z" + }, + statusInfo: { + type: "object", + description: "Status change information (for STATUS_CHANGE events)", + properties: { + previousStatus: { + type: "string", + description: "Previous shipment status before the change", + example: "ACCEPTED" + }, + newStatus: { + type: "string", + description: "New shipment status after the change", + example: "PICKED_UP" + } + } + }, + locationInfo: { + type: "object", + description: "Location information (for LOCATION_UPDATE events)", + properties: { + latitude: { + type: "number", + description: "Latitude coordinate of the current position", + example: 35.1495 + }, + longitude: { + type: "number", + description: "Longitude coordinate of the current position", + example: -90.049 + }, + city: { + type: "string", + description: "City name at the current location", + example: "Memphis" + }, + state: { + type: "string", + description: "State or province code", + example: "TN" + }, + zipCode: { + type: "string", + description: "Postal or ZIP code at the current location", + example: "38103" + }, + facility: { + type: "string", + description: "Name of the facility if at a known location", + example: "Memphis Hub" + } + } + }, + etaInfo: { + type: "object", + description: "ETA information (for ETA_UPDATE events)", + properties: { + previousETA: { + type: "string", + format: "date-time", + description: "Previous estimated time of arrival" + }, + newETA: { + type: "string", + format: "date-time", + description: "Updated estimated time of arrival" + }, + delayMinutes: { + type: "number", + description: "Delay in minutes (negative values indicate earlier arrival)" + } + } + }, + exceptionInfo: { + type: "object", + description: "Exception information (for EXCEPTION events)", + properties: { + exceptionType: { + type: "string", + description: "Category or type of the exception", + example: "WEATHER_DELAY" + }, + severity: { + type: "string", + enum: [ + "LOW", + "MEDIUM", + "HIGH", + "CRITICAL" + ], + description: "Severity level of the exception" + }, + description: { + type: "string", + description: "Detailed description of the exception", + example: "Severe weather causing transit delays" + }, + resolution: { + type: "string", + description: "Current resolution or mitigation action", + example: "Monitoring weather conditions" + } + } + }, + source: { + type: "string", + enum: [ + "API", + "EDI", + "MANUAL", + "GPS", + "CARRIER_PORTAL" + ], + description: "Source of the event data", + example: "API" + }, + rawData: { + type: "object", + description: "Raw event data for audit purposes", + example: {} + } + }, + required: [ + "eventType", + "eventTime" + ] + }, + TMSInboundTrailer: { + type: "object", + description: "Complete TMS inbound trailer record for dock scheduling and operations", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + description: "Formatted document identifier for API responses", + example: "507f1f77bcf86cd799439011" + }, + trailerId: { + type: "string", + description: "Unique trailer identifier", + example: "TRAILER_001" + }, + trailerNumber: { + type: "string", + description: "Physical trailer number/license plate", + example: "TR-12345" + }, + status: { + type: "string", + enum: [ + "SCHEDULED", + "EN_ROUTE", + "CHECKED_IN", + "AT_DOCK", + "UNLOADING", + "UNLOADED", + "DEPARTED", + "CANCELLED", + "DELAYED" + ], + description: "Current operational status of the trailer", + example: "CHECKED_IN" + }, + carrierInfo: { + type: "object", + description: "Carrier and driver information", + properties: { + carrierId: { + type: "string", + description: "Associated carrier identifier", + example: "CARRIER_FEDEX_001" + }, + carrierName: { + type: "string", + description: "Carrier company name", + example: "FedEx Corporation" + }, + driverName: { + type: "string", + description: "Driver full name", + example: "John Smith" + }, + driverPhone: { + type: "string", + description: "Driver contact phone number", + example: "+1-555-123-4567" + } + } + }, + appointmentInfo: { + type: "object", + description: "Appointment scheduling and timing details", + properties: { + appointmentId: { + type: "string", + description: "Unique appointment identifier", + example: "APPT-ATL-001" + }, + scheduledArrival: { + type: "string", + format: "date-time", + description: "Scheduled arrival date and time", + example: "2024-01-20T08:00:00.000Z" + }, + scheduledDeparture: { + type: "string", + format: "date-time", + description: "Scheduled departure time", + example: "2024-01-20T16:00:00.000Z" + }, + estimatedArrival: { + type: "string", + format: "date-time", + description: "Current estimated arrival time", + example: "2024-01-20T08:15:00.000Z" + }, + actualArrival: { + type: "string", + format: "date-time", + description: "Actual arrival timestamp", + example: "2024-01-20T08:12:00.000Z" + }, + actualDeparture: { + type: "string", + format: "date-time", + description: "Actual departure timestamp", + example: "2024-01-20T17:30:00.000Z" + }, + dockDoor: { + type: "string", + description: "Assigned dock door", + example: "DOCK-A-001" + } + } + }, + facilityInfo: { + type: "object", + description: "Distribution center and facility details", + properties: { + dcId: { + type: "string", + description: "Distribution center identifier", + example: "DC_ATL_001" + }, + facilityName: { + type: "string", + description: "Facility name", + example: "Atlanta Distribution Center" + }, + address: { + type: "object", + description: "Facility address information", + properties: { + street: { + type: "string", + example: "123 Industrial Blvd" + }, + city: { + type: "string", + example: "Atlanta" + }, + state: { + type: "string", + example: "GA" + }, + zipCode: { + type: "string", + example: "30309" + } + } + } + } + }, + shipmentIds: { + type: "array", + items: { + type: "string" + }, + description: "Associated shipment identifiers", + example: [ + "SHIPMENT_001", + "SHIPMENT_002" + ] + }, + cargo: { + type: "object", + description: "Cargo and freight information", + properties: { + purchaseOrders: { + type: "array", + items: { + type: "string" + }, + description: "Associated purchase order numbers", + example: [ + "PO-2024-001", + "PO-2024-002" + ] + }, + expectedPallets: { + type: "integer", + description: "Expected number of pallets", + example: 20 + }, + actualPallets: { + type: "integer", + description: "Actual number of pallets received", + example: 18 + }, + trailerType: { + type: "string", + enum: [ + "DRY_VAN", + "REEFER", + "FLATBED", + "TANKER", + "INTERMODAL" + ], + description: "Type of trailer equipment", + example: "DRY_VAN" + }, + sealNumber: { + type: "string", + description: "Trailer seal number for security", + example: "SEAL-789456" + } + } + }, + delays: { + type: "array", + description: "Delay events and disruptions", + items: { + type: "object", + properties: { + delayType: { + type: "string", + enum: [ + "TRAFFIC", + "WEATHER", + "CARRIER", + "DOCK_AVAILABILITY", + "OTHER" + ], + description: "Type of delay encountered", + example: "TRAFFIC" + }, + reason: { + type: "string", + description: "Detailed reason for the delay", + example: "Heavy traffic on I-75 due to construction" + }, + reportedAt: { + type: "string", + format: "date-time", + description: "When the delay was first reported", + example: "2024-01-20T07:15:00.000Z" + }, + estimatedDelay: { + type: "integer", + description: "Estimated delay duration in minutes", + example: 45 + } + } + } + }, + worldRef: { + type: "object", + description: "World reference information", + properties: { + worldId: { + type: "string", + description: "World identifier", + example: "507f1f77bcf86cd799439011" + }, + dcId: { + type: "string", + description: "Distribution center context", + example: "DC_ATL_001" + } + } + }, + customFields: { + type: "object", + description: "Additional custom fields for extensibility", + additionalProperties: true, + example: { + specialInstructions: "Fragile items - handle with care", + priority: "HIGH" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the trailer record was created", + example: "2024-01-19T10:30:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the trailer record was last updated", + example: "2024-01-20T08:15:00.000Z" + } + }, + required: [ + "_id", + "trailerId", + "trailerNumber", + "status", + "appointmentInfo", + "facilityInfo", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + TMSCarrier: { + type: "object", + description: "Complete TMS carrier record with profile, compliance, and performance information", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + description: "Formatted ID for client use (same as _id)", + example: "507f1f77bcf86cd799439011" + }, + carrierId: { + type: "string", + description: "Unique carrier identifier", + example: "TMS_CARRIER_674565c1234567890abcdef" + }, + carrierCode: { + type: "string", + description: "Business carrier code (SCAC or internal)", + example: "FDXF" + }, + carrierName: { + type: "string", + description: "Official carrier company name", + example: "FedEx Freight" + }, + carrierType: { + type: "string", + enum: ["LTL", "FTL", "PARCEL", "INTERMODAL", "COURIER", "RAIL", "AIR", "OCEAN"], + description: "Transportation mode and service type", + example: "FTL" + }, + status: { + type: "string", + enum: ["ACTIVE", "INACTIVE", "SUSPENDED", "PENDING_APPROVAL"], + description: "Current operational status of the carrier", + example: "ACTIVE" + }, + contact: { + type: "object", + description: "Carrier contact information", + properties: { + primaryContactName: { + type: "string", + description: "Name of the primary contact person", + example: "John Smith" + }, + email: { + type: "string", + description: "Contact email address", + example: "dispatch@acmetransport.com" + }, + phone: { + type: "string", + description: "Contact phone number", + example: "555-0199" + }, + address: { + type: "object", + description: "Carrier business address", + properties: { + street: { + type: "string", + example: "100 Logistics Blvd" + }, + city: { + type: "string", + example: "Atlanta" + }, + state: { + type: "string", + example: "GA" + }, + zipCode: { + type: "string", + example: "30309" + }, + country: { + type: "string", + example: "US" + } + } + } + } + }, + compliance: { + type: "object", + description: "Carrier compliance and certification information", + properties: { + dotNumber: { + type: "string", + description: "US DOT number", + example: "12345678" + }, + mcNumber: { + type: "string", + description: "Motor carrier number", + example: "MC-987654" + }, + scacCode: { + type: "string", + description: "Standard Carrier Alpha Code", + example: "ACME" + }, + smartWayCertified: { + type: "boolean", + description: "EPA SmartWay certification status", + example: true + }, + insuranceExpiry: { + type: "string", + format: "date-time", + description: "Insurance policy expiration date", + example: "2025-12-31T23:59:59.999Z" + }, + safetyRating: { + type: "string", + enum: ["SATISFACTORY", "CONDITIONAL", "UNSATISFACTORY", "NOT_RATED"], + description: "FMCSA safety rating", + example: "SATISFACTORY" + } + } + }, + performance: { + type: "object", + description: "Carrier performance metrics", + properties: { + onTimeDeliveryRate: { + type: "number", + description: "Percentage of on-time deliveries (0-1)", + example: 0.95 + }, + damageClaimRate: { + type: "number", + description: "Percentage of shipments with damage claims (0-1)", + example: 0.002 + }, + averageTransitTime: { + type: "number", + description: "Average transit time in hours", + example: 48 + }, + totalShipmentsCompleted: { + type: "integer", + description: "Total number of completed shipments", + example: 1250 + }, + lastPerformanceUpdate: { + type: "string", + format: "date-time", + description: "Timestamp of last performance metrics update", + example: "2024-01-15T10:30:00.000Z" + } + } + }, + serviceRegions: { + type: "array", + items: { + type: "string" + }, + description: "List of service regions/states", + example: ["GA", "FL", "SC", "NC", "TN"] + }, + worldRef: { + $ref: "#/components/schemas/WorldRef" + }, + customFields: { + type: "object", + description: "Additional custom fields for extensibility", + additionalProperties: true, + example: { + preferredLanes: ["ATL-MEM", "ATL-BNA"], + contractType: "DEDICATED" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the carrier record was created", + example: "2024-01-10T08:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the carrier record was last updated", + example: "2024-01-15T10:30:00.000Z" + } + }, + required: [ + "_id", + "id", + "carrierId", + "carrierCode", + "carrierName", + "status" + ] + }, + TMSCarrierInput: { + type: "object", + description: "Input data for creating or updating a TMS carrier", + properties: { + carrierCode: { + type: "string", + description: "Business carrier code (SCAC or internal) - must be unique", + example: "ACME" + }, + carrierName: { + type: "string", + description: "Official carrier company name", + example: "ACME Transportation" + }, + carrierType: { + type: "string", + enum: ["LTL", "FTL", "PARCEL", "INTERMODAL", "COURIER", "RAIL", "AIR", "OCEAN"], + description: "Transportation mode and service type", + example: "FTL" + }, + status: { + type: "string", + enum: ["ACTIVE", "INACTIVE", "SUSPENDED", "PENDING_APPROVAL"], + description: "Initial carrier status (defaults to ACTIVE)", + example: "ACTIVE" + }, + contact: { + type: "object", + description: "Carrier contact information", + properties: { + primaryContactName: { + type: "string", + description: "Name of the primary contact person", + example: "John Smith" + }, + email: { + type: "string", + description: "Contact email address", + example: "dispatch@acmetransport.com" + }, + phone: { + type: "string", + description: "Contact phone number", + example: "555-0199" + }, + address: { + type: "object", + description: "Carrier business address", + properties: { + street: { + type: "string", + example: "100 Logistics Blvd" + }, + city: { + type: "string", + example: "Atlanta" + }, + state: { + type: "string", + example: "GA" + }, + zipCode: { + type: "string", + example: "30309" + }, + country: { + type: "string", + example: "US" + } + } + } + } + }, + compliance: { + type: "object", + description: "Carrier compliance and certification information", + properties: { + dotNumber: { + type: "string", + description: "US DOT number", + example: "12345678" + }, + mcNumber: { + type: "string", + description: "Motor carrier number", + example: "MC-987654" + }, + scacCode: { + type: "string", + description: "Standard Carrier Alpha Code", + example: "ACME" + }, + smartWayCertified: { + type: "boolean", + description: "EPA SmartWay certification status", + example: true + }, + insuranceExpiry: { + type: "string", + format: "date-time", + description: "Insurance policy expiration date", + example: "2025-12-31T23:59:59.999Z" + }, + safetyRating: { + type: "string", + enum: ["SATISFACTORY", "CONDITIONAL", "UNSATISFACTORY", "NOT_RATED"], + description: "FMCSA safety rating", + example: "SATISFACTORY" + } + } + }, + performance: { + type: "object", + description: "Initial performance metrics (optional)", + properties: { + onTimeDeliveryRate: { + type: "number", + description: "Percentage of on-time deliveries (0-1)", + example: 0.95 + }, + damageClaimRate: { + type: "number", + description: "Percentage of shipments with damage claims (0-1)", + example: 0.002 + }, + totalShipmentsCompleted: { + type: "integer", + description: "Total number of completed shipments", + example: 0 + } + } + }, + serviceRegions: { + type: "array", + items: { + type: "string" + }, + description: "List of service regions/states", + example: ["GA", "FL", "SC", "NC", "TN"] + }, + customFields: { + type: "object", + description: "Additional custom fields for extensibility", + additionalProperties: true + } + }, + required: [ + "carrierCode", + "carrierName", + "carrierType" + ] + } +}; diff --git a/packages/controlmart/src/docs/schemas/wms.schema.ts b/packages/controlmart/src/docs/schemas/wms.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..22742df97fa28d1d6e1565289ad65f205fae6e3a --- /dev/null +++ b/packages/controlmart/src/docs/schemas/wms.schema.ts @@ -0,0 +1,4153 @@ +export const wmsSchemas = { + WMSBin: { + type: "object", + description: "Complete WMS warehouse bin record for inventory storage and location management", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + description: "Formatted document identifier for API responses", + example: "507f1f77bcf86cd799439011" + }, + binId: { + type: "string", + description: "Unique bin identifier", + example: "BIN_ATL_A01_001" + }, + binCode: { + type: "string", + description: "Human-readable bin code for operational identification", + example: "A01-B05-L02-P03" + }, + warehouseId: { + type: "string", + description: "Parent warehouse identifier", + example: "WH_ATL_001" + }, + zoneId: { + type: "string", + description: "Zone identifier within warehouse hierarchy", + example: "ZONE_PICK_A" + }, + aisleId: { + type: "string", + description: "Aisle identifier for location organization", + example: "AISLE_A01" + }, + location: { + type: "object", + description: "Precise physical location within warehouse", + properties: { + aisle: { + type: "string", + description: "Aisle designation", + example: "A01" + }, + bay: { + type: "string", + description: "Bay designation within aisle", + example: "B05" + }, + level: { + type: "integer", + description: "Level/tier number", + example: 2 + }, + position: { + type: "string", + description: "Position within bay", + example: "P03" + } + } + }, + binType: { + type: "string", + enum: [ + "PALLET", + "SHELF", + "FLOOR", + "CASE_FLOW", + "RESERVE", + "PICK_FACE" + ], + description: "Type of bin for operational classification and workflow", + example: "PICK_FACE" + }, + locationType: { + type: "string", + enum: [ + "STORAGE", + "STAGING", + "DOCK", + "QC", + "RETURN" + ], + description: "Functional location type for operational context", + example: "STORAGE" + }, + capacity: { + type: "object", + description: "Storage capacity constraints and limits", + properties: { + maxWeightLbs: { + type: "number", + description: "Maximum weight capacity in pounds", + example: 2000 + }, + maxCubicFeet: { + type: "number", + description: "Maximum volume capacity in cubic feet", + example: 50.5 + }, + maxPallets: { + type: "integer", + description: "Maximum pallet capacity", + example: 1 + } + } + }, + status: { + type: "string", + enum: [ + "AVAILABLE", + "OCCUPIED", + "RESERVED", + "DAMAGED", + "BLOCKED" + ], + description: "Current operational status of the bin", + example: "AVAILABLE" + }, + abcClassification: { + type: "string", + enum: [ + "A", + "B", + "C" + ], + description: "ABC velocity classification for inventory management optimization", + example: "A" + }, + pickable: { + type: "boolean", + description: "Whether bin is available for picking operations", + example: true + }, + lastInventoryCheck: { + type: "string", + format: "date-time", + description: "Timestamp of last inventory verification", + example: "2024-01-20T10:30:00.000Z" + }, + worldRef: { + type: "object", + description: "World reference information for multi-tenant context", + properties: { + worldId: { + type: "string", + description: "World identifier", + example: "507f1f77bcf86cd799439011" + } + } + }, + customFields: { + type: "object", + description: "Additional warehouse-specific bin attributes and metadata", + additionalProperties: true, + example: { + temperatureControlled: true, + hazMatApproved: false, + cleaningRequired: false, + equipmentType: "FORKLIFT_ACCESSIBLE" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the bin record was created", + example: "2024-01-15T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the bin record was last updated", + example: "2024-01-20T14:30:00.000Z" + } + }, + required: [ + "_id", + "binId", + "binCode", + "warehouseId", + "zoneId", + "status", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSInboundOrder: { + type: "object", + description: "Complete inbound order for receiving inventory into the warehouse with vendor coordination and line-item tracking", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + inboundOrderId: { + type: "string", + description: "Unique identifier for the inbound order, auto-generated using WMS service prefix", + example: "wms_inbound-order_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Warehouse identifier where inventory will be received", + example: "wms_warehouse_674565c1234567890abcdef" + }, + poNumber: { + type: "string", + nullable: true, + description: "Purchase order number for ERP integration and vendor coordination", + example: "PO-2024-001234" + }, + asnNumber: { + type: "string", + nullable: true, + description: "Advanced Ship Notice number from vendor for delivery coordination", + example: "ASN-VND001-20241127" + }, + vendor: { + type: "object", + nullable: true, + description: "Complete vendor information and contact details for coordination", + properties: { + vendorId: { + type: "string", + description: "Unique vendor identifier from ERP system", + example: "VND-SWIFT-001" + }, + vendorName: { + type: "string", + description: "Vendor company name for identification", + example: "Swift Manufacturing Co." + }, + contactEmail: { + type: "string", + format: "email", + description: "Primary vendor contact email for communication", + example: "receiving@swift-mfg.com" + }, + contactPhone: { + type: "string", + description: "Primary vendor contact phone for coordination", + example: "+1-555-0123" + } + } + }, + orderType: { + type: "string", + enum: [ + "PO", + "RETURN", + "TRANSFER", + "SAMPLE" + ], + nullable: true, + description: "Type of inbound order determining processing workflow", + example: "PO" + }, + orderStatus: { + type: "string", + enum: [ + "EXPECTED", + "IN_TRANSIT", + "RECEIVING", + "RECEIVED", + "CLOSED", + "CANCELLED" + ], + description: "Current operational status affecting receiving operations", + example: "EXPECTED" + }, + dates: { + type: "object", + nullable: true, + description: "Important dates for receiving coordination and timeline tracking", + properties: { + expectedArrival: { + type: "string", + format: "date-time", + description: "Expected delivery date and time for planning", + example: "2024-11-28T10:00:00Z" + }, + actualArrival: { + type: "string", + format: "date-time", + nullable: true, + description: "Actual delivery date and time for performance tracking", + example: "2024-11-28T09:45:00Z" + }, + receivingStarted: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp when receiving operations began", + example: "2024-11-28T10:15:00Z" + }, + receivingCompleted: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp when receiving operations completed", + example: "2024-11-28T14:30:00Z" + } + } + }, + appointmentId: { + type: "string", + nullable: true, + description: "Associated appointment ID for dock scheduling coordination", + example: "tms_appointment_674565c1234567890abcdef" + }, + totals: { + type: "object", + nullable: true, + description: "Order totals for capacity planning and resource allocation", + properties: { + pallets: { + type: "number", + description: "Total number of pallets expected for space planning", + example: 5 + }, + cases: { + type: "number", + description: "Total number of cases expected for handling planning", + example: 120 + }, + units: { + type: "number", + description: "Total number of individual units for inventory planning", + example: 2400 + }, + expectedLines: { + type: "number", + description: "Number of different product lines for processing planning", + example: 8 + }, + receivedLines: { + type: "number", + description: "Number of product lines completely received", + example: 6 + } + } + }, + lines: { + type: "array", + description: "Product line items with receiving specifications and progress tracking", + items: { + type: "object", + properties: { + lineNumber: { + type: "number", + description: "Sequential line number for tracking and identification", + example: 1 + }, + productId: { + type: "string", + description: "Product identifier from catalog for inventory management", + example: "PROD-WIDGET-001" + }, + sku: { + type: "string", + description: "Stock keeping unit identifier for operational reference", + example: "SKU-WDG-BLU-SM" + }, + productName: { + type: "string", + description: "Human-readable product name for identification", + example: "Blue Widget Small" + }, + expectedQuantity: { + type: "number", + description: "Expected quantity to receive for planning and verification", + example: 300 + }, + receivedQuantity: { + type: "number", + description: "Actual quantity received for progress tracking", + example: 300 + }, + uom: { + type: "string", + description: "Unit of measure (EA, CS, PLT, etc.) for quantity specification", + example: "EA" + }, + lotNumber: { + type: "string", + nullable: true, + description: "Lot number for batch tracking and traceability compliance", + example: "LOT-2024-W47" + }, + expirationDate: { + type: "string", + format: "date-time", + nullable: true, + description: "Product expiration date for perishable item management", + example: "2025-11-27T00:00:00Z" + }, + lineStatus: { + type: "string", + enum: [ + "EXPECTED", + "RECEIVING", + "RECEIVED", + "CLOSED" + ], + description: "Current status of this specific product line", + example: "RECEIVED" + } + } + } + }, + receivingNotes: { + type: "string", + nullable: true, + description: "Special instructions and notes for receiving team coordination", + example: "Handle with care - fragile items. Check lot numbers carefully." + }, + damageReport: { + type: "string", + nullable: true, + description: "Documentation of any damage found during receiving", + example: "Minor packaging damage on 3 units, product integrity maintained." + }, + worldRef: { + type: "object", + description: "Reference to the world environment containing this inbound order", + properties: { + worldId: { + type: "string", + description: "World identifier for data isolation and security", + example: "507f1f77bcf86cd799439011" + } + } + }, + customFields: { + type: "object", + description: "Additional order-specific configuration and operational data", + additionalProperties: true, + example: { + priority: "HIGH", + specialHandling: "FRAGILE", + temperatureRequired: "AMBIENT", + qualityInspectionRequired: true, + crossDockCandidate: false, + carrierInstructions: "Call 30 minutes before arrival" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the inbound order record was created", + example: "2024-11-27T10:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the inbound order record was last updated", + example: "2024-11-27T15:30:00.000Z" + } + }, + required: [ + "_id", + "inboundOrderId", + "warehouseId", + "orderStatus", + "lines", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSReceivingTransaction: { + type: "object", + description: "Complete receiving transaction record for goods receipt with quality control, damage assessment, and putaway management", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + receivingId: { + type: "string", + description: "Unique identifier for the receiving transaction (auto-generated with wms_receiving-transaction prefix)", + example: "wms_receiving-transaction_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Warehouse facility where goods are received", + example: "wms_warehouse_674565c1234567890abcdef" + }, + inboundOrderId: { + type: "string", + description: "Reference to the inbound order being received", + example: "wms_inbound-order_674565c1234567890abcdef" + }, + inboundLineId: { + type: "string", + nullable: true, + description: "Specific line item within the inbound order", + example: "line_001" + }, + productId: { + type: "string", + description: "Product identifier for the received goods", + example: "prod_12345" + }, + sku: { + type: "string", + nullable: true, + description: "Stock keeping unit code for operational reference", + example: "ABC-123-XL" + }, + productName: { + type: "string", + nullable: true, + description: "Human-readable product name for identification", + example: "Premium Wireless Headphones" + }, + licensePlateNumber: { + type: "string", + nullable: true, + description: "Container or pallet identifier for tracking", + example: "LP-20241201-001" + }, + lotNumber: { + type: "string", + nullable: true, + description: "Lot number for batch tracking and traceability", + example: "LOT-2024-Q4-001" + }, + receivedQuantity: { + type: "number", + description: "Quantity actually received and documented", + example: 50, + minimum: 0 + }, + uom: { + type: "string", + nullable: true, + description: "Unit of measure for received quantities (EA, CS, PLT, etc.)", + example: "EA" + }, + dockDoorId: { + type: "string", + nullable: true, + description: "Dock door identifier where goods were received", + example: "wms_dock-door_674565c1234567890abcdef" + }, + receivingStatus: { + type: "string", + enum: [ + "RECEIVED", + "QC_HOLD", + "PUTAWAY_PENDING", + "COMPLETED", + "REJECTED" + ], + description: "Current status of the receiving transaction workflow", + example: "RECEIVED" + }, + quality: { + type: "object", + nullable: true, + description: "Quality control and inspection information", + properties: { + status: { + type: "string", + enum: [ + "PASS", + "FAIL", + "PENDING" + ], + description: "Quality inspection result status", + example: "PENDING" + }, + inspectedBy: { + type: "string", + nullable: true, + description: "User identifier of the quality inspector", + example: "user_qc_inspector_001" + }, + notes: { + type: "string", + nullable: true, + description: "Quality inspection notes and observations", + example: "Visual inspection completed, minor packaging damage noted but product integrity maintained" + } + } + }, + damage: { + type: "object", + nullable: true, + description: "Damage assessment and documentation", + properties: { + hasDamage: { + type: "boolean", + description: "Flag indicating whether damage was observed", + example: false + }, + description: { + type: "string", + nullable: true, + description: "Detailed description of observed damage", + example: "Dented corner on 3 units, functionality unaffected" + }, + quantity: { + type: "number", + nullable: true, + description: "Number of damaged units", + example: 3, + minimum: 0 + }, + reportedBy: { + type: "string", + nullable: true, + description: "User who reported the damage", + example: "user_receiver_002" + } + } + }, + putaway: { + type: "object", + nullable: true, + description: "Putaway location assignment and management", + properties: { + assignedLocation: { + type: "string", + nullable: true, + description: "Designated storage location for received goods", + example: "A-01-01" + }, + assignedBy: { + type: "string", + nullable: true, + description: "User who assigned the putaway location", + example: "user_warehouse_manager_001" + }, + notes: { + type: "string", + nullable: true, + description: "Special putaway instructions and notes", + example: "Stack carefully - fragile items" + } + } + }, + items: { + type: "array", + items: { + type: "object", + description: "Individual item details within the receiving transaction", + properties: { + sku: { + type: "string", + description: "Stock keeping unit for this item", + example: "ABC-123-XL" + }, + productName: { + type: "string", + description: "Product name for this item", + example: "Premium Wireless Headphones" + }, + expectedQuantity: { + type: "number", + description: "Expected quantity to receive", + example: 50 + }, + receivedQuantity: { + type: "number", + description: "Actual quantity received", + example: 48 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure for this item", + example: "EA" + }, + lotNumber: { + type: "string", + nullable: true, + description: "Lot number for batch tracking", + example: "LOT-2024-Q4-001" + }, + serialNumbers: { + type: "array", + items: { + type: "string" + }, + description: "Serial numbers for individual item tracking", + example: [ + "SN001", + "SN002", + "SN003" + ] + }, + condition: { + type: "string", + nullable: true, + description: "Condition assessment of the item", + example: "GOOD" + }, + location: { + type: "object", + nullable: true, + description: "Storage location assignment for this item", + properties: { + binId: { + type: "string", + nullable: true, + description: "Specific bin location", + example: "BIN-A-01-01" + }, + zoneId: { + type: "string", + nullable: true, + description: "Zone identifier", + example: "ZONE-A" + } + } + } + } + }, + description: "Array of individual items within this receiving transaction" + }, + worldRef: { + type: "object", + description: "Reference to the world environment for data isolation", + properties: { + worldId: { + type: "string", + description: "World identifier for multi-tenant context", + example: "507f1f77bcf86cd799439011" + } + }, + required: [ + "worldId" + ] + }, + customFields: { + type: "object", + description: "Additional warehouse-specific receiving transaction data", + additionalProperties: true, + example: { + temperatureZone: "ambient", + vendorRefNumber: "VEN-REF-12345", + priority: "HIGH", + specialHandling: "FRAGILE" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the receiving transaction was created", + example: "2024-12-01T10:30:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the receiving transaction was last updated", + example: "2024-12-01T11:15:00.000Z" + }, + statusUpdatedAt: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp when the status was last updated", + example: "2024-12-01T11:15:00.000Z" + } + }, + required: [ + "_id", + "receivingId", + "warehouseId", + "inboundOrderId", + "productId", + "receivedQuantity", + "receivingStatus", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSInventoryTransaction: { + type: "object", + description: "Complete inventory transaction record for warehouse inventory movements, adjustments, and operations tracking", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + transactionId: { + type: "string", + description: "Unique identifier for the inventory transaction (auto-generated with wms_inventory-transaction prefix)", + example: "wms_inventory-transaction_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Warehouse facility where the transaction occurred", + example: "wms_warehouse_674565c1234567890abcdef" + }, + transactionType: { + type: "string", + enum: [ + "RECEIVE", + "PUTAWAY", + "PICK", + "MOVE", + "ADJUST", + "CYCLE_COUNT", + "RETURN", + "DAMAGE", + "SHIP" + ], + description: "Type of inventory transaction operation", + example: "PUTAWAY" + }, + productId: { + type: "string", + description: "Product identifier for the transaction", + example: "prod_12345" + }, + sku: { + type: "string", + nullable: true, + description: "Stock keeping unit code for operational reference", + example: "ABC-123-XL" + }, + fromBinId: { + type: "string", + nullable: true, + description: "Source bin identifier for movement transactions", + example: "BIN-RECV-001" + }, + toBinId: { + type: "string", + nullable: true, + description: "Destination bin identifier for movement transactions", + example: "BIN-A-01-01" + }, + lotNumber: { + type: "string", + nullable: true, + description: "Lot number for batch tracking and traceability", + example: "LOT-2024-Q4-001" + }, + licensePlateNumber: { + type: "string", + nullable: true, + description: "Container or pallet identifier for tracking", + example: "LP-20241201-001" + }, + quantity: { + type: "number", + description: "Transaction quantity (positive for additions, negative for reductions)", + example: 25 + }, + uom: { + type: "string", + nullable: true, + description: "Unit of measure for the transaction quantity", + example: "EA" + }, + referenceType: { + type: "string", + enum: [ + "PO", + "ORDER", + "TASK", + "CYCLE_COUNT" + ], + nullable: true, + description: "Type of reference document that triggered the transaction", + example: "ORDER" + }, + referenceId: { + type: "string", + nullable: true, + description: "Reference document identifier", + example: "wms_outbound-order_674565c1234567890abcdef" + }, + transactionDate: { + type: "string", + format: "date-time", + description: "Timestamp when the transaction occurred (defaults to creation time)", + example: "2024-12-01T14:30:00.000Z" + }, + userId: { + type: "string", + nullable: true, + description: "User who performed the transaction", + example: "user_warehouse_worker_001" + }, + userName: { + type: "string", + nullable: true, + description: "Human-readable name of the user who performed the transaction", + example: "John Smith" + }, + reasonCode: { + type: "string", + nullable: true, + description: "Reason code for adjustments or special transactions", + example: "CYCLE_COUNT_ADJUSTMENT" + }, + notes: { + type: "string", + nullable: true, + description: "Additional notes about the transaction", + example: "Putaway completed after quality inspection" + }, + worldRef: { + type: "object", + description: "Reference to the world environment for data isolation", + properties: { + worldId: { + type: "string", + description: "World identifier for multi-tenant context", + example: "507f1f77bcf86cd799439011" + } + }, + required: [ + "worldId" + ] + }, + customFields: { + type: "object", + description: "Additional warehouse-specific transaction data", + additionalProperties: true, + example: { + priority: "HIGH", + equipment: "Forklift-002", + temperatureZone: "ambient", + handlingInstructions: "Handle with care" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the transaction record was created", + example: "2024-12-01T14:30:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the transaction record was last updated", + example: "2024-12-01T14:30:00.000Z" + } + }, + required: [ + "_id", + "transactionId", + "warehouseId", + "transactionType", + "productId", + "quantity", + "transactionDate", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSOutboundOrder: { + type: "object", + description: "\n**Complete WMS Outbound Order Schema**\n\nComprehensive outbound order management with multi-line support, customer integration, and workflow tracking.\n\n**Key Features:**\n- Complex nested line item structure with allocation tracking\n- Customer and shipping address management\n- Priority-based order classification \n- Comprehensive timing workflow with automatic status updates\n- Warehouse-scoped order processing\n- Integrated carrier and tracking information\n\n**Field Consistency Verified:**\n- Primary identifier: `orderId` (consistent across model, controller, repository)\n- Business identifier: `orderNumber` (unique per world)\n- All repository methods align with controller parameter expectations\n\n**Status Workflow:**\nPENDING → RELEASED → ALLOCATED → PICKING → PICKED → PACKED → SHIPPED\n ", + properties: { + _id: { + type: "string", + description: "MongoDB auto-generated primary key", + example: "674565c1234567890abcdef0" + }, + orderId: { + type: "string", + description: "Business primary identifier (consistent naming verified across all components)", + example: "ORD-2024-001234" + }, + orderNumber: { + type: "string", + description: "Human-readable order number (unique per world, enforced by repository)", + example: "WO-20241201-001" + }, + worldRef: { + type: "object", + description: "Multi-tenant world reference for data isolation", + properties: { + worldId: { + type: "string", + description: "World scope identifier", + example: "550e8400-e29b-41d4-a716-446655440000" + }, + worldName: { + type: "string", + description: "Optional world display name", + example: "Production Environment" + } + }, + required: [ + "worldId" + ] + }, + warehouseId: { + type: "string", + description: "Required source warehouse identifier (validated in repository)", + example: "WH-MAIN-001" + }, + customerId: { + type: "string", + description: "Customer account identifier", + example: "CUST-ABC-123" + }, + customerName: { + type: "string", + description: "Customer display name", + example: "ABC Corporation" + }, + orderType: { + type: "string", + enum: [ + "STANDARD", + "EXPRESS", + "BULK", + "RETURNS" + ], + description: "Order classification affecting processing workflow", + example: "STANDARD" + }, + priority: { + type: "string", + enum: [ + "URGENT", + "HIGH", + "NORMAL", + "LOW" + ], + description: "Processing priority (affects sorting in ready-for-picking queries)", + example: "HIGH" + }, + orderStatus: { + type: "string", + enum: [ + "PENDING", + "RELEASED", + "ALLOCATED", + "PICKING", + "PICKED", + "PACKED", + "SHIPPED", + "CANCELLED" + ], + description: "Current order state in fulfillment workflow", + example: "ALLOCATED" + }, + orderDate: { + type: "string", + format: "date-time", + description: "Order creation timestamp", + example: "2024-12-01T09:00:00.000Z" + }, + requestedShipDate: { + type: "string", + format: "date-time", + description: "Customer delivery requirement (used for on-time metrics)", + example: "2024-12-03T17:00:00.000Z" + }, + lines: { + type: "array", + description: "Order line items (required, validated length > 0 in repository)", + items: { + type: "object", + description: "Individual line item with quantity tracking and allocation details", + properties: { + lineNumber: { + type: "number", + description: "Sequential line identifier within order (used for array updates)", + example: 1 + }, + itemId: { + type: "string", + description: "SKU/product code from inventory system", + example: "SKU-WIDGET-001" + }, + itemDescription: { + type: "string", + description: "Product display name", + example: "Premium Widget Assembly" + }, + orderedQuantity: { + type: "number", + description: "Customer requested amount", + example: 25 + }, + allocatedQuantity: { + type: "number", + description: "System allocated quantity (set during allocation process)", + example: 25 + }, + pickedQuantity: { + type: "number", + description: "Actually picked amount (updated during picking)", + example: 23 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure code", + enum: [ + "EA", + "CS", + "LB", + "KG", + "FT", + "M", + "GAL", + "L" + ], + example: "EA" + }, + unitPrice: { + type: "number", + description: "Price per unit (optional)", + example: 49.99 + }, + lineStatus: { + type: "string", + enum: [ + "PENDING", + "ALLOCATED", + "PICKING", + "PICKED", + "SHIPPED" + ], + description: "Individual line status (updated automatically during allocation and picking)", + example: "PICKED" + }, + allocations: { + type: "array", + description: "Optional bin-level allocation details", + items: { + type: "object", + properties: { + binId: { + type: "string", + description: "Source bin location identifier", + example: "BIN-A1-001" + }, + quantity: { + type: "number", + description: "Allocated quantity from this bin", + example: 15 + }, + lotNumber: { + type: "string", + description: "Optional lot/batch tracking", + example: "LOT-20241201-A" + } + }, + required: [ + "binId", + "quantity" + ] + } + }, + specialInstructions: { + type: "string", + description: "Line-specific handling notes", + example: "Handle with care - fragile items" + } + }, + required: [ + "lineNumber", + "itemId", + "itemDescription", + "orderedQuantity", + "unitOfMeasure", + "lineStatus" + ] + } + }, + shippingAddress: { + type: "object", + description: "Required delivery destination", + properties: { + street1: { + type: "string", + description: "Primary address line", + example: "123 Main Street" + }, + street2: { + type: "string", + description: "Secondary address line (optional)", + example: "Suite 456" + }, + city: { + type: "string", + description: "City name", + example: "Anytown" + }, + state: { + type: "string", + description: "State/province code", + example: "CA" + }, + zipCode: { + type: "string", + description: "Postal code", + example: "90210" + }, + country: { + type: "string", + description: "Country code", + example: "USA" + } + }, + required: [ + "street1", + "city", + "state", + "zipCode", + "country" + ] + }, + carrierInfo: { + type: "object", + description: "Optional shipping carrier details", + properties: { + carrierId: { + type: "string", + description: "Carrier account identifier", + example: "CARRIER-UPS" + }, + carrierName: { + type: "string", + description: "Carrier display name", + example: "UPS" + }, + serviceLevel: { + type: "string", + description: "Service type (GROUND, EXPRESS, etc.)", + example: "GROUND" + }, + trackingNumber: { + type: "string", + description: "Shipment tracking number", + example: "1Z999AA1234567890" + } + }, + required: [ + "carrierId", + "carrierName", + "serviceLevel" + ] + }, + specialInstructions: { + type: "string", + description: "Order-level handling and delivery notes", + example: "Deliver to loading dock, notify receiving department" + }, + timing: { + type: "object", + description: "Workflow timing tracking (automatically updated during status changes)", + properties: { + releasedAt: { + type: "string", + format: "date-time", + description: "Timestamp when order released to WMS (status: RELEASED)", + example: "2024-12-01T10:15:00.000Z" + }, + allocatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when inventory allocated (status: ALLOCATED)", + example: "2024-12-01T11:30:00.000Z" + }, + pickingStartedAt: { + type: "string", + format: "date-time", + description: "Timestamp when picking began (status: PICKING)", + example: "2024-12-01T13:00:00.000Z" + }, + pickedAt: { + type: "string", + format: "date-time", + description: "Timestamp when picking completed (status: PICKED)", + example: "2024-12-01T14:45:00.000Z" + }, + packedAt: { + type: "string", + format: "date-time", + description: "Timestamp when packing completed (status: PACKED)", + example: "2024-12-01T15:30:00.000Z" + }, + shippedAt: { + type: "string", + format: "date-time", + description: "Timestamp when shipment dispatched (status: SHIPPED)", + example: "2024-12-01T16:00:00.000Z" + } + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Document creation timestamp", + example: "2024-12-01T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last modification timestamp", + example: "2024-12-01T14:45:00.000Z" + } + }, + required: [ + "_id", + "orderId", + "orderNumber", + "worldRef", + "warehouseId", + "customerId", + "customerName", + "orderType", + "priority", + "orderStatus", + "orderDate", + "requestedShipDate", + "lines", + "shippingAddress", + "createdAt", + "updatedAt" + ] + }, + WMSOutboundShipment: { + type: "object", + description: "\n**Complete WMS Outbound Shipment Schema**\n\nComprehensive outbound shipment management with multi-carrier support, tracking integration, and logistics workflow.\n\n**Key Features:**\n- Multi-line shipment support with order references and line-level details\n- Carrier integration with SCAC codes, modes, and service levels\n- Comprehensive address management for origin and destination\n- Status workflow tracking from creation to delivery\n- Event-driven tracking with timestamps and location data\n- Document management for shipping documentation\n- Performance analytics and metrics tracking\n\n**Field Consistency Verified:**\n- Primary identifier: `shipmentId` (consistent across model, controller, repository)\n- Status field: `shipmentStatus` (enum-driven workflow)\n- All repository methods align with controller parameter expectations\n\n**🚨 CRITICAL BUGS DOCUMENTED:**\n1. Route parameter missing in ready-to-ship endpoint\n2. Field mapping issue in warehouse filtering (status vs shipmentStatus)\n\n**Status Workflow:**\nCREATED → MANIFESTED → LOADING → LOADED → SHIPPED → IN_TRANSIT → DELIVERED\n ", + properties: { + _id: { + type: "string", + description: "MongoDB auto-generated primary key", + example: "674565c1234567890abcdef0" + }, + shipmentId: { + type: "string", + description: "Business shipment identifier (consistent naming verified across all components)", + example: "SHIP-2024-001234" + }, + worldRef: { + type: "object", + description: "Multi-tenant world reference for data isolation", + properties: { + worldId: { + type: "string", + description: "World scope identifier", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + required: [ + "worldId" + ] + }, + warehouseId: { + type: "string", + description: "Required source warehouse identifier (validated in repository)", + example: "WH-MAIN-001" + }, + carrier: { + type: "object", + description: "Carrier information for transportation", + properties: { + name: { + type: "string", + description: "Carrier company name (used in repository queries as carrier.name)", + example: "UPS" + }, + scac: { + type: "string", + description: "Standard Carrier Alpha Code", + example: "UPSN" + }, + mode: { + type: "string", + enum: [ + "PARCEL", + "LTL", + "TL" + ], + description: "Transportation mode", + example: "PARCEL" + } + } + }, + serviceLevel: { + type: "string", + description: "Carrier service level (GROUND, EXPRESS, etc.)", + example: "GROUND" + }, + trackingNumber: { + type: "string", + description: "Carrier tracking number for customer visibility", + example: "1Z999AA1234567890" + }, + trailerNumber: { + type: "string", + description: "Trailer identifier for LTL/TL shipments", + example: "TRL-001" + }, + dockDoorId: { + type: "string", + description: "Assigned dock door for loading operations", + example: "DOCK-A-001" + }, + shipmentStatus: { + type: "string", + enum: [ + "CREATED", + "MANIFESTED", + "LOADING", + "LOADED", + "SHIPPED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION" + ], + description: "Current shipment state in logistics workflow", + example: "SHIPPED" + }, + orders: { + type: "array", + description: "Orders consolidated in this shipment", + items: { + type: "object", + properties: { + orderId: { + type: "string", + description: "Source order identifier", + example: "ORD-2024-001234" + } + }, + required: [ + "orderId" + ] + } + }, + totals: { + type: "object", + description: "Shipment totals and logistics metrics", + properties: { + packages: { + type: "number", + description: "Total package count", + example: 5 + }, + pallets: { + type: "number", + description: "Total pallet count", + example: 2 + }, + weight: { + type: "number", + description: "Total shipment weight", + example: 150.5 + }, + cube: { + type: "number", + description: "Total cubic volume", + example: 12.3 + }, + value: { + type: "number", + description: "Total shipment value", + example: 1249.99 + } + } + }, + dates: { + type: "object", + description: "Logistics timeline and delivery requirements", + properties: { + shipDate: { + type: "string", + format: "date-time", + description: "Planned ship date", + example: "2024-12-02T09:00:00.000Z" + }, + manifestDate: { + type: "string", + format: "date-time", + description: "Carrier manifest date (set on MANIFESTED status)", + example: "2024-12-02T08:30:00.000Z" + }, + loadStartTime: { + type: "string", + format: "date-time", + description: "Loading start timestamp", + example: "2024-12-02T10:00:00.000Z" + }, + loadEndTime: { + type: "string", + format: "date-time", + description: "Loading completion timestamp", + example: "2024-12-02T11:30:00.000Z" + }, + actualShipTime: { + type: "string", + format: "date-time", + description: "Actual dispatch timestamp (set on SHIPPED status)", + example: "2024-12-02T12:00:00.000Z" + }, + estimatedDeliveryDate: { + type: "string", + format: "date-time", + description: "Carrier estimated delivery date", + example: "2024-12-04T17:00:00.000Z" + }, + actualDeliveryDate: { + type: "string", + format: "date-time", + description: "Actual delivery timestamp (set on DELIVERED status)", + example: "2024-12-04T15:45:00.000Z" + } + } + }, + lines: { + type: "array", + description: "Shipment line items with order references (required, validated length > 0 in repository)", + items: { + type: "object", + description: "Individual line item with order and product details", + properties: { + lineNumber: { + type: "number", + description: "Sequential line identifier within shipment", + example: 1 + }, + orderId: { + type: "string", + description: "Source order identifier", + example: "ORD-2024-001234" + }, + orderLineId: { + type: "string", + description: "Source order line identifier", + example: "LINE-001" + }, + sku: { + type: "string", + description: "Product SKU/item code", + example: "SKU-WIDGET-001" + }, + productName: { + type: "string", + description: "Product display name", + example: "Premium Widget Assembly" + }, + quantityShipped: { + type: "number", + description: "Quantity included in this shipment", + example: 25 + }, + quantityOrdered: { + type: "number", + description: "Originally ordered quantity", + example: 25 + }, + unitOfMeasure: { + type: "string", + description: "Unit of measure code", + example: "EA" + }, + lotNumber: { + type: "string", + description: "Lot/batch number for traceability", + example: "LOT-20241201-A" + }, + serialNumbers: { + type: "array", + items: { + type: "string" + }, + description: "Serial numbers for serialized items", + example: [ + "SN123456", + "SN123457" + ] + }, + palletId: { + type: "string", + description: "Pallet identifier for logistics", + example: "PLT-001" + }, + packageCount: { + type: "number", + description: "Number of packages for this line", + example: 2 + }, + weight: { + type: "number", + description: "Line weight contribution", + example: 15.5 + }, + customFields: { + type: "object", + description: "Line-specific custom attributes", + additionalProperties: true + } + }, + required: [ + "lineNumber", + "sku", + "quantityShipped" + ] + } + }, + fromAddress: { + type: "object", + description: "Origin address (typically warehouse)", + properties: { + street1: { + type: "string", + description: "Street address", + example: "100 Warehouse Blvd" + }, + city: { + type: "string", + description: "City name", + example: "Atlanta" + }, + state: { + type: "string", + description: "State/province", + example: "GA" + }, + postalCode: { + type: "string", + description: "Postal code", + example: "30309" + }, + country: { + type: "string", + description: "Country code", + example: "USA" + } + } + }, + toAddress: { + type: "object", + description: "Required destination address (validated in repository)", + properties: { + street1: { + type: "string", + description: "Street address", + example: "123 Customer Ave" + }, + city: { + type: "string", + description: "City name", + example: "Miami" + }, + state: { + type: "string", + description: "State/province", + example: "FL" + }, + postalCode: { + type: "string", + description: "Postal code", + example: "33101" + }, + country: { + type: "string", + description: "Country code", + example: "USA" + } + }, + required: [ + "street1", + "city", + "state", + "postalCode" + ] + }, + documents: { + type: "array", + description: "Shipment documentation and attachments", + items: { + type: "object", + properties: { + type: { + type: "string", + description: "Document type identifier", + example: "BOL" + }, + url: { + type: "string", + description: "Document URL or file path", + example: "https://docs.example.com/bol123.pdf" + }, + documentType: { + type: "string", + description: "File format", + example: "PDF" + } + }, + required: [ + "type", + "url" + ] + } + }, + events: { + type: "array", + description: "Tracking event history (updated via addTrackingEvent)", + items: { + type: "object", + properties: { + timestamp: { + type: "string", + format: "date-time", + description: "Event timestamp", + example: "2024-12-01T18:30:00.000Z" + }, + location: { + type: "string", + description: "Event location", + example: "Atlanta, GA" + }, + status: { + type: "string", + description: "Status at time of event", + example: "IN_TRANSIT" + }, + note: { + type: "string", + description: "Event description", + example: "Package departed Atlanta facility" + }, + source: { + type: "string", + description: "Event source system", + example: "CARRIER_API" + } + } + } + }, + customFields: { + type: "object", + description: "Additional shipment-specific attributes", + additionalProperties: true, + example: { + expedited: true, + specialHandling: "FRAGILE", + customerReference: "PO-12345" + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Document creation timestamp", + example: "2024-12-01T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Last modification timestamp", + example: "2024-12-01T16:00:00.000Z" + } + }, + required: [ + "_id", + "shipmentId", + "worldRef", + "warehouseId", + "shipmentStatus", + "lines", + "toAddress", + "createdAt", + "updatedAt" + ] + }, + WMSCycleCount: { + type: "object", + description: "Complete WMS cycle count record for inventory accuracy verification and variance analysis", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + description: "Formatted document identifier for API responses", + example: "507f1f77bcf86cd799439011" + }, + cycleCountId: { + type: "string", + description: "Unique cycle count identifier", + example: "CC_ATL_2024_001" + }, + warehouseId: { + type: "string", + description: "Target warehouse identifier for count execution", + example: "WH_ATL_001" + }, + countType: { + type: "string", + enum: [ + "DAILY", + "WEEKLY", + "MONTHLY", + "ABC", + "FULL", + "SPOT", + "BLIND" + ], + description: "Type of cycle count for methodology determination", + example: "ABC" + }, + countStatus: { + type: "string", + enum: [ + "SCHEDULED", + "IN_PROGRESS", + "COMPLETED", + "APPROVED", + "REJECTED", + "CANCELLED" + ], + description: "Current operational status of the cycle count", + example: "IN_PROGRESS" + }, + schedule: { + type: "object", + description: "Scheduling information and execution timeline", + properties: { + scheduledDate: { + type: "string", + format: "date-time", + description: "Date and time when count is scheduled to begin", + example: "2024-01-25T08:00:00.000Z" + }, + startDate: { + type: "string", + format: "date-time", + description: "Actual start date/time when count execution began", + example: "2024-01-25T08:15:00.000Z" + }, + completedDate: { + type: "string", + format: "date-time", + description: "Date/time when count execution was completed", + example: "2024-01-25T16:30:00.000Z" + } + } + }, + scope: { + type: "object", + description: "Count scope definition and targeting criteria", + properties: { + zoneId: { + type: "string", + description: "Specific zone identifier for targeted counting", + example: "ZONE_PICK_A" + }, + binIds: { + type: "array", + items: { + type: "string" + }, + description: "Specific bins included in count scope", + example: [ + "BIN_ATL_A01_001", + "BIN_ATL_A01_002", + "BIN_ATL_A01_003" + ] + }, + productIds: { + type: "array", + items: { + type: "string" + }, + description: "Specific products targeted for counting across warehouse", + example: [ + "PROD_12345", + "PROD_67890" + ] + }, + abcClassification: { + type: "string", + enum: [ + "A", + "B", + "C" + ], + description: "ABC classification for velocity-based counting", + example: "A" + } + } + }, + assignments: { + type: "array", + items: { + type: "object", + properties: { + userId: { + type: "string", + description: "User identifier for count assignment", + example: "USER_001" + }, + userName: { + type: "string", + description: "User display name for assignment tracking", + example: "John Smith" + }, + assignedBins: { + type: "array", + items: { + type: "string" + }, + description: "Bins assigned to this user for counting", + example: [ + "BIN_ATL_A01_001", + "BIN_ATL_A01_002" + ] + }, + status: { + type: "string", + enum: [ + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED" + ], + description: "Status of individual user assignment", + example: "IN_PROGRESS" + } + } + }, + description: "User assignments and workload distribution" + }, + counts: { + type: "array", + items: { + type: "object", + properties: { + binId: { + type: "string", + description: "Bin identifier where count was performed", + example: "BIN_ATL_A01_001" + }, + productId: { + type: "string", + description: "Product identifier for counted item", + example: "PROD_12345" + }, + sku: { + type: "string", + description: "SKU identifier for product", + example: "SKU-WIDGET-001" + }, + lotNumber: { + type: "string", + description: "Lot number for batch tracking (if applicable)", + example: "LOT-2024-001" + }, + systemQuantity: { + type: "number", + description: "Expected quantity from system records", + example: 150 + }, + countedQuantity: { + type: "number", + description: "Actual counted quantity", + example: 148 + }, + variance: { + type: "number", + description: "Calculated variance (countedQuantity - systemQuantity)", + example: -2 + }, + variancePercent: { + type: "number", + description: "Variance as percentage of system quantity", + example: -1.33 + }, + countedBy: { + type: "string", + description: "User who performed the count", + example: "USER_001" + }, + countedAt: { + type: "string", + format: "date-time", + description: "Timestamp when count was performed", + example: "2024-01-25T14:30:00.000Z" + }, + reconciledBy: { + type: "string", + description: "User who reconciled variance (if applicable)", + example: "SUPERVISOR_001" + }, + reconciledAt: { + type: "string", + format: "date-time", + description: "Timestamp when variance was reconciled", + example: "2024-01-25T16:15:00.000Z" + }, + notes: { + type: "string", + description: "Notes about count or variance explanation", + example: "Found damaged units, excluded from count" + } + } + }, + description: "Individual count results and variance details" + }, + summary: { + type: "object", + description: "Count summary and accuracy metrics", + properties: { + totalBins: { + type: "integer", + description: "Total bins included in count", + example: 25 + }, + totalProducts: { + type: "integer", + description: "Total products counted", + example: 47 + }, + itemsMatched: { + type: "integer", + description: "Number of items with exact quantity match", + example: 42 + }, + itemsVariance: { + type: "integer", + description: "Number of items with quantity variance", + example: 5 + }, + accuracyPercent: { + type: "number", + description: "Overall count accuracy percentage", + example: 89.36 + }, + totalVarianceValue: { + type: "number", + description: "Total monetary value of variances", + example: 127.85 + } + } + }, + approvedBy: { + type: "string", + description: "User who approved the count results", + example: "MANAGER_001" + }, + approvedAt: { + type: "string", + format: "date-time", + description: "Timestamp when count was approved", + example: "2024-01-25T17:00:00.000Z" + }, + notes: { + type: "string", + description: "General notes about the count", + example: "Focus on high-value items in A classification. Some damaged inventory excluded." + }, + worldRef: { + type: "object", + description: "World reference information for multi-tenant context", + properties: { + worldId: { + type: "string", + description: "World identifier", + example: "507f1f77bcf86cd799439011" + } + } + }, + customFields: { + type: "object", + description: "Additional warehouse-specific count attributes and metadata", + additionalProperties: true, + example: { + priority: "HIGH", + countReason: "Quarterly ABC Analysis", + requiresApproval: true, + auditRequired: false + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the cycle count record was created", + example: "2024-01-24T16:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the cycle count record was last updated", + example: "2024-01-25T17:00:00.000Z" + } + }, + required: [ + "_id", + "cycleCountId", + "warehouseId", + "countType", + "countStatus", + "schedule", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSDailyMetrics: { + type: "object", + description: "Complete WMS daily metrics record for comprehensive warehouse performance tracking and operational analysis", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + description: "Formatted document identifier for API responses", + example: "507f1f77bcf86cd799439011" + }, + metricId: { + type: "string", + description: "Unique daily metrics identifier", + example: "wms_daily-metrics_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Warehouse identifier for metrics recording", + example: "WH_ATL_001" + }, + date: { + type: "string", + format: "date", + description: "Date for metrics recording", + example: "2024-11-27" + }, + shift: { + type: "string", + description: "Shift identifier for shift-based metrics (optional)", + example: "DAY_SHIFT_1" + }, + zoneId: { + type: "string", + description: "Zone identifier for zone-based metrics (optional)", + example: "ZONE_PICK_A" + }, + inbound: { + type: "object", + description: "Inbound receiving operation metrics", + properties: { + poReceived: { + type: "number", + description: "Number of purchase orders received", + example: 45 + }, + linesReceived: { + type: "number", + description: "Number of purchase order lines received", + example: 320 + }, + unitsReceived: { + type: "number", + description: "Total units received", + example: 2450 + }, + palletsReceived: { + type: "number", + description: "Number of pallets received", + example: 28 + }, + receivingHours: { + type: "number", + description: "Total receiving labor hours", + example: 32.5 + }, + unitsPerHour: { + type: "number", + description: "Receiving productivity - units processed per hour", + example: 75.4 + } + } + }, + putaway: { + type: "object", + description: "Putaway operation metrics", + properties: { + putawayTasks: { + type: "number", + description: "Number of putaway tasks completed", + example: 28 + }, + palletsPutaway: { + type: "number", + description: "Number of pallets put away", + example: 26 + }, + putawayHours: { + type: "number", + description: "Total putaway labor hours", + example: 18.5 + }, + palletsPerHour: { + type: "number", + description: "Putaway productivity - pallets per hour", + example: 1.4 + } + } + }, + picking: { + type: "object", + description: "Picking operation metrics", + properties: { + ordersShipped: { + type: "number", + description: "Number of orders shipped", + example: 125 + }, + linesPicked: { + type: "number", + description: "Number of order lines picked", + example: 890 + }, + unitsPicked: { + type: "number", + description: "Total units picked", + example: 2240 + }, + pickingHours: { + type: "number", + description: "Total picking labor hours", + example: 45.5 + }, + linesPerHour: { + type: "number", + description: "Picking productivity - lines per hour", + example: 19.6 + }, + unitsPerHour: { + type: "number", + description: "Picking productivity - units per hour", + example: 49.2 + }, + pickAccuracy: { + type: "number", + description: "Pick accuracy percentage", + example: 99.2 + } + } + }, + packing: { + type: "object", + description: "Packing operation metrics", + properties: { + ordersPacked: { + type: "number", + description: "Number of orders packed", + example: 120 + }, + packagesPacked: { + type: "number", + description: "Number of packages packed", + example: 98 + }, + packingHours: { + type: "number", + description: "Total packing labor hours", + example: 24 + }, + ordersPerHour: { + type: "number", + description: "Packing productivity - orders per hour", + example: 5 + } + } + }, + shipping: { + type: "object", + description: "Shipping operation metrics", + properties: { + shipmentsCreated: { + type: "number", + description: "Number of shipments created", + example: 85 + }, + carriersDispatched: { + type: "number", + description: "Number of carriers dispatched", + example: 12 + }, + packagesShipped: { + type: "number", + description: "Total packages shipped", + example: 96 + } + } + }, + labor: { + type: "object", + description: "Labor and workforce metrics", + properties: { + totalWorkers: { + type: "number", + description: "Total workers on shift", + example: 24 + }, + totalHours: { + type: "number", + description: "Total labor hours worked", + example: 192 + }, + productiveHours: { + type: "number", + description: "Direct productive labor hours", + example: 165.5 + }, + indirectHours: { + type: "number", + description: "Indirect labor hours (breaks, meetings, training)", + example: 26.5 + }, + utilizationPercent: { + type: "number", + description: "Labor utilization percentage", + example: 86.2 + } + } + }, + inventory: { + type: "object", + description: "Inventory management metrics", + properties: { + onHandUnits: { + type: "number", + description: "Total units on hand", + example: 45890 + }, + inventoryValue: { + type: "number", + description: "Total inventory value in dollars", + example: 2456780.5 + }, + turnoverRate: { + type: "number", + description: "Inventory turnover rate", + example: 8.4 + } + } + }, + quality: { + type: "object", + description: "Quality and error tracking metrics", + properties: { + pickErrors: { + type: "number", + description: "Number of pick errors recorded", + example: 7 + }, + packErrors: { + type: "number", + description: "Number of pack errors recorded", + example: 3 + }, + damageReports: { + type: "number", + description: "Number of damage reports filed", + example: 2 + }, + returnsProcessed: { + type: "number", + description: "Number of returns processed", + example: 15 + } + } + }, + worldRef: { + type: "object", + description: "World reference information for multi-tenant context", + properties: { + worldId: { + type: "string", + description: "World identifier", + example: "507f1f77bcf86cd799439011" + } + } + }, + customFields: { + type: "object", + description: "Additional warehouse-specific metrics and operational data", + additionalProperties: true, + example: { + temperatureControlledZones: 4, + hazMatHandling: true, + specialEquipmentUsage: 12.5, + sustainabilityMetrics: { + energyUsageKwh: 2450.5, + wasteReductionPercent: 15.2 + } + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the daily metrics record was created", + example: "2024-11-27T18:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the daily metrics record was last updated", + example: "2024-11-27T20:30:00.000Z" + } + }, + required: [ + "_id", + "metricId", + "warehouseId", + "date", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSDistributionCenter: { + type: "object", + description: "Complete WMS distribution center record for comprehensive facility management and operational coordination", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + id: { + type: "string", + description: "Formatted document identifier for API responses", + example: "507f1f77bcf86cd799439011" + }, + dcId: { + type: "string", + description: "Unique distribution center identifier", + example: "wms_distribution-center_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Parent warehouse identifier", + example: "WH_ATL_001" + }, + dcName: { + type: "string", + description: "Distribution center name", + example: "Atlanta Fulfillment Center East" + }, + dcType: { + type: "string", + enum: [ + "FULFILLMENT", + "CROSS_DOCK", + "COLD_STORAGE", + "3PL" + ], + description: "Type of distribution center operation", + example: "FULFILLMENT" + }, + address: { + type: "object", + description: "Physical address of the distribution center", + properties: { + street: { + type: "string", + description: "Street address", + example: "1234 Industrial Blvd" + }, + city: { + type: "string", + description: "City name", + example: "Atlanta" + }, + state: { + type: "string", + description: "State or province code", + example: "GA" + }, + zipCode: { + type: "string", + description: "Postal/ZIP code", + example: "30309" + }, + country: { + type: "string", + description: "Country code", + example: "US" + } + } + }, + timezone: { + type: "string", + description: "Timezone for facility operations", + example: "America/New_York" + }, + totalSqFootage: { + type: "number", + description: "Total square footage of the facility", + example: 250000 + }, + operationalStatus: { + type: "string", + enum: [ + "ACTIVE", + "INACTIVE", + "MAINTENANCE" + ], + description: "Current operational status of the facility", + example: "ACTIVE" + }, + operatingHours: { + type: "object", + description: "Weekly operating schedule for the facility", + properties: { + monday: { + type: "object", + description: "Monday operating hours", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "06:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "22:00" + } + } + }, + tuesday: { + type: "object", + description: "Tuesday operating hours", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + wednesday: { + type: "object", + description: "Wednesday operating hours", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + thursday: { + type: "object", + description: "Thursday operating hours", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + friday: { + type: "object", + description: "Friday operating hours", + properties: { + open: { + type: "string", + example: "06:00" + }, + close: { + type: "string", + example: "22:00" + } + } + }, + saturday: { + type: "object", + description: "Saturday operating hours", + properties: { + open: { + type: "string", + example: "08:00" + }, + close: { + type: "string", + example: "18:00" + } + } + }, + sunday: { + type: "object", + description: "Sunday operating hours", + properties: { + open: { + type: "string", + example: "10:00" + }, + close: { + type: "string", + example: "16:00" + } + } + } + } + }, + contactInfo: { + type: "object", + description: "Contact information for the facility", + properties: { + phone: { + type: "string", + description: "Primary phone number", + example: "+1-404-555-0123" + }, + email: { + type: "string", + description: "Primary email address", + example: "ops@atlanta-east.company.com" + }, + manager: { + type: "string", + description: "Facility manager name", + example: "John Smith" + } + } + }, + worldRef: { + type: "object", + description: "World reference information for multi-tenant context", + properties: { + worldId: { + type: "string", + description: "World identifier", + example: "507f1f77bcf86cd799439011" + } + } + }, + customFields: { + type: "object", + description: "Additional facility-specific configuration and operational data", + additionalProperties: true, + example: { + hazmatCertified: true, + securityLevel: "HIGH", + temperatureControlZones: 4, + dockDoors: 24, + certification: [ + "ISO9001", + "SOC2" + ], + sustainabilityMetrics: { + energyEfficiencyRating: "A+", + solarPowerCapacity: "500kW", + wasteRecyclingRate: 95.5 + } + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the distribution center record was created", + example: "2024-11-27T10:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the distribution center record was last updated", + example: "2024-11-27T15:30:00.000Z" + } + }, + required: [ + "_id", + "dcId", + "warehouseId", + "dcName", + "operationalStatus", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSDockDoor: { + type: "object", + description: "Warehouse dock door for trailer loading/unloading operations and appointment scheduling", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + dockDoorId: { + type: "string", + description: "Unique identifier for the dock door, auto-generated using WMS service prefix", + example: "wms_dock-door_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Warehouse identifier where dock door is located", + example: "wms_warehouse_674565c1234567890abcdef" + }, + doorNumber: { + type: "string", + description: "Physical door number or identifier for operational reference", + example: "DOCK-01" + }, + doorType: { + type: "string", + enum: [ + "INBOUND", + "OUTBOUND", + "CROSS_DOCK" + ], + description: "Operational type of dock door determining traffic flow and usage", + example: "INBOUND" + }, + status: { + type: "string", + enum: [ + "AVAILABLE", + "OCCUPIED", + "MAINTENANCE", + "CLOSED" + ], + description: "Current operational status affecting appointment scheduling availability", + example: "AVAILABLE" + }, + zoneId: { + type: "string", + nullable: true, + description: "Zone identifier for dock door location within warehouse facility", + example: "wms_zone_674565c1234567890abcdef" + }, + capabilities: { + type: "object", + description: "Physical capabilities and specifications for trailer compatibility", + properties: { + maxTrailerLength: { + type: "number", + nullable: true, + description: "Maximum supported trailer length in feet", + example: 53 + }, + maxTrailerHeight: { + type: "number", + nullable: true, + description: "Maximum supported trailer height in feet", + example: 13.5 + }, + levelingDock: { + type: "boolean", + description: "Whether dock has leveling capability for trailer height adjustment", + example: true + }, + hydraulicLeveler: { + type: "boolean", + description: "Hydraulic leveling system availability for automated adjustments", + example: true + }, + restraintSystem: { + type: "boolean", + description: "Trailer restraint system for safety during operations", + example: true + }, + weatherSeal: { + type: "boolean", + description: "Weather sealing capability for environmental protection", + example: true + } + } + }, + equipment: { + type: "object", + description: "Available equipment and systems supporting dock operations", + properties: { + forkliftAccess: { + type: "boolean", + description: "Forklift accessibility for material handling operations", + example: true + }, + conveyorSystem: { + type: "boolean", + description: "Conveyor system availability for automated material movement", + example: false + }, + scales: { + type: "boolean", + description: "Weighing scales availability for freight verification", + example: true + }, + lightSystem: { + type: "boolean", + description: "Lighting system for operational visibility and safety", + example: true + } + } + }, + currentAppointment: { + type: "object", + nullable: true, + description: "Current active appointment assigned to dock door", + properties: { + appointmentId: { + type: "string", + description: "Unique appointment identifier from scheduling system", + example: "tms_appointment_674565c1234567890abcdef" + }, + carrier: { + type: "string", + description: "Carrier company name responsible for the appointment", + example: "Swift Transportation" + }, + trailerNumber: { + type: "string", + description: "Trailer identification number for operational tracking", + example: "TRL-98765" + }, + startTime: { + type: "string", + format: "date-time", + description: "Scheduled appointment start time", + example: "2024-11-27T09:00:00Z" + }, + expectedEndTime: { + type: "string", + format: "date-time", + description: "Expected completion time for resource planning", + example: "2024-11-27T13:00:00Z" + } + } + }, + currentTrailer: { + type: "object", + nullable: true, + description: "Current trailer positioned at dock door for operations", + properties: { + trailerId: { + type: "string", + description: "Unique trailer identifier from TMS system", + example: "tms_inbound-trailer_674565c1234567890abcdef" + }, + trailerNumber: { + type: "string", + description: "Physical trailer identification number", + example: "TRL-98765" + }, + sealNumbers: { + type: "array", + items: { + type: "string" + }, + description: "Security seal numbers for cargo verification", + example: [ + "SEAL-12345", + "SEAL-67890" + ] + }, + arrivalTime: { + type: "string", + format: "date-time", + description: "Actual trailer arrival time at dock", + example: "2024-11-27T08:45:00Z" + } + } + }, + operatingHours: { + type: "object", + description: "Daily operating hours schedule for appointment planning", + properties: { + monday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "06:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "22:00" + } + } + }, + tuesday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "06:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "22:00" + } + } + }, + wednesday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "06:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "22:00" + } + } + }, + thursday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "06:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "22:00" + } + } + }, + friday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "06:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "22:00" + } + } + }, + saturday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "08:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "18:00" + } + } + }, + sunday: { + type: "object", + properties: { + open: { + type: "string", + description: "Opening time in HH:MM format", + example: "10:00" + }, + close: { + type: "string", + description: "Closing time in HH:MM format", + example: "16:00" + } + } + } + } + }, + safety: { + type: "object", + description: "Safety equipment and inspection schedules for compliance management", + properties: { + emergencyStop: { + type: "boolean", + description: "Emergency stop system availability", + example: true + }, + safetyLights: { + type: "boolean", + description: "Safety lighting system operational status", + example: true + }, + lastSafetyInspection: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp of last completed safety inspection", + example: "2024-11-20T10:00:00Z" + }, + nextSafetyInspection: { + type: "string", + format: "date-time", + nullable: true, + description: "Scheduled next safety inspection timestamp", + example: "2024-12-20T10:00:00Z" + } + } + }, + maintenance: { + type: "object", + description: "Maintenance schedules and documentation for operational continuity", + properties: { + lastMaintenance: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp of last completed maintenance", + example: "2024-11-15T14:30:00Z" + }, + nextMaintenance: { + type: "string", + format: "date-time", + nullable: true, + description: "Scheduled next maintenance timestamp", + example: "2024-12-15T14:30:00Z" + }, + maintenanceNotes: { + type: "string", + nullable: true, + description: "Notes from last maintenance activity", + example: "Hydraulic system serviced, leveling dock calibrated" + } + } + }, + worldRef: { + type: "object", + description: "Reference to the world environment containing this dock door", + properties: { + worldId: { + type: "string", + description: "World identifier", + example: "507f1f77bcf86cd799439011" + } + } + }, + customFields: { + type: "object", + description: "Additional facility-specific configuration and operational data", + additionalProperties: true, + example: { + doorCode: "DOCK-INBOUND-01", + operatorCertificationRequired: true, + maxWeight: 80000, + priorityLevel: "HIGH", + associatedWarehouseZones: [ + "RECEIVING", + "STAGING" + ] + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the dock door record was created", + example: "2024-11-27T10:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the dock door record was last updated", + example: "2024-11-27T15:30:00.000Z" + } + }, + required: [ + "_id", + "dockDoorId", + "warehouseId", + "doorNumber", + "doorType", + "status", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSReplenishment: { + type: "object", + description: "\n**WMS Inventory Replenishment Management**\n\nComplete replenishment operation tracking from suggestion through completion, managing bin-to-bin inventory movement with approval workflows.\n\n**⚠️ CRITICAL IMPLEMENTATION BUGS:**\n\n> **BUG #5**: Repository approval method sets `approvedQuantity` field but model expects `quantity.approved` structure\n> \n> **BUG #6**: Metrics aggregation references `suggestedQuantity` field but model stores `quantity.suggested`\n>\n> **Impact**: Approval functionality and metrics will fail or return incorrect data\n> **Required Fixes**: \n> - Update `approveReplenishment` method to set `quantity.approved` not `approvedQuantity`\n> - Update metrics aggregation to use `\"$quantity.suggested\"` not `\"$suggestedQuantity\"`\n\n**Business Process Flow:**\n1. **SUGGESTED** - Initial replenishment request created\n2. **APPROVED** - Management approval with quantity validation \n3. **TASK_CREATED** - Work order generated for execution\n4. **IN_PROGRESS** - Active execution by warehouse staff\n5. **COMPLETED** - Successfully finished with actual quantities\n6. **CANCELLED** - Process terminated with reason\n\n**Key Features:**\n- Complex bin-to-bin movement tracking\n- Multi-level quantity management (suggested/approved/actual)\n- Priority-based processing with workflow integration\n- Comprehensive audit trail with approval/cancellation metadata\n ", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + replenishmentId: { + type: "string", + description: "Unique business identifier for the replenishment request, auto-generated using WMS service prefix", + example: "wms_replenishment_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Source warehouse identifier where replenishment occurs", + example: "WH001" + }, + productId: { + type: "string", + description: "Product identifier being replenished", + example: "PROD-12345" + }, + sku: { + type: "string", + description: "Product SKU for operational reference and identification", + example: "ABC-XYZ-001" + }, + fromBin: { + type: "object", + description: "Source bin details with current availability information", + properties: { + binId: { + type: "string", + description: "Source bin identifier", + example: "BIN-A001" + }, + binCode: { + type: "string", + description: "Human-readable bin location code", + example: "A-001" + }, + availableQuantity: { + type: "number", + description: "Currently available quantity in source bin", + example: 500 + }, + currentQuantity: { + type: "number", + description: "Total quantity currently in source bin", + example: 1000 + } + }, + required: [ + "binId" + ] + }, + toBin: { + type: "object", + description: "Destination bin details with capacity constraints", + properties: { + binId: { + type: "string", + description: "Destination bin identifier", + example: "BIN-P001" + }, + binCode: { + type: "string", + description: "Human-readable bin location code", + example: "P-001" + }, + currentQuantity: { + type: "number", + description: "Current quantity in destination bin", + example: 50 + }, + minQuantity: { + type: "number", + description: "Minimum quantity threshold triggering replenishment", + example: 100 + }, + maxQuantity: { + type: "number", + description: "Maximum capacity of destination bin", + example: 200 + } + }, + required: [ + "binId" + ] + }, + quantity: { + type: "object", + description: "Multi-level quantity tracking throughout replenishment lifecycle", + properties: { + suggested: { + type: "number", + description: "Initially suggested quantity for replenishment", + example: 150 + }, + approved: { + type: "number", + nullable: true, + description: "Management-approved quantity (may differ from suggested)", + example: 120 + }, + actual: { + type: "number", + nullable: true, + description: "Actual quantity moved during execution", + example: 118 + }, + uom: { + type: "string", + description: "Unit of measure for all quantities", + example: "EA" + } + }, + required: [ + "suggested", + "uom" + ] + }, + replenishmentType: { + type: "string", + enum: [ + "MIN_MAX", + "DEMAND", + "CYCLE", + "MANUAL" + ], + description: "Type of replenishment strategy triggering this request", + example: "MIN_MAX" + }, + priority: { + type: "number", + description: "Priority level for processing order (1-10, 10 being highest priority)", + example: 5 + }, + status: { + type: "string", + enum: [ + "SUGGESTED", + "APPROVED", + "TASK_CREATED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED" + ], + description: "Current status in the replenishment workflow", + example: "SUGGESTED" + }, + dueDate: { + type: "string", + format: "date-time", + nullable: true, + description: "Target completion date for the replenishment", + example: "2024-11-28T10:00:00Z" + }, + taskId: { + type: "string", + nullable: true, + description: "Associated task identifier when status becomes TASK_CREATED", + example: "TASK-12345" + }, + approvedBy: { + type: "string", + nullable: true, + description: "User identifier of the approving manager", + example: "MGR-001" + }, + approvedDate: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp when replenishment was approved", + example: "2024-11-27T14:30:00Z" + }, + completedBy: { + type: "string", + nullable: true, + description: "User identifier of the completing operator", + example: "OP-007" + }, + completedDate: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp when replenishment execution was completed", + example: "2024-11-27T16:45:00Z" + }, + cancelReason: { + type: "string", + nullable: true, + description: "Reason provided when replenishment was cancelled", + example: "Product discontinued" + }, + cancelledBy: { + type: "string", + nullable: true, + description: "User identifier who cancelled the replenishment", + example: "MGR-002" + }, + cancelledDate: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp when replenishment was cancelled", + example: "2024-11-27T15:00:00Z" + }, + notes: { + type: "string", + nullable: true, + description: "Additional operational notes and instructions", + example: "Handle with care - fragile items" + }, + worldRef: { + type: "object", + description: "Reference to the world environment containing this replenishment", + properties: { + worldId: { + type: "string", + description: "World identifier for multi-tenant context", + example: "507f1f77bcf86cd799439011" + } + }, + required: [ + "worldId" + ] + }, + customFields: { + type: "object", + description: "Additional business-specific data and configuration", + additionalProperties: true, + example: { + urgencyLevel: "HIGH", + costCenter: "DC001", + shiftAssignment: "DAY", + equipmentRequired: [ + "FORKLIFT", + "SCANNER" + ] + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the replenishment record was created", + example: "2024-11-27T10:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the replenishment record was last updated", + example: "2024-11-27T15:30:00.000Z" + } + }, + required: [ + "_id", + "replenishmentId", + "warehouseId", + "productId", + "fromBin", + "toBin", + "quantity", + "replenishmentType", + "priority", + "status", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSTask: { + type: "object", + description: "\n**WMS Task Management System**\n\nComprehensive warehouse task orchestration with detailed tracking, performance measurement, and workflow automation.\n\n**⚠️ CRITICAL IMPLEMENTATION BUG:**\n\n> **BUG #7**: Aggregation pipeline uses `avgDuration` but return value expects `averageDuration`\n>\n> **Impact**: Inconsistent field naming in metrics response causing client-side failures\n> **Required Fix**: Align aggregation field name with expected return value or update interface\n\n**Task Types Supported:**\n- **PICK**: Order fulfillment picking operations\n- **PUTAWAY**: Inbound inventory storage tasks\n- **REPLENISHMENT**: Stock movement for bin replenishment\n- **CYCLE_COUNT**: Inventory counting and verification\n- **MOVE**: General inventory relocation\n- **LOAD/UNLOAD**: Dock and trailer operations\n- **PACK**: Order packaging and preparation\n- **SORT**: Product and order sorting operations\n\n**Status Workflow:**\n1. **CREATED** - Task generated but not yet available\n2. **RELEASED** - Task available for assignment\n3. **ASSIGNED** - Task assigned to specific user\n4. **IN_PROGRESS** - Task execution in progress\n5. **COMPLETED** - Task successfully finished\n6. **CANCELLED/SUSPENDED** - Task terminated or paused\n\n**Key Features:**\n- Priority-based task sequencing and assignment\n- Comprehensive timing and performance tracking\n- Detailed scan validation and audit trails\n- Flexible task detail and product tracking\n- Equipment and resource assignment\n- Real-time status monitoring and reporting\n ", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + taskId: { + type: "string", + description: "Unique business identifier for the task, auto-generated using WMS service prefix", + example: "wms_task_674565c1234567890abcdef" + }, + warehouseId: { + type: "string", + description: "Warehouse identifier where task occurs", + example: "WH001" + }, + taskType: { + type: "string", + enum: [ + "PICK", + "PUTAWAY", + "REPLENISHMENT", + "CYCLE_COUNT", + "MOVE", + "LOAD", + "UNLOAD", + "PACK", + "SORT" + ], + description: "Type of warehouse operation to be performed", + example: "PICK" + }, + taskSubtype: { + type: "string", + enum: [ + "DISCRETE", + "BATCH", + "CLUSTER", + "ZONE" + ], + nullable: true, + description: "Task execution methodology and grouping strategy", + example: "DISCRETE" + }, + priority: { + type: "number", + description: "Task priority level for sequencing (higher values = higher priority)", + default: 50, + example: 75 + }, + taskStatus: { + type: "string", + enum: [ + "CREATED", + "RELEASED", + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED", + "SUSPENDED" + ], + description: "Current status in the task lifecycle", + default: "CREATED", + example: "IN_PROGRESS" + }, + reference: { + type: "object", + nullable: true, + description: "Reference to originating business document", + properties: { + type: { + type: "string", + enum: [ + "PO", + "ORDER", + "WAVE", + "INBOUND", + "REPLENISHMENT" + ], + description: "Type of originating document", + example: "ORDER" + }, + id: { + type: "string", + description: "Identifier of originating document", + example: "ORD-12345" + } + } + }, + product: { + type: "object", + nullable: true, + description: "Product information associated with the task", + properties: { + productId: { + type: "string", + description: "Product identifier", + example: "PROD-12345" + }, + sku: { + type: "string", + description: "Stock keeping unit code", + example: "ABC-XYZ-001" + }, + productName: { + type: "string", + description: "Human-readable product name", + example: "Widget Premium" + } + } + }, + from: { + type: "object", + nullable: true, + description: "Source location for the task", + properties: { + binId: { + type: "string", + description: "Source bin identifier", + example: "BIN-A001" + }, + binCode: { + type: "string", + description: "Human-readable source bin code", + example: "A-001" + }, + zoneId: { + type: "string", + description: "Source zone identifier", + example: "ZONE-PICK" + } + } + }, + to: { + type: "object", + nullable: true, + description: "Destination location for the task", + properties: { + binId: { + type: "string", + description: "Destination bin identifier", + example: "BIN-SHIP-001" + }, + binCode: { + type: "string", + description: "Human-readable destination bin code", + example: "SHIP-001" + }, + zoneId: { + type: "string", + description: "Destination zone identifier", + example: "ZONE-SHIPPING" + } + } + }, + lotNumber: { + type: "string", + nullable: true, + description: "Lot or batch number for traceability", + example: "LOT-20241127-001" + }, + licensePlateNumber: { + type: "string", + nullable: true, + description: "License plate number for pallet tracking", + example: "LPN-987654321" + }, + quantity: { + type: "object", + nullable: true, + description: "Quantity requirements and tracking", + properties: { + requested: { + type: "number", + description: "Originally requested quantity", + example: 24 + }, + actual: { + type: "number", + nullable: true, + description: "Actual quantity handled upon completion", + example: 22 + }, + uom: { + type: "string", + description: "Unit of measure for quantities", + example: "EA" + } + } + }, + assignment: { + type: "object", + nullable: true, + description: "Task assignment and resource allocation", + properties: { + userId: { + type: "string", + description: "Assigned user identifier", + example: "USER-001" + }, + userName: { + type: "string", + description: "Assigned user name for display", + example: "John Smith" + }, + equipmentId: { + type: "string", + nullable: true, + description: "Assigned equipment identifier", + example: "FORK-001" + }, + assignedAt: { + type: "string", + format: "date-time", + nullable: true, + description: "Timestamp when task was assigned", + example: "2024-11-27T09:15:00Z" + } + } + }, + zoneId: { + type: "string", + nullable: true, + description: "Primary zone identifier for the task", + example: "ZONE-PICK" + }, + timing: { + type: "object", + nullable: true, + description: "Comprehensive timing and performance tracking", + properties: { + createdAt: { + type: "string", + format: "date-time", + nullable: true, + description: "Task creation timestamp", + example: "2024-11-27T09:00:00Z" + }, + releasedAt: { + type: "string", + format: "date-time", + nullable: true, + description: "Task release timestamp (becomes available)", + example: "2024-11-27T09:05:00Z" + }, + assignedAt: { + type: "string", + format: "date-time", + nullable: true, + description: "Task assignment timestamp", + example: "2024-11-27T09:15:00Z" + }, + startedAt: { + type: "string", + format: "date-time", + nullable: true, + description: "Task execution start timestamp", + example: "2024-11-27T09:20:00Z" + }, + completedAt: { + type: "string", + format: "date-time", + nullable: true, + description: "Task completion timestamp", + example: "2024-11-27T09:35:00Z" + }, + estimatedDuration: { + type: "number", + nullable: true, + description: "Estimated duration in minutes", + example: 15 + }, + actualDuration: { + type: "number", + nullable: true, + description: "Actual duration in minutes (calculated on completion)", + example: 12 + } + } + }, + completedBy: { + type: "string", + nullable: true, + description: "User identifier who completed the task", + example: "USER-001" + }, + details: { + type: "array", + description: "Task-specific detail items and sub-operations", + items: { + type: "object", + properties: { + detailId: { + type: "string", + description: "Unique detail line identifier", + example: "DTL-001" + }, + productId: { + type: "string", + description: "Product identifier for this detail line", + example: "PROD-12345" + }, + sku: { + type: "string", + description: "SKU for this detail line", + example: "ABC-XYZ-001" + }, + binId: { + type: "string", + description: "Bin identifier for this detail line", + example: "BIN-A001" + }, + lotNumber: { + type: "string", + nullable: true, + description: "Lot number for this detail line", + example: "LOT-20241127-001" + }, + quantity: { + type: "number", + description: "Required quantity for this detail line", + example: 24 + }, + pickedQuantity: { + type: "number", + nullable: true, + description: "Actual picked quantity", + example: 22 + }, + uom: { + type: "string", + description: "Unit of measure", + example: "EA" + }, + sequenceNumber: { + type: "number", + nullable: true, + description: "Execution sequence number", + example: 1 + }, + detailStatus: { + type: "string", + enum: [ + "PENDING", + "IN_PROGRESS", + "COMPLETED", + "SHORT", + "SKIPPED" + ], + description: "Status of this detail line", + example: "COMPLETED" + } + } + } + }, + scans: { + type: "array", + description: "Scan validation history and audit trail", + items: { + type: "object", + properties: { + scanType: { + type: "string", + enum: [ + "BIN", + "PRODUCT", + "LPN", + "DESTINATION" + ], + description: "Type of scan performed", + example: "PRODUCT" + }, + scannedValue: { + type: "string", + description: "Actual scanned value", + example: "ABC-XYZ-001" + }, + expectedValue: { + type: "string", + description: "Expected value for validation", + example: "ABC-XYZ-001" + }, + scanResult: { + type: "string", + enum: [ + "MATCH", + "MISMATCH", + "OVERRIDE" + ], + description: "Result of scan validation", + example: "MATCH" + }, + scannedAt: { + type: "string", + format: "date-time", + description: "Timestamp of the scan", + example: "2024-11-27T09:25:00Z" + } + } + } + }, + performance: { + type: "object", + nullable: true, + description: "Performance metrics and productivity data", + properties: { + unitsPerHour: { + type: "number", + nullable: true, + description: "Units processed per hour", + example: 125.5 + }, + accuracy: { + type: "number", + nullable: true, + description: "Task accuracy percentage", + example: 98.5 + } + } + }, + notes: { + type: "string", + nullable: true, + description: "Additional operational notes and instructions", + example: "Customer requested expedited processing" + }, + worldRef: { + type: "object", + description: "Reference to the world environment containing this task", + properties: { + worldId: { + type: "string", + description: "World identifier for multi-tenant context", + example: "507f1f77bcf86cd799439011" + } + }, + required: [ + "worldId" + ] + }, + customFields: { + type: "object", + nullable: true, + description: "Additional business-specific data and configuration", + additionalProperties: true, + example: { + shiftCode: "DAY-1", + supervisorId: "SUP-001", + customerPriority: "HIGH", + specialInstructions: [ + "FRAGILE", + "HEAVY_LIFT" + ] + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the task record was created", + example: "2024-11-27T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the task record was last updated", + example: "2024-11-27T09:35:00.000Z" + } + }, + required: [ + "_id", + "taskId", + "warehouseId", + "taskType", + "priority", + "taskStatus", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSWarehouse: { + type: "object", + description: "Warehouse facility configuration and management data", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + worldRef: { + type: "object", + description: "Reference to the world environment", + properties: { + worldId: { + type: "string", + description: "World environment identifier", + example: "507f1f77bcf86cd799439011" + } + } + }, + warehouseId: { + type: "string", + description: "Unique auto-generated warehouse identifier", + example: "WH_507f1f77bcf86cd799439012" + }, + warehouseCode: { + type: "string", + description: "Auto-generated warehouse code from warehouse name (slugified)", + example: "atlanta-distribution-center" + }, + warehouseName: { + type: "string", + description: "Human readable warehouse name", + example: "Atlanta Distribution Center" + }, + address: { + type: "object", + description: "Physical warehouse address information", + properties: { + street: { + type: "string", + description: "Street address", + example: "1234 Industrial Blvd" + }, + city: { + type: "string", + description: "City name", + example: "Atlanta" + }, + state: { + type: "string", + description: "State or province", + example: "GA" + }, + postalCode: { + type: "string", + description: "Postal or ZIP code", + example: "30309" + }, + country: { + type: "string", + description: "Country name", + example: "USA" + }, + latitude: { + type: "number", + nullable: true, + description: "Geographic latitude coordinate", + example: 33.749 + }, + longitude: { + type: "number", + nullable: true, + description: "Geographic longitude coordinate", + example: -84.388 + } + }, + required: [ + "street1", + "city", + "state", + "postalCode", + "country" + ] + }, + timezone: { + type: "string", + description: "IANA timezone identifier for warehouse operations", + example: "America/New_York" + }, + warehouseType: { + type: "string", + enum: [ + "FULFILLMENT", + "STAGING", + "RETURNS", + "3PL", + "VIRTUAL" + ], + description: "Warehouse operational type classification", + example: "FULFILLMENT" + }, + status: { + type: "string", + enum: [ + "ACTIVE", + "DISABLED", + "ARCHIVED" + ], + description: "Warehouse operational status", + default: "ACTIVE", + example: "ACTIVE" + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the warehouse record was created", + example: "2024-11-27T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the warehouse record was last updated", + example: "2024-11-27T09:35:00.000Z" + } + }, + required: [ + "_id", + "warehouseId", + "warehouseCode", + "warehouseName", + "address", + "timezone", + "status", + "worldRef", + "createdAt", + "updatedAt" + ] + }, + WMSZone: { + type: "object", + description: "Warehouse zone configuration and management data", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + worldRef: { + type: "object", + description: "Reference to the world environment", + properties: { + worldId: { + type: "string", + description: "World environment identifier", + example: "507f1f77bcf86cd799439011" + } + } + }, + zoneId: { + type: "string", + description: "Unique auto-generated zone identifier", + example: "ZNE_507f1f77bcf86cd799439012" + }, + warehouseId: { + type: "string", + description: "Associated warehouse identifier", + example: "WH_ATL_001" + }, + zoneCode: { + type: "string", + description: "Auto-generated zone code from zone name (slugified)", + example: "picking-zone-a" + }, + zoneName: { + type: "string", + description: "Human readable zone name", + example: "Picking Zone A" + }, + zoneType: { + type: "string", + enum: [ + "RECEIVING", + "STORAGE", + "PICKING", + "PACKING", + "SHIPPING", + "STAGING", + "QC", + "RETURNS" + ], + description: "Zone operational type classification", + example: "PICKING" + }, + temperatureControlled: { + type: "boolean", + description: "Temperature control requirement flag", + default: false, + example: false + }, + temperatureRange: { + type: "object", + nullable: true, + description: "Temperature configuration for controlled zones", + properties: { + min: { + type: "number", + description: "Minimum temperature", + example: 32 + }, + max: { + type: "number", + description: "Maximum temperature", + example: 40 + }, + unit: { + type: "string", + description: "Temperature unit of measure", + example: "Fahrenheit" + } + } + }, + capacityCubicFeet: { + type: "number", + nullable: true, + description: "Zone storage capacity in cubic feet", + example: 50000 + }, + aisles: { + type: "array", + description: "Aisle configuration within the zone", + items: { + type: "object", + properties: { + aisleId: { + type: "string", + description: "Aisle identifier", + example: "AISLE_A1_001" + }, + aisleCode: { + type: "string", + description: "Human-readable aisle code", + example: "A1" + }, + aisleType: { + type: "string", + description: "Aisle type classification", + example: "STANDARD" + } + } + } + }, + customFields: { + type: "object", + nullable: true, + description: "Additional custom data fields", + additionalProperties: true, + example: { + shiftCode: "DAY-1", + supervisor: "SUP-001", + specialHandling: [ + "FRAGILE" + ] + } + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the zone record was created", + example: "2024-11-27T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the zone record was last updated", + example: "2024-11-27T09:35:00.000Z" + } + }, + required: [ + "_id", + "zoneId", + "warehouseId", + "zoneCode", + "zoneName", + "temperatureControlled", + "worldRef", + "createdAt", + "updatedAt" + ] + } +}; diff --git a/packages/controlmart/src/docs/schemas/world.schema.ts b/packages/controlmart/src/docs/schemas/world.schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..8e359f215aad2c0b0cffde0b605412d5456755bf --- /dev/null +++ b/packages/controlmart/src/docs/schemas/world.schema.ts @@ -0,0 +1,443 @@ +export const worldSchemas = { + World: { + type: "object", + description: "A world environment representing an isolated business context with its own data and configurations", + properties: { + _id: { + type: "string", + description: "Unique identifier for the world", + example: "507f1f77bcf86cd799439011" + }, + name: { + type: "string", + description: "Human-readable name for the world environment", + example: "Production Environment" + }, + url: { + type: "string", + description: "URL slug derived from the world name", + example: "production-environment" + }, + apiKey: { + type: "string", + description: "API key for authenticating with this world's services", + example: "prod_api_key_123456" + }, + apiSecret: { + type: "string", + description: "API secret for secure authentication", + example: "prod_secret_789012" + }, + is_default: { + type: "boolean", + description: "Whether this world is the default environment", + example: true, + default: false + }, + description: { + type: "string", + description: "Detailed description of the world's purpose and usage", + example: "Main production environment for live customer operations and critical business processes" + }, + layout: { + type: "string", + description: "ID of the layout template used to seed this world", + example: "perishables-food-manufacturer" + }, + mpcCompany: { + type: "string", + description: "Identifier of the main MPC company associated with this world", + example: "company_skyfall_main_123" + }, + realHoursPerSimDay: { + type: "number", + description: "Number of real-world hours that equal one simulation day", + default: 2, + example: 2 + }, + samplingStrategy: { + $ref: "#/components/schemas/SamplingStrategy" + }, + capabilityIds: { + type: "array", + items: { + type: "string" + }, + description: "Array of capability IDs assigned to this world", + example: ["cap_order_processing", "cap_inventory_check"] + }, + personas: { + $ref: "#/components/schemas/PersonaConfig" + }, + chaos: { + $ref: "#/components/schemas/ChaosConfig" + }, + ticketCreationEnabled: { + type: "boolean", + description: "Whether ITSM ticket creation is enabled for this world", + default: true, + example: true + }, + createdAt: { + type: "string", + format: "date-time", + description: "Timestamp when the world was created", + example: "2024-01-15T09:00:00.000Z" + }, + updatedAt: { + type: "string", + format: "date-time", + description: "Timestamp when the world was last modified", + example: "2024-01-15T10:30:00.000Z" + } + }, + required: [ + "_id", + "name", + "url", + "is_default", + "createdAt", + "updatedAt" + ] + }, + SamplingStrategy: { + type: "object", + description: "Strategy for sampling capabilities to assign to a world", + oneOf: [ + { + type: "object", + properties: { + type: { + type: "string", + enum: ["all"], + description: "Include all available capabilities" + } + }, + required: ["type"] + }, + { + type: "object", + properties: { + type: { + type: "string", + enum: ["filter"], + description: "Filter capabilities by criteria" + }, + filter: { + $ref: "#/components/schemas/CapabilityFilter" + } + }, + required: ["type", "filter"] + }, + { + type: "object", + properties: { + type: { + type: "string", + enum: ["random"], + description: "Randomly select a number of capabilities" + }, + count: { + type: "integer", + description: "Number of capabilities to randomly select", + example: 10 + }, + seed: { + type: "integer", + description: "Optional seed for reproducible random selection" + } + }, + required: ["type", "count"] + }, + { + type: "object", + properties: { + type: { + type: "string", + enum: ["seeded"], + description: "Select capabilities with a required seed for reproducibility" + }, + count: { + type: "integer", + description: "Number of capabilities to select", + example: 10 + }, + seed: { + type: "integer", + description: "Seed for reproducible selection", + example: 42 + } + }, + required: ["type", "count", "seed"] + } + ], + example: { + type: "random", + count: 10, + seed: 42 + } + }, + CapabilityFilter: { + type: "object", + description: "Filter criteria for capability sampling", + properties: { + domain: { + type: "array", + items: { + type: "string" + }, + description: "Filter by domain names", + example: ["order-management", "inventory"] + }, + complexity: { + type: "string", + enum: ["simple", "medium", "complex"], + description: "Filter by complexity level" + }, + services: { + type: "array", + items: { + type: "string" + }, + description: "Filter by required services", + example: ["erp", "wms"] + }, + personas: { + type: "array", + items: { + type: "string" + }, + description: "Filter by associated personas", + example: ["warehouse-manager", "procurement-officer"] + }, + patterns: { + type: "array", + items: { + type: "string" + }, + description: "Filter by capability patterns", + example: ["crud", "workflow"] + } + } + }, + PersonaConfig: { + type: "object", + description: "Configuration for personas allowed to access this world", + properties: { + allowedPersonas: { + type: "array", + items: { + type: "string" + }, + description: "List of persona IDs that can access this world. If not specified, all personas have access.", + example: ["warehouse-manager", "procurement-officer", "sales-rep"] + }, + personaOverrides: { + type: "object", + description: "Custom persona-to-capability mappings that override defaults", + additionalProperties: { + type: "object", + properties: { + capabilityIds: { + type: "array", + items: { + type: "string" + }, + description: "Capability IDs assigned to this persona" + } + } + }, + example: { + "warehouse-manager": { + capabilityIds: ["cap_inventory_check", "cap_stock_transfer"] + } + } + } + } + }, + ChaosConfig: { + type: "object", + description: "Configuration for per-world chaos engineering settings", + properties: { + processChaosEnabled: { + type: "boolean", + description: "Enable chaos for process execution (ODs)", + default: false, + example: false + }, + infraChaosEnabled: { + type: "boolean", + description: "Enable chaos for infrastructure components (tools/DB)", + default: false, + example: false + } + }, + required: ["processChaosEnabled", "infraChaosEnabled"] + }, + ChaosPolicy: { + type: "object", + description: "Legacy/Resolved Chaos engineering policy (internal use)", + properties: { + enabled: { + type: "boolean", + description: "Whether chaos injection is enabled", + example: true + }, + probability: { + type: "number", + format: "float", + minimum: 0, + maximum: 1, + description: "Overall probability (0.0 to 1.0) that chaos will occur", + example: 0.1 + }, + scenarios: { + type: "array", + items: { + $ref: "#/components/schemas/ChaosScenario" + }, + description: "List of chaos scenarios that can be triggered" + }, + seed: { + type: "string", + description: "Optional seed for reproducible chaos", + example: "test-seed-123" + }, + persistCorruptedData: { + type: "boolean", + description: "When true, persist corrupted data instead of throwing errors", + default: false + } + }, + required: ["enabled", "probability", "scenarios"] + }, + ChaosScenario: { + type: "object", + description: "A specific chaos scenario that can be triggered", + properties: { + type: { + type: "string", + enum: [ + "data_corruption", + "missing_data", + "stale_data", + "format_change", + "permission_denied", + "rate_limit", + "partial_data", + "duplicate_data", + "invalid_state", + "dependency_failure", + "timing_issue" + ], + description: "Type of chaos to inject" + }, + weight: { + type: "number", + description: "Relative weight for scenario selection (higher = more likely)", + example: 1.0 + }, + description: { + type: "string", + description: "Human-readable description of this scenario", + example: "Simulate missing required fields in API response" + }, + config: { + type: "object", + description: "Scenario-specific configuration", + additionalProperties: true + } + }, + required: ["type", "weight", "description", "config"] + }, + WorldLog: { + type: "object", + description: "A single operational log entry within a world environment", + properties: { + _id: { + type: "string", + description: "MongoDB document identifier", + example: "507f1f77bcf86cd799439011" + }, + worldRef: { + type: "object", + description: "Reference to the world this log belongs to", + properties: { + worldId: { + type: "string", + description: "Unique identifier of the world", + example: "550e8400-e29b-41d4-a716-446655440000" + } + }, + required: [ + "worldId" + ] + }, + logId: { + type: "string", + description: "Unique identifier for this log entry", + example: "log_123456789" + }, + timestamp: { + type: "string", + format: "date-time", + description: "When this log entry was created", + example: "2024-01-15T10:25:30.123Z" + }, + serviceType: { + type: "string", + enum: [ + "edi", + "erp", + "as2", + "translator", + "validator", + "gateway", + "infra", + "other" + ], + description: "Type of service that generated this log", + example: "edi" + }, + level: { + type: "string", + enum: [ + "trace", + "debug", + "info", + "warn", + "error", + "fatal" + ], + description: "Severity level of the log entry", + example: "info" + }, + msg: { + type: "string", + description: "Human-readable log message", + example: "EDI 850 Purchase Order processed successfully" + }, + metadata: { + type: "object", + description: "Additional structured data related to this log entry", + additionalProperties: true, + example: { + transactionId: "txn_789abc", + partnerId: "partner_456", + docType: "850", + direction: "IN", + processingTime: 1250 + } + } + }, + required: [ + "_id", + "worldRef", + "logId", + "timestamp", + "serviceType", + "level", + "msg" + ] + } +}; diff --git a/packages/controlmart/src/edi-demos/chaos-edi-demo.ts b/packages/controlmart/src/edi-demos/chaos-edi-demo.ts new file mode 100644 index 0000000000000000000000000000000000000000..678bf108728329c4b58368e9111a0f68b068fccd --- /dev/null +++ b/packages/controlmart/src/edi-demos/chaos-edi-demo.ts @@ -0,0 +1,554 @@ +import { pino, destination } from "pino"; + +import { ODSetExecutor } from "../operational-descriptor/od-set-executor"; +import type { OperationalDescriptor } from "../types/od.type"; +import { initOperationalDescriptor } from "../operational-descriptor/init.od"; +import { EService, EServices } from "../utils/service-mesh.util"; + +const logger = pino( + { + level: "info", + formatters: { + level: (label) => ({ level: label }), + }, + timestamp: pino.stdTimeFunctions.isoTime, + }, + destination({ dest: "chaos-edi-demo.log", mkdir: true }), +); + +const CHAOS_EDI_DEMO: OperationalDescriptor = { + id: "chaos-edi-demo", + name: "Chaos Engineering EDI Demo", + version: "1.0.0", + description: "EDI demo with comprehensive chaos engineering scenarios", + chaos: { + enabled: true, + probability: 0.8, + scenarios: [ + { + type: "data_corruption", + weight: 20, + description: "Corrupt payment data during processing", + config: { + corruptFields: ["amount", "orderId", "recipient.accountNumber"], + corruptionType: "random_value", + }, + }, + { + type: "missing_data", + weight: 15, + description: "Missing critical payment information", + config: { + missingFields: ["recipient", "currency"], + throwError: true, + }, + }, + { + type: "format_change", + weight: 15, + description: "Unexpected EDI format changes", + config: { + schemaChanges: [ + { + field: "ediFormat", + change: "change_type", + newType: "object", + }, + ], + }, + }, + { + type: "rate_limit", + weight: 15, + description: "EDI processing rate limits", + config: { + rateLimitDelay: 3000, + rateLimitMessage: "EDI processing rate limit exceeded", + }, + }, + { + type: "dependency_failure", + weight: 15, + description: "External service failures", + config: { + dependencyService: "edi-validator", + cascadeFailure: true, + }, + }, + { + type: "partial_data", + weight: 10, + description: "Incomplete processing results", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "timing_issue", + weight: 10, + description: "Processing timing issues", + config: { + rateLimitDelay: 5000, + }, + }, + ], + }, + steps: [ + { + id: "processPayment", + name: "Process Payment with Chaos", + type: "mcp" as const, + service: "edi", + tool: "processPayment", + input: { + type: "literal" as const, + value: { + orderId: "CHAOS-DEMO-001", + amount: 2500.0, + currency: "USD", + recipient: { + name: "Chaos Test Corp", + accountNumber: "CHAOS123456", + bankCode: "CHAOSBANK01", + }, + }, + }, + output: { storeAs: "paymentResult" }, + retry: { + maxRetries: 3, + backoff: "exponential", + baseMs: 200, + maxBackoffMs: 2000, + jitter: true, + }, + }, + { + id: "createEdiWithResilience", + name: "Create EDI Document with Resilience", + type: "mcp" as const, + service: "edi", + tool: "convert_json_to_edi", + input: { + type: "template" as const, + value: { + json: "{{paymentResult}}", + resilience: { + enableBackup: true, + validateStructure: true, + }, + }, + }, + output: { storeAs: "ediDocument" }, + retry: { + maxRetries: 5, + backoff: "linear", + baseMs: 300, + }, + }, + { + id: "validateWithFallback", + name: "Validate EDI with Fallback", + type: "mcp" as const, + service: "edi", + tool: "validate_edi_resilient", + input: { + type: "template" as const, + value: { + edi: "{{ediDocument}}", + fallbackValidation: true, + strictMode: false, + }, + }, + output: { storeAs: "validation" }, + retry: { + maxRetries: 2, + backoff: "fixed", + baseMs: 100, + }, + }, + { + id: "circuitBreakerTest", + name: "Test Circuit Breaker Pattern", + type: "mcp" as const, + service: "resilience", + tool: "circuitBreakerTest", + input: { + type: "template" as const, + value: { + previousResults: { + payment: "{{paymentResult}}", + edi: "{{ediDocument}}", + validation: "{{validation}}", + }, + }, + }, + output: { storeAs: "circuitBreakerResult" }, + }, + ], + assertions: [ + { + id: "payment_processed", + expression: "paymentResult != null", + description: "Payment must be processed despite chaos", + continueOnFailure: true, + }, + { + id: "edi_created", + expression: "ediDocument != null", + description: "EDI document must be created with resilience", + continueOnFailure: true, + }, + ], +}; + +/** + * Resilient mock tools with chaos handling + */ +const createResilientMockTools = () => ({ + edi: { + processPayment: async (input: any) => { + const baseDelay = 200; + const chaosDelay = Math.random() < 0.3 ? Math.random() * 2000 : 0; + await new Promise((resolve) => setTimeout(resolve, baseDelay + chaosDelay)); + + const chaosRoll = Math.random(); + + if (chaosRoll < 0.1) { + throw new Error("Payment gateway connection failed"); + } + + if (chaosRoll < 0.2) { + throw new Error("Insufficient funds or account validation failed"); + } + + if (chaosRoll < 0.3) { + return { + ...input, + amount: "CORRUPTED_AMOUNT", + status: "unknown", + reference: null, + }; + } + + const result = { + ...input, + status: "processed", + reference: `REF-CHAOS-${Date.now()}`, + processedAt: new Date().toISOString(), + processingTime: baseDelay + chaosDelay, + chaosRecovery: chaosDelay > 0 ? "recovered_from_delay" : "normal", + }; + + if (Math.random() < 0.2) { + delete result.reference; + result.status = "partial"; + } + + return result; + }, + + convert_json_to_edi: async (input: any) => { + let json; + try { + json = typeof input.json === "string" ? JSON.parse(input.json) : input.json; + } catch (error) { + if (input.resilience?.enableBackup) { + console.warn(" Using backup processing for corrupted input"); + json = { + orderId: "BACKUP-ORDER", + amount: 0, + currency: "USD", + status: "backup_processed", + }; + } else { + throw new Error("Failed to parse payment JSON and no backup enabled"); + } + } + + if (Math.random() < 0.25) { + return "CHAOS*EDI*MALFORMED*DATA~"; + } + + const controlNumber = Date.now().toString().substr(-9); + const timeStamp = new Date().toTimeString().substr(0, 4).replace(":", ""); + + const ediDocument = `ISA*00* *00* *ZZ*CHAOS_SENDER *ZZ*CHAOS_RECEIVER*${controlNumber}*${timeStamp}*U*00401*${controlNumber}*0*P*>~ +GS*HC*SENDER*RECEIVER*${new Date().toISOString().substr(0, 10).replace(/-/g, "")}*${timeStamp}*${controlNumber}*X*004010~ +ST*820*${controlNumber}~ +BPR*C*${json.amount || 0}*C*ACH*CCP*01*${json.recipient?.accountNumber || "UNKNOWN"}*DA*${json.recipient?.bankCode || "UNKNOWN"}~ +TRN*1*${json.orderId || "UNKNOWN"}*CHAOS_CORP~ +SE*5*${controlNumber}~ +GE*1*${controlNumber}~ +IEA*1*${controlNumber}~`; + + return { + ediDocument, + format: "X12_004010", + generatedAt: new Date().toISOString(), + chaosResistant: input.resilience?.enableBackup || false, + }; + }, + + validate_edi_resilient: async (input: any) => { + const ediData = typeof input.edi === "string" ? input.edi : input.edi?.ediDocument; + + if (!ediData) { + if (input.fallbackValidation) { + console.warn(" Using fallback validation for missing EDI data"); + return { + isValid: true, + errors: [], + warnings: ["Fallback validation used - limited validation performed"], + fallbackUsed: true, + validatedAt: new Date().toISOString(), + }; + } + throw new Error("No EDI data to validate"); + } + + const chaosRoll = Math.random(); + + if (chaosRoll < 0.15 && !input.fallbackValidation) { + throw new Error("EDI validator service unavailable"); + } + + const isValidBasic = ediData.includes("ISA*") && ediData.includes("IEA*"); + const isChaosDoc = ediData.includes("CHAOS*"); + + let isValid = isValidBasic && !isChaosDoc; + let errors: string[] = []; + let warnings: string[] = []; + + if (isChaosDoc) { + if (input.strictMode === false) { + warnings.push("Chaos-generated EDI detected but allowed in non-strict mode"); + isValid = true; + } else { + errors.push("Invalid EDI format detected"); + isValid = false; + } + } + + if (!isValidBasic) { + errors.push("Missing required EDI envelope segments"); + } + + if (Math.random() < 0.3) { + warnings.push("Non-standard segment ordering detected"); + } + + if (Math.random() < 0.1) { + errors.push("Control number mismatch"); + isValid = false; + } + + return { + isValid, + errors, + warnings, + validatedAt: new Date().toISOString(), + chaosDetected: isChaosDoc, + fallbackUsed: false, + }; + }, + }, + + resilience: { + circuitBreakerTest: async (input: any) => { + await new Promise((resolve) => setTimeout(resolve, 100)); + + const { previousResults } = input; + + const hasFailures = + !previousResults.payment?.reference || + !previousResults.edi || + !previousResults.validation?.isValid; + + const circuitState = hasFailures ? "OPEN" : "CLOSED"; + + if (circuitState === "OPEN") { + console.warn(" Circuit breaker OPEN - implementing fallback logic"); + + return { + circuitState: "OPEN", + fallbackActivated: true, + fallbackResult: { + orderId: previousResults.payment?.orderId || "FALLBACK-ORDER", + status: "processed_via_fallback", + processedAt: new Date().toISOString(), + note: "Processed through backup pathway due to service degradation", + }, + recommendedAction: "retry_later", + retryAfterSeconds: 30, + }; + } + + return { + circuitState: "CLOSED", + fallbackActivated: false, + systemHealth: "healthy", + allServicesOperational: true, + lastCheckedAt: new Date().toISOString(), + }; + }, + }, +}); + +/** + * Chaos demo scenarios + */ +const chaosScenarios = [ + { + name: "Baseline Test", + description: "Normal processing without chaos", + chaosLevel: 0, + iterations: 3, + }, + { + name: "Light Chaos", + description: "Minimal chaos injection for resilience testing", + chaosLevel: 0.2, + iterations: 5, + }, + { + name: "Moderate Chaos", + description: "Significant chaos to test recovery mechanisms", + chaosLevel: 0.5, + iterations: 7, + }, + { + name: "Extreme Chaos", + description: "High chaos to validate circuit breaker patterns", + chaosLevel: 0.8, + iterations: 10, + }, +]; + +async function runChaosEDIDemo() { + console.log(" Chaos Engineering EDI Demo"); + console.log("=".repeat(60)); + + const executor = new ODSetExecutor(logger); + const results: any[] = []; + + for (const scenario of chaosScenarios) { + console.log(`\n Running Scenario: ${scenario.name}`); + console.log(` Description: ${scenario.description}`); + console.log(` Chaos Level: ${scenario.chaosLevel * 100}%`); + console.log(` Iterations: ${scenario.iterations}`); + + const scenarioResults = []; + + for (let i = 0; i < scenario.iterations; i++) { + const initContext = await initOperationalDescriptor("skyfoods-edi", EService.EDI); + + const context = { + world: initContext.world, + tools: createResilientMockTools(), + logger, + globalState: {}, + }; + + const dynamicOD = { + ...CHAOS_EDI_DEMO, + id: `${CHAOS_EDI_DEMO.id}-${scenario.name.toLowerCase().replace(" ", "-")}-${i}`, + chaos: { + ...CHAOS_EDI_DEMO.chaos!, + probability: scenario.chaosLevel, + }, + }; + + try { + console.log(` Iteration ${i + 1}/${scenario.iterations}`); + const result = await executor.executeOD(dynamicOD, context); + scenarioResults.push(result); + + const statusIcon = result.success ? "" : ""; + console.log( + ` ${statusIcon} ${result.success ? "SUCCESS" : "FAILED"} (${result.executionTime}ms)`, + ); + } catch (error) { + console.log(` EXCEPTION: ${error instanceof Error ? error.message : String(error)}`); + scenarioResults.push({ + odId: dynamicOD.id, + success: false, + error: error as Error, + executionTime: 0, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 100)); + } + + const successCount = scenarioResults.filter((r) => r.success).length; + const successRate = (successCount / scenario.iterations) * 100; + const avgExecutionTime = + scenarioResults + .filter((r) => r.executionTime > 0) + .reduce((sum, r) => sum + r.executionTime, 0) / + Math.max(1, scenarioResults.filter((r) => r.executionTime > 0).length); + + console.log( + ` Success Rate: ${successRate.toFixed(1)}% (${successCount}/${scenario.iterations})`, + ); + console.log(` Avg Execution Time: ${Math.round(avgExecutionTime)}ms`); + + results.push({ + scenario: scenario.name, + chaosLevel: scenario.chaosLevel, + iterations: scenario.iterations, + successCount, + successRate, + avgExecutionTime, + results: scenarioResults, + }); + } + + console.log("\n" + "=".repeat(60)); + console.log(" Chaos Engineering Analysis"); + console.log("=".repeat(60)); + + results.forEach((scenario) => { + console.log(`${scenario.scenario}:`); + console.log(` Chaos Level: ${scenario.chaosLevel * 100}%`); + console.log(` Success Rate: ${scenario.successRate.toFixed(1)}%`); + console.log(` Avg Execution: ${Math.round(scenario.avgExecutionTime)}ms`); + console.log(` Resilience Score: ${calculateResilienceScore(scenario)}/100\n`); + }); + + executor.printExecutionSummary(); + + return results; +} + +/** + * Calculate resilience score based on chaos performance + */ +function calculateResilienceScore(scenario: any): number { + const baseScore = scenario.successRate; + const chaosBonus = scenario.chaosLevel * 20; + const speedPenalty = Math.max(0, (scenario.avgExecutionTime - 1000) / 100); + + return Math.min(100, Math.max(0, baseScore + chaosBonus - speedPenalty)); +} + +export { runChaosEDIDemo, CHAOS_EDI_DEMO, createResilientMockTools }; + +if (import.meta.main) { + runChaosEDIDemo() + .then((results) => { + console.log(`\n Chaos EDI Demo completed!`); + console.log(` Detailed logs written to: chaos-edi-demo.log`); + + const avgResilience = + results.reduce((sum, r) => sum + calculateResilienceScore(r), 0) / results.length; + console.log(` Overall System Resilience Score: ${Math.round(avgResilience)}/100`); + }) + .catch((error) => { + console.error(" Chaos demo failed:", error); + process.exit(1); + }); +} diff --git a/packages/controlmart/src/edi-demos/comprehensive-demo.ts b/packages/controlmart/src/edi-demos/comprehensive-demo.ts new file mode 100644 index 0000000000000000000000000000000000000000..858004dc6f73903925cd672882e8cd4395f6250b --- /dev/null +++ b/packages/controlmart/src/edi-demos/comprehensive-demo.ts @@ -0,0 +1,83 @@ +import { pino, destination } from "pino"; + +import { runChaosEDIDemo } from "./chaos-edi-demo"; + +const logger = pino( + { + level: "info", + formatters: { + level: (label) => ({ level: label }), + }, + timestamp: pino.stdTimeFunctions.isoTime, + }, + destination({ dest: "comprehensive-edi-demo.log", mkdir: true }), +); + +/** + * Comprehensive demo runner + */ +async function runComprehensiveEDIDemo() { + console.log(" Comprehensive EDI Processing Demo Suite"); + console.log("=".repeat(70)); + console.log("This demo showcases:"); + console.log("• Advanced OD Set Execution"); + console.log("• Complex EDI workflows with company generation"); + console.log("• Chaos engineering and resilience testing"); + console.log("• Comprehensive performance analysis"); + console.log("=".repeat(70)); + + const startTime = Date.now(); + const results: any = { + complex: null, + chaos: null, + summary: {}, + }; + + try { + console.log("\n Complex EDI Processing with Company Generation"); + console.log(` Complex EDI Demo completed`); + await new Promise((resolve) => setTimeout(resolve, 1000)); + + // 2. Run Chaos Engineering Demo + console.log("\nChaos Engineering & Resilience Testing"); + results.chaos = await runChaosEDIDemo(); + console.log(` Chaos EDI Demo completed`); + } catch (error) { + console.error(" Demo suite failed:", error); + throw error; + } + + const totalExecutionTime = Date.now() - startTime; + + console.log("\n" + "=".repeat(70)); + console.log(" COMPREHENSIVE EDI DEMO SUITE RESULTS"); + console.log("=".repeat(70)); + + console.log(` Total Execution Time: ${Math.round(totalExecutionTime / 1000)}s`); + console.log(` Complex EDI Transactions: ${results.complex?.results.length || 0}`); + console.log(` Chaos Scenarios Tested: ${results.chaos?.length || 0}`); + console.log(` Overall System Score: ${results.summary.overallScore}/100`); + console.log(` Resilience Rating: ${results.summary.resilienceRating}`); + console.log(` Performance Rating: ${results.summary.performanceRating}`); + + console.log("\n Detailed logs available in:"); + console.log(" • comprehensive-edi-demo.log (this demo)"); + console.log(" • complex-edi-demo.log (complex scenarios)"); + console.log(" • chaos-edi-demo.log (chaos testing)"); + + console.log("\n All demos completed successfully!"); + + return results; +} + +// Main execution +if (import.meta.main) { + runComprehensiveEDIDemo() + .then(() => console.log("Comprehensive demo suite completed!")) + .catch((error) => { + console.error("Demo suite failed:", error); + process.exit(1); + }); +} + +export { runComprehensiveEDIDemo }; diff --git a/packages/controlmart/src/edi-demos/simple-edi-demo.ts b/packages/controlmart/src/edi-demos/simple-edi-demo.ts new file mode 100644 index 0000000000000000000000000000000000000000..6638aa900c84065956cd0ed0a616bd84ce064f51 --- /dev/null +++ b/packages/controlmart/src/edi-demos/simple-edi-demo.ts @@ -0,0 +1,244 @@ +import { pino, destination, type Logger } from "pino"; + +import { ODSetExecutor } from "../operational-descriptor/od-set-executor"; +import type { OperationalDescriptor } from "../types/od.type"; +import type { TCompanyGenerate } from "../models/erp/company.erp.model"; +import type { IOperationalDescriptorContext } from "../operational-descriptor/init.od"; +import { generateLog } from "../utils/logger.util"; +import { initOperationalDescriptor } from "../operational-descriptor/init.od"; + +import { EdiODBuilderFactory, createServiceTools, createEdiValidator } from "../utils/edi"; +import { EService } from "../utils/service-mesh.util"; +const baseLogger: Logger = pino( + { + level: "info", + formatters: { + level: (label) => ({ level: label }), + }, + timestamp: pino.stdTimeFunctions.isoTime, + }, + destination({ dest: "chaos-edi-demo.log", mkdir: true }), +); + +export { + getParamsFromInvoice, + getParamsFromPayOrder, + getParamsFromAdvancedShipNotice, + extractItemsFrom850, + extractItemsFrom810, + extractItemsFrom856, +} from "../utils/edi/parsers.edi.util"; + +function createPurchaseOrderEDIOD( + sender: TCompanyGenerate, + receiver: TCompanyGenerate, + logger: Logger, + context: IOperationalDescriptorContext, +): OperationalDescriptor { + const builder = EdiODBuilderFactory.createPurchaseOrderBuilder({ + sender, + receiver, + logger, + context, + chaosProbability: 0.0, + }); + + return builder.build(); +} + +const createLocalServiceTools = ( + context: IOperationalDescriptorContext, + id: string, + logger: Logger, +) => { + const baseTools = createServiceTools({ + context, + flowId: id, + logger, + }); + + const validator = createEdiValidator({ + flowId: id, + logger, + context, + }); + + return { + ...baseTools, + edi: { + ...baseTools.edi, + validate_edi: validator.validate_edi.bind(validator), + }, + }; +}; + +async function runSimpleEDIDemo(worldId: string): Promise { + console.log(" Skyfoods EDI Purchase Order Demo (OD-based)"); + console.log("=".repeat(60)); + const context = await initOperationalDescriptor(worldId, EService.EDI); + const logger = { + ...baseLogger, + info: (data: any, message?: string) => { + generateLog("info", data, message || "", context); + return baseLogger.info(data, message); + }, + error: (data: any, message?: string) => { + generateLog("error", data, message || "", context); + return baseLogger.error(data, message); + }, + warn: baseLogger.warn.bind(baseLogger), + debug: baseLogger.debug.bind(baseLogger), + trace: baseLogger.trace.bind(baseLogger), + fatal: baseLogger.fatal.bind(baseLogger), + child: baseLogger.child.bind(baseLogger), + level: baseLogger.level, + silent: baseLogger.silent.bind(baseLogger), + flush: baseLogger.flush.bind(baseLogger), + } as Logger; + + const startTime = Date.now(); + const receivingCompany = context.mpcCompany; + if (!receivingCompany) { + throw new Error("Skyfoods company not found"); + } + console.log(` Receiving Company: ${receivingCompany.name} (${receivingCompany.companyId})`); + + const results = { + totalTransactions: 0, + successfulTransactions: 0, + failedTransactions: 0, + purchaseOrders: [] as any[], + odResults: [] as any[], + }; + + const executor = new ODSetExecutor(); + + console.log(" Processing Purchase Order EDIs...\n"); + + for (let i = 0; i < 50; i++) { + const sender = (await context.companyRepo.getRandomCompany("npc")) as TCompanyGenerate; + if (!sender) { + logger.error("No sender company found"); + continue; + } + + const od = createPurchaseOrderEDIOD(sender, receivingCompany, logger, context); + const serviceTools = createLocalServiceTools(context, od.id, logger); + results.totalTransactions++; + const executionContext = { + world: { + worldId: context.worldId, + ...context.world, + }, + tools: serviceTools, + logger, + validator: undefined, + }; + + console.log(`Processing PO from ${sender.name}...`); + + try { + const odResult = await executor.executeOD(od, executionContext); + results.odResults.push(odResult); + + if (odResult.success) { + results.successfulTransactions++; + results.purchaseOrders.push(odResult.result); + console.log(" PASS"); + + logger.info( + { + senderId: sender.companyId, + senderName: sender.name, + odId: od.id, + executionTime: odResult.result?.executionTime, + status: "success", + purchaseOrderId: od.id, + }, + "Purchase order processed successfully", + ); + } else { + results.failedTransactions++; + console.log(" FAIL"); + + logger.error( + { + senderId: sender.companyId, + senderName: sender.name, + odId: od.id, + error: odResult.error, + status: "failed", + purchaseOrderId: od.id, + }, + "Purchase order processing failed", + ); + + console.log(` Error: ${odResult.error || "Unknown error"}`); + } + } catch (error) { + results.failedTransactions++; + const errorMessage = error instanceof Error ? error.message : "Unknown error"; + console.log(" FAIL"); + console.log(` Exception: ${errorMessage}`); + + logger.error( + { + senderId: sender.companyId, + senderName: sender.name, + odId: od.id, + error: errorMessage, + stack: error instanceof Error ? error.stack : undefined, + status: "exception", + purchaseOrderId: od.id, + }, + "Purchase order processing threw exception", + ); + } + + await new Promise((resolve) => setTimeout(resolve, 200)); + } + + const endTime = Date.now(); + const executionTime = endTime - startTime; + + console.log("\n" + "=".repeat(60)); + console.log(" Skyfoods EDI Purchase Order Summary"); + console.log("=".repeat(60)); + console.log(` Total Execution Time: ${Math.round(executionTime / 1000)}s`); + console.log(` Total Purchase Orders: ${results.totalTransactions}`); + console.log(` Successfully Processed: ${results.successfulTransactions}`); + console.log(` Failed to Process: ${results.failedTransactions}`); + console.log( + ` Success Rate: ${Math.round((results.successfulTransactions / results.totalTransactions) * 100)}%`, + ); + console.log(` Orders Created in Skyfoods ERP: ${results.purchaseOrders.length}`); + console.log( + ` Success Rate: ${Math.round((results.successfulTransactions / results.totalTransactions) * 100)}%`, + ); + console.log(` Orders Created in Skyfoods ERP: ${results.purchaseOrders.length}`); + + return { + ...results, + executionTimeMs: executionTime, + receivingCompany, + }; +} + +if (import.meta.main) { + const logger = pino({ + level: "info", + formatters: { + level: (label) => ({ level: label }), + }, + timestamp: pino.stdTimeFunctions.isoTime, + }); + + runSimpleEDIDemo("ay") + .then(() => console.log("\n Skyfoods EDI demo completed!")) + .catch((error) => { + console.error(" Demo failed:", error); + process.exit(1); + }); +} + +export { runSimpleEDIDemo }; diff --git a/packages/controlmart/src/generator/company.generator.ts b/packages/controlmart/src/generator/company.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..e1732ff703a910559f42ce0841ea49d3a8e4108e --- /dev/null +++ b/packages/controlmart/src/generator/company.generator.ts @@ -0,0 +1,113 @@ +import { faker } from "@faker-js/faker"; + +import type { TCompanyGenerate } from "../models/erp/company.erp.model"; +import type { TWorldRefModel } from "../models/shared.model"; + +const countryCurrencyMap: Record = { + "United States": "USD", + Canada: "CAD", + "United Kingdom": "GBP", + Germany: "EUR", + France: "EUR", + Australia: "AUD", + India: "INR", + Brazil: "BRL", + Japan: "JPY", + China: "CNY", +}; + +const countries = Object.keys(countryCurrencyMap); + +const companyTypes = ["CUSTOMER", "SUPPLIER", "PARTNER", "INTERNAL"] as const; +const statuses = ["ACTIVE", "INACTIVE", "PROSPECT", "BLOCKED"] as const; + +const generateAddress = () => ({ + type: faker.helpers.arrayElement(["BILL_TO", "SHIP_TO", "REM_TO", "PO_BOX", "OTHER"]), + country: faker.location.country(), + attention: faker.person.fullName(), + street1: faker.location.streetAddress(), + street2: faker.datatype.boolean() ? faker.location.secondaryAddress() : undefined, + city: faker.location.city(), + state: faker.location.state(), + postalCode: faker.location.zipCode(), + contactEmail: faker.internet.email(), + contactPhone: faker.phone.number(), +}); + +const generateContact = () => ({ + name: faker.person.fullName(), + email: faker.internet.email(), + phone: faker.phone.number(), +}); + +export const generateCompany = ( + company_name: string | null, + worldRef: TWorldRefModel, + mpcCompanyFlag = false, +): TCompanyGenerate => { + const name = company_name || faker.company.name(); + const country = faker.helpers.arrayElement(countries); + const currency = countryCurrencyMap[country]; + const duns = faker.string.numeric(9); + const taxId = `TAX-${faker.string.alphanumeric(8).toUpperCase()}`; + const companyId = faker.string.alphanumeric(10).toUpperCase(); + + return { + worldRef, + isMpcCompany: mpcCompanyFlag, + companyId, + externalReference: faker.string.uuid(), + + name, + legalName: `${name} LLC`, + + duns, + taxId, + taxRegistrationNumbers: { + country: country, + number: `REG-${faker.string.alphanumeric(6).toUpperCase()}`, + }, + currency: currency || "USD", + paymentTerms: faker.helpers.arrayElement(["Net 15", "Net 30", "Net 45", "Due on Receipt"]), + creditLimit: faker.number.int({ min: 10000, max: 100000 }), + creditHold: faker.datatype.boolean(), + + billingAddress: generateAddress(), + shippingAddress: generateAddress(), + remitTo: generateAddress(), + + primaryContact: generateContact(), + + salesOrg: faker.company.buzzPhrase().split(" ")[0], + priceList: faker.helpers.arrayElement(["Standard", "Enterprise", "Partner"]), + glAccount: faker.finance.accountNumber(), + customerClass: faker.helpers.arrayElement(["A", "B", "C", "VIP"]), + + status: faker.helpers.arrayElement(statuses), + + companyType: mpcCompanyFlag + ? "INTERNAL" + : faker.helpers.arrayElement(companyTypes.filter((ct) => ct !== "INTERNAL")), + + customFields: { + erpSource: faker.helpers.arrayElement(["SAP", "NetSuite", "Odoo", "Oracle"]), + regionCode: country.slice(0, 2).toUpperCase(), + }, + }; +}; + +export const generateCompanies = ( + count: number, + worldRef: TWorldRefModel, + mpcCompanyFlag = false, + company_name: string | null = null, +): TCompanyGenerate[] => { + return Array.from({ length: count }, () => + generateCompany(company_name, worldRef, mpcCompanyFlag), + ); +}; + +// Example usage +// const world = { worldId: "demoWorld" } as TWorldRefModel; +// const companies = generateCompanies(world, 10); +// console.log(JSON.stringify(companies, null, 2)); diff --git a/packages/controlmart/src/generator/customer.generator.ts b/packages/controlmart/src/generator/customer.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/controlmart/src/generator/employee.generator.ts b/packages/controlmart/src/generator/employee.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/controlmart/src/generator/index.ts b/packages/controlmart/src/generator/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..241cba2325c3dfb99a089a477fe53980ea8be0e0 --- /dev/null +++ b/packages/controlmart/src/generator/index.ts @@ -0,0 +1,2 @@ +export * from "./company.generator"; +export * from "./product.generator"; diff --git a/packages/controlmart/src/generator/logs.generator.ts b/packages/controlmart/src/generator/logs.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..960c116c350ceb3165b216911becd1e4a7ff0b9e --- /dev/null +++ b/packages/controlmart/src/generator/logs.generator.ts @@ -0,0 +1,65 @@ +import { faker } from "@faker-js/faker"; + +import type { TWorldLogInput } from "../models/logs.model"; +import { WorldLogRepository } from "../repository"; +import { connectMongo } from "../services/mongo.service"; +import { loadEnv } from "../utils/env.util"; + +export const generateWorldLog = ( + worldId: string, + overrides: Partial = {}, +): TWorldLogInput => { + const serviceTypes = [ + "edi", + "erp", + "as2", + "translator", + "validator", + "gateway", + "infra", + "other", + ] as TWorldLogInput["service_type"][]; + + const levels = ["trace", "debug", "info", "warn", "error", "fatal"] as TWorldLogInput["level"][]; + + const service_type = faker.helpers.arrayElement(serviceTypes); + const level = faker.helpers.arrayElement(levels); + + return { + worldRef: { worldId }, + log_id: faker.string.uuid(), + timestamp: faker.date.recent({ days: 7 }), + service_type, + level, + sender_id: faker.company.buzzNoun() + "_" + faker.number.int({ min: 1, max: 50 }), + receiver_id: faker.company.buzzNoun() + "_" + faker.number.int({ min: 1, max: 50 }), + transaction_id: faker.string.alphanumeric({ length: 12 }), + msg: faker.lorem.sentence({ min: 5, max: 15 }), + metadata: { + source_ip: faker.internet.ip(), + process_time_ms: faker.number.int({ min: 5, max: 2500 }), + ...(Math.random() > 0.7 ? { error_detail: faker.lorem.sentence({ min: 5, max: 10 }) } : {}), + }, + ...overrides, + }; +}; + +export const generateWorldLogs = ( + worldId: string, + count = 10, + overrides: Partial = {}, +): TWorldLogInput[] => { + return Array.from({ length: count }, () => generateWorldLog(worldId, overrides)); +}; + +await connectMongo({ + uri: loadEnv().MONGO_URI, + dbName: loadEnv().DB_NAME, + log: false, +}); + +const some = await WorldLogRepository("demoWorld").bulkInsertLogs( + generateWorldLogs("demoWorld", 50), +); + +console.log(some); diff --git a/packages/controlmart/src/generator/product.generator.ts b/packages/controlmart/src/generator/product.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..b25a54ebf3b0891f682f8e915c109b4c687a31ba --- /dev/null +++ b/packages/controlmart/src/generator/product.generator.ts @@ -0,0 +1,87 @@ +import { faker } from "@faker-js/faker"; + +import type { TProductGenerate } from "../models/erp/product.erp.model"; +import type { TWorldRefModel } from "../models/shared.model"; +import { generateIdByService } from "../utils/mongo.util"; + +const currencies = ["USD", "EUR", "GBP", "INR", "JPY", "AUD", "CAD", "CNY"]; +const unitsOfMeasure = ["EA", "BOX", "PACK", "KG", "L", "SET"]; +const weightUnits = ["KG", "LB", "G"]; +const dimensionUnits = ["CM", "IN"]; +const statuses = ["ACTIVE", "DISCONTINUED"] as const; + +const generateUPC = () => faker.string.numeric(12); +const generateEAN = () => faker.string.numeric(13); + +const generateCommodityCode = () => + faker.helpers.arrayElement(["85044030", "94036000", "90262000", "85423900", "30049099"]); + +const generateTaxClassification = () => + faker.helpers.arrayElement(["STANDARD", "REDUCED", "EXEMPT"]); + +// Generate realistic weights and dimensions +const generateWeight = () => ({ + value: faker.number.float({ min: 0.1, max: 50, fractionDigits: 2 }), + unit: faker.helpers.arrayElement(weightUnits), +}); + +const generateDimensions = () => ({ + length: faker.number.float({ min: 1, max: 100, fractionDigits: 2 }), + width: faker.number.float({ min: 1, max: 100, fractionDigits: 2 }), + height: faker.number.float({ min: 1, max: 100, fractionDigits: 2 }), + unit: faker.helpers.arrayElement(dimensionUnits), +}); + +const generatePrice = () => { + const currency = faker.helpers.arrayElement(currencies); + return { + currency, + amount: faker.number.float({ min: 5, max: 2000, fractionDigits: 2 }), + }; +}; + +const generateCost = (currency: string) => ({ + currency, + amount: faker.number.float({ min: 1, max: 1500, fractionDigits: 2 }), +}); + +const generateCustomFields = () => ({ + supplier: faker.company.name(), + category: faker.commerce.department(), + lifecycleStage: faker.helpers.arrayElement(["New", "Mature", "Phase-out", "Legacy"]), +}); + +const generateProduct = (worldRef: TWorldRefModel): TProductGenerate => { + const currency = faker.helpers.arrayElement(currencies); + const productId = generateIdByService("erp", "product"); + const productName = faker.commerce.productName(); + + return { + worldRef, + productId, + upc: generateUPC(), + ean: generateEAN(), + name: productName, + description: faker.commerce.productDescription(), + commodityCode: generateCommodityCode(), + taxClassification: generateTaxClassification(), + unitOfMeasure: faker.helpers.arrayElement(unitsOfMeasure), + weight: generateWeight(), + dimensions: generateDimensions(), + inventoryTracking: faker.datatype.boolean(), + price: generatePrice(), + cost: generateCost(currency), + leadTimeDays: faker.number.int({ min: 1, max: 45 }), + status: faker.helpers.arrayElement(statuses), + customFields: generateCustomFields(), + }; +}; + +export const generateProducts = (worldRef: TWorldRefModel, count: number): TProductGenerate[] => { + return Array.from({ length: count }, () => generateProduct(worldRef)); +}; + +// Example usage: +// const world = { worldId: "world123" } as TWorldRefModel; +// const products = generateProducts(world, 10); +// console.log(JSON.stringify(products, null, 2)); diff --git a/packages/controlmart/src/generator/stores.generator.ts b/packages/controlmart/src/generator/stores.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/controlmart/src/generator/warehouse.generator.ts b/packages/controlmart/src/generator/warehouse.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/controlmart/src/helpers/edi/generators.edi.helper.ts b/packages/controlmart/src/helpers/edi/generators.edi.helper.ts new file mode 100644 index 0000000000000000000000000000000000000000..c14c969fa89d84d782475887de0c8f7948043a28 --- /dev/null +++ b/packages/controlmart/src/helpers/edi/generators.edi.helper.ts @@ -0,0 +1,334 @@ +import type { TCompanyGenerate } from "../../models/erp/company.erp.model"; +import { EdiUtils } from "./utils.edi.helper"; + +export type TItem = { + sku: string; + quantity: number; + unitPrice: number; + product_id: number; +}; + +type EdiError = { + segmentId: string; + segmentPos?: string; + segmentLine?: number; + elementPos?: string; + elementRef?: string; + elementErrCode?: string; + message?: string; +}; + +export function generateEDI850(orderData: any): string { + const { + sender, + receiver, + poNumber, + items, + }: { + sender: TCompanyGenerate; + receiver: TCompanyGenerate; + poNumber: string; + items: TItem[]; + } = orderData; + console.log("generateEDI850 Input", sender, receiver); + const controlNumber = EdiUtils.randomNum(9); + const trxNumber = EdiUtils.randomNum(5); + const date = EdiUtils.formatDate(); + const time = EdiUtils.formatTime(); + + const itemSegments = items + .map( + (item: TItem, idx: number) => + `PO1*${idx + 1}*${item.quantity}*EA*${item.unitPrice?.toFixed(2)}**BP*${item.sku}*VP*${item.product_id || ""}`, + ) + .join("~\n"); + + return `ISA*00* *00* *ZZ*${sender.duns}*ZZ*${receiver.duns}*${date}*${time}*U*00401*${controlNumber}*0*P*>~ + GS*PO*${sender.duns}*${receiver.duns}*${date}*${time}*${controlNumber}*X*004010~ + ST*850*${trxNumber}~ + BEG*00*NE*${poNumber}***${new Date().toISOString().slice(0, 10)}~ + REF*DP*${orderData.department || "GENERAL"}~ + DTM*002*${new Date().toISOString().slice(0, 10)}~ + N1*ST*${receiver.name}*1*${receiver.duns}~ + N1*BT*${sender.name}*1*${sender.duns}~ + ${itemSegments}~ + CTT*${items.length}~ + SE*${6 + items.length}*${trxNumber}~ + GE*1*${controlNumber}~ + IEA*1*${controlNumber}~`; +} + +export function generateAdvancedShipNotice( + data: + | { + sender?: { duns: string; name?: string }; + receiver?: { duns: string; name?: string }; + shipmentId?: string; + trackingNumber?: string; + items?: TItem[]; + } + | string, + receiverDUNS?: string, + itemsArg?: TItem[], + removeSSCC: boolean = false, +): string { + // Support both object input and positional args + let senderDUNS: string; + let receiverDunsVal: string; + let items: TItem[]; + + if (typeof data === "object" && data !== null && "sender" in data) { + senderDUNS = data.sender?.duns || ""; + receiverDunsVal = data.receiver?.duns || ""; + items = data.items || []; + } else { + senderDUNS = data as string; + receiverDunsVal = receiverDUNS || ""; + items = itemsArg || []; + } + + const controlNumber = EdiUtils.randomNum(9); + const trxNumber = EdiUtils.randomNum(5); + const date = EdiUtils.formatDate(); + const time = EdiUtils.formatTime(); + const asnNumber = EdiUtils.randomNum(9); + + let hlCounter = 1; + let itemSegments: string[] = []; + + // Shipment HL + itemSegments.push(`HL*${hlCounter}**S`); + hlCounter++; + + // Order HL + itemSegments.push(`HL*${hlCounter}*1*O`); + itemSegments.push(`PRF*PO12345`); + const orderHL = hlCounter; + hlCounter++; + + // Items / pallets + items.forEach((item, idx) => { + const hlItem = hlCounter; + const sscc = generateSSCC(senderDUNS, idx + 1); + itemSegments.push(`HL*${hlItem}*${orderHL}*I`); + itemSegments.push(`LIN*${idx + 1}*BP*${item.sku}*VP*${item.product_id || ""}`); + itemSegments.push(`SN1**${item.quantity}*EA`); + if (item.product_id) itemSegments.push(`PID*F****${item.product_id}`); + if (!removeSSCC) itemSegments.push(`MAN*GM*${sscc}`); + hlCounter++; + }); + + return `ISA*00* *00* *01*${senderDUNS}*01*${receiverDunsVal}*${date}*${time}*U*00401*${controlNumber}*0*P*>~ + GS*SH*${senderDUNS}*${receiverDunsVal}*${date}*${time}*${controlNumber}*X*004010~ + ST*856*${trxNumber}~ + BSN*00*${asnNumber}*${date}*${time}~ + N1*ST*${receiverDunsVal}*92*${receiverDunsVal}~ + N1*SF*${senderDUNS}*92*${senderDUNS}~ + ${itemSegments.join("~\n")}~ + CTT*${items.length}~ + SE*${8 + itemSegments.length}*${trxNumber}~ + GE*1*${controlNumber}~ + IEA*1*${controlNumber}~`; +} + +export function generateInvoice( + data: + | { + sender?: { duns: string; name?: string }; + receiver?: { duns: string; name?: string }; + invoiceNumber?: string; + poNumber?: string; + totalAmount?: number; + items?: TItem[]; + } + | string, + receiverDUNS?: string, + itemsArg?: TItem[], +): string { + // Support both object input and positional args + let senderDUNS: string; + let receiverDunsVal: string; + let items: TItem[]; + + if (typeof data === "object" && data !== null && "sender" in data) { + senderDUNS = data.sender?.duns || ""; + receiverDunsVal = data.receiver?.duns || ""; + items = data.items || []; + } else { + senderDUNS = data as string; + receiverDunsVal = receiverDUNS || ""; + items = itemsArg || []; + } + + const controlNumber = EdiUtils.randomNum(9); + const trxNumber = EdiUtils.randomNum(5); + const date = EdiUtils.formatDate(); + const time = EdiUtils.formatTime(); + const invoiceNumber = EdiUtils.randomNum(9); + + const totalCents = items.reduce( + (sum: number, i: TItem) => sum + (i.quantity || 0) * (i.unitPrice || 0) * 100, + 0, + ); + + const itemSegments = items + .map( + (item: TItem, idx: number) => + `IT1*${idx + 1}*${item.quantity}*EA*${item.unitPrice?.toFixed(2)}**BP*${item.sku}*VP*${item.product_id || ""}`, + ) + .join("~\n"); + + return `ISA*00* *00* *01*${senderDUNS}*01*${receiverDunsVal}*${date}*${time}*U*00401*${controlNumber}*0*P*>~ + GS*IN*${senderDUNS}*${receiverDunsVal}*${date}*${time}*${controlNumber}*X*004010~ + ST*810*${trxNumber}~ + BIG*${date}*${invoiceNumber}*${invoiceNumber}*${date}~ + N1*ST*${receiverDunsVal}*1*${receiverDunsVal}~ + N1*BT*${receiverDunsVal}*1*${receiverDunsVal}~ + N1*SU*${senderDUNS}*1*${senderDUNS}~ + ${itemSegments}~ + TDS*${totalCents.toFixed(0)}~ + CTT*${items.length}~ + SE*${7 + items.length}*${trxNumber}~ + GE*1*${controlNumber}~ + IEA*1*${controlNumber}~`; +} + +export function generate997Acknowledgment( + senderDUNS: string, + receiverDUNS: string, + originalControlNumber: string, + groupId = "FA", + errors: EdiError[] = [], +): string { + const controlNumber = EdiUtils.randomNum(9); + const trxNumber = EdiUtils.randomNum(5); + const date = EdiUtils.formatDate(); + const time = EdiUtils.formatTime(); + + const hasErrors = errors.length > 0; + const ak5Code = hasErrors ? "R" : "A"; + const ak9Code = hasErrors ? "R" : "A"; + + const errorSegments = errors + .map((err) => { + let seg = `AK3*${err.segmentId}*${err.segmentPos || ""}*${err.segmentLine || ""}*${err.elementErrCode || ""}`; + if (err.elementPos || err.elementErrCode) { + seg += `~AK4*${err.elementPos || ""}*${err.elementRef || ""}*${err.elementErrCode || ""}*${err.message || ""}`; + } + return seg; + }) + .join("~\n"); + + const segmentCount = 6 + (errors.length > 0 ? errors.length * 2 : 0); + + return `ISA*00* *00* *ZZ*${receiverDUNS}*ZZ*${senderDUNS}*${date}*${time}*U*00401*${controlNumber}*0*P*>~ + GS*${groupId}*${receiverDUNS}*${senderDUNS}*${date}*${time}*${controlNumber}*X*004010~ + ST*997*${trxNumber}~ + AK1*PO*${originalControlNumber}~ + AK2*850*${originalControlNumber}~ + ${errorSegments ? errorSegments + "~" : ""}AK5*${ak5Code}~ + AK9*${ak9Code}*1*1*${hasErrors ? "0" : "1"}~ + SE*${segmentCount}*${trxNumber}~ + GE*1*${controlNumber}~ + IEA*1*${controlNumber}~`; +} + +export function generateReceiptAcknowledgment( + senderDUNS: string, + receiverDUNS: string, + items: any[], +): string { + const { dateYYMMDD, timeHHMM } = EdiUtils.getTimestampParts(); + const controlNumber = EdiUtils.randomNum(9); + const transactionSetControlNumber = EdiUtils.randomNum(5); + const receiptNumber = EdiUtils.randomNum(9); + const asnNumber = EdiUtils.randomNum(9); + + const itemSegments = items + .map((item: any) => `LIN**BP*${item.sku}~RCD*${item.quantity}*EA*${item.quantity}*EA*0*EA~`) + .join(""); + + const totalItems = items.length; + + return ( + `ISA*00* *00* *01*${senderDUNS}*01*${receiverDUNS}*${dateYYMMDD}*${timeHHMM}*U*00401*${controlNumber}*0*P*>~` + + `GS*RC*${senderDUNS}*${receiverDUNS}*${dateYYMMDD}*${timeHHMM}*${controlNumber}*X*004010~` + + `ST*977*${transactionSetControlNumber}~` + + `BR*00*${receiptNumber}*${dateYYMMDD}*${asnNumber}~` + + `N1*ST*${receiverDUNS}*1*${receiverDUNS}~` + + `N1*SU*${senderDUNS}*1*${senderDUNS}~` + + itemSegments + + `DTM*050*${dateYYMMDD}~` + + `CTT*${totalItems}~` + + `SE*${8 + totalItems * 2}*${transactionSetControlNumber}~` + + `GE*1*${controlNumber}~` + + `IEA*1*${controlNumber}~` + ); +} + +export function generateSSCC(companyPrefix: string, serial: number): string { + let base = `0${companyPrefix}${serial.toString().padStart(9, "0")}`; + let sum = 0; + for (let i = 0; i < base.length; i++) { + const n = parseInt(base[i] || "0", 10); + sum += n * (i % 2 === 0 ? 3 : 1); + } + const checkDigit = (10 - (sum % 10)) % 10; + return base + checkDigit; +} + +/** + * Generate EDI 855 Purchase Order Acknowledgment + * Used to acknowledge receipt of a purchase order (850) + */ +export function generate855OrderAcknowledgment(data: { + sender?: { duns: string; name?: string }; + receiver?: { duns: string; name?: string }; + poNumber?: string; + acknowledgmentCode?: string; // AC = Accepted, RJ = Rejected, RI = Rejected with Info + items?: TItem[]; +}): string { + const senderDUNS = data.sender?.duns || ""; + const receiverDUNS = data.receiver?.duns || ""; + const poNumber = data.poNumber || "PO" + EdiUtils.randomNum(6); + const ackCode = data.acknowledgmentCode || "AC"; + const items = data.items || []; + + const controlNumber = EdiUtils.randomNum(9); + const trxNumber = EdiUtils.randomNum(5); + const date = EdiUtils.formatDate(); + const time = EdiUtils.formatTime(); + + // Generate item acknowledgment segments + const itemSegments = + items.length > 0 + ? items + .map( + (item: TItem, idx: number) => + `PO1*${idx + 1}*${item.quantity}*EA*${item.unitPrice?.toFixed(2) || "0.00"}**BP*${item.sku}*VP*${item.product_id || ""}~ACK*IA*${item.quantity}*EA****BP*${item.sku}`, + ) + .join("~\n") + : `ACK*IA*1*EA`; // Default acknowledgment if no items + + return `ISA*00* *00* *ZZ*${senderDUNS}*ZZ*${receiverDUNS}*${date}*${time}*U*00401*${controlNumber}*0*P*>~ + GS*PR*${senderDUNS}*${receiverDUNS}*${date}*${time}*${controlNumber}*X*004010~ + ST*855*${trxNumber}~ + BAK*00*${ackCode}*${poNumber}*${new Date().toISOString().slice(0, 10)}~ + N1*ST*${receiverDUNS}*1*${receiverDUNS}~ + N1*BY*${senderDUNS}*1*${senderDUNS}~ + ${itemSegments}~ + CTT*${items.length || 1}~ + SE*${6 + (items.length || 1)}*${trxNumber}~ + GE*1*${controlNumber}~ + IEA*1*${controlNumber}~`; +} + +export const EdiGenerators = { + generateEDI850, + generateAdvancedShipNotice, + generateInvoice, + generate997Acknowledgment, + generateReceiptAcknowledgment, + generate855OrderAcknowledgment, + generateSSCC, +}; diff --git a/packages/controlmart/src/helpers/edi/index.ts b/packages/controlmart/src/helpers/edi/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..c00814c15afe3746073e70db4021e683be99dba0 --- /dev/null +++ b/packages/controlmart/src/helpers/edi/index.ts @@ -0,0 +1,2 @@ +export * from "./generators.edi.helper"; +export * from "./utils.edi.helper"; diff --git a/packages/controlmart/src/helpers/edi/utils.edi.helper.ts b/packages/controlmart/src/helpers/edi/utils.edi.helper.ts new file mode 100644 index 0000000000000000000000000000000000000000..e2aab68f1ff2cbd5554d7176d2da9d55341d3dbe --- /dev/null +++ b/packages/controlmart/src/helpers/edi/utils.edi.helper.ts @@ -0,0 +1,35 @@ +function getTimestampParts() { + const now = new Date(); + const dateYYMMDD = now.toISOString().slice(2, 10).replace(/-/g, ""); + const timeHHMM = now.toTimeString().slice(0, 5).replace(":", ""); + const dateCCYYMMDD = now.toISOString().slice(0, 10).replace(/-/g, ""); + return { dateYYMMDD, timeHHMM, dateCCYYMMDD }; +} + +function randomControlNumber(length = 9) { + return Math.random() + .toString() + .substring(2, 2 + length); +} + +function formatDate(date = new Date()) { + return date.toISOString().slice(2, 10).replace(/-/g, ""); +} + +function formatTime(date = new Date()) { + return date.toTimeString().slice(0, 4); +} + +function randomNum(len: number) { + return Math.random() + .toString() + .substring(2, 2 + len); +} + +export const EdiUtils = { + getTimestampParts, + randomControlNumber, + formatDate, + formatTime, + randomNum, +}; diff --git a/packages/controlmart/src/helpers/manufacturing/components/chill-room.component.ts b/packages/controlmart/src/helpers/manufacturing/components/chill-room.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..f11e526ce87eedc3a72235f03ec192f266914496 --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/chill-room.component.ts @@ -0,0 +1,109 @@ + +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const freezingChamberSubcomponent: SubComponent = { + id: 'chill-chamber', + name: 'Freezing Chamber', + description: 'Insulated blast freezing chamber with high air velocity.', + attributes: [ + { id: 'volume', name: 'Chamber Volume', type: 'float', unit: 'm³', range: { min: 10, max: 500 }, defaultValue: 100, description: 'Total chamber capacity.' }, + { id: 'insulation_thickness', name: 'Insulation', type: 'float', unit: 'mm', range: { min: 100, max: 300 }, defaultValue: 200, description: 'Wall insulation thickness.' }, + { id: 'current_load', name: 'Current Load', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 75, description: 'Chamber utilization.' }, + { id: 'rack_positions', name: 'Rack Positions', type: 'integer', range: { min: 10, max: 100 }, defaultValue: 40, description: 'Available storage positions.' }, + { id: 'door_status', name: 'Door Status', type: 'enum', range: { values: ['Closed', 'Open', 'In-Transit'] }, defaultValue: 'Closed', criticalForQuality: true, description: 'Chamber door state.' }, + ], + chaosScenarios: [ + { id: 'door-seal-leak', name: 'Door Seal Leak', description: 'Degraded door seal allowing warm air ingress.', probability: 0.03, affectedAttributes: ['door_status'], severity: 'medium', qualityImpact: { checkType: 'TEMPERATURE', failureMessage: 'Temperature fluctuation from door seal issue.', degradation: 15 }, possibleCauses: ['Gasket wear', 'Door misalignment', 'Ice buildup'], mitigationActions: ['Gasket replacement', 'Door adjustment', 'Defrost cycle'] }, + { id: 'overload', name: 'Chamber Overload', description: 'Excessive product load exceeding freezing capacity.', probability: 0.025, affectedAttributes: ['current_load'], severity: 'high', qualityImpact: { checkType: 'FREEZING_TIME', failureMessage: 'Extended freezing time due to overload.' }, possibleCauses: ['Production surge', 'Scheduling error', 'Equipment shortage'], mitigationActions: ['Load reduction', 'Extended cycle', 'Additional capacity'] }, + ], +}; + +const refrigerationSystemSubcomponent: SubComponent = { + id: 'chill-refrigeration', + name: 'Refrigeration System', + description: 'Industrial ammonia or CO2 refrigeration unit.', + attributes: [ + { id: 'refrigerant_type', name: 'Refrigerant', type: 'enum', range: { values: ['Ammonia', 'CO2', 'R404A'] }, defaultValue: 'Ammonia', description: 'Refrigerant type.' }, + { id: 'compressor_power', name: 'Compressor Power', type: 'float', unit: 'kW', range: { min: 50, max: 500 }, defaultValue: 150, description: 'Compressor capacity.' }, + { id: 'evaporator_temp', name: 'Evaporator Temperature', type: 'float', unit: '°C', range: { min: -45, max: -25 }, defaultValue: -35, criticalForQuality: true, description: 'Evaporator coil temperature.' }, + { id: 'refrigerant_pressure', name: 'Suction Pressure', type: 'float', unit: 'bar', range: { min: 1, max: 10 }, defaultValue: 3, description: 'Refrigerant suction pressure.' }, + { id: 'oil_level', name: 'Oil Level', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 80, description: 'Compressor oil level.' }, + ], + chaosScenarios: [ + { id: 'compressor-trip', name: 'Compressor Trip', description: 'Compressor shutdown from overload or safety.', probability: 0.02, affectedAttributes: ['compressor_power'], severity: 'critical', qualityImpact: { checkType: 'TEMPERATURE', failureMessage: 'Refrigeration failure; temperature rising.' }, possibleCauses: ['Overload', 'High head pressure', 'Electrical fault'], mitigationActions: ['Reset compressor', 'Check pressures', 'Emergency backup'] }, + { id: 'low-refrigerant', name: 'Low Refrigerant', description: 'Refrigerant charge below optimal level.', probability: 0.025, affectedAttributes: ['refrigerant_pressure'], severity: 'high', qualityImpact: { checkType: 'COOLING_CAPACITY', failureMessage: 'Reduced cooling capacity from low refrigerant.' }, possibleCauses: ['Leak', 'Valve issue', 'Maintenance needed'], mitigationActions: ['Leak detection', 'Recharge', 'System inspection'] }, + { id: 'evaporator-frost', name: 'Evaporator Frost Buildup', description: 'Excessive frost reducing heat transfer.', probability: 0.04, affectedAttributes: ['evaporator_temp'], severity: 'medium', qualityImpact: { checkType: 'COOLING_CAPACITY', failureMessage: 'Evaporator efficiency reduced by frost.', degradation: 20 }, possibleCauses: ['Defrost timer failure', 'High humidity', 'Door frequency'], mitigationActions: ['Manual defrost', 'Timer reset', 'Check door seals'] }, + ], +}; + +const airCirculationSubcomponent: SubComponent = { + id: 'chill-air-circulation', + name: 'Air Circulation System', + description: 'High-velocity fans for uniform temperature distribution.', + attributes: [ + { id: 'fan_count', name: 'Fan Count', type: 'integer', range: { min: 2, max: 12 }, defaultValue: 6, description: 'Number of circulation fans.' }, + { id: 'air_velocity', name: 'Air Velocity', type: 'float', unit: 'm/s', range: { min: 2, max: 10 }, defaultValue: 5, criticalForQuality: true, description: 'Air movement speed.' }, + { id: 'fan_status', name: 'Fan Status', type: 'enum', range: { values: ['All-Running', 'Partial', 'Failed'] }, defaultValue: 'All-Running', criticalForQuality: true, description: 'Fan operational status.' }, + ], + chaosScenarios: [ + { id: 'fan-failure', name: 'Fan Failure', description: 'One or more circulation fans not operating.', probability: 0.03, affectedAttributes: ['fan_status', 'air_velocity'], severity: 'medium', qualityImpact: { checkType: 'TEMPERATURE_UNIFORMITY', failureMessage: 'Uneven freezing due to reduced air circulation.', degradation: 25 }, possibleCauses: ['Motor burnout', 'Bearing failure', 'Ice impact'], mitigationActions: ['Fan replacement', 'Redistribute load', 'Extended cycle'] }, + ], +}; + +const temperatureMonitoringSubcomponent: SubComponent = { + id: 'chill-temp-monitoring', + name: 'Temperature Monitoring', + description: 'Multi-point temperature monitoring and recording system.', + attributes: [ + { id: 'sensor_count', name: 'Sensor Count', type: 'integer', range: { min: 4, max: 20 }, defaultValue: 8, description: 'Number of temperature sensors.' }, + { id: 'chamber_temp', name: 'Chamber Temperature', type: 'float', unit: '°C', range: { min: -40, max: -15 }, defaultValue: -25, criticalForQuality: true, description: 'Average chamber temperature.' }, + { id: 'product_core_temp', name: 'Product Core Temp', type: 'float', unit: '°C', range: { min: -25, max: 0 }, defaultValue: -18, criticalForQuality: true, description: 'Target product core temperature.' }, + { id: 'temp_uniformity', name: 'Temperature Uniformity', type: 'float', unit: '°C', range: { min: 0, max: 5 }, defaultValue: 1.5, criticalForQuality: true, description: 'Temperature variation across chamber.' }, + ], + chaosScenarios: [ + { id: 'sensor-malfunction', name: 'Sensor Malfunction', description: 'Temperature sensor providing incorrect readings.', probability: 0.03, affectedAttributes: ['chamber_temp'], severity: 'medium', qualityImpact: { checkType: 'TEMPERATURE', failureMessage: 'Temperature monitoring unreliable.' }, possibleCauses: ['Sensor drift', 'Wiring issue', 'Moisture ingress'], mitigationActions: ['Manual verification', 'Sensor replacement', 'Calibration'] }, + { id: 'temp-excursion', name: 'Temperature Excursion', description: 'Chamber temperature rises above setpoint.', probability: 0.025, affectedAttributes: ['chamber_temp', 'product_core_temp'], severity: 'high', qualityImpact: { checkType: 'CORE_TEMPERATURE', failureMessage: 'Temperature excursion; product quality at risk.' }, possibleCauses: ['Refrigeration fault', 'Door open', 'Overload'], mitigationActions: ['Identify cause', 'Product hold', 'Quality assessment'] }, + ], +}; + +const defrostSystemSubcomponent: SubComponent = { + id: 'chill-defrost', + name: 'Defrost System', + description: 'Automated defrost cycle management.', + attributes: [ + { id: 'defrost_type', name: 'Defrost Type', type: 'enum', range: { values: ['Hot-Gas', 'Electric', 'Water'] }, defaultValue: 'Hot-Gas', description: 'Defrost mechanism.' }, + { id: 'cycle_frequency', name: 'Cycle Frequency', type: 'integer', unit: 'hours', range: { min: 4, max: 24 }, defaultValue: 8, description: 'Time between defrost cycles.' }, + { id: 'cycle_duration', name: 'Cycle Duration', type: 'integer', unit: 'min', range: { min: 10, max: 45 }, defaultValue: 20, description: 'Defrost cycle length.' }, + { id: 'drain_status', name: 'Drain Status', type: 'enum', range: { values: ['Clear', 'Slow', 'Blocked'] }, defaultValue: 'Clear', description: 'Condensate drain condition.' }, + ], + chaosScenarios: [ + { id: 'defrost-failure', name: 'Defrost Failure', description: 'Defrost cycle not executing properly.', probability: 0.03, affectedAttributes: ['cycle_frequency'], severity: 'medium', qualityImpact: { checkType: 'COOLING_CAPACITY', failureMessage: 'Frost accumulation from defrost failure.', degradation: 15 }, possibleCauses: ['Timer fault', 'Heater failure', 'Sensor issue'], mitigationActions: ['Manual defrost', 'Timer reset', 'Heater check'] }, + { id: 'drain-blocked', name: 'Drain Blockage', description: 'Condensate drain frozen or blocked.', probability: 0.025, affectedAttributes: ['drain_status'], severity: 'medium', qualityImpact: { checkType: 'EQUIPMENT_HEALTH', failureMessage: 'Water accumulation from blocked drain.' }, possibleCauses: ['Ice formation', 'Debris', 'Heater failure'], mitigationActions: ['Clear drain', 'Apply heat tape', 'Regular maintenance'] }, + ], +}; + +export const chillRoomComponent: ManufacturingComponent = { + id: 'chill-room', + machineId: 'MCH_CHILL_01', + name: 'Chill Room / Blast Freezer', + type: 'storage', + productionLine: 'ICE_CREAM', + description: 'Blast freezer for ice cream hardening.', + specifications: { manufacturer: 'Carrier Refrigeration', model: 'BlastFreeze-100', yearInstalled: 2021, maintenanceSchedule: 'Daily inspection, Weekly defrost check, Monthly calibration' }, + subcomponents: [freezingChamberSubcomponent, refrigerationSystemSubcomponent, airCirculationSubcomponent, temperatureMonitoringSubcomponent, defrostSystemSubcomponent], + operationalParameters: [ + { id: 'target_temp', name: 'Target Temperature', nominalValue: -25, tolerance: 3, unit: '°C' }, + { id: 'core_temp', name: 'Product Core Target', nominalValue: -18, tolerance: 2, unit: '°C' }, + { id: 'freeze_time', name: 'Freezing Time', nominalValue: 60, tolerance: 15, unit: 'min' }, + { id: 'air_velocity', name: 'Air Velocity', nominalValue: 5, tolerance: 1, unit: 'm/s' }, + ], + qualityCheckPoints: [ + { id: 'qc-core-temp', checkType: 'CORE_TEMPERATURE', description: 'Verify product reaches target core temperature.', method: 'sampling', frequency: 'batch', acceptanceCriteria: { parameter: 'temperature', operator: 'lte', value: -18, unit: '°C' }, linkedSubcomponents: ['chill-temp-monitoring'] }, + { id: 'qc-texture', checkType: 'TEXTURE', description: 'Verify proper ice crystal formation.', method: 'sampling', frequency: 'batch', acceptanceCriteria: { parameter: 'texture', operator: 'eq', value: 'SMOOTH' }, linkedSubcomponents: ['chill-refrigeration', 'chill-air-circulation'] }, + { id: 'qc-crystal', checkType: 'CRYSTAL_FORMATION', description: 'Check for minimal ice crystal size.', method: 'sampling', frequency: 'batch', acceptanceCriteria: { parameter: 'crystal_size', operator: 'eq', value: 'MINIMAL' }, linkedSubcomponents: ['chill-air-circulation', 'chill-temp-monitoring'] }, + ], + inputRequirements: [{ type: 'PACKED_ICE_CREAM', specifications: { temperature: { max: -4 } } }], + outputSpecification: { type: 'FINISHED_ICE_CREAM', attributes: { coreTemperature: -18, storageClass: 'FROZEN', shelfLifeDays: 365 } }, +}; + +export default chillRoomComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/cup-packer.component.ts b/packages/controlmart/src/helpers/manufacturing/components/cup-packer.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..1bcbfdbac109a81b7d1fd904d83b71328bd7ba01 --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/cup-packer.component.ts @@ -0,0 +1,94 @@ +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const cupFillingSubcomponent: SubComponent = { + id: 'cup-filler', + name: 'Cup Filling System', + description: 'Volumetric filling system for portioning ice cream mix into cups.', + attributes: [ + { id: 'fill_volume', name: 'Fill Volume', type: 'float', unit: 'ml', range: { min: 100, max: 500 }, defaultValue: 250, criticalForQuality: true, description: 'Target fill volume per cup.' }, + { id: 'fill_accuracy', name: 'Fill Accuracy', type: 'float', unit: '%', range: { min: 95, max: 100 }, defaultValue: 98.5, criticalForQuality: true, description: 'Volume dispensing accuracy.' }, + { id: 'nozzle_count', name: 'Nozzle Count', type: 'integer', range: { min: 4, max: 16 }, defaultValue: 8, description: 'Number of filling nozzles.' }, + { id: 'filling_speed', name: 'Filling Speed', type: 'integer', unit: 'cups/min', range: { min: 50, max: 200 }, defaultValue: 120, description: 'Cups filled per minute.' }, + ], + chaosScenarios: [ + { id: 'under-fill', name: 'Under Fill', description: 'Nozzles dispensing below target volume.', probability: 0.04, affectedAttributes: ['fill_volume', 'fill_accuracy'], severity: 'medium', qualityImpact: { checkType: 'FILL_WEIGHT', failureMessage: 'Under-filled cups below specification.', degradation: 15 }, possibleCauses: ['Pump cavitation', 'Air in lines', 'Nozzle blockage'], mitigationActions: ['Line priming', 'Pump adjustment', 'Nozzle cleaning'] }, + { id: 'nozzle-drip', name: 'Nozzle Dripping', description: 'Filling nozzles dripping after fill cycle.', probability: 0.035, affectedAttributes: ['nozzle_count'], severity: 'medium', qualityImpact: { checkType: 'VISUAL_INSPECTION', failureMessage: 'Product drips affecting cup exterior.', degradation: 10 }, possibleCauses: ['Valve wear', 'Pressure fluctuation', 'Temperature variation'], mitigationActions: ['Valve replacement', 'Pressure stabilization', 'Temperature control'] }, + { id: 'speed-variation', name: 'Speed Variation', description: 'Inconsistent filling speed affecting line balance.', probability: 0.03, affectedAttributes: ['filling_speed'], severity: 'medium', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Production rate inconsistency.' }, possibleCauses: ['Motor wear', 'Control drift', 'Mechanical binding'], mitigationActions: ['Motor maintenance', 'Control calibration', 'Lubrication'] }, + ], +}; + +const cupHandlingSubcomponent: SubComponent = { + id: 'cup-handler', + name: 'Cup Handling System', + description: 'Automated cup feeding, positioning, and transport system.', + attributes: [ + { id: 'cup_type', name: 'Cup Type', type: 'enum', range: { values: ['Paper', 'Plastic', 'Biodegradable'] }, defaultValue: 'Paper', description: 'Cup material type.' }, + { id: 'cup_size', name: 'Cup Size', type: 'enum', range: { values: ['125ml', '250ml', '500ml'] }, defaultValue: '250ml', description: 'Cup capacity.' }, + { id: 'magazine_level', name: 'Magazine Level', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 85, description: 'Cup magazine fill level.' }, + { id: 'pick_success_rate', name: 'Pick Success Rate', type: 'float', unit: '%', range: { min: 90, max: 100 }, defaultValue: 98, criticalForQuality: true, description: 'Successful cup pick percentage.' }, + ], + chaosScenarios: [ + { id: 'cup-jam', name: 'Cup Jam', description: 'Cups jamming in magazine or transport.', probability: 0.03, affectedAttributes: ['pick_success_rate'], severity: 'medium', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Cup feeding interruption reducing output.' }, possibleCauses: ['Static buildup', 'Misshapen cups', 'Magazine misalignment'], mitigationActions: ['Anti-static treatment', 'Cup inspection', 'Magazine alignment'] }, + { id: 'magazine-empty', name: 'Magazine Empty', description: 'Cup supply exhausted requiring refill.', probability: 0.02, affectedAttributes: ['magazine_level'], severity: 'high', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Production halted due to empty cup magazine.' }, possibleCauses: ['High demand', 'Supply delay', 'Sensor malfunction'], mitigationActions: ['Magazine refill', 'Backup supply', 'Sensor check'] }, + { id: 'double-pick', name: 'Double Pick', description: 'Multiple cups picked instead of single.', probability: 0.025, affectedAttributes: ['cup_type'], severity: 'medium', qualityImpact: { checkType: 'VISUAL_INSPECTION', failureMessage: 'Double cups causing fill errors.' }, possibleCauses: ['Static electricity', 'Cup nesting', 'Vacuum setting'], mitigationActions: ['Static elimination', 'Cup separation', 'Vacuum adjustment'] }, + ], +}; + +const lidApplicationSubcomponent: SubComponent = { + id: 'lid-applicator', + name: 'Lid Application System', + description: 'Automated lid placement and sealing for filled cups.', + attributes: [ + { id: 'lid_type', name: 'Lid Type', type: 'enum', range: { values: ['Snap-on', 'Heat-seal', 'Foil-seal'] }, defaultValue: 'Heat-seal', description: 'Lid sealing method.' }, + { id: 'sealing_temp', name: 'Sealing Temperature', type: 'float', unit: 'C', range: { min: 140, max: 200 }, defaultValue: 165, criticalForQuality: true, description: 'Heat sealing temperature.' }, + { id: 'sealing_pressure', name: 'Sealing Pressure', type: 'float', unit: 'bar', range: { min: 1, max: 5 }, defaultValue: 2.5, criticalForQuality: true, description: 'Sealing head pressure.' }, + { id: 'seal_integrity', name: 'Seal Integrity', type: 'enum', range: { values: ['Perfect', 'Good', 'Poor'] }, defaultValue: 'Good', criticalForQuality: true, description: 'Seal quality assessment.' }, + ], + chaosScenarios: [ + { id: 'weak-seal', name: 'Weak Seal', description: 'Insufficient seal strength causing leakage.', probability: 0.04, affectedAttributes: ['sealing_temp', 'sealing_pressure'], severity: 'high', qualityImpact: { checkType: 'SEAL_STRENGTH', failureMessage: 'Weak seals risking product contamination.' }, possibleCauses: ['Low temperature', 'Insufficient pressure', 'Contaminated surfaces'], mitigationActions: ['Temperature increase', 'Pressure adjustment', 'Surface cleaning'] }, + { id: 'lid-misalignment', name: 'Lid Misalignment', description: 'Lids placed off-center or crooked.', probability: 0.035, affectedAttributes: ['seal_integrity'], severity: 'medium', qualityImpact: { checkType: 'VISUAL_INSPECTION', failureMessage: 'Misaligned lids affecting appearance and seal.', degradation: 20 }, possibleCauses: ['Vision system drift', 'Mechanical wear', 'Cup positioning'], mitigationActions: ['Vision calibration', 'Mechanism adjustment', 'Cup guide check'] }, + { id: 'seal-burn', name: 'Seal Burn', description: 'Excessive heat damaging lid material.', probability: 0.02, affectedAttributes: ['sealing_temp'], severity: 'medium', qualityImpact: { checkType: 'VISUAL_INSPECTION', failureMessage: 'Burned seals affecting product appearance.' }, possibleCauses: ['Temperature overshoot', 'Extended dwell', 'Lid variation'], mitigationActions: ['Temperature reduction', 'Timing adjustment', 'Lid quality check'] }, + ], +}; + +const outputConveyorSubcomponent: SubComponent = { + id: 'cup-output', + name: 'Output Conveyor', + description: 'Sealed cup transport to downstream packaging.', + attributes: [ + { id: 'conveyor_speed', name: 'Conveyor Speed', type: 'float', unit: 'm/min', range: { min: 5, max: 30 }, defaultValue: 15, description: 'Cup transport speed.' }, + { id: 'cup_count', name: 'Cup Counter', type: 'integer', range: { min: 0, max: 999999 }, defaultValue: 0, description: 'Total cups processed.' }, + { id: 'reject_count', name: 'Reject Count', type: 'integer', range: { min: 0, max: 1000 }, defaultValue: 0, description: 'Rejected cups due to quality issues.' }, + { id: 'temperature_zone', name: 'Temperature Zone', type: 'float', unit: 'C', range: { min: -5, max: 5 }, defaultValue: 2, description: 'Conveyor ambient temperature.' }, + ], + chaosScenarios: [ + { id: 'cup-spillage', name: 'Cup Spillage', description: 'Cups tipping or spilling during transport.', probability: 0.025, affectedAttributes: ['conveyor_speed'], severity: 'medium', qualityImpact: { checkType: 'VISUAL_INSPECTION', failureMessage: 'Product spills creating waste and contamination.' }, possibleCauses: ['Excessive speed', 'Belt vibration', 'Cup instability'], mitigationActions: ['Speed reduction', 'Belt tension check', 'Guide adjustment'] }, + { id: 'temp-excursion', name: 'Temperature Excursion', description: 'Conveyor temperature outside specification.', probability: 0.03, affectedAttributes: ['temperature_zone'], severity: 'medium', qualityImpact: { checkType: 'TEMPERATURE', failureMessage: 'Product temperature affecting quality.' }, possibleCauses: ['HVAC malfunction', 'Ambient conditions', 'Insulation failure'], mitigationActions: ['HVAC adjustment', 'Insulation repair', 'Speed increase'] }, + ], +}; + +export const cupPackerComponent: ManufacturingComponent = { + id: 'cup-packer', + machineId: 'MCH_PACKER_01', + name: 'Cup Packer', + type: 'packaging', + productionLine: 'ICE_CREAM', + description: 'Automated cup filling and sealing system for ice cream products with integrated quality control.', + specifications: { manufacturer: 'Tetra Pak', model: 'PackMaster-Cup-300', yearInstalled: 2022, maintenanceSchedule: 'Daily cleaning, Weekly calibration, Monthly seal inspection' }, + subcomponents: [cupFillingSubcomponent, cupHandlingSubcomponent, lidApplicationSubcomponent, outputConveyorSubcomponent], + operationalParameters: [ + { id: 'fill_volume', name: 'Fill Volume', nominalValue: 250, tolerance: 10, unit: 'ml' }, + { id: 'production_rate', name: 'Production Rate', nominalValue: 120, tolerance: 10, unit: 'cups/min' }, + { id: 'seal_temp', name: 'Seal Temperature', nominalValue: 165, tolerance: 5, unit: 'C' }, + { id: 'product_temp', name: 'Product Temperature', nominalValue: -6, tolerance: 2, unit: 'C' }, + ], + qualityCheckPoints: [ + { id: 'qc-fill', checkType: 'FILL_WEIGHT', description: 'Verify cup fill volume meets specification.', method: 'automatic', frequency: 'per_unit', acceptanceCriteria: { parameter: 'volume', operator: 'between', value: [240, 260], unit: 'ml' }, linkedSubcomponents: ['cup-filler'] }, + { id: 'qc-seal', checkType: 'SEAL_STRENGTH', description: 'Verify lid seal integrity.', method: 'sampling', frequency: 'hourly', acceptanceCriteria: { parameter: 'strength', operator: 'gt', value: 12, unit: 'N' }, linkedSubcomponents: ['lid-applicator'] }, + { id: 'qc-appearance', checkType: 'VISUAL_INSPECTION', description: 'Check cup appearance and lid alignment.', method: 'automatic', frequency: 'continuous', acceptanceCriteria: { parameter: 'appearance', operator: 'eq', value: 'PASS' }, linkedSubcomponents: ['lid-applicator', 'cup-handler'] }, + ], + inputRequirements: [{ type: 'ICE_CREAM_MIX', specifications: { temperature: { max: -4, min: -8 }, viscosity: 'MEDIUM' } }], + outputSpecification: { type: 'SEALED_ICE_CREAM_CUP', attributes: { sealed: true, fillLevel: 'NOMINAL', temperature: -6 } }, +}; + +export default cupPackerComponent; \ No newline at end of file diff --git a/packages/controlmart/src/helpers/manufacturing/components/fermenter.component.ts b/packages/controlmart/src/helpers/manufacturing/components/fermenter.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..0428194dbb50062c5c3ed1d572337f3950a8fde8 --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/fermenter.component.ts @@ -0,0 +1,516 @@ +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const vesselSubcomponent: SubComponent = { + id: 'fermenter-vessel', + name: 'Vessel', + description: 'The physical containment unit of the fermenter that holds the culture and media.', + attributes: [ + { + id: 'material', + name: 'Material', + type: 'enum', + description: 'Material of construction; affects corrosion resistance.', + range: { values: ['SS304', 'SS316', 'Glass-lined'] }, + defaultValue: 'SS316', + criticalForQuality: false, + }, + { + id: 'capacity', + name: 'Capacity', + type: 'float', + unit: 'L', + description: 'Total volumetric capacity.', + range: { min: 5, max: 200000 }, + defaultValue: 5000, + }, + { + id: 'working_volume', + name: 'Working Volume', + type: 'float', + unit: 'L', + description: 'Maximum usable volume during operation (60-85% of capacity).', + range: { min: 3, max: 170000 }, + defaultValue: 4000, + criticalForQuality: true, + }, + { + id: 'pressure_rating', + name: 'Pressure Rating', + type: 'float', + unit: 'bar', + description: 'Maximum allowable operating pressure.', + range: { min: 0, max: 6 }, + defaultValue: 2, + }, + { + id: 'jacket_type', + name: 'Jacket Type', + type: 'enum', + description: 'Type of thermal jacket for heating/cooling.', + range: { values: ['Single', 'Dimple', 'Half-pipe'] }, + defaultValue: 'Dimple', + }, + ], + chaosScenarios: [ + { + id: 'vessel-leak', + name: 'Vessel Seal Leak', + description: 'Micro-leak in vessel seal allowing contamination or pressure loss.', + probability: 0.02, + affectedAttributes: ['pressure_rating', 'working_volume'], + severity: 'high', + qualityImpact: { + checkType: 'CONTAMINATION_TEST', + failureMessage: 'Potential contamination detected due to vessel seal integrity failure.', + }, + possibleCauses: ['Worn gaskets', 'Thermal cycling stress', 'Improper cleaning'], + mitigationActions: ['Emergency seal replacement', 'Batch quarantine', 'Sterility retest'], + }, + { + id: 'vessel-overpressure', + name: 'Overpressure Event', + description: 'Internal pressure exceeds safe operating limits.', + probability: 0.01, + affectedAttributes: ['pressure_rating'], + severity: 'critical', + qualityImpact: { + checkType: 'PRESSURE_SAFETY', + failureMessage: 'Overpressure event triggered safety relief valve.', + }, + possibleCauses: ['Blocked vent', 'Excessive gas production', 'Temperature spike'], + mitigationActions: ['Emergency pressure release', 'Process halt', 'Safety inspection'], + }, + ], +}; + +const agitatorSubcomponent: SubComponent = { + id: 'fermenter-agitator', + name: 'Agitator', + description: 'Mechanical system responsible for mixing contents uniformly and maintaining mass/heat transfer.', + attributes: [ + { + id: 'type', + name: 'Type', + type: 'enum', + description: 'Design type of agitator.', + range: { values: ['Impeller', 'Turbine', 'Anchor'] }, + defaultValue: 'Impeller', + }, + { + id: 'rpm', + name: 'RPM', + type: 'integer', + unit: 'RPM', + description: 'Current operating speed.', + range: { min: 20, max: 500 }, + defaultValue: 150, + criticalForQuality: true, + }, + { + id: 'motor_power', + name: 'Motor Power', + type: 'float', + unit: 'kW', + description: 'Motor capacity driving the agitator.', + range: { min: 0.25, max: 500 }, + defaultValue: 15, + }, + { + id: 'shaft_length', + name: 'Shaft Length', + type: 'float', + unit: 'm', + description: 'Length of agitator shaft.', + range: { min: 0.5, max: 12 }, + defaultValue: 2.5, + }, + { + id: 'seal_type', + name: 'Seal Type', + type: 'enum', + description: 'Sealing mechanism preventing leaks.', + range: { values: ['Mechanical', 'Magnetic'] }, + defaultValue: 'Mechanical', + }, + ], + chaosScenarios: [ + { + id: 'agitator-speed-drift', + name: 'Agitator Speed Drift', + description: 'RPM deviates from setpoint affecting mixing uniformity.', + probability: 0.05, + affectedAttributes: ['rpm'], + severity: 'medium', + qualityImpact: { + checkType: 'MIXING_UNIFORMITY', + failureMessage: 'Inconsistent mixing detected; product homogeneity compromised.', + degradation: 15, + }, + possibleCauses: ['Motor wear', 'VFD malfunction', 'Power fluctuation'], + mitigationActions: ['Recalibrate speed controller', 'Extend mixing time', 'Motor inspection'], + }, + { + id: 'agitator-seal-failure', + name: 'Agitator Seal Failure', + description: 'Mechanical seal leak allowing lubricant contamination.', + probability: 0.015, + affectedAttributes: ['seal_type'], + severity: 'critical', + qualityImpact: { + checkType: 'CONTAMINATION_TEST', + failureMessage: 'Lubricant contamination detected from agitator seal failure.', + }, + possibleCauses: ['Seal wear', 'Misalignment', 'Excessive vibration'], + mitigationActions: ['Immediate shutdown', 'Batch rejection', 'Seal replacement'], + }, + { + id: 'agitator-motor-overheat', + name: 'Motor Overheating', + description: 'Agitator motor temperature exceeds safe limits.', + probability: 0.03, + affectedAttributes: ['motor_power'], + severity: 'high', + qualityImpact: { + checkType: 'EQUIPMENT_HEALTH', + failureMessage: 'Agitator motor thermal protection triggered.', + }, + possibleCauses: ['Overload', 'Cooling failure', 'High viscosity'], + mitigationActions: ['Reduce speed', 'Check cooling system', 'Process pause'], + }, + ], +}; + +const cultureMediaSubcomponent: SubComponent = { + id: 'fermenter-culture-media', + name: 'Culture / Media', + description: 'Biological material and nutrients used for fermentation.', + attributes: [ + { + id: 'culture_id', + name: 'Culture ID', + type: 'string', + description: 'Unique identifier for the culture batch.', + defaultValue: '', + }, + { + id: 'strain', + name: 'Strain', + type: 'string', + description: 'Microbial or cell strain name.', + defaultValue: 'Lactobacillus bulgaricus', + criticalForQuality: true, + }, + { + id: 'inoculation_date', + name: 'Inoculation Date', + type: 'datetime', + description: 'Date and time of culture inoculation.', + }, + { + id: 'batch_id', + name: 'Batch ID', + type: 'string', + description: 'Production batch identifier.', + }, + { + id: 'volume', + name: 'Volume', + type: 'float', + unit: 'L', + description: 'Current culture volume.', + range: { min: 1, max: 170000 }, + criticalForQuality: true, + }, + ], + chaosScenarios: [ + { + id: 'culture-contamination', + name: 'Culture Contamination', + description: 'Foreign microorganism contamination in the culture.', + probability: 0.02, + affectedAttributes: ['strain', 'culture_id'], + severity: 'critical', + qualityImpact: { + checkType: 'PATHOGEN_TEST', + failureMessage: 'Microbial contamination detected; batch must be rejected.', + }, + possibleCauses: ['Non-sterile inoculation', 'Air ingress', 'Contaminated media'], + mitigationActions: ['Batch rejection', 'CIP cycle', 'Source investigation'], + }, + { + id: 'culture-viability-loss', + name: 'Culture Viability Loss', + description: 'Culture cells losing viability affecting fermentation efficiency.', + probability: 0.04, + affectedAttributes: ['strain'], + severity: 'high', + qualityImpact: { + checkType: 'FERMENTATION_EFFICIENCY', + failureMessage: 'Low culture viability; fermentation incomplete.', + degradation: 40, + }, + possibleCauses: ['Old culture', 'Temperature shock', 'Nutrient depletion'], + mitigationActions: ['Fresh inoculation', 'Nutrient supplementation', 'Extended fermentation'], + }, + ], +}; + +const sensorsSubcomponent: SubComponent = { + id: 'fermenter-sensors', + name: 'Sensors', + description: 'Instrumentation used to monitor and control fermentation conditions.', + attributes: [ + { + id: 'temperature', + name: 'Temperature', + type: 'float', + unit: '°C', + description: 'Measures broth temperature.', + range: { min: 0, max: 150 }, + defaultValue: 42, + criticalForQuality: true, + }, + { + id: 'pH', + name: 'pH', + type: 'float', + description: 'Measures acidity/alkalinity.', + range: { min: 2.0, max: 12.0 }, + defaultValue: 6.5, + criticalForQuality: true, + }, + { + id: 'dissolved_oxygen', + name: 'Dissolved Oxygen', + type: 'float', + unit: '%', + description: 'Measures oxygen availability.', + range: { min: 0, max: 100 }, + defaultValue: 30, + criticalForQuality: true, + }, + { + id: 'pressure', + name: 'Pressure', + type: 'float', + unit: 'bar', + description: 'Measures internal pressure.', + range: { min: 0, max: 6 }, + defaultValue: 0.5, + }, + { + id: 'foam_detected', + name: 'Foam Detected', + type: 'boolean', + description: 'Detects foam formation.', + defaultValue: false, + }, + ], + chaosScenarios: [ + { + id: 'sensor-drift', + name: 'Sensor Calibration Drift', + description: 'Sensor readings drifting from true values.', + probability: 0.06, + affectedAttributes: ['temperature', 'pH', 'dissolved_oxygen'], + severity: 'medium', + qualityImpact: { + checkType: 'PROCESS_CONTROL', + failureMessage: 'Sensor drift detected; process parameters may be inaccurate.', + degradation: 10, + }, + possibleCauses: ['Sensor aging', 'Biofilm buildup', 'Calibration overdue'], + mitigationActions: ['Sensor recalibration', 'Cross-check with portable instruments'], + }, + { + id: 'ph-sensor-failure', + name: 'pH Sensor Failure', + description: 'pH sensor malfunction causing incorrect readings.', + probability: 0.025, + affectedAttributes: ['pH'], + severity: 'high', + qualityImpact: { + checkType: 'PH_LEVEL', + failureMessage: 'pH sensor failure; unable to verify acidity levels.', + }, + possibleCauses: ['Electrode fouling', 'Reference junction blockage', 'Cable damage'], + mitigationActions: ['Manual pH measurement', 'Sensor replacement', 'Process hold'], + }, + { + id: 'temperature-spike', + name: 'Temperature Spike', + description: 'Unexpected temperature increase due to sensor or control failure.', + probability: 0.03, + affectedAttributes: ['temperature'], + severity: 'high', + qualityImpact: { + checkType: 'TEMPERATURE', + failureMessage: 'Temperature exceeded safe limits; culture viability at risk.', + }, + possibleCauses: ['Cooling system failure', 'Exothermic reaction', 'Control loop error'], + mitigationActions: ['Emergency cooling', 'Manual override', 'Process evaluation'], + }, + ], +}; + +const valvesThermalSubcomponent: SubComponent = { + id: 'fermenter-valves-thermal', + name: 'Valves & Thermal Control', + description: 'Valve systems and heating/cooling infrastructure.', + attributes: [ + { + id: 'inlet_valves', + name: 'Inlet Valves', + type: 'integer', + description: 'Number of inlet valves.', + range: { min: 1, max: 10 }, + defaultValue: 4, + }, + { + id: 'outlet_valves', + name: 'Outlet Valves', + type: 'integer', + description: 'Number of outlet valves.', + range: { min: 1, max: 5 }, + defaultValue: 2, + }, + { + id: 'sampling_valve', + name: 'Sampling Valve', + type: 'boolean', + description: 'Presence of aseptic sampling valve.', + defaultValue: true, + }, + { + id: 'safety_relief_valve', + name: 'Safety Relief Valve', + type: 'boolean', + description: 'Presence of pressure safety relief valve.', + defaultValue: true, + }, + { + id: 'jacket_temperature', + name: 'Jacket Temperature', + type: 'float', + unit: '°C', + description: 'Current jacket temperature.', + range: { min: 5, max: 120 }, + defaultValue: 40, + criticalForQuality: true, + }, + { + id: 'heat_transfer_medium', + name: 'Heat Transfer Medium', + type: 'enum', + description: 'Medium used for heat transfer.', + range: { values: ['Steam', 'Water', 'Oil'] }, + defaultValue: 'Water', + }, + { + id: 'flow_rate', + name: 'Flow Rate', + type: 'float', + unit: 'L/min', + description: 'Heat transfer medium flow rate.', + range: { min: 0, max: 5000 }, + defaultValue: 500, + }, + ], + chaosScenarios: [ + { + id: 'valve-stuck', + name: 'Valve Stuck', + description: 'Valve fails to open or close properly.', + probability: 0.03, + affectedAttributes: ['inlet_valves', 'outlet_valves'], + severity: 'high', + qualityImpact: { + checkType: 'FLOW_CONTROL', + failureMessage: 'Valve malfunction affecting material flow.', + }, + possibleCauses: ['Actuator failure', 'Debris in valve', 'Corrosion'], + mitigationActions: ['Manual valve operation', 'Bypass activation', 'Maintenance'], + }, + { + id: 'cooling-failure', + name: 'Cooling System Failure', + description: 'Jacket cooling unable to maintain temperature setpoint.', + probability: 0.025, + affectedAttributes: ['jacket_temperature', 'flow_rate'], + severity: 'high', + qualityImpact: { + checkType: 'TEMPERATURE', + failureMessage: 'Cooling failure; fermentation temperature rising.', + }, + possibleCauses: ['Chiller malfunction', 'Low coolant', 'Pump failure'], + mitigationActions: ['Backup chiller', 'Ice bath emergency cooling', 'Process pause'], + }, + ], +}; + + +export const fermenterComponent: ManufacturingComponent = { + id: 'fermenter', + machineId: 'MCH_FERMENT_01', + name: 'Fermenter', + type: 'processing', + productionLine: 'ICE_CREAM', + description: 'Fermentation vessel for culturing dairy products.', + specifications: { + manufacturer: 'BioTech Systems', + model: 'FermPro-5000', + yearInstalled: 2022, + maintenanceSchedule: 'Weekly inspection, Monthly calibration', + }, + subcomponents: [ + vesselSubcomponent, + agitatorSubcomponent, + cultureMediaSubcomponent, + sensorsSubcomponent, + valvesThermalSubcomponent, + ], + operationalParameters: [ + { id: 'fermentation_temp', name: 'Fermentation Temperature', nominalValue: 42, tolerance: 2, unit: '°C' }, + { id: 'target_ph', name: 'Target pH', nominalValue: 4.5, tolerance: 0.3, unit: '' }, + { id: 'agitation_speed', name: 'Agitation Speed', nominalValue: 150, tolerance: 20, unit: 'RPM' }, + { id: 'fermentation_time', name: 'Fermentation Time', nominalValue: 4, tolerance: 1, unit: 'hours' }, + ], + qualityCheckPoints: [ + { + id: 'qc-ph-level', + checkType: 'PH_LEVEL', + description: 'Verify final pH is within acceptable range for cultured products.', + method: 'automatic', + frequency: 'continuous', + acceptanceCriteria: { parameter: 'pH', operator: 'between', value: [4.2, 4.8] }, + linkedSubcomponents: ['fermenter-sensors', 'fermenter-culture-media'], + }, + { + id: 'qc-pathogen-test', + checkType: 'PATHOGEN_TEST', + description: 'Microbiological testing for harmful pathogens.', + method: 'sampling', + frequency: 'batch', + acceptanceCriteria: { parameter: 'pathogen_presence', operator: 'eq', value: 'NEGATIVE' }, + linkedSubcomponents: ['fermenter-culture-media', 'fermenter-vessel'], + }, + { + id: 'qc-temp-control', + checkType: 'TEMPERATURE', + description: 'Verify temperature maintained within setpoint.', + method: 'automatic', + frequency: 'continuous', + acceptanceCriteria: { parameter: 'temperature', operator: 'between', value: [40, 44], unit: '°C' }, + linkedSubcomponents: ['fermenter-sensors', 'fermenter-valves-thermal'], + }, + ], + inputRequirements: [ + { type: 'PASTEURIZED_BASE', specifications: { temperature: { max: 10 }, volume: { min: 100 } } }, + ], + outputSpecification: { + type: 'FERMENTED_BASE', + attributes: { pH: 4.5, temperature: 42, consistency: 'SMOOTH' }, + }, +}; + +export default fermenterComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/fryer.component.ts b/packages/controlmart/src/helpers/manufacturing/components/fryer.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..3dd2f46fd20b0547431947ab269555aca3ae1d2f --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/fryer.component.ts @@ -0,0 +1,110 @@ +/** + * MCH_FRYER_01 - Fryer Component Definition + * Continuous deep fryer for potato chip production. + */ + +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const fryingVesselSubcomponent: SubComponent = { + id: 'fryer-vessel', + name: 'Frying Vessel', + description: 'Stainless steel continuous frying pan with oil circulation.', + attributes: [ + { id: 'oil_capacity', name: 'Oil Capacity', type: 'float', unit: 'L', range: { min: 500, max: 10000 }, defaultValue: 3000, description: 'Total oil volume in system.' }, + { id: 'vessel_material', name: 'Material', type: 'enum', range: { values: ['SS304', 'SS316'] }, defaultValue: 'SS316', description: 'Construction material.' }, + { id: 'frying_length', name: 'Frying Length', type: 'float', unit: 'm', range: { min: 5, max: 30 }, defaultValue: 15, description: 'Product travel distance in fryer.' }, + { id: 'oil_level', name: 'Oil Level', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 90, criticalForQuality: true, description: 'Current oil level.' }, + ], + chaosScenarios: [ + { id: 'oil-low', name: 'Low Oil Level', description: 'Oil level below safe operating threshold.', probability: 0.03, affectedAttributes: ['oil_level'], severity: 'high', qualityImpact: { checkType: 'FRYING_QUALITY', failureMessage: 'Inconsistent frying from low oil level.' }, possibleCauses: ['Carry-out loss', 'Evaporation', 'Leak'], mitigationActions: ['Oil top-up', 'Leak inspection', 'Level sensor check'] }, + ], +}; + +const heatingSystemSubcomponent: SubComponent = { + id: 'fryer-heating', + name: 'Heating System', + description: 'Thermal fluid or direct-fire heating for oil temperature control.', + attributes: [ + { id: 'heating_type', name: 'Heating Type', type: 'enum', range: { values: ['Thermal-Fluid', 'Direct-Fire', 'Electric'] }, defaultValue: 'Thermal-Fluid', description: 'Heat source type.' }, + { id: 'oil_temp', name: 'Oil Temperature', type: 'float', unit: '°C', range: { min: 160, max: 200 }, defaultValue: 180, criticalForQuality: true, description: 'Frying oil temperature.' }, + { id: 'heat_exchanger_efficiency', name: 'HX Efficiency', type: 'float', unit: '%', range: { min: 80, max: 100 }, defaultValue: 95, description: 'Heat transfer efficiency.' }, + { id: 'burner_status', name: 'Burner Status', type: 'enum', range: { values: ['On', 'Off', 'Modulating', 'Fault'] }, defaultValue: 'Modulating', description: 'Burner operational state.' }, + ], + chaosScenarios: [ + { id: 'temp-undershoot', name: 'Temperature Undershoot', description: 'Oil temperature falls below setpoint.', probability: 0.04, affectedAttributes: ['oil_temp'], severity: 'high', qualityImpact: { checkType: 'OIL_TEMPERATURE', failureMessage: 'Oil temperature too low; chips absorbing excess oil.', degradation: 25 }, possibleCauses: ['High throughput', 'Burner issue', 'Cold product'], mitigationActions: ['Reduce throughput', 'Burner check', 'Preheat product'] }, + { id: 'temp-overshoot', name: 'Temperature Overshoot', description: 'Oil temperature exceeds safe limit.', probability: 0.025, affectedAttributes: ['oil_temp'], severity: 'high', qualityImpact: { checkType: 'ACRYLAMIDE_LEVEL', failureMessage: 'Excessive temperature increasing acrylamide formation.' }, possibleCauses: ['Control fault', 'Low throughput', 'Sensor error'], mitigationActions: ['Temperature reduction', 'Control check', 'Increase throughput'] }, + { id: 'burner-fault', name: 'Burner Fault', description: 'Heating system malfunction.', probability: 0.02, affectedAttributes: ['burner_status'], severity: 'critical', qualityImpact: { checkType: 'OIL_TEMPERATURE', failureMessage: 'Heating failure; production must stop.' }, possibleCauses: ['Ignition failure', 'Gas supply', 'Safety lockout'], mitigationActions: ['Burner reset', 'Gas check', 'Safety inspection'] }, + ], +}; + +const oilQualitySubcomponent: SubComponent = { + id: 'fryer-oil-quality', + name: 'Oil Quality Management', + description: 'Oil filtration, monitoring, and conditioning system.', + attributes: [ + { id: 'oil_type', name: 'Oil Type', type: 'enum', range: { values: ['Sunflower', 'Palm', 'Canola', 'Blend'] }, defaultValue: 'Sunflower', description: 'Frying oil type.' }, + { id: 'ffa_level', name: 'FFA Level', type: 'float', unit: '%', range: { min: 0, max: 5 }, defaultValue: 0.3, criticalForQuality: true, description: 'Free fatty acid content.' }, + { id: 'polar_compounds', name: 'Polar Compounds', type: 'float', unit: '%', range: { min: 0, max: 30 }, defaultValue: 8, criticalForQuality: true, description: 'Total polar materials (oil degradation).' }, + { id: 'filter_status', name: 'Filter Status', type: 'enum', range: { values: ['Clean', 'Normal', 'Saturated'] }, defaultValue: 'Normal', description: 'Filtration system condition.' }, + { id: 'oil_age_hours', name: 'Oil Age', type: 'float', unit: 'hours', range: { min: 0, max: 200 }, defaultValue: 48, description: 'Time since last oil change.' }, + ], + chaosScenarios: [ + { id: 'oil-degraded', name: 'Oil Degradation', description: 'Oil quality exceeding acceptable limits.', probability: 0.04, affectedAttributes: ['ffa_level', 'polar_compounds'], severity: 'high', qualityImpact: { checkType: 'OIL_QUALITY', failureMessage: 'Degraded oil affecting product taste and safety.' }, possibleCauses: ['Extended use', 'High temperature', 'Contamination'], mitigationActions: ['Partial oil change', 'Temperature reduction', 'Increase filtration'] }, + { id: 'filter-clogged', name: 'Filter Clogged', description: 'Oil filter saturated with debris.', probability: 0.035, affectedAttributes: ['filter_status'], severity: 'medium', qualityImpact: { checkType: 'OIL_QUALITY', failureMessage: 'Reduced filtration causing oil quality decline.', degradation: 15 }, possibleCauses: ['High fines', 'Extended cycle', 'Poor quality potatoes'], mitigationActions: ['Filter change', 'Cycle adjustment', 'Slicer check'] }, + ], +}; + +const productConveyorSubcomponent: SubComponent = { + id: 'fryer-conveyor', + name: 'Product Conveyor', + description: 'Submerging and take-out conveyor system.', + attributes: [ + { id: 'conveyor_speed', name: 'Conveyor Speed', type: 'float', unit: 'm/min', range: { min: 1, max: 10 }, defaultValue: 4, criticalForQuality: true, description: 'Product movement speed.' }, + { id: 'frying_time', name: 'Frying Time', type: 'float', unit: 's', range: { min: 60, max: 300 }, defaultValue: 180, criticalForQuality: true, description: 'Total time in fryer.' }, + { id: 'paddle_condition', name: 'Paddle Condition', type: 'enum', range: { values: ['Good', 'Worn', 'Replace'] }, defaultValue: 'Good', description: 'Submerging paddle state.' }, + ], + chaosScenarios: [ + { id: 'speed-variation', name: 'Speed Variation', description: 'Conveyor speed fluctuating.', probability: 0.03, affectedAttributes: ['conveyor_speed', 'frying_time'], severity: 'medium', qualityImpact: { checkType: 'MOISTURE_CONTENT', failureMessage: 'Inconsistent frying time affecting moisture.', degradation: 20 }, possibleCauses: ['Motor issue', 'VFD fault', 'Chain wear'], mitigationActions: ['Speed calibration', 'Drive check', 'Chain inspection'] }, + { id: 'paddle-failure', name: 'Paddle Failure', description: 'Submerging paddles not working properly.', probability: 0.02, affectedAttributes: ['paddle_condition'], severity: 'medium', qualityImpact: { checkType: 'COLOR_INDEX', failureMessage: 'Uneven color from poor submersion.' }, possibleCauses: ['Paddle wear', 'Drive issue', 'Oil buildup'], mitigationActions: ['Paddle replacement', 'Drive repair', 'Cleaning'] }, + ], +}; + +const exhaustSystemSubcomponent: SubComponent = { + id: 'fryer-exhaust', + name: 'Exhaust & Ventilation', + description: 'Fume extraction and oil mist collection.', + attributes: [ + { id: 'exhaust_flow', name: 'Exhaust Flow', type: 'float', unit: 'm³/hr', range: { min: 5000, max: 50000 }, defaultValue: 20000, description: 'Ventilation airflow.' }, + { id: 'mist_collector_status', name: 'Mist Collector', type: 'enum', range: { values: ['Clean', 'Normal', 'Maintenance'] }, defaultValue: 'Normal', description: 'Oil mist collection status.' }, + { id: 'hood_pressure', name: 'Hood Pressure', type: 'float', unit: 'Pa', range: { min: -50, max: -5 }, defaultValue: -20, description: 'Negative pressure under hood.' }, + ], + chaosScenarios: [ + { id: 'exhaust-reduced', name: 'Reduced Exhaust', description: 'Ventilation flow below requirement.', probability: 0.025, affectedAttributes: ['exhaust_flow', 'hood_pressure'], severity: 'medium', qualityImpact: { checkType: 'SAFETY_CONDITIONS', failureMessage: 'Inadequate ventilation affecting work environment.' }, possibleCauses: ['Fan belt worn', 'Filter blocked', 'Damper issue'], mitigationActions: ['Fan service', 'Filter cleaning', 'Damper adjustment'] }, + ], +}; + +export const fryerComponent: ManufacturingComponent = { + id: 'fryer', + machineId: 'MCH_FRYER_01', + name: 'Fryer', + type: 'processing', + productionLine: 'CHIPS', + description: 'Continuous atmospheric fryer with oil quality management for potato chip production.', + specifications: { manufacturer: 'Heat and Control', model: 'MasterFry-2500', yearInstalled: 2021, maintenanceSchedule: 'Daily oil test, Weekly filter change, Monthly deep clean' }, + subcomponents: [fryingVesselSubcomponent, heatingSystemSubcomponent, oilQualitySubcomponent, productConveyorSubcomponent, exhaustSystemSubcomponent], + operationalParameters: [ + { id: 'oil_temp', name: 'Oil Temperature', nominalValue: 180, tolerance: 5, unit: '°C' }, + { id: 'frying_time', name: 'Frying Time', nominalValue: 180, tolerance: 15, unit: 's' }, + { id: 'throughput', name: 'Throughput', nominalValue: 2000, tolerance: 100, unit: 'kg/hr' }, + ], + qualityCheckPoints: [ + { id: 'qc-oil-temp', checkType: 'OIL_TEMPERATURE', description: 'Verify oil temperature in range.', method: 'automatic', frequency: 'continuous', acceptanceCriteria: { parameter: 'temperature', operator: 'between', value: [175, 185], unit: '°C' }, linkedSubcomponents: ['fryer-heating'] }, + { id: 'qc-color', checkType: 'COLOR_INDEX', description: 'Check chip color meets specification.', method: 'automatic', frequency: 'continuous', acceptanceCriteria: { parameter: 'color', operator: 'between', value: ['L*55', 'L*65'] }, linkedSubcomponents: ['fryer-heating', 'fryer-conveyor'] }, + { id: 'qc-moisture', checkType: 'MOISTURE_CONTENT', description: 'Verify final moisture content.', method: 'sampling', frequency: 'hourly', acceptanceCriteria: { parameter: 'moisture', operator: 'lt', value: 2, unit: '%' }, linkedSubcomponents: ['fryer-conveyor'] }, + { id: 'qc-acrylamide', checkType: 'ACRYLAMIDE_LEVEL', description: 'Monitor acrylamide formation.', method: 'sampling', frequency: 'batch', acceptanceCriteria: { parameter: 'acrylamide', operator: 'lt', value: 750, unit: 'ppb' }, linkedSubcomponents: ['fryer-heating'] }, + ], + inputRequirements: [{ type: 'SLICED_POTATOES', specifications: { thickness: 1.5, starchRemoved: true } }], + outputSpecification: { type: 'FRIED_CHIPS', attributes: { color: 'GOLDEN', crispness: 'HIGH', moisture: 1.5 } }, +}; + +export default fryerComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/index.ts b/packages/controlmart/src/helpers/manufacturing/components/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..72b3a85d0edff119062a24e831f17ef5b864cc5a --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/index.ts @@ -0,0 +1,96 @@ +/** + * Manufacturing Components Index + * Exports all machine component definitions for ice cream and chips production lines. + */ + +// Ice Cream Production Line +export { pasteurizerComponent } from './pasteurizer.component'; +export { fermenterComponent } from './fermenter.component'; +export { mixerComponent } from './mixer.component'; +export { cupPackerComponent } from './cup-packer.component'; +export { chillRoomComponent } from './chill-room.component'; + +// Chips Production Line +export { slicerComponent } from './slicer.component'; +export { fryerComponent } from './fryer.component'; +export { seasoningDrumComponent } from './seasoning-drum.component'; +export { pouchPackerComponent } from './pouch-packer.component'; +export { metalDetectorComponent } from './metal-detector.component'; + +// Re-export types +export type { + ManufacturingComponent, + SubComponent, + SubComponentChaosScenario, + QualityCheckPoint, + AttributeDefinition, +} from '../types.manufacturing.helper'; + +// Import all components for lookup maps +import { pasteurizerComponent } from './pasteurizer.component'; +import { fermenterComponent } from './fermenter.component'; +import { mixerComponent } from './mixer.component'; +import { cupPackerComponent } from './cup-packer.component'; +import { chillRoomComponent } from './chill-room.component'; +import { slicerComponent } from './slicer.component'; +import { fryerComponent } from './fryer.component'; +import { seasoningDrumComponent } from './seasoning-drum.component'; +import { pouchPackerComponent } from './pouch-packer.component'; +import { metalDetectorComponent } from './metal-detector.component'; +import type { ManufacturingComponent } from '../types.manufacturing.helper'; + +/** + * All manufacturing components indexed by machine ID + */ +export const componentsByMachineId: Record = { + 'MCH_PASTEUR_01': pasteurizerComponent, + 'MCH_FERMENT_01': fermenterComponent, + 'MCH_MIXER_01': mixerComponent, + 'MCH_PACKER_01': cupPackerComponent, + 'MCH_CHILL_01': chillRoomComponent, + 'MCH_SLICER_01': slicerComponent, + 'MCH_FRYER_01': fryerComponent, + 'MCH_SEASON_01': seasoningDrumComponent, + 'MCH_PACKER_02': pouchPackerComponent, + 'MCH_MD_01': metalDetectorComponent, +}; + +/** + * Production line configurations + */ +export const productionLines = { + ICE_CREAM: { + name: 'Ice Cream Production Line', + machineSequence: ['MCH_PASTEUR_01', 'MCH_FERMENT_01', 'MCH_MIXER_01', 'MCH_PACKER_01', 'MCH_CHILL_01'], + products: ['ICE-VANILLA-001', 'ICE-CHOCO-001'], + }, + CHIPS: { + name: 'Chips Production Line', + machineSequence: ['MCH_SLICER_01', 'MCH_FRYER_01', 'MCH_SEASON_01', 'MCH_PACKER_02', 'MCH_MD_01'], + products: ['CHP-CLASSIC-001', 'CHP-BBQ-001'], + }, +}; + +/** + * Get all components for a production line + */ +export function getProductionLineComponents(lineType: 'ICE_CREAM' | 'CHIPS'): ManufacturingComponent[] { + const line = productionLines[lineType]; + return line.machineSequence.map(machineId => componentsByMachineId[machineId]!); +} + +/** + * Get component by machine ID + */ +export function getComponentByMachineId(machineId: string): ManufacturingComponent | undefined { + return componentsByMachineId[machineId]; +} + +/** + * Determine production line type from product SKU + */ +export function getProductionLineFromSku(sku: string): 'ICE_CREAM' | 'CHIPS' | null { + if (sku.startsWith('ICE-')) return 'ICE_CREAM'; + if (sku.startsWith('CHP-')) return 'CHIPS'; + return null; +} diff --git a/packages/controlmart/src/helpers/manufacturing/components/metal-detector.component.ts b/packages/controlmart/src/helpers/manufacturing/components/metal-detector.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..1ad249248ee89c9be94b56164c98f95620304eda --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/metal-detector.component.ts @@ -0,0 +1,111 @@ +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const detectorHeadSubcomponent: SubComponent = { + id: 'md-head', + name: 'Detector Head', + description: 'Multi-frequency electromagnetic detection coil system.', + attributes: [ + { id: 'aperture_width', name: 'Aperture Width', type: 'float', unit: 'mm', range: { min: 200, max: 800 }, defaultValue: 400, description: 'Detection opening width.' }, + { id: 'aperture_height', name: 'Aperture Height', type: 'float', unit: 'mm', range: { min: 100, max: 300 }, defaultValue: 150, description: 'Detection opening height.' }, + { id: 'frequency', name: 'Operating Frequency', type: 'enum', range: { values: ['Single', 'Dual', 'Multi'] }, defaultValue: 'Multi', description: 'Detection frequency mode.' }, + { id: 'sensitivity_ferrous', name: 'Ferrous Sensitivity', type: 'float', unit: 'mm', range: { min: 0.5, max: 3 }, defaultValue: 1.5, criticalForQuality: true, description: 'Minimum ferrous metal detection size.' }, + { id: 'sensitivity_nonferrous', name: 'Non-Ferrous Sensitivity', type: 'float', unit: 'mm', range: { min: 1, max: 4 }, defaultValue: 2.0, criticalForQuality: true, description: 'Minimum non-ferrous detection size.' }, + { id: 'sensitivity_stainless', name: 'Stainless Sensitivity', type: 'float', unit: 'mm', range: { min: 1.5, max: 5 }, defaultValue: 2.5, criticalForQuality: true, description: 'Minimum stainless steel detection size.' }, + ], + chaosScenarios: [ + { id: 'sensitivity-drift', name: 'Sensitivity Drift', description: 'Detection sensitivity decreasing over time.', probability: 0.03, affectedAttributes: ['sensitivity_ferrous', 'sensitivity_nonferrous', 'sensitivity_stainless'], severity: 'high', qualityImpact: { checkType: 'DETECTION_SENSITIVITY', failureMessage: 'Metal detection sensitivity below specification.' }, possibleCauses: ['Environmental interference', 'Coil degradation', 'Calibration due'], mitigationActions: ['Recalibration', 'Environment check', 'Coil inspection'] }, + { id: 'false-positives', name: 'False Positive Alarms', description: 'Excessive false rejections.', probability: 0.04, affectedAttributes: ['frequency'], severity: 'medium', qualityImpact: { checkType: 'REJECT_RATE', failureMessage: 'High false positive rate causing product waste.', degradation: 30 }, possibleCauses: ['Product effect', 'Electrical noise', 'Sensitivity too high'], mitigationActions: ['Product compensation', 'Noise filtering', 'Sensitivity tuning'] }, + { id: 'coil-contamination', name: 'Coil Contamination', description: 'Metallic debris on detector affecting performance.', probability: 0.02, affectedAttributes: ['sensitivity_ferrous'], severity: 'high', qualityImpact: { checkType: 'METAL_DETECTION', failureMessage: 'Detector performance compromised by contamination.' }, possibleCauses: ['Product spillage', 'Belt debris', 'Cleaning residue'], mitigationActions: ['Aperture cleaning', 'Belt inspection', 'Verification test'] }, + ], +}; + +const conveyorSystemSubcomponent: SubComponent = { + id: 'md-conveyor', + name: 'Conveyor System', + description: 'Non-metallic belt conveyor for product transport through detector.', + attributes: [ + { id: 'belt_type', name: 'Belt Type', type: 'enum', range: { values: ['Modular-Plastic', 'Food-Grade-PU', 'Silicone'] }, defaultValue: 'Food-Grade-PU', description: 'Non-metallic belt material.' }, + { id: 'belt_speed', name: 'Belt Speed', type: 'float', unit: 'm/min', range: { min: 20, max: 80 }, defaultValue: 60, description: 'Conveyor speed.' }, + { id: 'belt_width', name: 'Belt Width', type: 'float', unit: 'mm', range: { min: 200, max: 600 }, defaultValue: 350, description: 'Conveyor belt width.' }, + { id: 'belt_condition', name: 'Belt Condition', type: 'enum', range: { values: ['Good', 'Worn', 'Replace'] }, defaultValue: 'Good', description: 'Belt wear status.' }, + ], + chaosScenarios: [ + { id: 'belt-wear', name: 'Belt Wear', description: 'Conveyor belt showing wear patterns.', probability: 0.025, affectedAttributes: ['belt_condition'], severity: 'low', qualityImpact: { checkType: 'EQUIPMENT_HEALTH', failureMessage: 'Conveyor belt approaching end of life.' }, possibleCauses: ['Normal wear', 'Misalignment', 'Tension issues'], mitigationActions: ['Schedule replacement', 'Alignment check', 'Tension adjustment'] }, + { id: 'speed-variation', name: 'Speed Variation', description: 'Belt speed fluctuating affecting detection.', probability: 0.02, affectedAttributes: ['belt_speed'], severity: 'medium', qualityImpact: { checkType: 'DETECTION_RELIABILITY', failureMessage: 'Speed variation may affect detection accuracy.' }, possibleCauses: ['Motor issue', 'VFD fault', 'Load variation'], mitigationActions: ['Motor inspection', 'VFD check', 'Speed stabilization'] }, + ], +}; + +const rejectSystemSubcomponent: SubComponent = { + id: 'md-reject', + name: 'Reject System', + description: 'Automatic rejection mechanism for contaminated products.', + attributes: [ + { id: 'reject_type', name: 'Reject Type', type: 'enum', range: { values: ['Pusher', 'Air-Blast', 'Flipper', 'Drop-Away'] }, defaultValue: 'Pusher', description: 'Rejection mechanism type.' }, + { id: 'reject_confirm', name: 'Reject Confirm', type: 'boolean', defaultValue: true, criticalForQuality: true, description: 'Reject confirmation sensor active.' }, + { id: 'reject_bin_level', name: 'Reject Bin Level', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 25, description: 'Reject bin fill level.' }, + { id: 'reject_count_session', name: 'Session Rejects', type: 'integer', range: { min: 0, max: 1000 }, defaultValue: 0, description: 'Rejects this production session.' }, + ], + chaosScenarios: [ + { id: 'reject-failure', name: 'Reject Mechanism Failure', description: 'Reject system not removing detected product.', probability: 0.02, affectedAttributes: ['reject_type', 'reject_confirm'], severity: 'critical', qualityImpact: { checkType: 'METAL_DETECTION', failureMessage: 'Reject mechanism failure; contaminated product may pass.' }, possibleCauses: ['Actuator fault', 'Timing error', 'Mechanical jam'], mitigationActions: ['Immediate stop', 'Mechanism repair', 'Product hold'] }, + { id: 'bin-full', name: 'Reject Bin Full', description: 'Reject collection bin at capacity.', probability: 0.025, affectedAttributes: ['reject_bin_level'], severity: 'medium', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Reject bin full; line must pause.' }, possibleCauses: ['High reject rate', 'Bin not emptied'], mitigationActions: ['Empty bin', 'Investigate reject cause'] }, + { id: 'confirm-sensor-fault', name: 'Confirm Sensor Fault', description: 'Reject confirmation sensor malfunction.', probability: 0.015, affectedAttributes: ['reject_confirm'], severity: 'high', qualityImpact: { checkType: 'REJECT_VERIFICATION', failureMessage: 'Cannot verify rejects are being removed.' }, possibleCauses: ['Sensor blocked', 'Wiring issue', 'Sensor failure'], mitigationActions: ['Sensor cleaning', 'Wiring check', 'Manual verification'] }, + ], +}; + +const controlSystemSubcomponent: SubComponent = { + id: 'md-control', + name: 'Control System', + description: 'Detection processing and data logging system.', + attributes: [ + { id: 'calibration_status', name: 'Calibration Status', type: 'enum', range: { values: ['Valid', 'Due', 'Expired'] }, defaultValue: 'Valid', criticalForQuality: true, description: 'Calibration validity.' }, + { id: 'last_test_pass', name: 'Last Test Result', type: 'boolean', defaultValue: true, criticalForQuality: true, description: 'Most recent test piece result.' }, + { id: 'test_frequency', name: 'Test Frequency', type: 'integer', unit: 'min', range: { min: 15, max: 120 }, defaultValue: 30, description: 'Test piece interval.' }, + { id: 'data_logging', name: 'Data Logging', type: 'enum', range: { values: ['Active', 'Error', 'Disabled'] }, defaultValue: 'Active', description: 'Event logging status.' }, + ], + chaosScenarios: [ + { id: 'calibration-expired', name: 'Calibration Expired', description: 'Detector calibration past due date.', probability: 0.02, affectedAttributes: ['calibration_status'], severity: 'high', qualityImpact: { checkType: 'CALIBRATION_STATUS', failureMessage: 'Detector calibration expired; must recalibrate.' }, possibleCauses: ['Missed schedule', 'System error'], mitigationActions: ['Immediate calibration', 'Production hold'] }, + { id: 'test-fail', name: 'Test Piece Fail', description: 'Routine test piece not detected.', probability: 0.015, affectedAttributes: ['last_test_pass'], severity: 'critical', qualityImpact: { checkType: 'METAL_DETECTION', failureMessage: 'Test piece detection failed; system compromised.' }, possibleCauses: ['Sensitivity loss', 'Test piece issue', 'System fault'], mitigationActions: ['Immediate stop', 'Recalibration', 'Product hold since last good test'] }, + { id: 'logging-failure', name: 'Data Logging Failure', description: 'Event logging not functioning.', probability: 0.02, affectedAttributes: ['data_logging'], severity: 'medium', qualityImpact: { checkType: 'TRACEABILITY', failureMessage: 'Detection events not being recorded.' }, possibleCauses: ['Storage full', 'System error', 'Network issue'], mitigationActions: ['Storage cleanup', 'System restart', 'Manual logging'] }, + ], +}; + +const testPieceSubcomponent: SubComponent = { + id: 'md-test-pieces', + name: 'Test Pieces', + description: 'Standard test pieces for verification.', + attributes: [ + { id: 'ferrous_test', name: 'Ferrous Test Piece', type: 'float', unit: 'mm', range: { min: 1, max: 3 }, defaultValue: 1.5, description: 'Ferrous test sphere size.' }, + { id: 'nonferrous_test', name: 'Non-Ferrous Test Piece', type: 'float', unit: 'mm', range: { min: 1.5, max: 4 }, defaultValue: 2.0, description: 'Non-ferrous test sphere size.' }, + { id: 'stainless_test', name: 'Stainless Test Piece', type: 'float', unit: 'mm', range: { min: 2, max: 5 }, defaultValue: 2.5, description: 'Stainless test sphere size.' }, + { id: 'test_cards_available', name: 'Test Cards Available', type: 'integer', range: { min: 0, max: 20 }, defaultValue: 6, description: 'Test piece cards in use.' }, + ], + chaosScenarios: [ + { id: 'test-piece-lost', name: 'Test Piece Lost', description: 'Test piece missing or damaged.', probability: 0.02, affectedAttributes: ['test_cards_available'], severity: 'medium', qualityImpact: { checkType: 'VERIFICATION_CAPABILITY', failureMessage: 'Cannot perform routine verification tests.' }, possibleCauses: ['Misplaced', 'Damaged', 'In product stream'], mitigationActions: ['Locate test piece', 'Use backup', 'Product search if in stream'] }, + ], +}; + +export const metalDetectorComponent: ManufacturingComponent = { + id: 'metal-detector', + machineId: 'MCH_MD_01', + name: 'Metal Detector', + type: 'quality_control', + productionLine: 'CHIPS', + description: 'Multi-frequency metal detection system for final product screening.', + specifications: { manufacturer: 'Mettler Toledo', model: 'Safeline-X', yearInstalled: 2022, maintenanceSchedule: 'Hourly test, Daily calibration check, Monthly service' }, + subcomponents: [detectorHeadSubcomponent, conveyorSystemSubcomponent, rejectSystemSubcomponent, controlSystemSubcomponent, testPieceSubcomponent], + operationalParameters: [ + { id: 'line_speed', name: 'Line Speed', nominalValue: 60, tolerance: 5, unit: 'm/min' }, + { id: 'ferrous_sensitivity', name: 'Ferrous Sensitivity', nominalValue: 1.5, tolerance: 0.2, unit: 'mm' }, + { id: 'nonferrous_sensitivity', name: 'Non-Ferrous Sensitivity', nominalValue: 2.0, tolerance: 0.2, unit: 'mm' }, + { id: 'stainless_sensitivity', name: 'Stainless Sensitivity', nominalValue: 2.5, tolerance: 0.3, unit: 'mm' }, + ], + qualityCheckPoints: [ + { id: 'qc-detection', checkType: 'METAL_DETECTION', description: 'Verify no metal contamination.', method: 'automatic', frequency: 'per_unit', acceptanceCriteria: { parameter: 'metal_detected', operator: 'eq', value: 'NO_CONTAMINANTS' }, linkedSubcomponents: ['md-head', 'md-reject'] }, + { id: 'qc-reject-rate', checkType: 'REJECT_RATE', description: 'Monitor rejection statistics.', method: 'automatic', frequency: 'continuous', acceptanceCriteria: { parameter: 'reject_rate', operator: 'lt', value: 1, unit: '%' }, linkedSubcomponents: ['md-reject'] }, + { id: 'qc-verification', checkType: 'VERIFICATION_TEST', description: 'Periodic test piece verification.', method: 'manual', frequency: 'hourly', acceptanceCriteria: { parameter: 'test_result', operator: 'eq', value: 'PASS' }, linkedSubcomponents: ['md-control', 'md-test-pieces'] }, + ], + inputRequirements: [{ type: 'PACKED_CHIPS', specifications: { sealed: true } }], + outputSpecification: { type: 'FINISHED_CHIPS', attributes: { qualityCleared: true, metalFree: true } }, +}; + +export default metalDetectorComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/mixer.component.ts b/packages/controlmart/src/helpers/manufacturing/components/mixer.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..c7914ddeda463a7fc3638b72cfec8ba28416f41c --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/mixer.component.ts @@ -0,0 +1,102 @@ +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const mixingVesselSubcomponent: SubComponent = { + id: 'mixer-vessel', + name: 'Mixing Vessel', + description: 'Jacketed stainless steel vessel for ingredient blending.', + attributes: [ + { id: 'capacity', name: 'Capacity', type: 'float', unit: 'L', range: { min: 100, max: 10000 }, defaultValue: 2000, description: 'Total vessel capacity.' }, + { id: 'material', name: 'Material', type: 'enum', range: { values: ['SS304', 'SS316'] }, defaultValue: 'SS316', description: 'Construction material.' }, + { id: 'jacket_type', name: 'Jacket Type', type: 'enum', range: { values: ['Single', 'Dimple', 'Half-pipe'] }, defaultValue: 'Dimple', description: 'Thermal jacket type.' }, + { id: 'current_level', name: 'Current Level', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 75, criticalForQuality: true, description: 'Current fill level.' }, + ], + chaosScenarios: [ + { id: 'vessel-overflow', name: 'Vessel Overflow', description: 'Level sensor failure causing overfill.', probability: 0.015, affectedAttributes: ['current_level'], severity: 'high', qualityImpact: { checkType: 'LEVEL_CONTROL', failureMessage: 'Vessel overflow detected; product loss.' }, possibleCauses: ['Level sensor failure', 'Valve stuck open'], mitigationActions: ['Emergency shutoff', 'Cleanup', 'Sensor replacement'] }, + ], +}; + +const agitatorSubcomponent: SubComponent = { + id: 'mixer-agitator', + name: 'High-Shear Agitator', + description: 'Multi-stage agitator for homogenization and aeration.', + attributes: [ + { id: 'type', name: 'Agitator Type', type: 'enum', range: { values: ['Propeller', 'Turbine', 'Anchor', 'Helical'] }, defaultValue: 'Turbine', description: 'Agitator blade design.' }, + { id: 'rpm', name: 'Speed', type: 'integer', unit: 'RPM', range: { min: 50, max: 3000 }, defaultValue: 1200, criticalForQuality: true, description: 'Rotation speed.' }, + { id: 'motor_power', name: 'Motor Power', type: 'float', unit: 'kW', range: { min: 5, max: 200 }, defaultValue: 30, description: 'Drive motor capacity.' }, + { id: 'torque', name: 'Torque', type: 'float', unit: 'Nm', range: { min: 10, max: 500 }, defaultValue: 150, description: 'Current torque reading.' }, + ], + chaosScenarios: [ + { id: 'speed-fluctuation', name: 'Speed Fluctuation', description: 'VFD causing inconsistent mixing speed.', probability: 0.04, affectedAttributes: ['rpm'], severity: 'medium', qualityImpact: { checkType: 'MIXING_UNIFORMITY', failureMessage: 'Inconsistent agitation; product texture affected.', degradation: 20 }, possibleCauses: ['VFD fault', 'Power quality', 'Motor issue'], mitigationActions: ['Speed reset', 'VFD check', 'Extended mixing'] }, + { id: 'blade-damage', name: 'Blade Damage', description: 'Foreign object or fatigue causing blade damage.', probability: 0.01, affectedAttributes: ['type'], severity: 'critical', qualityImpact: { checkType: 'CONTAMINATION_TEST', failureMessage: 'Metal particles detected from blade damage.' }, possibleCauses: ['Foreign object', 'Metal fatigue', 'Corrosion'], mitigationActions: ['Immediate stop', 'Batch rejection', 'Blade inspection'] }, + ], +}; + +const aerationSystemSubcomponent: SubComponent = { + id: 'mixer-aeration', + name: 'Aeration System', + description: 'Controlled air injection for overrun (volume increase).', + attributes: [ + { id: 'air_flow_rate', name: 'Air Flow Rate', type: 'float', unit: 'L/min', range: { min: 0, max: 500 }, defaultValue: 100, criticalForQuality: true, description: 'Injected air flow rate.' }, + { id: 'overrun_target', name: 'Overrun Target', type: 'float', unit: '%', range: { min: 50, max: 150 }, defaultValue: 100, criticalForQuality: true, description: 'Target volume increase percentage.' }, + { id: 'air_filter_status', name: 'Air Filter Status', type: 'enum', range: { values: ['Clean', 'Normal', 'Replace'] }, defaultValue: 'Normal', description: 'HEPA filter condition.' }, + ], + chaosScenarios: [ + { id: 'low-overrun', name: 'Insufficient Overrun', description: 'Air injection below target causing dense product.', probability: 0.05, affectedAttributes: ['air_flow_rate', 'overrun_target'], severity: 'medium', qualityImpact: { checkType: 'OVERRUN', failureMessage: 'Overrun below specification; product too dense.', degradation: 25 }, possibleCauses: ['Compressor issue', 'Blocked line', 'Valve fault'], mitigationActions: ['Check air supply', 'Extend aeration', 'Valve inspection'] }, + { id: 'filter-contamination', name: 'Air Filter Contamination', description: 'Compromised air filter introducing contaminants.', probability: 0.02, affectedAttributes: ['air_filter_status'], severity: 'high', qualityImpact: { checkType: 'CONTAMINATION_TEST', failureMessage: 'Airborne contamination from degraded filter.' }, possibleCauses: ['Filter age', 'Moisture ingress', 'System breach'], mitigationActions: ['Filter replacement', 'Batch hold', 'Air quality test'] }, + ], +}; + +const ingredientDosersSubcomponent: SubComponent = { + id: 'mixer-dosers', + name: 'Ingredient Dosing System', + description: 'Automated dispensing of sugar, flavorings, and additives.', + attributes: [ + { id: 'sugar_doser_accuracy', name: 'Sugar Doser Accuracy', type: 'float', unit: '%', range: { min: 98, max: 100 }, defaultValue: 99.5, criticalForQuality: true, description: 'Sugar dosing accuracy.' }, + { id: 'flavoring_doser_accuracy', name: 'Flavoring Doser Accuracy', type: 'float', unit: '%', range: { min: 97, max: 100 }, defaultValue: 99, criticalForQuality: true, description: 'Flavoring dosing accuracy.' }, + { id: 'hopper_levels', name: 'Hopper Levels', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 80, description: 'Ingredient hopper fill levels.' }, + ], + chaosScenarios: [ + { id: 'dosing-error', name: 'Dosing Inaccuracy', description: 'Incorrect ingredient quantities dispensed.', probability: 0.04, affectedAttributes: ['sugar_doser_accuracy', 'flavoring_doser_accuracy'], severity: 'medium', qualityImpact: { checkType: 'FLAVOR_PROFILE', failureMessage: 'Flavor profile deviation due to dosing error.', degradation: 15 }, possibleCauses: ['Calibration drift', 'Clogged nozzle', 'Load cell fault'], mitigationActions: ['Recalibration', 'Nozzle cleaning', 'Manual adjustment'] }, + { id: 'hopper-empty', name: 'Hopper Empty', description: 'Ingredient hopper runs empty during production.', probability: 0.03, affectedAttributes: ['hopper_levels'], severity: 'high', qualityImpact: { checkType: 'BATCH_COMPLETENESS', failureMessage: 'Production halted due to ingredient shortage.' }, possibleCauses: ['Supply chain issue', 'Sensor failure', 'Usage miscalculation'], mitigationActions: ['Emergency refill', 'Batch pause', 'Recipe adjustment'] }, + ], +}; + +const temperatureControlSubcomponent: SubComponent = { + id: 'mixer-temp-control', + name: 'Temperature Control', + description: 'Jacket cooling system for maintaining mix temperature.', + attributes: [ + { id: 'setpoint', name: 'Temperature Setpoint', type: 'float', unit: '°C', range: { min: -10, max: 10 }, defaultValue: -6, criticalForQuality: true, description: 'Target mix temperature.' }, + { id: 'actual_temp', name: 'Actual Temperature', type: 'float', unit: '°C', range: { min: -15, max: 15 }, defaultValue: -6, criticalForQuality: true, description: 'Current product temperature.' }, + { id: 'coolant_flow', name: 'Coolant Flow', type: 'float', unit: 'L/min', range: { min: 0, max: 1000 }, defaultValue: 300, description: 'Glycol coolant flow rate.' }, + ], + chaosScenarios: [ + { id: 'temp-rise', name: 'Temperature Rise', description: 'Cooling insufficient causing product warming.', probability: 0.035, affectedAttributes: ['actual_temp', 'coolant_flow'], severity: 'high', qualityImpact: { checkType: 'TEMPERATURE', failureMessage: 'Mix temperature above specification; texture at risk.' }, possibleCauses: ['Chiller overload', 'Glycol shortage', 'Fouled jacket'], mitigationActions: ['Boost cooling', 'Check chiller', 'Reduce batch size'] }, + ], +}; + +export const mixerComponent: ManufacturingComponent = { + id: 'mixer', + machineId: 'MCH_MIXER_01', + name: 'Mixer', + type: 'processing', + productionLine: 'ICE_CREAM', + description: 'High-shear mixing system for combining pasteurized base with sweeteners, flavorings, and air to create ice cream mix.', + specifications: { manufacturer: 'Tetra Pak', model: 'Hoyer MixMaster-2000', yearInstalled: 2022, maintenanceSchedule: 'Daily cleaning, Weekly inspection' }, + subcomponents: [mixingVesselSubcomponent, agitatorSubcomponent, aerationSystemSubcomponent, ingredientDosersSubcomponent, temperatureControlSubcomponent], + operationalParameters: [ + { id: 'mixing_speed', name: 'Mixing Speed', nominalValue: 1200, tolerance: 100, unit: 'RPM' }, + { id: 'mixing_time', name: 'Mixing Time', nominalValue: 15, tolerance: 2, unit: 'min' }, + { id: 'target_overrun', name: 'Target Overrun', nominalValue: 100, tolerance: 10, unit: '%' }, + { id: 'target_temp', name: 'Target Temperature', nominalValue: -6, tolerance: 2, unit: '°C' }, + ], + qualityCheckPoints: [ + { id: 'qc-overrun', checkType: 'OVERRUN', description: 'Verify air incorporation meets specification.', method: 'automatic', frequency: 'batch', acceptanceCriteria: { parameter: 'overrun', operator: 'between', value: [80, 120], unit: '%' }, linkedSubcomponents: ['mixer-aeration'] }, + { id: 'qc-viscosity', checkType: 'VISCOSITY', description: 'Check mix viscosity for proper texture.', method: 'sampling', frequency: 'batch', acceptanceCriteria: { parameter: 'viscosity', operator: 'eq', value: 'WITHIN_SPEC' }, linkedSubcomponents: ['mixer-agitator', 'mixer-temp-control'] }, + { id: 'qc-flavor', checkType: 'FLAVOR_PROFILE', description: 'Sensory evaluation of flavor balance.', method: 'sampling', frequency: 'batch', acceptanceCriteria: { parameter: 'flavor', operator: 'eq', value: 'APPROVED' }, linkedSubcomponents: ['mixer-dosers'] }, + ], + inputRequirements: [{ type: 'PASTEURIZED_BASE', specifications: { temperature: { max: 10 } } }], + outputSpecification: { type: 'ICE_CREAM_MIX', attributes: { temperature: -6, overrun: 100, consistency: 'SMOOTH' } }, +}; + +export default mixerComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/pasteurizer.component.ts b/packages/controlmart/src/helpers/manufacturing/components/pasteurizer.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..8f405bb295315a3280b925848556d688dbbad304 --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/pasteurizer.component.ts @@ -0,0 +1,431 @@ +/** + * MCH_PASTEUR_01 - Pasteurizer Component Definition + * Heat treatment system for eliminating pathogens while preserving nutritional value. + */ + +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +// ============================================================================ +// SUBCOMPONENTS +// ============================================================================ + +const heatExchangerSubcomponent: SubComponent = { + id: 'pasteurizer-heat-exchanger', + name: 'Heat Exchanger', + description: 'Plate or tubular heat exchanger for rapid heating and cooling of product.', + attributes: [ + { + id: 'type', + name: 'Type', + type: 'enum', + description: 'Heat exchanger design type.', + range: { values: ['Plate', 'Tubular', 'Scraped-Surface'] }, + defaultValue: 'Plate', + }, + { + id: 'surface_area', + name: 'Surface Area', + type: 'float', + unit: 'm²', + description: 'Total heat transfer surface area.', + range: { min: 5, max: 500 }, + defaultValue: 50, + }, + { + id: 'plate_count', + name: 'Plate Count', + type: 'integer', + description: 'Number of heat transfer plates.', + range: { min: 10, max: 200 }, + defaultValue: 60, + }, + { + id: 'material', + name: 'Material', + type: 'enum', + description: 'Plate material construction.', + range: { values: ['SS316', 'Titanium', 'Hastelloy'] }, + defaultValue: 'SS316', + }, + ], + chaosScenarios: [ + { + id: 'plate-fouling', + name: 'Plate Fouling', + description: 'Protein deposits reducing heat transfer efficiency.', + probability: 0.04, + affectedAttributes: ['surface_area'], + severity: 'medium', + qualityImpact: { + checkType: 'TEMPERATURE', + failureMessage: 'Heat transfer reduced; pasteurization temperature may be insufficient.', + degradation: 20, + }, + possibleCauses: ['Extended run time', 'High protein content', 'Insufficient CIP'], + mitigationActions: ['CIP cycle', 'Increase heating time', 'Manual inspection'], + }, + { + id: 'plate-leak', + name: 'Plate Gasket Leak', + description: 'Gasket failure causing product-to-heating medium cross-contamination.', + probability: 0.015, + affectedAttributes: ['plate_count'], + severity: 'critical', + qualityImpact: { + checkType: 'CONTAMINATION_TEST', + failureMessage: 'Cross-contamination detected from heat exchanger gasket failure.', + }, + possibleCauses: ['Gasket aging', 'Over-torque', 'Chemical attack'], + mitigationActions: ['Immediate shutdown', 'Batch rejection', 'Gasket replacement'], + }, + ], +}; + +const holdingTubeSubcomponent: SubComponent = { + id: 'pasteurizer-holding-tube', + name: 'Holding Tube', + description: 'Insulated tube ensuring product maintains target temperature for required duration.', + attributes: [ + { + id: 'length', + name: 'Length', + type: 'float', + unit: 'm', + description: 'Total tube length determining hold time.', + range: { min: 5, max: 100 }, + defaultValue: 20, + }, + { + id: 'diameter', + name: 'Diameter', + type: 'float', + unit: 'mm', + description: 'Internal tube diameter.', + range: { min: 25, max: 150 }, + defaultValue: 75, + }, + { + id: 'insulation_thickness', + name: 'Insulation Thickness', + type: 'float', + unit: 'mm', + description: 'Thickness of thermal insulation.', + range: { min: 25, max: 100 }, + defaultValue: 50, + }, + { + id: 'hold_time', + name: 'Hold Time', + type: 'float', + unit: 's', + description: 'Minimum residence time at pasteurization temperature.', + range: { min: 15, max: 30 }, + defaultValue: 15, + criticalForQuality: true, + }, + ], + chaosScenarios: [ + { + id: 'insufficient-hold-time', + name: 'Insufficient Hold Time', + description: 'Flow rate too high causing inadequate pasteurization.', + probability: 0.02, + affectedAttributes: ['hold_time'], + severity: 'critical', + qualityImpact: { + checkType: 'PATHOGEN_TEST', + failureMessage: 'Hold time below minimum; pasteurization efficacy compromised.', + }, + possibleCauses: ['Flow controller malfunction', 'Pump overspeeding', 'Valve timing error'], + mitigationActions: ['Divert valve activation', 'Flow rate reduction', 'Batch reprocessing'], + }, + { + id: 'temperature-drop', + name: 'Temperature Drop in Holding', + description: 'Insulation failure causing temperature loss in holding tube.', + probability: 0.03, + affectedAttributes: ['insulation_thickness'], + severity: 'high', + qualityImpact: { + checkType: 'TEMPERATURE', + failureMessage: 'Temperature drop detected in holding tube.', + degradation: 15, + }, + possibleCauses: ['Insulation damage', 'Ambient temperature extremes', 'Steam trace failure'], + mitigationActions: ['Increase inlet temperature', 'Insulation repair', 'Steam trace check'], + }, + ], +}; + +const temperatureControlSubcomponent: SubComponent = { + id: 'pasteurizer-temp-control', + name: 'Temperature Control System', + description: 'PID-controlled heating and cooling system with safety interlocks.', + attributes: [ + { + id: 'heating_temperature', + name: 'Heating Temperature', + type: 'float', + unit: '°C', + description: 'Target pasteurization temperature.', + range: { min: 72, max: 95 }, + defaultValue: 85, + criticalForQuality: true, + }, + { + id: 'cooling_temperature', + name: 'Cooling Temperature', + type: 'float', + unit: '°C', + description: 'Target outlet cooling temperature.', + range: { min: 2, max: 10 }, + defaultValue: 4, + criticalForQuality: true, + }, + { + id: 'divert_valve_setpoint', + name: 'Divert Valve Setpoint', + type: 'float', + unit: '°C', + description: 'Temperature below which product is diverted for reprocessing.', + range: { min: 70, max: 90 }, + defaultValue: 82, + }, + { + id: 'steam_pressure', + name: 'Steam Pressure', + type: 'float', + unit: 'bar', + description: 'Heating steam supply pressure.', + range: { min: 2, max: 6 }, + defaultValue: 4, + }, + ], + chaosScenarios: [ + { + id: 'temp-undershoot', + name: 'Temperature Undershoot', + description: 'Pasteurization temperature falls below setpoint.', + probability: 0.035, + affectedAttributes: ['heating_temperature'], + severity: 'high', + qualityImpact: { + checkType: 'TEMPERATURE', + failureMessage: 'Pasteurization temperature below minimum; divert activated.', + }, + possibleCauses: ['Steam supply fluctuation', 'Control valve issue', 'Sensor lag'], + mitigationActions: ['Divert to reprocessing', 'Steam pressure check', 'Sensor verification'], + }, + { + id: 'cooling-insufficient', + name: 'Insufficient Cooling', + description: 'Product not cooled to target temperature.', + probability: 0.04, + affectedAttributes: ['cooling_temperature'], + severity: 'medium', + qualityImpact: { + checkType: 'TEMPERATURE', + failureMessage: 'Cooling incomplete; product temperature above specification.', + degradation: 10, + }, + possibleCauses: ['Chilled water shortage', 'Fouled cooling section', 'High ambient temp'], + mitigationActions: ['Reduce flow rate', 'Check chiller', 'Additional cooling stage'], + }, + ], +}; + +const sensorsSubcomponent: SubComponent = { + id: 'pasteurizer-sensors', + name: 'Sensors & Instrumentation', + description: 'Temperature, pressure, and flow sensors for process monitoring.', + attributes: [ + { + id: 'inlet_temp_sensor', + name: 'Inlet Temperature', + type: 'float', + unit: '°C', + description: 'Raw product inlet temperature.', + range: { min: 2, max: 20 }, + defaultValue: 6, + }, + { + id: 'heating_temp_sensor', + name: 'Post-Heating Temperature', + type: 'float', + unit: '°C', + description: 'Temperature after heating section.', + range: { min: 72, max: 95 }, + defaultValue: 85, + criticalForQuality: true, + }, + { + id: 'holding_temp_sensor', + name: 'Holding Outlet Temperature', + type: 'float', + unit: '°C', + description: 'Temperature at holding tube outlet.', + range: { min: 72, max: 95 }, + defaultValue: 84.5, + criticalForQuality: true, + }, + { + id: 'outlet_temp_sensor', + name: 'Outlet Temperature', + type: 'float', + unit: '°C', + description: 'Final product outlet temperature.', + range: { min: 2, max: 10 }, + defaultValue: 4, + }, + { + id: 'flow_rate', + name: 'Flow Rate', + type: 'float', + unit: 'L/hr', + description: 'Product flow rate through system.', + range: { min: 1000, max: 50000 }, + defaultValue: 10000, + }, + ], + chaosScenarios: [ + { + id: 'sensor-mismatch', + name: 'Sensor Reading Mismatch', + description: 'Discrepancy between redundant temperature sensors.', + probability: 0.025, + affectedAttributes: ['heating_temp_sensor', 'holding_temp_sensor'], + severity: 'medium', + qualityImpact: { + checkType: 'PROCESS_CONTROL', + failureMessage: 'Temperature sensor disagreement; manual verification required.', + }, + possibleCauses: ['Sensor drift', 'Wiring issue', 'Calibration mismatch'], + mitigationActions: ['Manual temperature check', 'Sensor swap', 'Recalibration'], + }, + { + id: 'flow-sensor-error', + name: 'Flow Sensor Error', + description: 'Flow meter providing incorrect readings.', + probability: 0.03, + affectedAttributes: ['flow_rate'], + severity: 'high', + qualityImpact: { + checkType: 'FLOW_CONTROL', + failureMessage: 'Flow rate uncertainty; hold time cannot be verified.', + }, + possibleCauses: ['Air bubbles', 'Sensor fouling', 'Electronic failure'], + mitigationActions: ['Process pause', 'Meter cleaning', 'Backup calculation'], + }, + ], +}; + +const pumpSystemSubcomponent: SubComponent = { + id: 'pasteurizer-pumps', + name: 'Pump System', + description: 'Product and utility pumps for system operation.', + attributes: [ + { + id: 'product_pump_type', + name: 'Product Pump Type', + type: 'enum', + description: 'Type of product pump.', + range: { values: ['Centrifugal', 'Positive-Displacement', 'Lobe'] }, + defaultValue: 'Centrifugal', + }, + { + id: 'pump_capacity', + name: 'Pump Capacity', + type: 'float', + unit: 'L/hr', + description: 'Maximum pumping capacity.', + range: { min: 5000, max: 100000 }, + defaultValue: 15000, + }, + { + id: 'pump_pressure', + name: 'Discharge Pressure', + type: 'float', + unit: 'bar', + description: 'Pump discharge pressure.', + range: { min: 2, max: 10 }, + defaultValue: 4, + }, + ], + chaosScenarios: [ + { + id: 'pump-cavitation', + name: 'Pump Cavitation', + description: 'Vapor bubbles forming in pump causing damage and flow issues.', + probability: 0.025, + affectedAttributes: ['pump_capacity', 'pump_pressure'], + severity: 'high', + qualityImpact: { + checkType: 'FLOW_CONTROL', + failureMessage: 'Pump cavitation detected; inconsistent product flow.', + }, + possibleCauses: ['Low inlet pressure', 'High temperature', 'Valve restriction'], + mitigationActions: ['Check inlet conditions', 'Reduce speed', 'Valve adjustment'], + }, + ], +}; + +// ============================================================================ +// PASTEURIZER COMPONENT DEFINITION +// ============================================================================ + +export const pasteurizerComponent: ManufacturingComponent = { + id: 'pasteurizer', + machineId: 'MCH_PASTEUR_01', + name: 'Pasteurizer', + type: 'processing', + productionLine: 'ICE_CREAM', + description: 'High-Temperature Short-Time (HTST) pasteurization system for eliminating pathogens while preserving nutritional and sensory properties.', + specifications: { + manufacturer: 'APV Processing', + model: 'UltraPast-5000', + yearInstalled: 2021, + maintenanceSchedule: 'Daily CIP, Weekly inspection, Monthly calibration', + }, + subcomponents: [ + heatExchangerSubcomponent, + holdingTubeSubcomponent, + temperatureControlSubcomponent, + sensorsSubcomponent, + pumpSystemSubcomponent, + ], + operationalParameters: [ + { id: 'pasteurization_temp', name: 'Pasteurization Temperature', nominalValue: 85, tolerance: 2, unit: '°C' }, + { id: 'hold_time', name: 'Hold Time', nominalValue: 15, tolerance: 2, unit: 's' }, + { id: 'cooling_temp', name: 'Cooling Temperature', nominalValue: 4, tolerance: 2, unit: '°C' }, + { id: 'flow_rate', name: 'Flow Rate', nominalValue: 10000, tolerance: 500, unit: 'L/hr' }, + ], + qualityCheckPoints: [ + { + id: 'qc-pasteurization-temp', + checkType: 'TEMPERATURE', + description: 'Verify pasteurization temperature meets minimum requirements.', + method: 'automatic', + frequency: 'continuous', + acceptanceCriteria: { parameter: 'temperature', operator: 'gte', value: 82, unit: '°C' }, + linkedSubcomponents: ['pasteurizer-temp-control', 'pasteurizer-sensors'], + }, + { + id: 'qc-pathogen-test', + checkType: 'PATHOGEN_TEST', + description: 'Confirm elimination of pathogenic microorganisms.', + method: 'sampling', + frequency: 'batch', + acceptanceCriteria: { parameter: 'pathogen_presence', operator: 'eq', value: 'NEGATIVE' }, + linkedSubcomponents: ['pasteurizer-holding-tube', 'pasteurizer-temp-control'], + }, + ], + inputRequirements: [ + { type: 'RAW_MILK_CREAM', specifications: { temperature: { max: 10 }, quality: 'GRADE_A' } }, + ], + outputSpecification: { + type: 'PASTEURIZED_BASE', + attributes: { temperature: 4, pathogenFree: true, nutritionalValue: 'PRESERVED' }, + }, +}; + +export default pasteurizerComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/pouch-packer.component.ts b/packages/controlmart/src/helpers/manufacturing/components/pouch-packer.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..8de25c9f49f21e5f111bd05b4c280a903cfa150a --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/pouch-packer.component.ts @@ -0,0 +1,110 @@ + +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const weigherSubcomponent: SubComponent = { + id: 'packer-weigher', + name: 'Multi-Head Weigher', + description: 'Combination weigher for precise product portioning.', + attributes: [ + { id: 'head_count', name: 'Weigher Heads', type: 'integer', range: { min: 10, max: 24 }, defaultValue: 14, description: 'Number of weighing buckets.' }, + { id: 'target_weight', name: 'Target Weight', type: 'float', unit: 'g', range: { min: 25, max: 500 }, defaultValue: 150, criticalForQuality: true, description: 'Target bag fill weight.' }, + { id: 'weight_accuracy', name: 'Accuracy', type: 'float', unit: 'g', range: { min: 0.5, max: 5 }, defaultValue: 1.5, criticalForQuality: true, description: 'Weight tolerance.' }, + { id: 'speed', name: 'Weighing Speed', type: 'integer', unit: 'weighs/min', range: { min: 30, max: 150 }, defaultValue: 80, description: 'Weighing cycle rate.' }, + ], + chaosScenarios: [ + { id: 'weight-drift', name: 'Weight Drift', description: 'Weigher calibration drifting causing inaccurate fills.', probability: 0.04, affectedAttributes: ['weight_accuracy', 'target_weight'], severity: 'medium', qualityImpact: { checkType: 'BAG_WEIGHT', failureMessage: 'Fill weight deviation exceeding tolerance.', degradation: 15 }, possibleCauses: ['Vibration', 'Temperature change', 'Load cell drift'], mitigationActions: ['Recalibration', 'Vibration isolation', 'Temperature compensation'] }, + { id: 'bucket-stuck', name: 'Bucket Stuck', description: 'Weigher bucket gate not releasing product.', probability: 0.025, affectedAttributes: ['head_count'], severity: 'medium', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Reduced weigher capacity from stuck bucket.' }, possibleCauses: ['Product buildup', 'Actuator fault', 'Oily residue'], mitigationActions: ['Gate cleaning', 'Actuator check', 'Reduced oil content'] }, + { id: 'underfill-trend', name: 'Underfill Trend', description: 'Consistent underfilling detected.', probability: 0.03, affectedAttributes: ['target_weight'], severity: 'high', qualityImpact: { checkType: 'BAG_WEIGHT', failureMessage: 'Systematic underfill violating net weight requirements.' }, possibleCauses: ['Algorithm issue', 'Low product supply', 'Target mismatch'], mitigationActions: ['Target adjustment', 'Supply check', 'Algorithm review'] }, + ], +}; + +const filmHandlingSubcomponent: SubComponent = { + id: 'packer-film', + name: 'Film Handling System', + description: 'Rollstock film unwind, forming, and registration.', + attributes: [ + { id: 'film_type', name: 'Film Type', type: 'enum', range: { values: ['Laminate', 'Metallized', 'Clear'] }, defaultValue: 'Laminate', description: 'Packaging film material.' }, + { id: 'film_width', name: 'Film Width', type: 'float', unit: 'mm', range: { min: 200, max: 600 }, defaultValue: 320, description: 'Web width.' }, + { id: 'roll_diameter', name: 'Roll Diameter', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 75, description: 'Remaining film on roll.' }, + { id: 'tension', name: 'Film Tension', type: 'enum', range: { values: ['Optimal', 'Low', 'High'] }, defaultValue: 'Optimal', criticalForQuality: true, description: 'Web tension status.' }, + { id: 'registration_status', name: 'Print Registration', type: 'enum', range: { values: ['Locked', 'Adjusting', 'Lost'] }, defaultValue: 'Locked', criticalForQuality: true, description: 'Print eye alignment status.' }, + ], + chaosScenarios: [ + { id: 'film-break', name: 'Film Break', description: 'Web break stopping production.', probability: 0.02, affectedAttributes: ['tension'], severity: 'high', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Film break causing line stoppage.' }, possibleCauses: ['Tension spike', 'Film defect', 'Splice failure'], mitigationActions: ['Splice repair', 'Tension adjustment', 'Film inspection'] }, + { id: 'registration-lost', name: 'Registration Lost', description: 'Print alignment drifting.', probability: 0.035, affectedAttributes: ['registration_status'], severity: 'medium', qualityImpact: { checkType: 'PRINT_QUALITY', failureMessage: 'Print registration off-center affecting appearance.', degradation: 15 }, possibleCauses: ['Eye mark issue', 'Sensor dirty', 'Film stretch'], mitigationActions: ['Sensor cleaning', 'Eye mark check', 'Tension adjustment'] }, + { id: 'roll-low', name: 'Low Roll Warning', description: 'Film roll approaching end.', probability: 0.03, affectedAttributes: ['roll_diameter'], severity: 'low', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Film roll change required soon.' }, possibleCauses: ['Normal consumption'], mitigationActions: ['Roll change preparation', 'Splice preparation'] }, + ], +}; + +const sealingSystemSubcomponent: SubComponent = { + id: 'packer-sealing', + name: 'Sealing System', + description: 'Vertical and horizontal heat sealing bars.', + attributes: [ + { id: 'vertical_seal_temp', name: 'Vertical Seal Temp', type: 'float', unit: '°C', range: { min: 120, max: 200 }, defaultValue: 165, criticalForQuality: true, description: 'Fin seal temperature.' }, + { id: 'horizontal_seal_temp', name: 'Horizontal Seal Temp', type: 'float', unit: '°C', range: { min: 130, max: 210 }, defaultValue: 175, criticalForQuality: true, description: 'End seal temperature.' }, + { id: 'seal_pressure', name: 'Seal Pressure', type: 'float', unit: 'bar', range: { min: 2, max: 6 }, defaultValue: 4, criticalForQuality: true, description: 'Jaw closing pressure.' }, + { id: 'seal_time', name: 'Seal Dwell Time', type: 'float', unit: 'ms', range: { min: 100, max: 500 }, defaultValue: 250, description: 'Seal contact duration.' }, + ], + chaosScenarios: [ + { id: 'weak-seal', name: 'Weak Seal', description: 'Seal strength below specification.', probability: 0.04, affectedAttributes: ['vertical_seal_temp', 'horizontal_seal_temp', 'seal_pressure'], severity: 'high', qualityImpact: { checkType: 'SEAL_STRENGTH', failureMessage: 'Weak seals risking product freshness.' }, possibleCauses: ['Low temperature', 'Low pressure', 'Contamination'], mitigationActions: ['Temperature increase', 'Pressure adjustment', 'Jaw cleaning'] }, + { id: 'seal-burn', name: 'Seal Burn', description: 'Excessive heat damaging film.', probability: 0.02, affectedAttributes: ['vertical_seal_temp', 'horizontal_seal_temp'], severity: 'medium', qualityImpact: { checkType: 'VISUAL_INSPECTION', failureMessage: 'Burned seals affecting package appearance.' }, possibleCauses: ['Temperature overshoot', 'Extended dwell', 'Film variation'], mitigationActions: ['Temperature reduction', 'Dwell adjustment', 'Film check'] }, + { id: 'product-in-seal', name: 'Product in Seal', description: 'Chips caught in seal area.', probability: 0.03, affectedAttributes: ['seal_pressure'], severity: 'medium', qualityImpact: { checkType: 'SEAL_STRENGTH', failureMessage: 'Seal contamination causing leakers.', degradation: 20 }, possibleCauses: ['Overfill', 'Timing issue', 'Film flutter'], mitigationActions: ['Fill adjustment', 'Timing sync', 'Air purge'] }, + ], +}; + +const gasFlushSubcomponent: SubComponent = { + id: 'packer-gas', + name: 'Gas Flushing System', + description: 'Modified atmosphere packaging with nitrogen.', + attributes: [ + { id: 'gas_type', name: 'Gas Type', type: 'enum', range: { values: ['Nitrogen', 'CO2', 'Mix'] }, defaultValue: 'Nitrogen', description: 'MAP gas composition.' }, + { id: 'gas_flow', name: 'Gas Flow Rate', type: 'float', unit: 'L/min', range: { min: 10, max: 100 }, defaultValue: 40, description: 'Flush gas flow rate.' }, + { id: 'residual_oxygen', name: 'Residual O2', type: 'float', unit: '%', range: { min: 0, max: 5 }, defaultValue: 1, criticalForQuality: true, description: 'Oxygen level in sealed bag.' }, + { id: 'gas_purity', name: 'Gas Purity', type: 'float', unit: '%', range: { min: 95, max: 99.99 }, defaultValue: 99.5, description: 'Nitrogen purity level.' }, + ], + chaosScenarios: [ + { id: 'high-oxygen', name: 'High Residual Oxygen', description: 'Oxygen level above target affecting shelf life.', probability: 0.035, affectedAttributes: ['residual_oxygen', 'gas_flow'], severity: 'medium', qualityImpact: { checkType: 'NITROGEN_LEVEL', failureMessage: 'Oxygen level too high; shelf life at risk.', degradation: 20 }, possibleCauses: ['Low gas flow', 'Seal leak', 'Timing issue'], mitigationActions: ['Flow increase', 'Seal check', 'Timing adjustment'] }, + { id: 'gas-supply-low', name: 'Gas Supply Low', description: 'Nitrogen tank running low.', probability: 0.02, affectedAttributes: ['gas_purity'], severity: 'high', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Gas supply interruption stopping MAP.' }, possibleCauses: ['High usage', 'Delivery delay'], mitigationActions: ['Tank change', 'Emergency supply', 'Backup activation'] }, + ], +}; + +const outputConveyorSubcomponent: SubComponent = { + id: 'packer-output', + name: 'Output Conveyor', + description: 'Bag discharge and accumulation conveyor.', + attributes: [ + { id: 'conveyor_speed', name: 'Conveyor Speed', type: 'float', unit: 'm/min', range: { min: 10, max: 60 }, defaultValue: 30, description: 'Takeaway speed.' }, + { id: 'bag_count', name: 'Bag Counter', type: 'integer', range: { min: 0, max: 999999 }, defaultValue: 0, description: 'Accumulated bag count.' }, + { id: 'reject_count', name: 'Rejects', type: 'integer', range: { min: 0, max: 10000 }, defaultValue: 0, description: 'Rejected bag count.' }, + ], + chaosScenarios: [ + { id: 'jam', name: 'Bag Jam', description: 'Bags accumulating and jamming.', probability: 0.025, affectedAttributes: ['conveyor_speed'], severity: 'medium', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Bag jam causing packaging interruption.' }, possibleCauses: ['Downstream blockage', 'Speed mismatch', 'Bag shape'], mitigationActions: ['Clear jam', 'Speed adjustment', 'Downstream check'] }, + ], +}; + +export const pouchPackerComponent: ManufacturingComponent = { + id: 'pouch-packer', + machineId: 'MCH_PACKER_02', + name: 'Pouch Packer', + type: 'packaging', + productionLine: 'CHIPS', + description: 'Vertical Form-Fill-Seal packaging machine with multi-head weigher and nitrogen flushing.', + specifications: { manufacturer: 'Ishida', model: 'Atlas-Z-416', yearInstalled: 2022, maintenanceSchedule: 'Daily cleaning, Weekly calibration, Monthly overhaul' }, + subcomponents: [weigherSubcomponent, filmHandlingSubcomponent, sealingSystemSubcomponent, gasFlushSubcomponent, outputConveyorSubcomponent], + operationalParameters: [ + { id: 'target_weight', name: 'Target Weight', nominalValue: 150, tolerance: 3, unit: 'g' }, + { id: 'line_speed', name: 'Line Speed', nominalValue: 80, tolerance: 5, unit: 'bags/min' }, + { id: 'seal_temp', name: 'Seal Temperature', nominalValue: 170, tolerance: 10, unit: '°C' }, + { id: 'residual_o2', name: 'Residual Oxygen', nominalValue: 1, tolerance: 0.5, unit: '%' }, + ], + qualityCheckPoints: [ + { id: 'qc-weight', checkType: 'BAG_WEIGHT', description: 'Verify bag weight meets specification.', method: 'automatic', frequency: 'per_unit', acceptanceCriteria: { parameter: 'weight', operator: 'between', value: [147, 153], unit: 'g' }, linkedSubcomponents: ['packer-weigher'] }, + { id: 'qc-seal', checkType: 'SEAL_STRENGTH', description: 'Verify seal integrity.', method: 'sampling', frequency: 'hourly', acceptanceCriteria: { parameter: 'strength', operator: 'gt', value: 15, unit: 'N' }, linkedSubcomponents: ['packer-sealing'] }, + { id: 'qc-nitrogen', checkType: 'NITROGEN_LEVEL', description: 'Verify low oxygen atmosphere.', method: 'sampling', frequency: 'hourly', acceptanceCriteria: { parameter: 'oxygen', operator: 'lt', value: 2, unit: '%' }, linkedSubcomponents: ['packer-gas'] }, + ], + inputRequirements: [{ type: 'SEASONED_CHIPS', specifications: { temperature: { max: 40 } } }], + outputSpecification: { type: 'PACKED_CHIPS', attributes: { sealed: true, nitrogenFlushed: true, weightVerified: true } }, +}; + +export default pouchPackerComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/seasoning-drum.component.ts b/packages/controlmart/src/helpers/manufacturing/components/seasoning-drum.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..6b264fad324742e6cd7b08f1d6240b881aeaba3b --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/seasoning-drum.component.ts @@ -0,0 +1,104 @@ + +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const drumVesselSubcomponent: SubComponent = { + id: 'seasoner-drum', + name: 'Rotating Drum', + description: 'Inclined rotating cylinder for product tumbling and seasoning contact.', + attributes: [ + { id: 'diameter', name: 'Drum Diameter', type: 'float', unit: 'm', range: { min: 0.5, max: 2 }, defaultValue: 1.2, description: 'Internal drum diameter.' }, + { id: 'length', name: 'Drum Length', type: 'float', unit: 'm', range: { min: 2, max: 10 }, defaultValue: 5, description: 'Drum length.' }, + { id: 'rotation_speed', name: 'Rotation Speed', type: 'float', unit: 'RPM', range: { min: 5, max: 30 }, defaultValue: 15, criticalForQuality: true, description: 'Drum rotation speed.' }, + { id: 'inclination', name: 'Inclination Angle', type: 'float', unit: '°', range: { min: 2, max: 10 }, defaultValue: 5, description: 'Drum tilt angle.' }, + { id: 'internal_flights', name: 'Internal Flights', type: 'integer', range: { min: 4, max: 12 }, defaultValue: 8, description: 'Number of lifting flights.' }, + ], + chaosScenarios: [ + { id: 'speed-drift', name: 'Speed Drift', description: 'Drum rotation speed varying from setpoint.', probability: 0.035, affectedAttributes: ['rotation_speed'], severity: 'medium', qualityImpact: { checkType: 'SEASONING_COVERAGE', failureMessage: 'Uneven seasoning from speed variation.', degradation: 15 }, possibleCauses: ['Belt slip', 'Motor issue', 'VFD fault'], mitigationActions: ['Belt tension', 'Motor check', 'VFD calibration'] }, + { id: 'flight-wear', name: 'Flight Wear', description: 'Internal flights worn affecting tumbling.', probability: 0.025, affectedAttributes: ['internal_flights'], severity: 'medium', qualityImpact: { checkType: 'SEASONING_COVERAGE', failureMessage: 'Poor product turnover from worn flights.', degradation: 20 }, possibleCauses: ['Abrasion', 'Age', 'Salt corrosion'], mitigationActions: ['Flight replacement', 'Liner installation'] }, + ], +}; + +const seasoningDeliverySubcomponent: SubComponent = { + id: 'seasoner-delivery', + name: 'Seasoning Delivery System', + description: 'Precision seasoning metering and application.', + attributes: [ + { id: 'delivery_type', name: 'Delivery Type', type: 'enum', range: { values: ['Dry-Tumble', 'Slurry-Spray', 'Oil-Based'] }, defaultValue: 'Dry-Tumble', description: 'Seasoning application method.' }, + { id: 'seasoning_rate', name: 'Seasoning Rate', type: 'float', unit: '%', range: { min: 2, max: 12 }, defaultValue: 6, criticalForQuality: true, description: 'Seasoning as percentage of product weight.' }, + { id: 'feeder_speed', name: 'Feeder Speed', type: 'float', unit: 'kg/hr', range: { min: 10, max: 200 }, defaultValue: 60, criticalForQuality: true, description: 'Seasoning feed rate.' }, + { id: 'hopper_level', name: 'Hopper Level', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 70, description: 'Seasoning supply level.' }, + ], + chaosScenarios: [ + { id: 'rate-variation', name: 'Rate Variation', description: 'Seasoning delivery rate fluctuating.', probability: 0.045, affectedAttributes: ['feeder_speed', 'seasoning_rate'], severity: 'medium', qualityImpact: { checkType: 'FLAVOR_INTENSITY', failureMessage: 'Inconsistent seasoning level affecting taste.', degradation: 20 }, possibleCauses: ['Feeder clog', 'Humidity', 'Clumping'], mitigationActions: ['Feeder cleaning', 'Humidity control', 'Anti-caking agent'] }, + { id: 'hopper-empty', name: 'Seasoning Shortage', description: 'Seasoning supply exhausted.', probability: 0.025, affectedAttributes: ['hopper_level'], severity: 'high', qualityImpact: { checkType: 'SEASONING_COVERAGE', failureMessage: 'Production interrupted by seasoning shortage.' }, possibleCauses: ['Supply chain', 'Usage spike', 'Sensor error'], mitigationActions: ['Emergency refill', 'Recipe switch', 'Sensor check'] }, + { id: 'clumping', name: 'Seasoning Clumping', description: 'Moisture causing seasoning to clump.', probability: 0.04, affectedAttributes: ['feeder_speed'], severity: 'medium', qualityImpact: { checkType: 'SEASONING_COVERAGE', failureMessage: 'Clumped seasoning causing uneven application.', degradation: 25 }, possibleCauses: ['High humidity', 'Poor storage', 'Old seasoning'], mitigationActions: ['Humidity control', 'Fresh seasoning', 'Sieve installation'] }, + ], +}; + +const seasoningBlendSubcomponent: SubComponent = { + id: 'seasoner-blend', + name: 'Seasoning Blend', + description: 'Seasoning formulation and composition.', + attributes: [ + { id: 'blend_id', name: 'Blend ID', type: 'string', description: 'Active seasoning recipe identifier.' }, + { id: 'flavor_profile', name: 'Flavor Profile', type: 'enum', range: { values: ['CLASSIC_SALTED', 'BBQ_BLAZE', 'SOUR_CREAM', 'CHEESE', 'CUSTOM'] }, defaultValue: 'CLASSIC_SALTED', criticalForQuality: true, description: 'Seasoning flavor type.' }, + { id: 'salt_content', name: 'Salt Content', type: 'float', unit: '%', range: { min: 30, max: 90 }, defaultValue: 70, description: 'Salt percentage in blend.' }, + { id: 'allergen_present', name: 'Allergens', type: 'enum', range: { values: ['None', 'Dairy', 'Gluten', 'Multiple'] }, defaultValue: 'None', criticalForQuality: true, description: 'Allergen content.' }, + ], + chaosScenarios: [ + { id: 'wrong-blend', name: 'Wrong Blend Loaded', description: 'Incorrect seasoning flavor in hopper.', probability: 0.01, affectedAttributes: ['blend_id', 'flavor_profile'], severity: 'critical', qualityImpact: { checkType: 'FLAVOR_PROFILE', failureMessage: 'Wrong seasoning applied; batch must be segregated.' }, possibleCauses: ['Operator error', 'Label missing', 'Similar appearance'], mitigationActions: ['Batch segregation', 'Verification process', 'Batch coding'] }, + { id: 'allergen-cross', name: 'Allergen Cross-Contact', description: 'Allergen contamination from previous batch.', probability: 0.015, affectedAttributes: ['allergen_present'], severity: 'critical', qualityImpact: { checkType: 'ALLERGEN_CONTROL', failureMessage: 'Potential allergen cross-contamination.' }, possibleCauses: ['Incomplete cleaning', 'Sequence error', 'Shared equipment'], mitigationActions: ['Enhanced cleaning', 'Batch hold', 'Testing'] }, + ], +}; + +const exhaustCollectionSubcomponent: SubComponent = { + id: 'seasoner-exhaust', + name: 'Dust Collection', + description: 'Seasoning dust extraction and recovery system.', + attributes: [ + { id: 'extraction_rate', name: 'Extraction Rate', type: 'float', unit: 'm³/hr', range: { min: 500, max: 5000 }, defaultValue: 2000, description: 'Air extraction volume.' }, + { id: 'recovery_efficiency', name: 'Recovery Efficiency', type: 'float', unit: '%', range: { min: 80, max: 99 }, defaultValue: 95, description: 'Seasoning dust recovery rate.' }, + { id: 'filter_status', name: 'Filter Status', type: 'enum', range: { values: ['Clean', 'Normal', 'Replace'] }, defaultValue: 'Normal', description: 'Dust collector filter condition.' }, + ], + chaosScenarios: [ + { id: 'filter-blocked', name: 'Filter Blockage', description: 'Dust collector filter saturated.', probability: 0.03, affectedAttributes: ['filter_status', 'extraction_rate'], severity: 'medium', qualityImpact: { checkType: 'DUST_CONTROL', failureMessage: 'Seasoning dust accumulation in work area.' }, possibleCauses: ['High throughput', 'Oily seasoning', 'Maintenance due'], mitigationActions: ['Filter change', 'Pulse cleaning', 'Speed reduction'] }, + ], +}; + +const productSensorSubcomponent: SubComponent = { + id: 'seasoner-sensors', + name: 'Product Sensors', + description: 'Monitoring sensors for product flow and seasoning application.', + attributes: [ + { id: 'product_flow_rate', name: 'Product Flow', type: 'float', unit: 'kg/min', range: { min: 10, max: 100 }, defaultValue: 40, description: 'Chip throughput rate.' }, + { id: 'product_temp', name: 'Product Temperature', type: 'float', unit: '°C', range: { min: 20, max: 80 }, defaultValue: 45, description: 'Incoming chip temperature.' }, + { id: 'color_sensor', name: 'Color Sensor', type: 'enum', range: { values: ['Active', 'Calibrating', 'Fault'] }, defaultValue: 'Active', description: 'Seasoning color verification.' }, + ], + chaosScenarios: [ + { id: 'sensor-drift', name: 'Sensor Drift', description: 'Flow or color sensors providing inaccurate readings.', probability: 0.03, affectedAttributes: ['product_flow_rate'], severity: 'medium', qualityImpact: { checkType: 'PROCESS_CONTROL', failureMessage: 'Seasoning ratio uncertain due to sensor issue.', degradation: 10 }, possibleCauses: ['Dust accumulation', 'Calibration due', 'Damage'], mitigationActions: ['Sensor cleaning', 'Recalibration', 'Manual verification'] }, + ], +}; + +export const seasoningDrumComponent: ManufacturingComponent = { + id: 'seasoning-drum', + machineId: 'MCH_SEASON_01', + name: 'Seasoning Drum', + type: 'processing', + productionLine: 'CHIPS', + description: 'Rotating drum seasoning applicator for uniform flavor distribution on fried chips.', + specifications: { manufacturer: 'Heat and Control', model: 'FastBack-260', yearInstalled: 2022, maintenanceSchedule: 'Daily cleaning, Weekly deep clean, Monthly calibration' }, + subcomponents: [drumVesselSubcomponent, seasoningDeliverySubcomponent, seasoningBlendSubcomponent, exhaustCollectionSubcomponent, productSensorSubcomponent], + operationalParameters: [ + { id: 'drum_speed', name: 'Drum Speed', nominalValue: 15, tolerance: 2, unit: 'RPM' }, + { id: 'seasoning_rate', name: 'Seasoning Rate', nominalValue: 6, tolerance: 0.5, unit: '%' }, + { id: 'tumble_time', name: 'Tumble Time', nominalValue: 120, tolerance: 15, unit: 's' }, + ], + qualityCheckPoints: [ + { id: 'qc-coverage', checkType: 'SEASONING_COVERAGE', description: 'Verify even seasoning distribution.', method: 'sampling', frequency: 'hourly', acceptanceCriteria: { parameter: 'coverage', operator: 'gt', value: 95, unit: '%' }, linkedSubcomponents: ['seasoner-drum', 'seasoner-delivery'] }, + { id: 'qc-flavor', checkType: 'FLAVOR_INTENSITY', description: 'Verify flavor meets target profile.', method: 'sampling', frequency: 'batch', acceptanceCriteria: { parameter: 'flavor', operator: 'eq', value: 'TARGET' }, linkedSubcomponents: ['seasoner-delivery', 'seasoner-blend'] }, + ], + inputRequirements: [{ type: 'FRIED_CHIPS', specifications: { temperature: { max: 60 } } }], + outputSpecification: { type: 'SEASONED_CHIPS', attributes: { seasoningCoverage: 95, flavorProfile: 'TARGET' } }, +}; + +export default seasoningDrumComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/components/slicer.component.ts b/packages/controlmart/src/helpers/manufacturing/components/slicer.component.ts new file mode 100644 index 0000000000000000000000000000000000000000..a0c78533ad548cb9b81533b75f4bc09733515d39 --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/components/slicer.component.ts @@ -0,0 +1,106 @@ + +import type { ManufacturingComponent, SubComponent } from '../types.manufacturing.helper'; + +const feedSystemSubcomponent: SubComponent = { + id: 'slicer-feed', + name: 'Feed System', + description: 'Automated potato feeding and orientation system.', + attributes: [ + { id: 'hopper_capacity', name: 'Hopper Capacity', type: 'float', unit: 'kg', range: { min: 100, max: 2000 }, defaultValue: 500, description: 'Raw material holding capacity.' }, + { id: 'feed_rate', name: 'Feed Rate', type: 'float', unit: 'kg/min', range: { min: 10, max: 200 }, defaultValue: 50, description: 'Material throughput rate.' }, + { id: 'hopper_level', name: 'Hopper Level', type: 'float', unit: '%', range: { min: 0, max: 100 }, defaultValue: 75, description: 'Current fill level.' }, + { id: 'vibrator_frequency', name: 'Vibrator Frequency', type: 'float', unit: 'Hz', range: { min: 20, max: 80 }, defaultValue: 50, description: 'Feed vibration frequency.' }, + ], + chaosScenarios: [ + { id: 'feed-jam', name: 'Feed Jam', description: 'Potatoes jamming in feed system.', probability: 0.04, affectedAttributes: ['feed_rate'], severity: 'medium', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Feed system jam causing production interruption.' }, possibleCauses: ['Oversized potato', 'Debris', 'Wet potatoes'], mitigationActions: ['Clear jam', 'Size screening', 'Reduce moisture'] }, + { id: 'hopper-empty', name: 'Hopper Empty', description: 'Raw material supply exhausted.', probability: 0.025, affectedAttributes: ['hopper_level'], severity: 'high', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Production stopped due to material shortage.' }, possibleCauses: ['Supply delay', 'High demand', 'Sensor failure'], mitigationActions: ['Emergency refill', 'Production scheduling'] }, + ], +}; + +const slicingHeadSubcomponent: SubComponent = { + id: 'slicer-head', + name: 'Slicing Head', + description: 'High-speed centrifugal slicing mechanism.', + attributes: [ + { id: 'blade_count', name: 'Blade Count', type: 'integer', range: { min: 4, max: 16 }, defaultValue: 8, description: 'Number of cutting blades.' }, + { id: 'blade_material', name: 'Blade Material', type: 'enum', range: { values: ['Stainless', 'Tungsten-Carbide', 'Ceramic'] }, defaultValue: 'Tungsten-Carbide', description: 'Blade construction material.' }, + { id: 'blade_rpm', name: 'Blade RPM', type: 'integer', unit: 'RPM', range: { min: 1000, max: 5000 }, defaultValue: 3000, criticalForQuality: true, description: 'Cutting head rotation speed.' }, + { id: 'slice_thickness', name: 'Slice Thickness', type: 'float', unit: 'mm', range: { min: 1.0, max: 3.0 }, defaultValue: 1.5, criticalForQuality: true, description: 'Target slice thickness.' }, + { id: 'blade_sharpness', name: 'Blade Sharpness', type: 'enum', range: { values: ['Sharp', 'Good', 'Dull', 'Replace'] }, defaultValue: 'Sharp', criticalForQuality: true, description: 'Current blade condition.' }, + ], + chaosScenarios: [ + { id: 'blade-dull', name: 'Blade Dulling', description: 'Blades becoming dull affecting slice quality.', probability: 0.05, affectedAttributes: ['blade_sharpness', 'slice_thickness'], severity: 'medium', qualityImpact: { checkType: 'THICKNESS_UNIFORMITY', failureMessage: 'Inconsistent slice thickness from dull blades.', degradation: 20 }, possibleCauses: ['Normal wear', 'Hard material', 'Extended run'], mitigationActions: ['Blade replacement', 'Sharpening', 'Reduce speed'] }, + { id: 'blade-chip', name: 'Blade Chipping', description: 'Blade damage from foreign object.', probability: 0.015, affectedAttributes: ['blade_material'], severity: 'critical', qualityImpact: { checkType: 'CONTAMINATION_TEST', failureMessage: 'Metal fragment contamination risk from blade damage.' }, possibleCauses: ['Stone in potatoes', 'Metal debris', 'Fatigue fracture'], mitigationActions: ['Immediate stop', 'Blade inspection', 'Batch hold'] }, + { id: 'thickness-drift', name: 'Thickness Drift', description: 'Slice thickness deviating from setpoint.', probability: 0.04, affectedAttributes: ['slice_thickness'], severity: 'medium', qualityImpact: { checkType: 'THICKNESS_UNIFORMITY', failureMessage: 'Slice thickness outside specification.', degradation: 15 }, possibleCauses: ['Calibration drift', 'Potato variability', 'Blade wear'], mitigationActions: ['Recalibrate', 'Adjust gap', 'Speed adjustment'] }, + ], +}; + +const washRinseSubcomponent: SubComponent = { + id: 'slicer-wash', + name: 'Wash/Rinse System', + description: 'Water spray system for starch removal and cleaning.', + attributes: [ + { id: 'water_temp', name: 'Water Temperature', type: 'float', unit: '°C', range: { min: 10, max: 30 }, defaultValue: 18, description: 'Rinse water temperature.' }, + { id: 'water_flow', name: 'Water Flow Rate', type: 'float', unit: 'L/min', range: { min: 50, max: 500 }, defaultValue: 200, description: 'Rinse water consumption.' }, + { id: 'spray_pressure', name: 'Spray Pressure', type: 'float', unit: 'bar', range: { min: 1, max: 5 }, defaultValue: 2.5, description: 'Spray nozzle pressure.' }, + { id: 'starch_removal', name: 'Starch Removal', type: 'float', unit: '%', range: { min: 80, max: 100 }, defaultValue: 95, criticalForQuality: true, description: 'Starch removal efficiency.' }, + ], + chaosScenarios: [ + { id: 'nozzle-clog', name: 'Nozzle Clogging', description: 'Spray nozzles blocked with starch or debris.', probability: 0.035, affectedAttributes: ['spray_pressure', 'starch_removal'], severity: 'medium', qualityImpact: { checkType: 'STARCH_LEVEL', failureMessage: 'Inadequate starch removal affecting frying quality.', degradation: 15 }, possibleCauses: ['Starch buildup', 'Hard water scale', 'Debris'], mitigationActions: ['Nozzle cleaning', 'Water treatment', 'Pressure check'] }, + { id: 'water-shortage', name: 'Water Supply Issue', description: 'Insufficient water supply for rinsing.', probability: 0.02, affectedAttributes: ['water_flow'], severity: 'high', qualityImpact: { checkType: 'STARCH_LEVEL', failureMessage: 'High residual starch from water shortage.' }, possibleCauses: ['Main supply issue', 'Pump failure', 'Valve stuck'], mitigationActions: ['Check supply', 'Pump inspection', 'Valve override'] }, + ], +}; + +const conveyorSubcomponent: SubComponent = { + id: 'slicer-conveyor', + name: 'Discharge Conveyor', + description: 'Belt conveyor for sliced potato transport.', + attributes: [ + { id: 'belt_speed', name: 'Belt Speed', type: 'float', unit: 'm/min', range: { min: 5, max: 50 }, defaultValue: 20, description: 'Conveyor movement speed.' }, + { id: 'belt_material', name: 'Belt Material', type: 'enum', range: { values: ['Food-Grade-PU', 'Modular-Plastic', 'Stainless-Mesh'] }, defaultValue: 'Food-Grade-PU', description: 'Belt construction.' }, + { id: 'belt_tension', name: 'Belt Tension', type: 'enum', range: { values: ['Normal', 'Loose', 'Tight'] }, defaultValue: 'Normal', description: 'Current belt tension state.' }, + ], + chaosScenarios: [ + { id: 'belt-slip', name: 'Belt Slippage', description: 'Conveyor belt slipping on drive drum.', probability: 0.03, affectedAttributes: ['belt_tension', 'belt_speed'], severity: 'medium', qualityImpact: { checkType: 'LINE_EFFICIENCY', failureMessage: 'Product backup from conveyor issue.' }, possibleCauses: ['Low tension', 'Wet belt', 'Worn drum'], mitigationActions: ['Tension adjustment', 'Clean belt', 'Drum inspection'] }, + { id: 'belt-damage', name: 'Belt Damage', description: 'Belt tear or damage causing contamination risk.', probability: 0.01, affectedAttributes: ['belt_material'], severity: 'high', qualityImpact: { checkType: 'CONTAMINATION_TEST', failureMessage: 'Belt fragment contamination risk.' }, possibleCauses: ['Sharp edge contact', 'Age', 'Misalignment'], mitigationActions: ['Belt replacement', 'Alignment check', 'Batch inspection'] }, + ], +}; + +const inspectionSubcomponent: SubComponent = { + id: 'slicer-inspection', + name: 'Optical Inspection', + description: 'Camera-based defect detection for sliced potatoes.', + attributes: [ + { id: 'camera_count', name: 'Camera Count', type: 'integer', range: { min: 1, max: 8 }, defaultValue: 4, description: 'Number of inspection cameras.' }, + { id: 'defect_threshold', name: 'Defect Threshold', type: 'float', unit: '%', range: { min: 1, max: 10 }, defaultValue: 5, description: 'Maximum acceptable defect rate.' }, + { id: 'reject_rate', name: 'Current Reject Rate', type: 'float', unit: '%', range: { min: 0, max: 20 }, defaultValue: 3, description: 'Current defect rejection rate.' }, + ], + chaosScenarios: [ + { id: 'camera-fault', name: 'Camera Fault', description: 'Inspection camera malfunction.', probability: 0.025, affectedAttributes: ['camera_count'], severity: 'medium', qualityImpact: { checkType: 'DEFECT_RATE', failureMessage: 'Reduced defect detection from camera issue.' }, possibleCauses: ['Lens fouling', 'Lighting issue', 'Processing error'], mitigationActions: ['Camera cleaning', 'Light adjustment', 'Manual inspection'] }, + { id: 'high-defects', name: 'High Defect Rate', description: 'Raw material quality causing high rejection.', probability: 0.04, affectedAttributes: ['reject_rate'], severity: 'medium', qualityImpact: { checkType: 'DEFECT_RATE', failureMessage: 'High rejection rate indicating quality issue.', degradation: 25 }, possibleCauses: ['Poor potato quality', 'Storage issue', 'Seasonal variation'], mitigationActions: ['Supplier contact', 'Sorting adjustment', 'Batch segregation'] }, + ], +}; + +export const slicerComponent: ManufacturingComponent = { + id: 'slicer', + machineId: 'MCH_SLICER_01', + name: 'Slicer', + type: 'processing', + productionLine: 'CHIPS', + description: 'High-speed centrifugal potato slicer with integrated washing and optical inspection.', + specifications: { manufacturer: 'Heat and Control', model: 'AccuSlice-3000', yearInstalled: 2022, maintenanceSchedule: 'Daily blade check, Weekly full inspection' }, + subcomponents: [feedSystemSubcomponent, slicingHeadSubcomponent, washRinseSubcomponent, conveyorSubcomponent, inspectionSubcomponent], + operationalParameters: [ + { id: 'slice_thickness', name: 'Slice Thickness', nominalValue: 1.5, tolerance: 0.1, unit: 'mm' }, + { id: 'throughput', name: 'Throughput', nominalValue: 3000, tolerance: 200, unit: 'kg/hr' }, + { id: 'blade_speed', name: 'Blade Speed', nominalValue: 3000, tolerance: 100, unit: 'RPM' }, + ], + qualityCheckPoints: [ + { id: 'qc-thickness', checkType: 'THICKNESS_UNIFORMITY', description: 'Verify slice thickness uniformity.', method: 'automatic', frequency: 'continuous', acceptanceCriteria: { parameter: 'thickness', operator: 'between', value: [1.4, 1.6], unit: 'mm' }, linkedSubcomponents: ['slicer-head'] }, + { id: 'qc-defect', checkType: 'DEFECT_RATE', description: 'Monitor defect rejection rate.', method: 'automatic', frequency: 'continuous', acceptanceCriteria: { parameter: 'defect_rate', operator: 'lt', value: 5, unit: '%' }, linkedSubcomponents: ['slicer-inspection'] }, + ], + inputRequirements: [{ type: 'RAW_POTATOES', specifications: { grade: 'PREMIUM', moisture: { max: 80 } } }], + outputSpecification: { type: 'SLICED_POTATOES', attributes: { thickness: 1.5, starchRemoved: true, defectsRemoved: true } }, +}; + +export default slicerComponent; diff --git a/packages/controlmart/src/helpers/manufacturing/index.ts b/packages/controlmart/src/helpers/manufacturing/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..d5641e7c389f20068ab7df730f14a2968d8c6848 --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/index.ts @@ -0,0 +1,83 @@ +/** + * Manufacturing Helpers + * + * This module provides detailed manufacturing component definitions and simulation + * capabilities for the production lines: + * + * ICE CREAM LINE: + * MCH_PASTEUR_01 → MCH_FERMENT_01 → MCH_MIXER_01 → MCH_PACKER_01 → MCH_CHILL_01 + * (Pasteurizer) (Fermenter) (Mixer) (Cup Packer) (Chill Room) + * + * CHIPS LINE: + * MCH_SLICER_01 → MCH_FRYER_01 → MCH_SEASON_01 → MCH_PACKER_02 → MCH_MD_01 + * (Slicer) (Fryer) (Seasoning Drum) (Pouch Packer) (Metal Detector) + * + * Each component includes: + * - Detailed subcomponent definitions with attributes + * - Chaos scenarios that can affect subcomponents + * - Quality check points linked to subcomponents + * - Operational parameters + * + * Usage in OD scripts: + * ```javascript + * const helpers = require('./helpers/manufacturing'); + * const result = helpers.simulateMachineStep('MCH_FRYER_01', inputProduct, context); + * ``` + */ + +// Export types +export type { + AttributeType, + AttributeDefinition, + SubComponent, + SubComponentChaosScenario, + ManufacturingComponent, + QualityCheckPoint, + MaterialRequirement, + OutputSpecification, + SimulationContext, + BOMItem, + MachineStepResult, + SubcomponentState, + QualityCheckResult, + ChaosEvent, +} from './types.manufacturing.helper'; + +// Export type utilities +export { + generateRandomInRange, + generateBatchId, + shouldTriggerChaos, + selectChaosScenario, +} from './types.manufacturing.helper'; + +// Export individual components +export { + // Ice Cream Line + pasteurizerComponent, + fermenterComponent, + mixerComponent, + cupPackerComponent, + chillRoomComponent, + // Chips Line + slicerComponent, + fryerComponent, + seasoningDrumComponent, + pouchPackerComponent, + metalDetectorComponent, + // Component lookup + componentsByMachineId, + productionLines, + getProductionLineComponents, + getComponentByMachineId, + getProductionLineFromSku, +} from './components'; + +// Export simulation functions +export { + simulateMachineStep, + simulateProductionLine, + getComponentInfo, + generateMachineChaosEvents, + defaultSimulationConfig, +} from './simulation.manufacturing.helper'; diff --git a/packages/controlmart/src/helpers/manufacturing/simulation.manufacturing.helper.ts b/packages/controlmart/src/helpers/manufacturing/simulation.manufacturing.helper.ts new file mode 100644 index 0000000000000000000000000000000000000000..f2c6c1ffcf8ee2657a061f4ab1745e56dac7ec96 --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/simulation.manufacturing.helper.ts @@ -0,0 +1,401 @@ + +import { + componentsByMachineId, + getProductionLineComponents, + getProductionLineFromSku, +} from './components'; +import { + type ManufacturingComponent, + type SubComponent, + type SubComponentChaosScenario, + type MachineStepResult, + type SubcomponentState, + type QualityCheckResult, + type ChaosEvent, + type SimulationContext, + type BOMItem, + generateBatchId, + generateRandomInRange, + shouldTriggerChaos, + selectChaosScenario, +} from './types.manufacturing.helper'; + + +interface SimulationConfig { + chaosEnabled: boolean; + chaosProbability: number; + verboseLogging: boolean; +} + +const DEFAULT_CONFIG: SimulationConfig = { + chaosEnabled: true, + chaosProbability: 0.3, // 30% chance of chaos when enabled + verboseLogging: false, +}; + +function simulateSubcomponentState( + subcomponent: SubComponent, + config: SimulationConfig +): { state: SubcomponentState; chaosEvent: ChaosEvent | null } { + const attributes: Record = {}; + let status: SubcomponentState['status'] = 'normal'; + let chaosEvent: ChaosEvent | null = null; + const anomalies: string[] = []; + + // Generate attribute values + for (const attr of subcomponent.attributes) { + if (attr.type === 'float' || attr.type === 'integer') { + const range = attr.range; + if (range?.min !== undefined && range?.max !== undefined) { + attributes[attr.id] = generateRandomInRange(range.min, range.max, attr.type === 'integer' ? 0 : 2); + } else { + attributes[attr.id] = attr.defaultValue; + } + } else if (attr.type === 'enum' && attr.range?.values) { + attributes[attr.id] = attr.defaultValue || attr.range.values[0]; + } else if (attr.type === 'boolean') { + attributes[attr.id] = attr.defaultValue ?? true; + } else { + attributes[attr.id] = attr.defaultValue || ''; + } + } + + // Apply chaos if enabled + if (config.chaosEnabled && subcomponent.chaosScenarios.length > 0) { + const scenario = selectChaosScenario(subcomponent.chaosScenarios, config.chaosProbability); + + if (scenario) { + // Apply chaos effects + status = scenario.severity === 'critical' ? 'failed' : + scenario.severity === 'high' ? 'warning' : 'degraded'; + + anomalies.push(scenario.description); + + // Modify affected attributes + for (const attrId of scenario.affectedAttributes) { + const attr = subcomponent.attributes.find(a => a.id === attrId); + if (attr && (attr.type === 'float' || attr.type === 'integer')) { + // Deviate the value outside normal range + const currentValue = attributes[attrId]; + const deviation = scenario.severity === 'critical' ? 0.3 : + scenario.severity === 'high' ? 0.2 : 0.1; + attributes[attrId] = currentValue * (1 + (Math.random() > 0.5 ? deviation : -deviation)); + } + } + + chaosEvent = { + id: `chaos-${Date.now()}-${Math.random().toString(36).substring(2, 8)}`, + timestamp: new Date().toISOString(), + subcomponentId: subcomponent.id, + scenarioId: scenario.id, + scenarioName: scenario.name, + severity: scenario.severity, + description: scenario.description, + affectedAttributes: scenario.affectedAttributes, + qualityImpact: scenario.qualityImpact.failureMessage, + mitigationApplied: scenario.mitigationActions[0], + }; + } + } + + return { + state: { + subcomponentId: subcomponent.id, + subcomponentName: subcomponent.name, + attributes, + status, + anomalies: anomalies.length > 0 ? anomalies : undefined, + }, + chaosEvent, + }; +} + +function simulateQualityChecks( + component: ManufacturingComponent, + subcomponentStates: SubcomponentState[], + chaosEvents: ChaosEvent[] +): QualityCheckResult[] { + const results: QualityCheckResult[] = []; + + for (const checkPoint of component.qualityCheckPoints) { + let status: 'PASS' | 'FAIL' | 'WARNING' = 'PASS'; + let value: any = 'WITHIN_SPEC'; + let failureReason: string | undefined; + let linkedChaosEvent: string | undefined; + + // Check if any chaos events affect this quality check + const affectingChaos = chaosEvents.find(event => { + const subcomp = component.subcomponents.find(s => s.id === event.subcomponentId); + if (!subcomp) return false; + + const scenario = subcomp.chaosScenarios.find(s => s.id === event.scenarioId); + return scenario?.qualityImpact.checkType === checkPoint.checkType; + }); + + if (affectingChaos) { + const subcomp = component.subcomponents.find(s => s.id === affectingChaos.subcomponentId); + const scenario = subcomp?.chaosScenarios.find(s => s.id === affectingChaos.scenarioId); + + if (scenario) { + if (scenario.severity === 'critical') { + status = 'FAIL'; + failureReason = scenario.qualityImpact.failureMessage; + } else if (scenario.severity === 'high') { + status = Math.random() > 0.5 ? 'FAIL' : 'WARNING'; + failureReason = scenario.qualityImpact.failureMessage; + } else { + status = 'WARNING'; + } + + if (scenario.qualityImpact.degradation) { + value = `DEGRADED_${scenario.qualityImpact.degradation}%`; + } + + linkedChaosEvent = affectingChaos.id; + } + } + + // Generate realistic check values + if (status === 'PASS') { + const criteria = checkPoint.acceptanceCriteria; + if (criteria.operator === 'between' && Array.isArray(criteria.value)) { + value = generateRandomInRange(criteria.value[0], criteria.value[1]); + if (criteria.unit) value = `${value}${criteria.unit}`; + } else if (criteria.operator === 'eq') { + value = criteria.value; + } else if (criteria.operator === 'lt' || criteria.operator === 'lte') { + value = generateRandomInRange(0, criteria.value * 0.8); + if (criteria.unit) value = `${value}${criteria.unit}`; + } else if (criteria.operator === 'gt' || criteria.operator === 'gte') { + value = generateRandomInRange(criteria.value * 1.1, criteria.value * 1.5); + if (criteria.unit) value = `${value}${criteria.unit}`; + } + } + + results.push({ + checkType: checkPoint.checkType, + status, + value, + expectedRange: JSON.stringify(checkPoint.acceptanceCriteria), + failureReason, + linkedChaosEvent, + }); + } + + return results; +} + +export function simulateMachineStep( + machineId: string, + inputProduct: Record, + context: SimulationContext, + config: SimulationConfig = DEFAULT_CONFIG +): MachineStepResult { + const component = componentsByMachineId[machineId]; + if (!component) { + throw new Error(`Unknown machine ID: ${machineId}`); + } + + const startTime = new Date(); + const subcomponentStates: SubcomponentState[] = []; + const chaosEvents: ChaosEvent[] = []; + + // Simulate each subcomponent + for (const subcomp of component.subcomponents) { + const { state, chaosEvent } = simulateSubcomponentState(subcomp, config); + subcomponentStates.push(state); + if (chaosEvent) { + chaosEvents.push(chaosEvent); + } + } + + // Simulate quality checks + const qualityChecks = simulateQualityChecks(component, subcomponentStates, chaosEvents); + + // Calculate process time based on operational parameters + const baseTime = component.operationalParameters.find(p => + p.name.toLowerCase().includes('time') + )?.nominalValue || 120; + const processTimeSeconds = baseTime + Math.floor(Math.random() * 60); + + const endTime = new Date(startTime.getTime() + processTimeSeconds * 1000); + + // Generate output product + const outputProduct = generateOutputProduct(component, inputProduct, context, qualityChecks); + + // Generate machine parameters from operational parameters + const parameters: Record = {}; + for (const param of component.operationalParameters) { + const variance = param.tolerance * (Math.random() * 2 - 1); + parameters[param.id] = typeof param.nominalValue === 'number' + ? Number((param.nominalValue + variance).toFixed(2)) + : param.nominalValue; + } + + return { + machineId: component.machineId, + machineName: component.name, + operation: component.description.split(' ').slice(0, 3).join('_').toUpperCase(), + startTime: startTime.toISOString(), + endTime: endTime.toISOString(), + processTimeSeconds, + inputProduct, + outputProduct, + subcomponentStates, + parameters, + qualityChecks, + chaosEvents, + }; +} + +function generateOutputProduct( + component: ManufacturingComponent, + inputProduct: Record, + context: SimulationContext, + qualityChecks: QualityCheckResult[] +): MachineStepResult['outputProduct'] { + const output = component.outputSpecification; + const anyFailures = qualityChecks.some(qc => qc.status === 'FAIL'); + + // Calculate output quantity (may have some loss/gain depending on process) + let quantity = inputProduct.quantity || 100; + if (component.type === 'processing') { + // Processing may have yield loss + quantity = quantity * generateRandomInRange(0.85, 0.98); + } + + // Get temperature from component parameters or output spec + const tempParam = component.operationalParameters.find(p => + p.name.toLowerCase().includes('temp') + ); + const temperature = tempParam?.nominalValue || output.attributes?.temperature; + + return { + type: output.type, + quantity: Math.round(quantity), + unit: inputProduct.unit || 'EA', + temperature, + batchId: generateBatchId(component.machineId.replace('MCH_', '')), + ...output.attributes, + qualityStatus: anyFailures ? 'HOLD' : 'RELEASED', + }; +} + +export function simulateProductionLine( + productSku: string, + billOfMaterials: BOMItem[], + config: SimulationConfig = DEFAULT_CONFIG +): { + productionLineType: 'ICE_CREAM' | 'CHIPS'; + machineSteps: MachineStepResult[]; + totalProcessTime: number; + finalOutput: MachineStepResult['outputProduct']; + allQualityChecks: QualityCheckResult[]; + allChaosEvents: ChaosEvent[]; + overallStatus: 'PASS' | 'FAIL' | 'WARNING'; +} { + const lineType = getProductionLineFromSku(productSku); + if (!lineType) { + throw new Error(`Cannot determine production line for SKU: ${productSku}`); + } + + const components = getProductionLineComponents(lineType); + const context: SimulationContext = { + productionOrderId: `PO-${Date.now()}`, + productSku, + productName: productSku, + billOfMaterials, + chaosEnabled: config.chaosEnabled, + chaosProbability: config.chaosProbability, + }; + + const machineSteps: MachineStepResult[] = []; + const allQualityChecks: QualityCheckResult[] = []; + const allChaosEvents: ChaosEvent[] = []; + + // Initial input from BOM + let currentInput: Record = { + type: 'RAW_MATERIALS', + quantity: billOfMaterials.reduce((sum, item) => sum + item.requiredQty, 0), + unit: billOfMaterials[0]?.unit || 'KG', + bom: billOfMaterials, + }; + + // Execute each machine in sequence + for (const component of components) { + const stepResult = simulateMachineStep( + component.machineId, + currentInput, + context, + config + ); + + machineSteps.push(stepResult); + allQualityChecks.push(...stepResult.qualityChecks); + allChaosEvents.push(...stepResult.chaosEvents); + + // Output becomes input for next step + currentInput = stepResult.outputProduct; + } + + // Calculate totals + const totalProcessTime = machineSteps.reduce((sum, step) => sum + step.processTimeSeconds, 0); + const finalOutput = machineSteps[machineSteps.length - 1]!.outputProduct; + + // Determine overall status + const hasFail = allQualityChecks.some(qc => qc.status === 'FAIL'); + const hasWarning = allQualityChecks.some(qc => qc.status === 'WARNING'); + const overallStatus = hasFail ? 'FAIL' : hasWarning ? 'WARNING' : 'PASS'; + + return { + productionLineType: lineType, + machineSteps, + totalProcessTime, + finalOutput, + allQualityChecks, + allChaosEvents, + overallStatus, + }; +} + +export function getComponentInfo(machineId: string): ManufacturingComponent | undefined { + return componentsByMachineId[machineId]; +} + +export function generateMachineChaosEvents( + machineId: string, + chaosProbability: number = 0.3 +): { chaosEvents: ChaosEvent[]; affectedQualityChecks: string[] } { + const component = componentsByMachineId[machineId]; + if (!component) return { chaosEvents: [], affectedQualityChecks: [] }; + + const chaosEvents: ChaosEvent[] = []; + const affectedQualityChecks: string[] = []; + + for (const subcomp of component.subcomponents) { + const scenario = selectChaosScenario(subcomp.chaosScenarios, chaosProbability); + + if (scenario) { + chaosEvents.push({ + id: `chaos-${Date.now()}-${Math.random().toString(36).substring(2, 8)}`, + timestamp: new Date().toISOString(), + subcomponentId: subcomp.id, + scenarioId: scenario.id, + scenarioName: scenario.name, + severity: scenario.severity, + description: scenario.description, + affectedAttributes: scenario.affectedAttributes, + qualityImpact: scenario.qualityImpact.failureMessage, + mitigationApplied: scenario.mitigationActions[0], + }); + + if (!affectedQualityChecks.includes(scenario.qualityImpact.checkType)) { + affectedQualityChecks.push(scenario.qualityImpact.checkType); + } + } + } + + return { chaosEvents, affectedQualityChecks }; +} + +export { DEFAULT_CONFIG as defaultSimulationConfig }; diff --git a/packages/controlmart/src/helpers/manufacturing/types.manufacturing.helper.ts b/packages/controlmart/src/helpers/manufacturing/types.manufacturing.helper.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f2995f3c4eee1ff0f9977e7b2cef5c8f0e6415c --- /dev/null +++ b/packages/controlmart/src/helpers/manufacturing/types.manufacturing.helper.ts @@ -0,0 +1,198 @@ + +export type AttributeType = 'string' | 'float' | 'integer' | 'boolean' | 'enum' | 'datetime'; + +export interface AttributeDefinition { + id: string; + name: string; + type: AttributeType; + unit?: string; + description: string; + range?: { + min?: number; + max?: number; + values?: string[]; // For enum types + }; + defaultValue?: any; + criticalForQuality?: boolean; // If true, out-of-range values trigger quality failures +} + + +export interface SubComponent { + id: string; + name: string; + description: string; + attributes: AttributeDefinition[]; + chaosScenarios: SubComponentChaosScenario[]; +} + +export interface SubComponentChaosScenario { + id: string; + name: string; + description: string; + probability: number; // 0.0 to 1.0 + affectedAttributes: string[]; // Attribute IDs that can be affected + severity: 'low' | 'medium' | 'high' | 'critical'; + qualityImpact: { + checkType: string; + failureMessage: string; + degradation?: number; // Percentage degradation (0-100) + }; + possibleCauses: string[]; + mitigationActions: string[]; +} + + +export interface ManufacturingComponent { + id: string; + machineId: string; + name: string; + type: 'processing' | 'packaging' | 'quality_control' | 'storage'; + productionLine: 'ICE_CREAM' | 'CHIPS' | 'SHARED'; + description: string; + specifications: { + manufacturer?: string; + model?: string; + yearInstalled?: number; + maintenanceSchedule?: string; + }; + subcomponents: SubComponent[]; + operationalParameters: { + id: string; + name: string; + nominalValue: any; + tolerance: number; + unit: string; + }[]; + qualityCheckPoints: QualityCheckPoint[]; + inputRequirements: MaterialRequirement[]; + outputSpecification: OutputSpecification; +} + +export interface QualityCheckPoint { + id: string; + checkType: string; + description: string; + method: 'automatic' | 'manual' | 'sampling'; + frequency: 'continuous' | 'batch' | 'hourly' | 'per_unit'; + acceptanceCriteria: { + parameter: string; + operator: 'eq' | 'lt' | 'gt' | 'lte' | 'gte' | 'between' | 'in'; + value: any; + unit?: string; + }; + linkedSubcomponents: string[]; // Subcomponent IDs that affect this check +} + +export interface MaterialRequirement { + type: string; + specifications: Record; +} + +export interface OutputSpecification { + type: string; + attributes: Record; +} + + +export interface SimulationContext { + productionOrderId: string; + productSku: string; + productName: string; + billOfMaterials: BOMItem[]; + chaosEnabled: boolean; + chaosProbability: number; + randomSeed?: number; +} + +export interface BOMItem { + sku: string; + requiredQty: number; + unit: string; + lotNumber?: string; +} + +export interface MachineStepResult { + machineId: string; + machineName: string; + operation: string; + startTime: string; + endTime: string; + processTimeSeconds: number; + inputMaterials?: Record; + inputProduct?: Record; + outputProduct: { + type: string; + quantity: number; + unit: string; + temperature?: number; + batchId: string; + [key: string]: any; + }; + subcomponentStates: SubcomponentState[]; + parameters: Record; + qualityChecks: QualityCheckResult[]; + chaosEvents: ChaosEvent[]; + waste?: Record; + consumables?: Record; +} + +export interface SubcomponentState { + subcomponentId: string; + subcomponentName: string; + attributes: Record; + status: 'normal' | 'degraded' | 'warning' | 'failed'; + anomalies?: string[]; +} + +export interface QualityCheckResult { + checkType: string; + status: 'PASS' | 'FAIL' | 'WARNING'; + value: any; + expectedRange?: string; + failureReason?: string; + linkedChaosEvent?: string; +} + +export interface ChaosEvent { + id: string; + timestamp: string; + subcomponentId: string; + scenarioId: string; + scenarioName: string; + severity: 'low' | 'medium' | 'high' | 'critical'; + description: string; + affectedAttributes: string[]; + qualityImpact: string; + mitigationApplied?: string; +} + +export function generateRandomInRange(min: number, max: number, decimals = 2): number { + const value = Math.random() * (max - min) + min; + return Number(value.toFixed(decimals)); +} + +/** + * Generate a batch ID with component prefix + */ +export function generateBatchId(prefix: string): string { + return `${prefix}-${Date.now()}-${Math.random().toString(36).substring(2, 8).toUpperCase()}`; +} + +/** + * Check if chaos should trigger based on probability + */ +export function shouldTriggerChaos(probability: number, chaosProbability: number = 1.0): boolean { + return Math.random() < (probability * chaosProbability); +} + +/** + * Select a random chaos scenario from available scenarios + */ +export function selectChaosScenario( + scenarios: SubComponentChaosScenario[], + chaosProbability: number = 1.0 +): SubComponentChaosScenario | null { + const triggered = scenarios.filter(s => shouldTriggerChaos(s.probability, chaosProbability)); + if (triggered.length === 0) return null; + return triggered[Math.floor(Math.random() * triggered.length)]; +} diff --git a/packages/controlmart/src/jobs/delete-logqueue.job.ts b/packages/controlmart/src/jobs/delete-logqueue.job.ts new file mode 100644 index 0000000000000000000000000000000000000000..50965440dbf97e160ab71886fc653f13ce123a03 --- /dev/null +++ b/packages/controlmart/src/jobs/delete-logqueue.job.ts @@ -0,0 +1,40 @@ +import { Job } from "@hokify/agenda"; +import { deleteGlobalConsumedLogQueueEntries } from "../repository/log-queue.repository"; +import { createAppLogger } from "../utils/logger.util"; +import { defineJob, createRecurringJob } from "../services/scheduler.service"; + +const logger = createAppLogger({ service: "delete-logqueue-job" }); + +export const deleteConsumedLogQueueJob = async (job: Job, jobLogger: any) => { + logger.info("Starting delete consumed log queue job"); + + try { + // Use lastFinishedAt as the cutoff to ensure we keep logs for at least one cycle + // If first run, default to 10 minutes ago + const lastFinishedAt = job.attrs.lastFinishedAt; + const cutoffDate = lastFinishedAt ? new Date(lastFinishedAt) : new Date(Date.now() - 10 * 60 * 1000); + + logger.info({ cutoffDate }, "Deleting consumed logs older than cutoff"); + + const deletedCount = await deleteGlobalConsumedLogQueueEntries(cutoffDate); + + if (deletedCount > 0) { + logger.info({ deletedCount }, "Deleted old consumed log queue entries"); + } else { + logger.info("No old consumed log queue entries found to delete"); + } + + } catch (error) { + logger.error({ error: String(error) }, "Delete consumed log queue job failed"); + throw error; + } +}; + +export const registerDeleteLogQueueJob = async () => { + defineJob("delete-logqueue-job", deleteConsumedLogQueueJob); + await createRecurringJob( + "10 minutes", + "delete-logqueue-job", + { name: "delete-logqueue-job" } + ); +}; diff --git a/packages/controlmart/src/jobs/ticketing.job.ts b/packages/controlmart/src/jobs/ticketing.job.ts new file mode 100644 index 0000000000000000000000000000000000000000..81fb27a2fd581e7e97d5fa97ef3a931b723b36ce --- /dev/null +++ b/packages/controlmart/src/jobs/ticketing.job.ts @@ -0,0 +1,121 @@ +import { Job } from "@hokify/agenda"; +import { findGlobalPendingTicketCandidates, LogQueueRepository } from "../repository/log-queue.repository"; +import { analyzeLogQueueAndGenerateTicket } from "../services/ticketing-ai.service"; +import { TicketRepository } from "../repository/tickets.repository"; +import { createAppLogger } from "../utils/logger.util"; +import mongoose from "mongoose"; +import { defineJob, createRecurringJob } from "../services/scheduler.service"; +import { createTicketOnServiceNow } from "../services/service-now.tickets.service"; +import { loadEnv } from "../utils/env.util"; +import { World } from "../models/world.model"; + +const logger = createAppLogger({ service: "ticketing-job" }); + +import { type TLogQueueModel } from "../models/log-queue.model"; + +const processTicketCandidate = async (candidate: TLogQueueModel) => { + try { + const worldId = candidate.worldRef?.worldId; + if (!worldId) { + logger.error({ runId: candidate.runId }, "Skipping candidate: Missing worldId"); + return; + } + + // Check if ticket creation is enabled for this world + const world = await World.findById(worldId).lean(); + if (!world?.ticketCreationEnabled) { + logger.info({ runId: candidate.runId, worldId }, "Skipping candidate: Ticket creation is disabled for this world"); + // Mark as consumed so we don't reprocess it + await LogQueueRepository(worldId).updateLogQueueStatus(candidate.runId, "consumed"); + return; + } + + const logQueueRepo = LogQueueRepository(worldId); + + logger.info({ runId: candidate.runId }, "Generating ticket with AI"); + const ticketData = await analyzeLogQueueAndGenerateTicket(candidate); + + if (!ticketData) { + logger.warn({ runId: candidate.runId }, "AI failed to generate ticket, marking as failed/retry"); + await logQueueRepo.incrementLogQueueRetry(candidate.runId); + return; + } + + // Sabse phehle aapne ghabraana nahi hai. + // User removed WorldUser model, so we skip finding a real user and just generate a system ID placeholder. + // In a real scenario, this should link to a valid user or system actor. + const requesterId = new mongoose.Types.ObjectId(); + + const ticketRepo = TicketRepository(worldId); + const newTicket = await ticketRepo.createTicket({ + type: "incident", + description: "", + title: "", + priority: "low", + impact: "low", + urgency: "low", + category: "others", + ...ticketData, + metadata: ticketData.metadata || {}, + requester: requesterId, + status: "new", + attachments: [] as any, + resolutionNotes: undefined as any, + workNotes: [] as any, + assignedTo: undefined as any, + }); + + logger.info({ ticketId: (newTicket as any)._id, runId: candidate.runId }, "Ticket created successfully"); + + if (loadEnv().SEND_TICKETS_TO_SERVICENOW) { + try { + logger.info({ ticketId: (newTicket as any)._id }, "Sending ticket to ServiceNow"); + await createTicketOnServiceNow(newTicket); + logger.info({ ticketId: (newTicket as any)._id }, "Successfully sent ticket to ServiceNow"); + } catch (snError) { + logger.error({ error: String(snError), ticketId: (newTicket as any)._id }, "Failed to send ticket to ServiceNow"); + // We do NOT rethrow here because the local ticket was created successfully, + // and we don't want to rollback or retry the entire candidate just because SN sync failed. + } + } + + await logQueueRepo.updateLogQueueStatus(candidate.runId, "consumed"); + + } catch (error) { + logger.error({ error: String(error), runId: candidate.runId }, "Failed to process candidate"); + if (candidate.worldRef?.worldId) { + await LogQueueRepository(candidate.worldRef.worldId).incrementLogQueueRetry(candidate.runId); + } + } +}; + +export const ticketingJob = async (job: Job) => { + logger.info("Starting ticketing job"); + try { + const candidates = await findGlobalPendingTicketCandidates(0); + if (candidates.length === 0) { + logger.info("No pending ticket candidates found"); + return; + } + + logger.info({ count: candidates.length }, "Processing ticket candidates in batches"); + + // Batch processing to control concurrency + const BATCH_SIZE = 10; + for (let i = 0; i < candidates.length; i += BATCH_SIZE) { + const batch = candidates.slice(i, i + BATCH_SIZE); + // logger.info({ batchIndex: Math.floor(i / BATCH_SIZE) + 1, batchSize: batch.length }, "Processing batch"); + + await Promise.allSettled(batch.map(candidate => processTicketCandidate(candidate))); + } + + } catch (error) { + logger.error({ error: String(error) }, "Ticketing job failed globally"); + throw error; + } +}; + +export const registerTicketingJob = async () => { + defineJob("ticketing-job", ticketingJob); + await createRecurringJob("1 minute", "ticketing-job", {}, { name: "ticketing-job" }); +}; diff --git a/packages/controlmart/src/models/audit.model.ts b/packages/controlmart/src/models/audit.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..26042fe7475a43d297f99964ed3f9cd5ac38f7d2 --- /dev/null +++ b/packages/controlmart/src/models/audit.model.ts @@ -0,0 +1,40 @@ +import mongoose, { Schema, Model } from "mongoose"; + +const auditSchema = new Schema( + { + model: { type: String, required: true }, + documentId: { type: String, required: true, index: true }, + changedBy: { type: String }, + before: { type: Schema.Types.Mixed }, + after: { type: Schema.Types.Mixed }, + reason: { type: String }, + }, + { timestamps: true }, +); + +// Compound indexes for common audit query patterns (MORPH-515) +// Model + documentId for getting all changes to a specific document +auditSchema.index({ model: 1, documentId: 1 }); + +// Model + createdAt for getting recent changes by model type +auditSchema.index({ model: 1, createdAt: -1 }); + +// ChangedBy + createdAt for user activity tracking +auditSchema.index({ changedBy: 1, createdAt: -1 }); + +// Optional: TTL index for automatic cleanup of old audit logs (90 days) +// Uncomment if you want automatic data lifecycle management +// auditSchema.index({ createdAt: 1 }, { expireAfterSeconds: 7776000 }); + +export const AuditLog: Model = + mongoose.models.AuditLog || mongoose.model("AuditLog", auditSchema); + +export type TAuditLogModel = mongoose.InferSchemaType; +export type TAuditLogInput = Omit; +export type TQueryableAuditFields = { + worldId: string; + model?: string; + documentId?: string; + dateStart?: Date; + dateEnd?: Date; +}; diff --git a/packages/controlmart/src/models/audit.plugin.ts b/packages/controlmart/src/models/audit.plugin.ts new file mode 100644 index 0000000000000000000000000000000000000000..cd673ba514ecf5c38dee73bbc45533bf71c03310 --- /dev/null +++ b/packages/controlmart/src/models/audit.plugin.ts @@ -0,0 +1,185 @@ +import type { Schema, Document, Query, Model } from "mongoose"; + +import { AuditLog } from "./audit.model"; + +interface AuditDocument extends Document { + _original?: any; +} + +interface AuditQuery extends Query { + _before?: any; +} + +function diff(before: any, after: any) { + const changes: Record = {}; + + for (const key of Object.keys(after)) { + const from = before?.[key]; + const to = after?.[key]; + + if (JSON.stringify(from) !== JSON.stringify(to)) { + changes[key] = { from, to }; + } + } + + return Object.keys(changes).length ? changes : null; +} + +function getDocumentId(doc: any): string { + if (!doc) return ""; + if (doc._id) return doc._id.toString(); + if (doc.id) return doc.id.toString(); + return ""; +} + +export function AuditPlugin(schema: Schema) { + schema.pre("save", function (this: AuditDocument) { + if (!this.isNew) { + this._original = this.toObject({ depopulate: true }); + } + }); + + schema.post("save", async function (this: AuditDocument, doc: AuditDocument) { + if (this.isNew) return; + + const before = this._original; + const after = doc.toObject({ depopulate: true }); + const changes = diff(before, after); + + if (!changes) return; + + const documentId = getDocumentId(doc); + if (!documentId) return; // Should not happen for saved docs + + await AuditLog.create({ + model: (doc.constructor as Model).modelName, + documentId, + action: "update", + before, + after, + changes, + }); + }); + + const updateOps = [ + "findOneAndUpdate", + "findOneAndReplace", + "updateOne", + "updateMany", + "replaceOne", + ] as const; + + for (const op of updateOps) { + schema.pre(op, async function (this: AuditQuery) { + const query = this.getQuery(); + + if (op === "updateMany") { + const docs = await this.model.find(query).lean(); + (this as any)._before = docs; + } else { + const doc = await this.model.findOne(query).lean(); + (this as any)._before = doc; + } + }); + + schema.post(op, async function (this: AuditQuery, res: any) { + const before = (this as any)._before; + if (!before) return; + + const query = this.getQuery(); + + if (op === "updateMany" && Array.isArray(before)) { + const afterDocs = await this.model.find(query).lean(); + + for (const beforeDoc of before) { + const afterDoc = afterDocs.find( + (d: any) => getDocumentId(d) === getDocumentId(beforeDoc), + ); + + if (!afterDoc) continue; + + const changes = diff(beforeDoc, afterDoc); + if (!changes) continue; + + await AuditLog.create({ + model: this.model.modelName, + documentId: getDocumentId(beforeDoc), + action: "update", + before: beforeDoc, + after: afterDoc, + changes, + }); + } + return; + } + let after: any; + + if (res && typeof res.toObject === "function") { + after = res.toObject({ depopulate: true }); + } else { + after = await this.model.findOne(query).lean(); + } + + if (!after) return; + + const changes = diff(before, after); + if (!changes) return; + + const documentId = getDocumentId(after) || getDocumentId(before); + + await AuditLog.create({ + model: this.model.modelName, + documentId, + action: "update", + before, + after, + changes, + }); + }); + } + + const deleteOps = ["deleteOne", "deleteMany", "findOneAndDelete"] as const; + + for (const op of deleteOps) { + schema.pre(op, async function (this: AuditQuery) { + const query = this.getQuery(); + + if (op === "deleteMany") { + const docs = await this.model.find(query).lean(); + (this as any)._before = docs; + } else { + const doc = await this.model.findOne(query).lean(); + (this as any)._before = doc; + } + }); + + schema.post(op, async function (this: AuditQuery, res: any) { + const before = (this as any)._before; + if (!before) return; + + if (op === "deleteMany" && Array.isArray(before)) { + for (const doc of before) { + await AuditLog.create({ + model: this.model.modelName, + documentId: doc._id, + action: "delete", + before: doc, + after: null, + changes: null, + }); + } + return; + } + const doc = Array.isArray(before) ? before[0] : before; + + await AuditLog.create({ + model: this.model.modelName, + documentId: doc._id, + action: "delete", + before: doc, + after: null, + changes: null, + }); + }); + } +} diff --git a/packages/controlmart/src/models/business-rule.model.ts b/packages/controlmart/src/models/business-rule.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..a347bd2dabb9a55da617d4cd20c24202f91dceab --- /dev/null +++ b/packages/controlmart/src/models/business-rule.model.ts @@ -0,0 +1,115 @@ +import mongoose, { Schema, Model } from "mongoose"; + +const businessRuleSchema = new Schema( + { + ruleId: { + type: String, + required: true, + unique: true, + description: "Unique identifier for the rule", + }, + name: { + type: String, + required: true, + description: "Human-readable name", + }, + description: { + type: String, + required: false, + description: "Detailed description of what the rule does", + }, + domain: { + type: String, + required: true, + enum: ["ERP", "WMS", "TMS", "EDI"], + description: "Domain this rule applies to", + }, + targetCollection: { + type: String, + required: true, + description: "Collection/model name (e.g., Inventory, Order)", + }, + trigger: { + type: [String], + required: true, + description: + "Array of trigger points (before_insert, after_update, api_endpoint:POST:/path, od_execution:start)", + }, + executionMode: { + type: String, + required: true, + enum: ["sync", "async"], + default: "sync", + description: "Sync actions block operation, async actions run in background", + }, + priority: { + type: Number, + required: true, + default: 100, + description: "Lower number = higher priority (0-1000)", + }, + condition: { + language: { + type: String, + enum: ["jmespath", "javascript"], + default: "jmespath", + description: "Language for condition evaluation", + }, + expression: { + type: String, + required: true, + description: "Condition expression that must evaluate to true", + }, + }, + actions: { + type: [ + { + type: { + type: String, + enum: ["validate", "transform", "createRecord", "triggerOD", "log"], + required: true, + }, + config: { + type: Schema.Types.Mixed, + required: true, + description: "Action-specific configuration", + }, + }, + ], + required: true, + description: "Array of actions to execute when condition is met", + }, + enabled: { + type: Boolean, + default: true, + description: "Whether this rule is active", + }, + worldId: { + type: String, + default: null, + description: "World ID for world-specific rules, null for global rules", + }, + version: { + type: Number, + default: 1, + description: "Rule version for change tracking", + }, + metadata: { + type: Schema.Types.Mixed, + description: "Additional metadata (author, tags, etc.)", + }, + }, + { timestamps: true }, +); + +// Indexes for performance +businessRuleSchema.index({ domain: 1, targetCollection: 1, enabled: 1 }); +businessRuleSchema.index({ worldId: 1 }); +businessRuleSchema.index({ trigger: 1 }); + +export const BusinessRule: Model = + mongoose.models.BusinessRule || + mongoose.model("BusinessRule", businessRuleSchema); + +export type TBusinessRuleModel = mongoose.InferSchemaType; +export type TBusinessRuleInput = Omit; diff --git a/packages/controlmart/src/models/capability.model.ts b/packages/controlmart/src/models/capability.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..d0349bf9edf4e1fc31ff92ec5568666c9ad6ffa2 --- /dev/null +++ b/packages/controlmart/src/models/capability.model.ts @@ -0,0 +1,54 @@ +import mongoose, { Schema, Model } from "mongoose"; +import { AuditPlugin } from "./audit.plugin"; +import type { TCapabilityModel } from "./capability.model.type"; + +/** + * Capability Schema + * Defines the MongoDB schema for Capability documents + * + * Note: Capabilities are GLOBAL resources (no worldRef field). + * They are reusable templates that worlds can select via capabilityIds. + */ +const CapabilitySchema = new Schema( + { + id: { type: String, required: true, unique: true, index: true }, + name: { type: String, required: true }, + description: { type: String, required: true }, + tags: { type: Schema.Types.Mixed, required: true }, + personas: { type: [String], required: true, default: [] }, + odId: { type: String, required: true }, + version: { type: String, required: true }, + metadata: { type: Schema.Types.Mixed, required: false }, + chaos: { type: Schema.Types.Mixed, required: false }, + }, + { timestamps: true } +); + +// Apply audit plugin for automatic change tracking +CapabilitySchema.plugin(AuditPlugin); + +// Add indexes for common query patterns +CapabilitySchema.index({ 'tags.domain': 1 }); +CapabilitySchema.index({ 'tags.complexity': 1 }); +CapabilitySchema.index({ 'tags.personas': 1 }); +CapabilitySchema.index({ personas: 1 }); +CapabilitySchema.index({ 'tags.domain': 1, 'tags.complexity': 1 }); // Compound index +CapabilitySchema.index({ name: "text", description: "text" }); // Text search + +// Additional indexes for service and pattern filtering (MORPH-515) +CapabilitySchema.index({ 'tags.services': 1 }); +CapabilitySchema.index({ 'tags.patterns': 1 }); +// Note: Removed compound index on tags.domain + tags.services - MongoDB cannot +// index parallel arrays. Use single-field indexes instead. +CapabilitySchema.index({ personas: 1, 'tags.complexity': 1 }); + +/** + * Capability Model + * Singleton pattern to prevent model re-registration + * Collection name: "capabilities" + */ +export const Capability: Model = + mongoose.models.Capability || + mongoose.model("Capability", CapabilitySchema, "capabilities"); + +export type { TCapabilityModel, TCapabilityInput } from './capability.model.type'; diff --git a/packages/controlmart/src/models/capability.model.type.ts b/packages/controlmart/src/models/capability.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..6268e2cf2812b3801a60e400a135ae3a81fc58c4 --- /dev/null +++ b/packages/controlmart/src/models/capability.model.type.ts @@ -0,0 +1,89 @@ +import type { ChaosPolicy } from '../types/od.type'; + +/** + * Capability Tags for organization and filtering + */ +export interface CapabilityTags { + /** Business domains this capability belongs to (e.g., "fulfillment", "inventory") */ + domain: string[]; + + /** Complexity level */ + complexity: 'simple' | 'medium' | 'complex'; + + /** Services this capability uses (e.g., "erp", "wms", "tms") */ + services: string[]; + + /** Personas that can perform this capability (e.g., "store-manager", "warehouse-worker") */ + personas?: string[]; + + /** Workflow patterns (e.g., "sequential", "parallel", "compensating") */ + patterns?: string[]; +} + +/** + * Capability Metadata for additional information + */ +export interface CapabilityMetadata { + /** Author or team that created this capability */ + author?: string; + + /** When the capability was created */ + createdAt?: Date; + + /** Estimated execution duration in milliseconds */ + estimatedDuration?: number; + + /** Additional custom metadata */ + [key: string]: any; +} + +/** + * Capability Model - MongoDB document structure + * Represents semantic business functions or processes that personas can perform. + * + * Note: Capabilities are GLOBAL resources (no worldRef field). + * Worlds select which capabilities to enable via capabilityIds. + */ +export interface TCapabilityModel { + /** MongoDB document ID */ + _id: string; + + /** Unique identifier for the capability */ + id: string; + + /** Human-readable name */ + name: string; + + /** Detailed description of what this capability does */ + description: string; + + /** Multi-dimensional tags for organization and filtering */ + tags: CapabilityTags; + + /** Persona IDs that can perform this capability */ + personas: string[]; + + /** The OD that implements this capability */ + odId: string; + + /** Semantic version */ + version: string; + + /** Optional metadata */ + metadata?: CapabilityMetadata; + + /** Optional capability-level chaos configuration */ + chaos?: ChaosPolicy; + + /** Timestamp when document was created */ + createdAt?: Date; + + /** Timestamp when document was last updated */ + updatedAt?: Date; +} + +/** + * Capability Input - Data structure for creating/updating capabilities + * Omits auto-generated fields (_id, createdAt, updatedAt) + */ +export type TCapabilityInput = Omit; diff --git a/packages/controlmart/src/models/edi/transactions.model.ts b/packages/controlmart/src/models/edi/transactions.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..980d63aebdba27a59ade99a217210b7f241268ac --- /dev/null +++ b/packages/controlmart/src/models/edi/transactions.model.ts @@ -0,0 +1,133 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const EdiTransactionSchema = new Schema( + { + worldRef: { type: WorldRefSchema, required: true }, + transactionId: { + type: String, + required: true, + unique: true, + index: true, + description: "System-generated unique transaction ID (UUID/nanoid). Internal only.", + default: () => generateIdByService("edi", "transaction"), + }, + partnerId: { type: String, ref: "Partner", required: true, index: true }, + customerId: { type: String, ref: "Customer", index: true }, + companyId: { type: String, ref: "PortalCompany", index: true }, + docType: { + type: String, + enum: ["850", "855", "856", "810", "820", "997", "999"], + required: true, + index: true, + }, + dollarValue: { type: Number }, + direction: { + type: String, + enum: ["INBOUND", "OUTBOUND"], + required: true, + index: true, + }, + timestamp: { type: Date, default: Date.now, index: true }, + interchangeControlNumber: { + type: String, + index: true, + description: "ISA13 - Interchange Control Number", + }, + groupControlNumber: { + type: String, + index: true, + description: "GS06 - Functional Group Control Number", + }, + transactionSetControlNumber: { + type: String, + index: true, + description: "ST02 - Transaction Set Control Number", + }, + businessDocumentNumber: { + type: String, + index: true, + description: + "Business-level document number (e.g., BEG03, BIG02, BSN02). Used for correlation.", + }, + status: { + type: String, + enum: ["RECEIVED", "QUEUED", "PROCESSING", "DELIVERED", "ERRORED", "ARCHIVED"], + required: true, + index: true, + }, + errorReason: { type: String }, + errorDetails: Schema.Types.Mixed, + fileName: { type: String }, + flowId: { + type: String, + index: true, + description: "Correlates logical flow", + }, + rawEdi: { + type: String, + description: "Raw EDI X12/EDIFACT text blob", + }, + payload: { + type: Schema.Types.Mixed, + description: + "Structured metadata or parsed EDI fields (PO#, totals, etc.). Optional, lightweight.", + }, + _idempotencyKey: { + type: String, + index: true, + unique: true, + sparse: true, + select: false, + description: + "System-generated implicit key for idempotency (hash of partnerId+docType+direction+controlNumber).", + }, + }, + { + timestamps: true, + minimize: true, + }, +); + +EdiTransactionSchema.index({ + partnerId: 1, + docType: 1, + direction: 1, + timestamp: -1, +}); + +EdiTransactionSchema.index({ + partnerId: 1, + docType: 1, + status: 1, + timestamp: -1, +}); + +EdiTransactionSchema.index( + { + partnerId: 1, + docType: 1, + direction: 1, + transactionSetControlNumber: 1, + }, + { unique: false, sparse: true }, +); + +EdiTransactionSchema.plugin(AuditPlugin); + +// Attach business rule hooks +// attachBusinessRuleHooks(EdiTransactionSchema, "EDI", "EdiTransaction"); + +export const EdiTransaction: Model = + mongoose.models.EdiTransaction || + mongoose.model("EdiTransaction", EdiTransactionSchema); + +export type TEdiTransactionModel = mongoose.InferSchemaType; +export type TEdiTransactionInput = Omit< + TEdiTransactionModel, + "worldRef" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/erp/company.erp.model.ts b/packages/controlmart/src/models/erp/company.erp.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..27e62555f0ace4754dc5e3d29a06a378b4872358 --- /dev/null +++ b/packages/controlmart/src/models/erp/company.erp.model.ts @@ -0,0 +1,87 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AddressSchema, WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const CompanySchema = new Schema( + { + worldRef: WorldRefSchema, + + isMpcCompany: { type: Boolean, default: false, index: true }, + + companyId: { + type: String, + required: false, + index: true, + default: () => generateIdByService("erp", "company"), + }, + externalReference: { type: String, index: true }, + + name: { type: String, required: true, index: true }, + legalName: String, + + duns: { type: String, index: true }, + taxId: String, + taxRegistrationNumbers: new Schema( + { + country: String, + number: String, + }, + { _id: false }, + ), + + currency: { type: String, default: "USD", index: true }, + paymentTerms: String, + creditLimit: Number, + creditHold: { type: Boolean, default: false }, + + billingAddress: AddressSchema, + shippingAddress: AddressSchema, + remitTo: AddressSchema, + + primaryContact: { + name: String, + email: String, + phone: String, + }, + + salesOrg: String, + priceList: String, + glAccount: String, + customerClass: String, + + status: { + type: String, + enum: ["ACTIVE", "INACTIVE", "PROSPECT", "BLOCKED"], + default: "ACTIVE", + index: true, + }, + + companyType: { + type: String, + enum: ["CUSTOMER", "SUPPLIER", "PARTNER", "INTERNAL"], + default: "CUSTOMER", + index: true, + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +CompanySchema.index({ "worldRef.worldId": 1, companyId: 1 }, { unique: true }); + +CompanySchema.index({ "worldRef.worldId": 1, name: 1 }); +CompanySchema.index({ duns: 1, taxId: 1 }); + +CompanySchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(CompanySchema, "ERP", "Company"); + +export const Company: Model = + mongoose.models.Company || mongoose.model("Company", CompanySchema); +export type TCompanyModel = mongoose.InferSchemaType; +export type TCompanyInput = Omit; +export type TCompanyGenerate = Omit; diff --git a/packages/controlmart/src/models/erp/invoice.erp.model.ts b/packages/controlmart/src/models/erp/invoice.erp.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..5216a182c88ed52b2be4c8d3e74ef4ea306df703 --- /dev/null +++ b/packages/controlmart/src/models/erp/invoice.erp.model.ts @@ -0,0 +1,86 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AddressSchema, TaxDetailSchema, WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const InvoiceLineSchema = new Schema( + { + lineNumber: Number, + sku: String, + description: String, + quantity: Number, + unitPrice: Number, + lineAmount: Number, + discount: { amount: Number, percent: Number }, + taxDetails: [TaxDetailSchema], + accounting: { costCenter: String, glAccount: String }, + customFields: Schema.Types.Mixed, + }, + { _id: false }, +); + +const InvoiceSchema = new Schema( + { + worldRef: WorldRefSchema, + invoiceId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("erp", "invoice"), + }, + invoiceType: { + type: String, + enum: ["STANDARD", "CREDIT", "DEBIT", "CORRECTION"], + default: "STANDARD", + }, + poNumber: { type: String, index: true }, + customerId: { type: String, ref: "Customer", index: true }, + orderId: { type: String, ref: "Order", index: true }, + partnerId: { type: String, ref: "Partner", index: true }, + billTo: AddressSchema, + remitTo: AddressSchema, + issueDate: Date, + dueDate: Date, + currency: { type: String, default: "USD" }, + subtotal: Number, + allowances: [{ description: String, amount: Number }], + charges: [{ description: String, amount: Number }], + taxes: [TaxDetailSchema], + totalAmount: Number, + balanceDue: Number, + status: { + type: String, + enum: ["DRAFT", "SENT", "VALIDATED", "REJECTED", "PAID", "PARTIALLY_PAID"], + default: "DRAFT", + index: true, + }, + lines: [InvoiceLineSchema], + references: [{ docType: String, docId: String }], + ediTransactionId: { type: Schema.Types.ObjectId, ref: "EdiTransaction" }, + paymentTerms: String, + taxSummary: Schema.Types.Mixed, + accounting: { arAccount: String, period: String, postingDate: Date }, + disputes: [{ ticketId: String, reason: String, createdAt: Date }], + correctionHistory: [{ previous: Schema.Types.Mixed, correctedAt: Date, correctedBy: String }], + attachments: [{ filename: String, url: String }], + flowId: { type: String, index: true }, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +InvoiceSchema.index({ customerId: 1, issueDate: -1 }); + +InvoiceSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(InvoiceSchema, "ERP", "Invoice"); + +export const Invoice: Model = + mongoose.models.Invoice || mongoose.model("Invoice", InvoiceSchema); +export type TInvoiceModel = mongoose.InferSchemaType; +export type TInvoiceLineModel = mongoose.InferSchemaType; + +export type TInvoiceInput = Omit; diff --git a/packages/controlmart/src/models/erp/orders.erp.model.ts b/packages/controlmart/src/models/erp/orders.erp.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..5bd934794e0b0e29074f24d8e04b6da0268e5608 --- /dev/null +++ b/packages/controlmart/src/models/erp/orders.erp.model.ts @@ -0,0 +1,91 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AddressSchema, TaxDetailSchema, WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const POLineSchema = new Schema( + { + lineNumber: { type: Number, required: true }, + poLineId: { type: String }, + sku: { type: String, index: true }, + description: String, + quantityOrdered: { type: Number, required: true }, + quantityBackordered: { type: Number, default: 0 }, + quantityCanceled: { type: Number, default: 0 }, + unitOfMeasure: String, + unitPrice: Number, + lineTotal: Number, + promisedDate: Date, + shipToAddress: AddressSchema, + taxDetails: [TaxDetailSchema], + scheduleLines: [{ qty: Number, date: Date }], + customFields: Schema.Types.Mixed, + }, + { _id: false }, +); + +const PurchaseOrderSchema = new Schema( + { + worldRef: WorldRefSchema, + orderId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("erp", "order"), + }, + poType: { + type: String, + enum: ["STANDARD", "BLANKET", "CONTRACT", "DROP_SHIP"], + default: "STANDARD", + }, + customerId: { type: String, ref: "Customer", index: true }, + partnerId: { type: String, ref: "Partner", index: true }, + orderDate: Date, + requestedDate: Date, + dueDate: Date, + direction: { type: String, enum: ["INBOUND", "OUTBOUND"], default: "INBOUND" }, + buyer: { id: String, name: String }, + currency: { type: String, default: "USD" }, + subtotal: Number, + discounts: [{ description: String, amount: Number }], + taxes: [TaxDetailSchema], + totalAmount: Number, + status: { + type: String, + enum: [ + "RECEIVED", + "ACKED", + "IN_PROGRESS", + "MATERIALS_PICKED", + "MANUFACTURING_COMPLETE", + "PARTIALLY_SHIPPED", + "COMPLETED", + "CANCELLED", + ], + default: "RECEIVED", + index: true, + }, + lines: [POLineSchema], + attachments: [{ filename: String, url: String }], + ediTransactionId: { type: Schema.Types.ObjectId, ref: "EdiTransaction" }, + flowId: { type: String, index: true }, + notes: String, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +PurchaseOrderSchema.index({ customerId: 1, orderDate: -1 }); + +PurchaseOrderSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(PurchaseOrderSchema, "ERP", "Order"); + +export const PurchaseOrder: Model = + mongoose.models.PurchaseOrder || mongoose.model("Order", PurchaseOrderSchema); +export type TOrderModel = mongoose.InferSchemaType; +export type TOrderLineModel = mongoose.InferSchemaType; +export type TOrderInputModel = Omit; \ No newline at end of file diff --git a/packages/controlmart/src/models/erp/payment.erp.model.ts b/packages/controlmart/src/models/erp/payment.erp.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..4929c471df9011725b8459e5deefb8757cd6973a --- /dev/null +++ b/packages/controlmart/src/models/erp/payment.erp.model.ts @@ -0,0 +1,76 @@ +import mongoose, { Schema, Model, get } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const PaymentAllocationSchema = new Schema( + { + invoiceNumber: String, + appliedAmount: Number, + discountTaken: Number, + unappliedAmount: Number, + allocationMethod: String, + }, + { _id: false }, +); + +const PaymentSchema = new Schema( + { + worldRef: WorldRefSchema, + paymentId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("erp", "payment"), + }, + remittanceId: String, + customerId: { type: String, ref: "Customer", index: true }, + partnerId: { type: String, ref: "Partner" }, + paymentDate: Date, + currency: { type: String, default: "USD" }, + totalAmount: Number, + method: { + type: String, + enum: ["ACH", "WIRE", "CHECK", "CREDIT_CARD", "OTHER"], + default: "ACH", + }, + bankDetails: { + bankName: String, + accountNumber: String, + routingNumber: String, + swift: String, + }, + allocations: { + type: [PaymentAllocationSchema], + default: [], + }, + status: { + type: String, + enum: ["RECEIVED", "APPLIED", "UNMATCHED", "REVERSAL"], + default: "RECEIVED", + index: true, + }, + ediTransactionId: { type: Schema.Types.ObjectId, ref: "EdiTransaction" }, // inbound 820 + referenceNumbers: [String], + notes: String, + flowId: { type: String, index: true }, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +PaymentSchema.index({ partnerId: 1, paymentDate: -1 }); + +PaymentSchema.plugin(AuditPlugin); + +// Attach business rule hooks +// attachBusinessRuleHooks(PaymentSchema, "ERP", "Payment"); + +export const Payment: Model = + mongoose.models.Payment || mongoose.model("Payment", PaymentSchema); +export type TPaymentModel = mongoose.InferSchemaType; +export type TPaymentAllocationModel = mongoose.InferSchemaType; +export type TPaymentInput = Omit; diff --git a/packages/controlmart/src/models/erp/product.erp.model.ts b/packages/controlmart/src/models/erp/product.erp.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..a49b796e7b114e9fafd4977d8599fb6a1db1597e --- /dev/null +++ b/packages/controlmart/src/models/erp/product.erp.model.ts @@ -0,0 +1,54 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const ProductSchema = new Schema( + { + worldRef: WorldRefSchema, + productId: { + type: String, + required: false, + unique: false, + index: true, + default: () => generateIdByService("erp", "product"), + }, + sku: { type: String, index: true }, + upc: String, + ean: String, + name: { type: String, required: true }, + description: String, + commodityCode: String, + taxClassification: String, + unitOfMeasure: { type: String, default: "EA" }, + weight: { value: Number, unit: String }, + dimensions: { length: Number, width: Number, height: Number, unit: String }, + inventoryTracking: { type: Boolean, default: true }, + price: { currency: String, amount: Number }, + cost: { currency: String, amount: Number }, + leadTimeDays: Number, + status: { + type: String, + enum: ["ACTIVE", "DISCONTINUED"], + default: "ACTIVE", + }, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ProductSchema.plugin(AuditPlugin); + +// Attach business rule hooks +ProductSchema.index({ "worldRef.worldId": 1, productId: 1 }, { unique: true }); + +// attachBusinessRuleHooks(ProductSchema, "ERP", "Product"); + +export const Product: Model = + mongoose.models.Product || mongoose.model("Product", ProductSchema); +export type TProductModel = mongoose.InferSchemaType; +export type TProductInput = Omit; +export type TProductGenerate = Omit; + diff --git a/packages/controlmart/src/models/erp/shipment.erp.model.ts b/packages/controlmart/src/models/erp/shipment.erp.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..e4b79d7c2389b720b333452491ba0156b86e38c2 --- /dev/null +++ b/packages/controlmart/src/models/erp/shipment.erp.model.ts @@ -0,0 +1,76 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AddressSchema, WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const ShipmentLineSchema = new Schema( + { + lineNumber: Number, + sku: String, + quantityShipped: Number, + quantityOrdered: Number, + unitOfMeasure: String, + lotNumber: String, + serialNumbers: [String], + palletId: String, + packageCount: Number, + weight: Number, + customs: Schema.Types.Mixed, + customFields: Schema.Types.Mixed, + }, + { _id: false }, +); + +const ShipmentSchema = new Schema( + { + worldRef: WorldRefSchema, + shipmentId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("erp", "shipment"), + }, + poNumber: { type: String, index: true }, + carrier: { name: String, scac: String, mode: String }, + trackingNumber: String, + shipDate: Date, + estimatedArrival: Date, + actualArrival: Date, + status: { + type: String, + enum: ["CREATED", "IN_TRANSIT", "DELIVERED", "EXCEPTION"], + default: "CREATED", + index: true, + }, + fromAddress: AddressSchema, + toAddress: AddressSchema, + packaging: { + palletCount: Number, + totalPackages: Number, + packagingType: String, + }, + lines: [ShipmentLineSchema], + ediTransactionId: { type: Schema.Types.ObjectId, ref: "EdiTransaction" }, // outbound 856 + documents: [{ type: String }], // urls to bills of lading, labels + events: [{ ts: Date, location: String, status: String, note: String }], + flowId: { type: String, index: true }, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ShipmentSchema.plugin(AuditPlugin); + +// Attach business rule hooks +// attachBusinessRuleHooks(ShipmentSchema, "ERP", "ERPShipment"); + +export const Shipment: Model = + mongoose.models.Shipment || + mongoose.model("ERPShipment", ShipmentSchema, "erp-shipments"); +export type TShipmentModel = mongoose.InferSchemaType; +export type TShipmentLineModel = mongoose.InferSchemaType; + +export type TShipmentInput = Omit; diff --git a/packages/controlmart/src/models/finance/finance.model.ts b/packages/controlmart/src/models/finance/finance.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..00696ff302b25bf236c0372ca53da3e409d925fb --- /dev/null +++ b/packages/controlmart/src/models/finance/finance.model.ts @@ -0,0 +1,43 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { generateIdByService } from "../../utils/mongo.util"; + +const FinanceTransactionSchema = new Schema( + { + worldRef: WorldRefSchema, + partnerId: { type: String, index: true }, + transactionId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("finance", "transaction"), + }, + type: { + type: String, + enum: ["payment_in", "payment_out"], + required: true, + index: true, + }, + amount: { type: Number, required: true }, + sourceType: { + type: String, + enum: ["invoice", "bill", "manual", "interest", "payment"], + required: true, + }, + sourceId: { type: String, required: true, index: true }, + metadata: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +export const FinanceTransaction: Model = + mongoose.models.FinanceTransaction || + mongoose.model("FinanceTransaction", FinanceTransactionSchema); + +export type TFinanceTransactionModel = mongoose.InferSchemaType; +export type TFinanceTransactionInput = Omit< + TFinanceTransactionModel, + "worldRef" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/finance/ledger.model.ts b/packages/controlmart/src/models/finance/ledger.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..40b29d148c05c4738f535d4e655a7050cbe567e3 --- /dev/null +++ b/packages/controlmart/src/models/finance/ledger.model.ts @@ -0,0 +1,71 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; + +const CompanyLedgerSchema = new Schema( + { + worldRef: WorldRefSchema, + cash: { + type: Number, + default: 0, + }, + + totalReceivables: { + type: Number, + default: 0, + }, + + totalPayables: { + type: Number, + default: 0, + }, + + netPosition: { + type: Number, + default: 0, + }, + }, + { + timestamps: true, + }, +); + +CompanyLedgerSchema.pre("save", function (next) { + this.netPosition = this.cash + this.totalReceivables - this.totalPayables; + next(); +}); + +CompanyLedgerSchema.pre("findOneAndUpdate", function (next) { + const update = this.getUpdate() as any; + const $set = update.$set ?? {}; + const $inc = update.$inc ?? {}; + + const current = this.getQuery(); + + CompanyLedger.findOne(this.getQuery()) + .then((doc) => { + const newCash = $set.cash ?? (doc?.cash ?? 0) + ($inc.cash ?? 0); + const newReceivables = + $set.totalReceivables ?? (doc?.totalReceivables ?? 0) + ($inc.totalReceivables ?? 0); + const newPayables = + $set.totalPayables ?? (doc?.totalPayables ?? 0) + ($inc.totalPayables ?? 0); + + update.$set = { + ...$set, + netPosition: newCash + newReceivables - newPayables, + }; + + next(); + }) + .catch(next); +}); + +export const CompanyLedger: Model = + mongoose.models.CompanyLedger || + mongoose.model("CompanyLedger", CompanyLedgerSchema); + +export type TCompanyLedgerModel = mongoose.InferSchemaType; +export type TCompanyLedgerInput = Omit< + TCompanyLedgerModel, + "worldRef" | "netPosition" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/knowledge-graph-edge.model.ts b/packages/controlmart/src/models/knowledge-graph-edge.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..1a24601c2fc6dd58f70ea1760dbf56637e9bd631 --- /dev/null +++ b/packages/controlmart/src/models/knowledge-graph-edge.model.ts @@ -0,0 +1,53 @@ +import mongoose, { Schema, Model } from "mongoose"; +import { AuditPlugin } from "./audit.plugin"; +import type { TKnowledgeGraphEdgeModel } from "./knowledge-graph-edge.model.type"; + +/** + * Knowledge Graph Edge Schema + * Defines the MongoDB schema for knowledge graph edges + */ +const KnowledgeGraphEdgeSchema = new Schema( + { + source: { type: String, required: true, index: true }, + target: { type: String, required: true, index: true }, + type: { + type: String, + required: true, + enum: [ + "can_perform", + "implemented_by", + "uses", + "exposed_by", + "produces", + "fetches", + "requires", + "modifies", + "prerequisite", + "references", + "depends_on" + ], + index: true + }, + metadata: { type: Schema.Types.Mixed, required: false } + }, + { timestamps: true } +); + +// Apply audit plugin for automatic change tracking +KnowledgeGraphEdgeSchema.plugin(AuditPlugin); + +// Add compound indexes for efficient edge queries +KnowledgeGraphEdgeSchema.index({ source: 1, target: 1 }); +KnowledgeGraphEdgeSchema.index({ source: 1, type: 1 }); +KnowledgeGraphEdgeSchema.index({ target: 1, type: 1 }); + +/** + * Knowledge Graph Edge Model + * Singleton pattern to prevent model re-registration + * Collection name: "knowledge-graph-edges" + */ +export const KnowledgeGraphEdge: Model = + mongoose.models.KnowledgeGraphEdge || + mongoose.model("KnowledgeGraphEdge", KnowledgeGraphEdgeSchema, "knowledge-graph-edges"); + +export type { TKnowledgeGraphEdgeModel, TKnowledgeGraphEdgeInput } from './knowledge-graph-edge.model.type'; diff --git a/packages/controlmart/src/models/knowledge-graph-edge.model.type.ts b/packages/controlmart/src/models/knowledge-graph-edge.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..218d82cf35c889a26ca63f0be725ca037cf1b31b --- /dev/null +++ b/packages/controlmart/src/models/knowledge-graph-edge.model.type.ts @@ -0,0 +1,55 @@ +/** + * Knowledge Graph Edge Model Type Definitions + * + * Represents edges/relationships in the knowledge graph. + */ + +/** + * Edge types representing relationships in the knowledge graph + * Extended to include all edge types from knowledge-graph.type.ts + */ +export type KnowledgeGraphEdgeType = + | 'can_perform' // Persona → Capability + | 'implemented_by' // Capability → OD + | 'uses' // OD → Tool + | 'exposed_by' // Tool → Service + | 'produces' // Tool → Entity (creates new) + | 'fetches' // Tool → Entity (reads existing) + | 'requires' // Tool → Entity + | 'modifies' // Tool → Entity + | 'prerequisite' // Tool → Tool + | 'references' // General reference + | 'depends_on'; // General dependency + +/** + * Knowledge Graph Edge Model - MongoDB document structure + * Represents a directed edge/relationship in the knowledge graph. + */ +export interface TKnowledgeGraphEdgeModel { + /** MongoDB document ID */ + _id: string; + + /** Source node ID (from) */ + source: string; + + /** Target node ID (to) */ + target: string; + + /** Type of relationship */ + type: KnowledgeGraphEdgeType; + + /** Additional metadata specific to edge type */ + metadata?: Record; + + /** Timestamp when document was created */ + createdAt?: Date; + + /** Timestamp when document was last updated */ + updatedAt?: Date; +} + +/** + * Knowledge Graph Edge Input - Data structure for creating/updating edges + * Omits auto-generated fields (_id, createdAt, updatedAt) + */ +export type TKnowledgeGraphEdgeInput = Omit; diff --git a/packages/controlmart/src/models/knowledge-graph-node.model.ts b/packages/controlmart/src/models/knowledge-graph-node.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..44bb171e091363ca501a4488b6804d32437c8883 --- /dev/null +++ b/packages/controlmart/src/models/knowledge-graph-node.model.ts @@ -0,0 +1,40 @@ +import mongoose, { Schema, Model } from "mongoose"; +import { AuditPlugin } from "./audit.plugin"; +import type { TKnowledgeGraphNodeModel } from "./knowledge-graph-node.model.type"; + +/** + * Knowledge Graph Node Schema + * Defines the MongoDB schema for knowledge graph nodes + */ +const KnowledgeGraphNodeSchema = new Schema( + { + id: { type: String, required: true, unique: true, index: true }, + type: { + type: String, + required: true, + enum: ["persona", "capability", "od", "tool", "service", "entity"], + index: true + }, + name: { type: String, required: true }, + metadata: { type: Schema.Types.Mixed, required: false } + }, + { timestamps: true } +); + +// Apply audit plugin for automatic change tracking +KnowledgeGraphNodeSchema.plugin(AuditPlugin); + +// Add indexes for common query patterns +KnowledgeGraphNodeSchema.index({ 'metadata.tags': 1 }); +KnowledgeGraphNodeSchema.index({ id: 1, type: 1 }); // Compound index + +/** + * Knowledge Graph Node Model + * Singleton pattern to prevent model re-registration + * Collection name: "knowledge-graph-nodes" + */ +export const KnowledgeGraphNode: Model = + mongoose.models.KnowledgeGraphNode || + mongoose.model("KnowledgeGraphNode", KnowledgeGraphNodeSchema, "knowledge-graph-nodes"); + +export type { TKnowledgeGraphNodeModel, TKnowledgeGraphNodeInput } from './knowledge-graph-node.model.type'; diff --git a/packages/controlmart/src/models/knowledge-graph-node.model.type.ts b/packages/controlmart/src/models/knowledge-graph-node.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..ba65dc5397a7a8077bfc2d94b82b9ad63c30bded --- /dev/null +++ b/packages/controlmart/src/models/knowledge-graph-node.model.type.ts @@ -0,0 +1,50 @@ +/** + * Knowledge Graph Node Model Type Definitions + * + * Represents nodes in the knowledge graph (services, entities, ODs, capabilities, tools, personas). + */ + +/** + * Node types in the knowledge graph + * Extended to include all node types from knowledge-graph.type.ts + */ +export type KnowledgeGraphNodeType = + | 'persona' // Roles/actors + | 'capability' // Business functions + | 'od' // Operational Descriptors + | 'tool' // API operations + | 'service' // System boundaries + | 'entity'; // Data objects + +/** + * Knowledge Graph Node Model - MongoDB document structure + * Represents a node in the knowledge graph. + */ +export interface TKnowledgeGraphNodeModel { + /** MongoDB document ID */ + _id: string; + + /** Unique node identifier */ + id: string; + + /** Type of node */ + type: KnowledgeGraphNodeType; + + /** Human-readable name/label */ + name: string; + + /** Additional metadata specific to node type */ + metadata?: Record; + + /** Timestamp when document was created */ + createdAt?: Date; + + /** Timestamp when document was last updated */ + updatedAt?: Date; +} + +/** + * Knowledge Graph Node Input - Data structure for creating/updating nodes + * Omits auto-generated fields (_id, createdAt, updatedAt) + */ +export type TKnowledgeGraphNodeInput = Omit; diff --git a/packages/controlmart/src/models/log-queue.model.ts b/packages/controlmart/src/models/log-queue.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..b1abf7eca3438da09483e571a49dcca8f896a207 --- /dev/null +++ b/packages/controlmart/src/models/log-queue.model.ts @@ -0,0 +1,47 @@ +import mongoose, { Schema, Model } from "mongoose"; +import { WorldRefSchema } from "./shared.model"; + +const LogQueueSchema = new Schema( + { + worldRef: { type: WorldRefSchema, required: true }, + runId: { type: String, required: true, index: true }, + odId: { type: String, required: true, index: false }, + odName: { type: String, required: true, index: false }, + persona: { type: String, index: false }, + logs: { type: [String], default: [] }, + isTicketCandidate: { type: Boolean, default: false, index: true }, + entries: [ + { + odId: String, + odName: String, + persona: String, + stepId: String, + service: String, + tool: String, + chaosType: { type: String, required: false }, + modifications: { type: [String], required: false }, + inputType: { type: String, required: false }, // Store input details (stringified or limited) + outputType: { type: String, required: false }, // Store output details + data: { type: Schema.Types.Mixed, required: false }, // Generic payload for input/output data + context: { type: Schema.Types.Mixed, required: false }, // Store step context + timestamp: String, + }, + ], + status: { + type: String, + enum: ["queued", "consumed", "failed"], + default: "queued", + index: true, + }, + retryCount: { type: Number, default: 0 }, + consumedAt: { type: Date }, + }, + { + timestamps: true, + } +); + +export const LogQueue: Model = mongoose.model("LogQueue", LogQueueSchema); +export type TLogQueueModel = mongoose.InferSchemaType; +export type TLogQueueInput = Omit; + diff --git a/packages/controlmart/src/models/logs.model.ts b/packages/controlmart/src/models/logs.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..b3c746c61859f2a46664860142418c2c75a0676b --- /dev/null +++ b/packages/controlmart/src/models/logs.model.ts @@ -0,0 +1,56 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "./shared.model"; +import { AuditPlugin } from "./audit.plugin"; +import { EServices } from "../utils/service-mesh.util"; + +const WorldLogSchema = new Schema( + { + worldRef: WorldRefSchema, + logId: { type: String, required: false }, + timestamp: { type: Date, required: true, index: true }, + serviceType: { + type: String, + enum: Object.values(EServices), + index: true, + }, + level: { + type: String, + enum: ["trace", "debug", "info", "warn", "error", "fatal"], + index: true, + default: "info", + }, + msg: { type: String, required: true, index: true }, + metadata: Schema.Types.Mixed, + }, + { + timestamps: false, + versionKey: false, + }, +); + +WorldLogSchema.index( + { logId: 1 }, + { unique: true, partialFilterExpression: { logId: { $exists: true } } }, +); + +WorldLogSchema.pre("validate", function (next) { + const worldId = this.worldRef?.worldId ?? "unknown"; + const service = this.serviceType ?? "unknown"; + const ts = (this.timestamp ?? new Date()).toISOString().replace(/[:.]/g, "-"); + + const rand = Math.random().toString(36).slice(2, 6); + + this.logId = `${worldId}-${service}-${ts}-${rand}`; + + next(); +}); + +WorldLogSchema.index({ msg: "text" }); + +WorldLogSchema.plugin(AuditPlugin); + +export const WorldLog: Model = + mongoose.models.WorldLog || mongoose.model("WorldLog", WorldLogSchema); +export type TWorldLogModel = mongoose.InferSchemaType; +export type TWorldLogInput = Omit; diff --git a/packages/controlmart/src/models/manufacturing/index.ts b/packages/controlmart/src/models/manufacturing/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..d9eef277678fb4b7142cc303b0e1c4389f7551b4 --- /dev/null +++ b/packages/controlmart/src/models/manufacturing/index.ts @@ -0,0 +1,3 @@ +export * from "./production-run.model"; +export * from "./production-run.model.type"; + diff --git a/packages/controlmart/src/models/manufacturing/production-run.model.ts b/packages/controlmart/src/models/manufacturing/production-run.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..ddeeb003682dfcd77b9f3ed148ceee6ceb7a70dc --- /dev/null +++ b/packages/controlmart/src/models/manufacturing/production-run.model.ts @@ -0,0 +1,95 @@ +import mongoose, { Schema, Model } from "mongoose"; +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const RawMaterialConsumedSchema = new Schema( + { + materialId: { type: String, required: true }, + sku: { type: String, required: true }, + quantity: { type: Number, required: true }, + unit: { type: String, required: true }, + lotNumber: String, + binId: String, + }, + { _id: false }, +); + +const FinishedGoodsProducedSchema = new Schema( + { + productId: { type: String, required: true }, + sku: { type: String, required: true }, + quantity: { type: Number, required: true }, + unit: { type: String, required: true }, + lotNumber: String, + }, + { _id: false }, +); + +const QualityCheckSchema = new Schema( + { + checkType: { type: String, required: true }, + status: { + type: String, + enum: ["PASS", "FAIL", "PENDING"], + default: "PENDING", + }, + checkedAt: Date, + notes: String, + }, + { _id: false }, +); + +const ProductionRunSchema = new Schema( + { + worldRef: WorldRefSchema, + productionRunId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("manufacturing", "production-run"), + }, + productionOrderId: { + type: String, + required: true, + index: true, + }, + status: { + type: String, + enum: ["CREATED", "IN_PROGRESS", "COMPLETED", "FAILED", "CANCELLED"], + default: "CREATED", + index: true, + }, + startTime: Date, + endTime: Date, + rawMaterialsConsumed: [RawMaterialConsumedSchema], + finishedGoodsProduced: [FinishedGoodsProducedSchema], + qualityChecks: [QualityCheckSchema], + equipmentId: String, + workCenterId: String, + operatorId: String, + notes: String, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ProductionRunSchema.index({ productionOrderId: 1, status: 1 }); +ProductionRunSchema.index({ status: 1, startTime: -1 }); + +ProductionRunSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(ProductionRunSchema, "MANUFACTURING", "ProductionRun"); + +export const ProductionRun: Model = + mongoose.models.ProductionRun || + mongoose.model("ProductionRun", ProductionRunSchema, "manufacturing_production_runs"); + +export type TProductionRunModel = mongoose.InferSchemaType; +export type TProductionRunInput = Omit< + TProductionRunModel, + "worldRef" | "createdAt" | "updatedAt" +>; + diff --git a/packages/controlmart/src/models/manufacturing/production-run.model.type.ts b/packages/controlmart/src/models/manufacturing/production-run.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..688fda1036b424b61d5728371a8112cf7986d0ec --- /dev/null +++ b/packages/controlmart/src/models/manufacturing/production-run.model.type.ts @@ -0,0 +1,48 @@ +import mongoose from "mongoose"; + +export type TProductionRunModel = { + _id: mongoose.Types.ObjectId; + worldRef: { + worldId: string; + traceId?: string; + }; + productionRunId: string; + productionOrderId: string; + status: "CREATED" | "IN_PROGRESS" | "COMPLETED" | "FAILED" | "CANCELLED"; + startTime?: Date; + endTime?: Date; + rawMaterialsConsumed: Array<{ + materialId: string; + sku: string; + quantity: number; + unit: string; + lotNumber?: string; + binId?: string; + }>; + finishedGoodsProduced: Array<{ + productId: string; + sku: string; + quantity: number; + unit: string; + lotNumber?: string; + }>; + qualityChecks?: Array<{ + checkType: string; + status: "PASS" | "FAIL" | "PENDING"; + checkedAt?: Date; + notes?: string; + }>; + equipmentId?: string; + workCenterId?: string; + operatorId?: string; + notes?: string; + customFields?: Record; + createdAt: Date; + updatedAt: Date; +}; + +export type TProductionRunInput = Omit< + TProductionRunModel, + "_id" | "worldRef" | "createdAt" | "updatedAt" +>; + diff --git a/packages/controlmart/src/models/od.model.ts b/packages/controlmart/src/models/od.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..d175e3c1345d1ed6f9403b9360e5a6d9b5435060 --- /dev/null +++ b/packages/controlmart/src/models/od.model.ts @@ -0,0 +1,42 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AuditPlugin } from "./audit.plugin"; +import { WorldRefSchema } from "./shared.model"; +import { generateIdByService } from "../utils/mongo.util"; + +const operationalDescriptorSchema = new Schema( + { + worldRef: WorldRefSchema, + odId: { + type: String, + default: () => generateIdByService("od", "od"), + }, + name: { type: String, required: true }, + description: { type: String }, + persona: { type: String }, + odType: { + type: String, + required: true, + enum: ["background_job", "standard", "workflow"], + default: "standard", + }, + data: { type: Schema.Types.Mixed, required: true }, + }, + { timestamps: true, minimize: false }, +); + +operationalDescriptorSchema.index({ odId: 1, "worldRef.worldId": 1 }, { unique: true }); + +operationalDescriptorSchema.plugin(AuditPlugin); + +export const OperationalDescriptor: Model = + mongoose.models.OperationalDescriptor || + mongoose.model("OperationalDescriptor", operationalDescriptorSchema); + +export type TOperationalDescriptorModel = mongoose.InferSchemaType< + typeof operationalDescriptorSchema +>; +export type TOperationalDescriptorInput = Omit< + TOperationalDescriptorModel, + "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/persona.model.ts b/packages/controlmart/src/models/persona.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..54febe4432a8f105047068a4c2df72cf845226f0 --- /dev/null +++ b/packages/controlmart/src/models/persona.model.ts @@ -0,0 +1,43 @@ +import mongoose, { Schema, Model } from "mongoose"; +import { AuditPlugin } from "./audit.plugin"; +import type { TPersonaModel } from "./persona.model.type"; + +/** + * Persona Schema + * Defines the MongoDB schema for Persona documents + * + * Note: Personas are GLOBAL resources (no worldRef field). + * They are reusable role templates that can be referenced by worlds. + */ +const PersonaSchema = new Schema( + { + id: { type: String, required: true, unique: true, index: true }, + name: { type: String, required: true }, + description: { type: String, required: true }, + role: { type: String, required: true, index: true }, + department: { type: String, required: false, index: true }, + capabilityIds: { type: [String], required: true, default: [] }, + metadata: { type: Schema.Types.Mixed, required: false }, + }, + { timestamps: true } +); + +// Apply audit plugin for automatic change tracking +PersonaSchema.plugin(AuditPlugin); + +// Add indexes for common query patterns +PersonaSchema.index({ 'metadata.accessLevel': 1 }); +PersonaSchema.index({ 'metadata.tags': 1 }); +PersonaSchema.index({ role: 1, department: 1 }); // Compound index +PersonaSchema.index({ name: "text", description: "text" }); // Text search + +/** + * Persona Model + * Singleton pattern to prevent model re-registration + * Collection name: "personas" + */ +export const Persona: Model = + mongoose.models.Persona || + mongoose.model("Persona", PersonaSchema, "personas"); + +export type { TPersonaModel, TPersonaInput } from './persona.model.type'; diff --git a/packages/controlmart/src/models/persona.model.type.ts b/packages/controlmart/src/models/persona.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..36cf3aba60fe27ee4d6d9f5515e27df250280386 --- /dev/null +++ b/packages/controlmart/src/models/persona.model.type.ts @@ -0,0 +1,93 @@ +/** + * Persona Model Type Definitions + * + * Personas represent supply chain roles (Store Manager, Warehouse Worker, etc.) + * and their associated capabilities. + */ + +/** + * Primary role categories for personas + */ +export type PersonaRole = 'operational' | 'management' | 'specialist' | 'system'; + +/** + * Department or functional area + */ +export type PersonaDepartment = + | 'warehouse' + | 'transportation' + | 'customer-service' + | 'inventory' + | 'edi' + | 'returns' + | 'store-operations'; + +/** + * Access level for capability restrictions + */ +export type PersonaAccessLevel = 'basic' | 'advanced' | 'admin'; + +/** + * Additional metadata for personas + */ +export interface PersonaMetadata { + /** Permission strings */ + permissions?: string[]; + + /** Access level for capability restrictions */ + accessLevel?: PersonaAccessLevel; + + /** Tags for filtering and grouping */ + tags?: string[]; + + /** Display order priority (lower = higher priority) */ + priority?: number; + + /** Additional custom metadata */ + [key: string]: any; +} + +/** + * Persona Model - MongoDB document structure + * Represents a supply chain role with associated capabilities. + * + * Note: Personas are GLOBAL resources (no worldRef field). + * They define reusable roles that can be referenced by worlds. + */ +export interface TPersonaModel { + /** MongoDB document ID */ + _id: string; + + /** Unique identifier (kebab-case) */ + id: string; + + /** Display name */ + name: string; + + /** Description of the role and responsibilities */ + description: string; + + /** Primary role category */ + role: PersonaRole; + + /** Department or functional area */ + department?: PersonaDepartment; + + /** Capabilities this persona can perform */ + capabilityIds: string[]; + + /** Additional metadata */ + metadata?: PersonaMetadata; + + /** Timestamp when document was created */ + createdAt?: Date; + + /** Timestamp when document was last updated */ + updatedAt?: Date; +} + +/** + * Persona Input - Data structure for creating/updating personas + * Omits auto-generated fields (_id, createdAt, updatedAt) + */ +export type TPersonaInput = Omit; diff --git a/packages/controlmart/src/models/shared.model.ts b/packages/controlmart/src/models/shared.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..27e7f9ebb6c77c011fad78ff52c0aa1e014f1fe9 --- /dev/null +++ b/packages/controlmart/src/models/shared.model.ts @@ -0,0 +1,104 @@ +import mongoose, { Schema, Model } from "mongoose"; + +export const AddressSchema = new Schema( + { + type: { + type: String, + enum: ["BILL_TO", "SHIP_TO", "REM_TO", "PO_BOX", "OTHER"], + default: "OTHER", + }, + name: { type: String }, + attention: { type: String }, + street1: { type: String }, + street2: { type: String }, + city: { type: String }, + state: { type: String }, + postalCode: { type: String }, + country: { type: String }, + latitude: { type: Number }, + longitude: { type: Number }, + contactName: { type: String }, + contactPhone: { type: String }, + contactEmail: { type: String }, + }, + { _id: false }, +); + +export const MoneySchema = new Schema( + { + currency: { type: String, required: false, index: true }, + amount: { type: Number, required: false }, + }, + { _id: false }, +); + +export const TaxDetailSchema = new Schema( + { + taxType: { type: String }, + jurisdiction: { type: String }, + taxRate: { type: Number }, + taxAmount: { type: Number }, + taxableBase: { type: Number }, + taxId: { type: String }, + }, + { _id: false }, +); + +export const PartyRefSchema = new Schema( + { + partyType: { + type: String, + enum: ["CUSTOMER", "VENDOR", "SHIPPER", "CARRIER", "BILL_TO"], + default: "CUSTOMER", + }, + partyId: { type: String }, + externalId: { type: String }, + name: { type: String }, + duns: { type: String }, + }, + { _id: false }, +); + +export const LineReferenceSchema = new Schema( + { + lineNumber: { type: Number }, + poLineNumber: { type: String }, + refId: { type: String }, + }, + { _id: false }, +); + +export const WorldRefSchema = new Schema( + { + worldId: { type: String, required: true, index: true }, + traceId: { type: String }, + }, + { _id: false }, +); + +export const DimensionsSchema = new Schema( + { + length: Number, + width: Number, + height: Number, + unit: { type: String, default: "inches" }, + }, + { _id: false }, +); + +export const WeightSchema = new Schema( + { + value: Number, + unit: { type: String, default: "lbs" }, + }, + { _id: false }, +); + +export type TAddressModel = mongoose.InferSchemaType; +export type TMoneyModel = mongoose.InferSchemaType; +export type TTaxDetailModel = mongoose.InferSchemaType; +export type TPartyRefModel = mongoose.InferSchemaType; +export type TLineReferenceModel = mongoose.InferSchemaType; +export type TWorldRefModel = mongoose.InferSchemaType; +export type TDimensionsModel = mongoose.InferSchemaType; +export type TWeightModel = mongoose.InferSchemaType; diff --git a/packages/controlmart/src/models/tickets.model.ts b/packages/controlmart/src/models/tickets.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..b5a36ba39fba7c49e379aba473d470f96a8d3655 --- /dev/null +++ b/packages/controlmart/src/models/tickets.model.ts @@ -0,0 +1,126 @@ +import mongoose, { Model, Schema } from "mongoose"; + +import { AuditPlugin } from "./audit.plugin"; +import { WorldRefSchema } from "./shared.model"; + +const WorkNoteSchema = new Schema( + { + author: { + type: String, + required: true, + }, + note: { + type: String, + required: true, + }, + isPublic: { + type: Boolean, + default: false, + }, + }, + { timestamps: true }, +); + +const WorldItsmTicketingSchema = new Schema( + { + worldRef: WorldRefSchema, + title: { + type: String, + required: true, + trim: true, + }, + + description: { + type: String, + required: true, + }, + + requester: { + type: Schema.Types.ObjectId, + ref: "WorldUser", + required: true, + }, + + assignedTo: { + type: Schema.Types.ObjectId, + ref: "WorldUser", + default: null, + }, + + status: { + type: String, + enum: ["new", "open", "in_progress", "on_hold", "resolved", "closed"], + default: "new", + }, + + priority: { + type: String, + enum: ["low", "medium", "high", "critical"], + default: "medium", + }, + + impact: { + type: String, + enum: ["low", "medium", "high"], + default: "low", + }, + + urgency: { + type: String, + enum: ["low", "medium", "high"], + default: "low", + }, + + category: { + type: String, + default: null, + }, + + metadata: { + type: Schema.Types.Mixed, + default: {}, + }, + + type: { + type: String, + enum: ["incident", "service_request", "problem", "change"], + default: "incident", + }, + + attachments: [ + { + type: { url: String, filename: String, uploadedAt: Date }, + required: false, + default: [], + }, + ], + + resolutionNotes: { + type: String, + default: null, + }, + + workNotes: [ + { + type: WorkNoteSchema, + required: false, + default: [], + }, + ], + }, + { + timestamps: true, + }, +); + +WorldItsmTicketingSchema.plugin(AuditPlugin); + +export const WorldItsmTicket: Model = mongoose.model( + "WorldItsmTicket", + WorldItsmTicketingSchema, +); + +export type TWorldItsmTicketModel = mongoose.InferSchemaType; +export type TWorldItsmTicketInput = Omit; +export type TWorkNoteModel = mongoose.InferSchemaType; +export type TWorkNoteInput = Omit; diff --git a/packages/controlmart/src/models/tms/carrier.tms.model.ts b/packages/controlmart/src/models/tms/carrier.tms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..5fcd05a741f6ee3e5f96bcbbc0690b907df1d0fc --- /dev/null +++ b/packages/controlmart/src/models/tms/carrier.tms.model.ts @@ -0,0 +1,82 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const CarrierSchema = new Schema( + { + worldRef: WorldRefSchema, + carrierId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("tms", "carrier"), + }, + carrierCode: { type: String, required: true, index: true }, + carrierName: { type: String, required: true }, + + carrierType: { + type: String, + enum: ["LTL", "FTL", "PARCEL", "INTERMODAL", "COURIER", "RAIL", "AIR", "OCEAN"], + index: true, + }, + + status: { + type: String, + enum: ["ACTIVE", "INACTIVE", "SUSPENDED", "PENDING_APPROVAL"], + default: "ACTIVE", + index: true, + }, + + contact: { + primaryContactName: String, + email: String, + phone: String, + address: { + street: String, + city: String, + state: String, + zipCode: String, + country: String, + }, + }, + + compliance: { + dotNumber: String, + mcNumber: String, + scacCode: String, + smartWayCertified: { type: Boolean, default: false, index: true }, + insuranceExpiry: Date, + safetyRating: { + type: String, + enum: ["SATISFACTORY", "CONDITIONAL", "UNSATISFACTORY", "NOT_RATED"], + }, + }, + performance: { + onTimeDeliveryRate: { type: Number, default: 0 }, + damageClaimRate: { type: Number, default: 0 }, + averageTransitTime: Number, + totalShipmentsCompleted: { type: Number, default: 0 }, + lastPerformanceUpdate: Date, + }, + serviceRegions: [String], + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +CarrierSchema.index({ "worldRef.dcId": 1, status: 1 }); +CarrierSchema.index({ carrierCode: 1, carrierName: 1 }); + +CarrierSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(CarrierSchema, "TMS", "tms-carriers"); + +export const Carrier: Model = + mongoose.models.Carrier || + mongoose.model("Carrier", CarrierSchema, "tms-carriers"); +export type TCarrierModel = mongoose.InferSchemaType; +export type TCarrierInput = Omit; diff --git a/packages/controlmart/src/models/tms/inbound_trailer.tms.model.ts b/packages/controlmart/src/models/tms/inbound_trailer.tms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..16cff72d30bd616c30fc0e9150ea037af26a7d17 --- /dev/null +++ b/packages/controlmart/src/models/tms/inbound_trailer.tms.model.ts @@ -0,0 +1,112 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const InboundTrailerSchema = new Schema( + { + worldRef: WorldRefSchema, + trailerId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("tms", "inbound_trailer"), + }, + trailerNumber: { type: String, required: true, index: true }, + + carrierInfo: { + carrierId: { type: String, index: true }, + carrierName: String, + driverName: String, + driverPhone: String, + }, + + appointmentInfo: { + appointmentId: String, + scheduledArrival: { type: Date, required: true, index: true }, + scheduledDeparture: Date, + estimatedArrival: { type: Date, index: true }, + actualArrival: Date, + actualDeparture: Date, + dockDoor: String, + }, + + status: { + type: String, + enum: [ + "SCHEDULED", + "EN_ROUTE", + "CHECKED_IN", + "AT_DOCK", + "UNLOADING", + "UNLOADED", + "DEPARTED", + "CANCELLED", + "DELAYED", + ], + default: "SCHEDULED", + index: true, + }, + + facilityInfo: { + dcId: { type: String, index: true }, + facilityName: String, + address: { + street: String, + city: String, + state: String, + zipCode: String, + }, + }, + + shipmentIds: [String], + + cargo: { + purchaseOrders: [String], + expectedPallets: Number, + actualPallets: Number, + trailerType: { + type: String, + enum: ["DRY_VAN", "REEFER", "FLATBED", "TANKER", "INTERMODAL"], + }, + sealNumber: String, + }, + + delays: [ + { + delayType: { + type: String, + enum: ["TRAFFIC", "WEATHER", "CARRIER", "DOCK_AVAILABILITY", "OTHER"], + }, + reason: String, + reportedAt: Date, + estimatedDelay: Number, + }, + ], + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +InboundTrailerSchema.index({ "worldRef.dcId": 1, status: 1 }); + +InboundTrailerSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(InboundTrailerSchema, "TMS", "tms-inbound-trailers"); + +export const InboundTrailer: Model = + mongoose.models.InboundTrailer || + mongoose.model( + "InboundTrailer", + InboundTrailerSchema, + "tms-inbound-trailers", + ); +export type TInboundTrailerModel = mongoose.InferSchemaType; +export type TInboundTrailerInput = Omit< + TInboundTrailerModel, + "worldRef" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/tms/index.ts b/packages/controlmart/src/models/tms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..175855d64a783fb7a43d137bc763862140087580 --- /dev/null +++ b/packages/controlmart/src/models/tms/index.ts @@ -0,0 +1,5 @@ +export * from "./carrier.tms.model"; +export * from "./lane.tms.model"; +export * from "./shipment.tms.model"; +export * from "./inbound_trailer.tms.model"; +export * from "./shipment_status_event.tms.model"; diff --git a/packages/controlmart/src/models/tms/lane.tms.model.ts b/packages/controlmart/src/models/tms/lane.tms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..91ad3467341a38f59e3fc77609f74bfb0e300f1f --- /dev/null +++ b/packages/controlmart/src/models/tms/lane.tms.model.ts @@ -0,0 +1,98 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const LaneSchema = new Schema( + { + worldRef: WorldRefSchema, + laneId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("tms", "lane"), + }, + origin: { + locationId: String, + locationName: String, + city: String, + state: String, + zipCode: String, + country: String, + coordinates: { + latitude: Number, + longitude: Number, + }, + }, + + destination: { + locationId: String, + locationName: String, + city: String, + state: String, + zipCode: String, + country: String, + coordinates: { + latitude: Number, + longitude: Number, + }, + }, + + laneType: { + type: String, + enum: ["INBOUND", "OUTBOUND", "INTER_FACILITY", "CUSTOMER_DELIVERY"], + index: true, + }, + + status: { + type: String, + enum: ["ACTIVE", "INACTIVE", "SEASONAL"], + default: "ACTIVE", + index: true, + }, + + distance: { + miles: Number, + kilometers: Number, + }, + + estimatedTransitTime: { + hours: Number, + businessDays: Number, + }, + + preferredCarriers: [ + { + carrierId: String, + carrierName: String, + priority: Number, // 1 = highest + }, + ], + + historicalData: { + averageCostPerMile: Number, + averageCostPerShipment: Number, + totalShipments: { type: Number, default: 0 }, + lastShipmentDate: Date, + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +LaneSchema.index({ "worldRef.dcId": 1, status: 1 }); +LaneSchema.index({ "origin.zipCode": 1, "destination.zipCode": 1 }); +LaneSchema.index({ laneType: 1, status: 1 }); + +LaneSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(LaneSchema, "TMS", "tms-lanes"); + +export const Lane: Model = + mongoose.models.Lane || mongoose.model("Lane", LaneSchema, "tms-lanes"); +export type TLaneModel = mongoose.InferSchemaType; +export type TLaneInput = Omit; diff --git a/packages/controlmart/src/models/tms/shipment.tms.model.ts b/packages/controlmart/src/models/tms/shipment.tms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..25f98b565abdee39b29c681da5dfa4e07114fcc1 --- /dev/null +++ b/packages/controlmart/src/models/tms/shipment.tms.model.ts @@ -0,0 +1,202 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const ShipmentSchema = new Schema( + { + worldRef: WorldRefSchema, + shipmentId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("tms", "shipment"), + }, + shipmentNumber: { type: String, required: true, index: true }, + + carrierInfo: { + carrierId: { type: String, index: true }, + carrierName: String, + carrierCode: String, + scacCode: String, + proNumber: String, + trackingNumber: String, + }, + + origin: { + locationId: String, + locationName: String, + address: { + street: String, + city: String, + state: String, + zipCode: String, + country: String, + }, + coordinates: { + latitude: Number, + longitude: Number, + }, + contactName: String, + contactPhone: String, + }, + + destination: { + locationId: String, + locationName: String, + address: { + street: String, + city: String, + state: String, + zipCode: String, + country: String, + }, + coordinates: { + latitude: Number, + longitude: Number, + }, + contactName: String, + contactPhone: String, + }, + + laneId: { type: String, index: true }, + + status: { + type: String, + enum: [ + "PLANNED", + "TENDERED", + "ACCEPTED", + "PICKED_UP", + "IN_TRANSIT", + "OUT_FOR_DELIVERY", + "DELIVERED", + "CANCELLED", + "DELAYED", + "EXCEPTION", + ], + default: "PLANNED", + index: true, + }, + + shipmentType: { + type: String, + enum: ["INBOUND", "OUTBOUND", "TRANSFER", "RETURN"], + index: true, + }, + + serviceLevel: { + type: String, + enum: ["STANDARD", "EXPEDITED", "NEXT_DAY", "TWO_DAY", "ECONOMY"], + index: true, + }, + + dates: { + plannedPickupDate: { type: Date, index: true }, + actualPickupDate: Date, + plannedDeliveryDate: { type: Date, index: true }, + estimatedDeliveryDate: { type: Date, index: true }, + actualDeliveryDate: Date, + }, + + cargo: { + totalWeight: Number, + totalWeightUOM: { type: String, default: "LBS" }, + totalVolume: Number, + totalVolumeUOM: { type: String, default: "CUFT" }, + palletCount: Number, + packageCount: Number, + commodityType: String, + hazmat: { type: Boolean, default: false }, + temperatureControlled: { type: Boolean, default: false }, + temperatureRange: { + min: Number, + max: Number, + uom: { type: String, enum: ["F", "C"] }, + }, + }, + + costs: { + baseCost: Number, + fuelSurcharge: Number, + accessorialCharges: Number, + totalCost: Number, + currency: { type: String, default: "USD" }, + costPerMile: Number, + }, + + routeInfo: { + plannedRoute: [ + { + stopNumber: Number, + locationName: String, + arrivalTime: Date, + departureTime: Date, + }, + ], + estimatedDistance: Number, + estimatedDuration: Number, + }, + + currentLocation: { + lastKnownPosition: { + latitude: Number, + longitude: Number, + }, + lastUpdateTime: Date, + currentCity: String, + currentState: String, + }, + + delays: [ + { + delayType: { + type: String, + enum: ["WEATHER", "TRAFFIC", "MECHANICAL", "CARRIER", "CUSTOMS", "OTHER"], + }, + reason: String, + startTime: Date, + endTime: Date, + estimatedDelay: Number, + }, + ], + + externalEvents: [ + { + eventType: String, + eventDescription: String, + eventTime: Date, + source: String, + }, + ], + + references: { + orderId: String, + purchaseOrderNumber: String, + invoiceNumber: String, + customerReference: String, + loadNumber: String, + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ShipmentSchema.index({ "worldRef.dcId": 1, status: 1 }); +ShipmentSchema.index({ "carrierInfo.carrierId": 1, status: 1 }); +ShipmentSchema.index({ shipmentType: 1, status: 1 }); +ShipmentSchema.index({ "references.orderId": 1 }); + +ShipmentSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(ShipmentSchema, "TMS", "tms-shipments"); + +export const Shipment: Model = + mongoose.models.TMSShipment || + mongoose.model("TMSShipment", ShipmentSchema, "tms-shipments"); +export type TShipmentModel = mongoose.InferSchemaType; +export type TShipmentInput = Omit; diff --git a/packages/controlmart/src/models/tms/shipment_status_event.tms.model.ts b/packages/controlmart/src/models/tms/shipment_status_event.tms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..a869a63723d62670cdb25cc871f38e6edf8c5fbe --- /dev/null +++ b/packages/controlmart/src/models/tms/shipment_status_event.tms.model.ts @@ -0,0 +1,91 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const ShipmentStatusEventSchema = new Schema( + { + worldRef: WorldRefSchema, + eventId: { + type: String, + required: false, + unique: true, + index: true, + default: () => generateIdByService("tms", "shipment_status_event"), + }, + shipmentId: { type: String, required: true, index: true }, + + eventType: { + type: String, + enum: ["STATUS_CHANGE", "LOCATION_UPDATE", "ETA_UPDATE", "DELAY", "EXCEPTION", "MILESTONE"], + required: true, + index: true, + }, + + eventTime: { type: Date, required: true, index: true }, + + statusInfo: { + previousStatus: String, + newStatus: String, + }, + + locationInfo: { + latitude: Number, + longitude: Number, + city: String, + state: String, + zipCode: String, + facility: String, + }, + + etaInfo: { + previousETA: Date, + newETA: Date, + delayMinutes: Number, + }, + + exceptionInfo: { + exceptionType: String, + severity: { + type: String, + enum: ["LOW", "MEDIUM", "HIGH", "CRITICAL"], + }, + description: String, + resolution: String, + }, + + source: { + type: String, + enum: ["API", "EDI", "MANUAL", "GPS", "CARRIER_PORTAL"], + default: "API", + }, + + rawData: Schema.Types.Mixed, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ShipmentStatusEventSchema.index({ shipmentId: 1, eventTime: -1 }); +ShipmentStatusEventSchema.index({ "worldRef.dcId": 1, eventType: 1 }); +ShipmentStatusEventSchema.index({ eventTime: -1 }); + +ShipmentStatusEventSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(ShipmentStatusEventSchema, "TMS", "tms-shipment-events"); + +export const ShipmentStatusEvent: Model = + mongoose.models.ShipmentStatusEvent || + mongoose.model( + "ShipmentStatusEvent", + ShipmentStatusEventSchema, + "tms-shipment-events", + ); +export type TShipmentStatusEventModel = mongoose.InferSchemaType; +export type TShipmentStatusEventInput = Omit< + TShipmentStatusEventModel, + "worldRef" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/wms/bin.wms.model.ts b/packages/controlmart/src/models/wms/bin.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..e122954acfbb11f2539fadefc46077ad53a6624d --- /dev/null +++ b/packages/controlmart/src/models/wms/bin.wms.model.ts @@ -0,0 +1,68 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const BinSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + binId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "bin"), + }, + binCode: { type: String, required: true, index: true }, + zoneId: { type: String, required: true, index: true }, + aisleId: String, + location: { + aisle: String, + bay: String, + level: Number, + position: String, + }, + binType: { + type: String, + enum: ["PALLET", "SHELF", "FLOOR", "CASE_FLOW", "RESERVE", "PICK_FACE"], + }, + locationType: { + type: String, + enum: ["STORAGE", "STAGING", "DOCK", "QC", "RETURN"], + }, + capacity: { + maxWeightLbs: Number, + maxCubicFeet: Number, + maxPallets: { type: Number, default: 1 }, + }, + status: { + type: String, + enum: ["AVAILABLE", "OCCUPIED", "RESERVED", "DAMAGED", "BLOCKED"], + default: "AVAILABLE", + index: true, + }, + abcClassification: { + type: String, + enum: ["A", "B", "C"], + index: true, + }, + pickable: { type: Boolean, default: true }, + lastInventoryCheck: Date, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +BinSchema.index({ abcClassification: 1, pickable: 1 }); +BinSchema.index({ worldRef: 1, warehouseId: 1, binCode: 1 }, { unique: true }); + +BinSchema.plugin(AuditPlugin); +// attachBusinessRuleHooks(BinSchema, "WMS", "Bin"); + +export const Bin: Model = + mongoose.models.Bin || mongoose.model("Bin", BinSchema, "wms_bins"); +export type TBinModel = mongoose.InferSchemaType; +export type TBinInput = Omit; diff --git a/packages/controlmart/src/models/wms/cycle_count.wms.model.ts b/packages/controlmart/src/models/wms/cycle_count.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..bb2f8a17994784197725562c09261fee2548d9af --- /dev/null +++ b/packages/controlmart/src/models/wms/cycle_count.wms.model.ts @@ -0,0 +1,104 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const CycleCountSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true, unique: false }, + cycleCountId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "cycle-count"), + }, + + countType: { + type: String, + enum: ["DAILY", "WEEKLY", "MONTHLY", "ABC", "FULL", "SPOT", "BLIND"], + index: true, + }, + + countStatus: { + type: String, + enum: ["SCHEDULED", "IN_PROGRESS", "COMPLETED", "APPROVED", "REJECTED", "CANCELLED"], + default: "SCHEDULED", + index: true, + }, + + schedule: { + scheduledDate: { type: Date, index: true }, + startDate: Date, + completedDate: Date, + }, + + scope: { + zoneId: String, + binIds: [String], + productIds: [String], + abcClassification: String, + }, + + assignments: [ + { + userId: String, + userName: String, + assignedBins: [String], + status: String, + }, + ], + + counts: [ + { + binId: String, + productId: String, + sku: String, + lotNumber: String, + systemQuantity: Number, + countedQuantity: Number, + variance: Number, + variancePercent: Number, + countedBy: String, + countedAt: Date, + reconciledBy: String, + reconciledAt: Date, + notes: String, + }, + ], + + summary: { + totalBins: Number, + totalProducts: Number, + itemsMatched: Number, + itemsVariance: Number, + accuracyPercent: Number, + totalVarianceValue: Number, + }, + + approvedBy: String, + approvedAt: Date, + + notes: String, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +CycleCountSchema.index( + { worldRef: 1, warehouseId: 1, "schedule.scheduledDate": 1 }, + { sparse: true }, +); + +CycleCountSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(CycleCountSchema, "WMS", "CycleCount"); + +export const CycleCount: Model = + mongoose.models.CycleCount || + mongoose.model("CycleCount", CycleCountSchema, "wms_cycle-counts"); +export type TCycleCountModel = mongoose.InferSchemaType; +export type TCycleCountInput = Omit; diff --git a/packages/controlmart/src/models/wms/daily_metrics.wms.model.ts b/packages/controlmart/src/models/wms/daily_metrics.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..3a1a887702f2f7dd4d4aeb4669e2cca167e016fd --- /dev/null +++ b/packages/controlmart/src/models/wms/daily_metrics.wms.model.ts @@ -0,0 +1,97 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const DailyMetricsSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + metricId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "daily-metrics"), + }, + + date: { type: Date, required: true, index: true }, + shift: String, + zoneId: String, + + inbound: { + poReceived: Number, + linesReceived: Number, + unitsReceived: Number, + palletsReceived: Number, + receivingHours: Number, + unitsPerHour: Number, + }, + + putaway: { + putawayTasks: Number, + palletsPutaway: Number, + putawayHours: Number, + palletsPerHour: Number, + }, + + picking: { + ordersShipped: Number, + linesPicked: Number, + unitsPicked: Number, + pickingHours: Number, + linesPerHour: Number, + unitsPerHour: Number, + pickAccuracy: Number, + }, + + packing: { + ordersPacked: Number, + packagesPacked: Number, + packingHours: Number, + ordersPerHour: Number, + }, + + shipping: { + shipmentsCreated: Number, + carriersDispatched: Number, + packagesShipped: Number, + }, + + labor: { + totalWorkers: Number, + totalHours: Number, + productiveHours: Number, + indirectHours: Number, + utilizationPercent: Number, + }, + + inventory: { + onHandUnits: Number, + inventoryValue: Number, + turnoverRate: Number, + }, + + quality: { + pickErrors: Number, + packErrors: Number, + damageReports: Number, + returnsProcessed: Number, + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +DailyMetricsSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(DailyMetricsSchema, "WMS", "wms_daily-metrics"); + +export const DailyMetrics: Model = + mongoose.models.DailyMetrics || + mongoose.model("DailyMetrics", DailyMetricsSchema, "wms_daily-metrics"); +export type TDailyMetricsModel = mongoose.InferSchemaType; +export type TDailyMetricsInput = Omit; diff --git a/packages/controlmart/src/models/wms/dc.wms.model.ts b/packages/controlmart/src/models/wms/dc.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..2be6b3bdc898aadc1123337aaa259e0799b099c3 --- /dev/null +++ b/packages/controlmart/src/models/wms/dc.wms.model.ts @@ -0,0 +1,69 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AddressSchema, WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const DistributionCenterSchema = new Schema( + { + worldRef: WorldRefSchema, + dcId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "distribution-center"), + }, + warehouseId: { type: String, required: true, index: true }, + dcName: { type: String, required: true }, + dcType: { + type: String, + enum: ["FULFILLMENT", "CROSS_DOCK", "COLD_STORAGE", "3PL"], + }, + address: AddressSchema, + timezone: String, + totalSqFootage: Number, + operationalStatus: { + type: String, + enum: ["ACTIVE", "INACTIVE", "MAINTENANCE"], + default: "ACTIVE", + index: true, + }, + operatingHours: { + monday: { open: String, close: String }, + tuesday: { open: String, close: String }, + wednesday: { open: String, close: String }, + thursday: { open: String, close: String }, + friday: { open: String, close: String }, + saturday: { open: String, close: String }, + sunday: { open: String, close: String }, + }, + contactInfo: { + phone: String, + email: String, + manager: String, + }, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +DistributionCenterSchema.plugin(AuditPlugin); + +DistributionCenterSchema.index({ worldRef: 1, dcName: 1 }, { unique: true }); + +// attachBusinessRuleHooks(DistributionCenterSchema, "WMS", "wms_distribution-center"); + +export const DistributionCenter: Model = + mongoose.models.DistributionCenter || + mongoose.model( + "DistributionCenter", + DistributionCenterSchema, + "wms_distribution-centers", + ); +export type TDistributionCenterModel = mongoose.InferSchemaType; +export type TDistributionCenterInput = Omit< + TDistributionCenterModel, + "worldRef" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/wms/dock_door.wms.model.ts b/packages/controlmart/src/models/wms/dock_door.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..4663167462822e95b12c196279ec1e9e7ef0e317 --- /dev/null +++ b/packages/controlmart/src/models/wms/dock_door.wms.model.ts @@ -0,0 +1,103 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const DockDoorSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + dockDoorId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "dock-door"), + }, + doorNumber: { type: String, required: true }, + doorType: { + type: String, + enum: ["INBOUND", "OUTBOUND", "CROSS_DOCK"], + index: true, + }, + status: { + type: String, + enum: ["AVAILABLE", "OCCUPIED", "MAINTENANCE", "CLOSED"], + default: "AVAILABLE", + index: true, + }, + + zoneId: String, + + capabilities: { + maxTrailerLength: Number, + maxTrailerHeight: Number, + levelingDock: { type: Boolean, default: false }, + hydraulicLeveler: { type: Boolean, default: false }, + restraintSystem: { type: Boolean, default: false }, + weatherSeal: { type: Boolean, default: false }, + }, + + equipment: { + forkliftAccess: { type: Boolean, default: true }, + conveyorSystem: { type: Boolean, default: false }, + scales: { type: Boolean, default: false }, + lightSystem: { type: Boolean, default: true }, + }, + + currentAppointment: { + appointmentId: String, + carrier: String, + trailerNumber: String, + startTime: Date, + expectedEndTime: Date, + }, + + currentTrailer: { + trailerId: String, + trailerNumber: String, + sealNumbers: [String], + arrivalTime: Date, + }, + + operatingHours: { + monday: { open: String, close: String }, + tuesday: { open: String, close: String }, + wednesday: { open: String, close: String }, + thursday: { open: String, close: String }, + friday: { open: String, close: String }, + saturday: { open: String, close: String }, + sunday: { open: String, close: String }, + }, + + safety: { + emergencyStop: { type: Boolean, default: true }, + safetyLights: { type: Boolean, default: true }, + lastSafetyInspection: Date, + nextSafetyInspection: Date, + }, + + maintenance: { + lastMaintenance: Date, + nextMaintenance: Date, + maintenanceNotes: String, + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +DockDoorSchema.plugin(AuditPlugin); + +DockDoorSchema.index({ worldRef: 1, warehouseId: 1, doorNumber: 1 }, { unique: true }); + +// attachBusinessRuleHooks(DockDoorSchema, "WMS", "DockDoor"); + +export const DockDoor: Model = + mongoose.models.DockDoor || + mongoose.model("DockDoor", DockDoorSchema, "wms_dock-doors"); +export type TDockDoorModel = mongoose.InferSchemaType; +export type TDockDoorInput = Omit; diff --git a/packages/controlmart/src/models/wms/inbound_order.wms.model.ts b/packages/controlmart/src/models/wms/inbound_order.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..79a03bf48a9b6f31c4d47e91a6fe065e89071db9 --- /dev/null +++ b/packages/controlmart/src/models/wms/inbound_order.wms.model.ts @@ -0,0 +1,96 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const InboundOrderLineSchema = new Schema( + { + lineNumber: Number, + productId: { type: String, required: true }, + sku: String, + productName: String, + expectedQuantity: { type: Number, required: true, default: 1 }, + receivedQuantity: { type: Number, default: 0 }, + uom: String, + lotNumber: String, + expirationDate: { + type: Date, + required: false, + }, + lineStatus: { + type: String, + enum: ["EXPECTED", "RECEIVING", "RECEIVED", "CLOSED"], + default: "EXPECTED", + }, + }, + { _id: false }, +); + +const InboundOrderSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + inboundOrderId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "inbound-order"), + }, + poNumber: { type: String, index: true }, + asnNumber: String, + + vendor: { + vendorId: String, + vendorName: String, + contactEmail: String, + contactPhone: String, + }, + + orderType: { + type: String, + enum: ["PO", "RETURN", "TRANSFER", "SAMPLE"], + }, + + orderStatus: { + type: String, + enum: ["EXPECTED", "IN_TRANSIT", "RECEIVING", "RECEIVED", "CLOSED", "CANCELLED"], + default: "EXPECTED", + index: true, + }, + + dates: { + expectedArrival: { type: Date, index: true }, + actualArrival: Date, + receivingStarted: Date, + receivingCompleted: Date, + }, + + appointmentId: String, + + totals: { + pallets: Number, + cases: Number, + units: Number, + expectedLines: Number, + receivedLines: Number, + }, + lines: [InboundOrderLineSchema], + receivingNotes: String, + damageReport: String, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +InboundOrderSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(InboundOrderSchema, "WMS", "InboundOrder"); + +export const InboundOrder: Model = + mongoose.models.InboundOrder || + mongoose.model("InboundOrder", InboundOrderSchema, "wms_inbound-orders"); +export type TInboundOrderModel = mongoose.InferSchemaType; +export type TInboundOrderInput = Omit; diff --git a/packages/controlmart/src/models/wms/inbound_receiving_transaction.wms.model.ts b/packages/controlmart/src/models/wms/inbound_receiving_transaction.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..13b62320653d96f6f79e04a7299e96f5926bb36b --- /dev/null +++ b/packages/controlmart/src/models/wms/inbound_receiving_transaction.wms.model.ts @@ -0,0 +1,96 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const ReceivingTransactionSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + receivingId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "receiving-transaction"), + }, + inboundOrderId: { type: String, required: true, index: true }, + inboundLineId: String, + + productId: { type: String, required: true }, + sku: String, + productName: { + type: String, + required: false, + }, + licensePlateNumber: { + type: String, + required: false, + }, + lotNumber: { + type: String, + required: false, + }, + receivedQuantity: { type: Number, required: true }, + uom: String, + + dockDoorId: String, + + receivingStatus: { + type: String, + enum: ["RECEIVED", "QC_HOLD", "PUTAWAY_PENDING", "COMPLETED", "REJECTED"], + default: "RECEIVED", + index: true, + }, + + quality: { + status: { type: String, enum: ["PASS", "FAIL", "PENDING"] }, + inspectedBy: { + type: String, + required: false, + }, + inspectedAt: { + type: Date, + required: false, + }, + notes: { + type: String, + required: false, + }, + }, + + receivedBy: String, + receivedAt: { type: Date, default: Date.now, index: true }, + + damage: { + damaged: Boolean, + damageType: String, + damagedQuantity: Number, + notes: String, + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ReceivingTransactionSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(ReceivingTransactionSchema, "WMS", "ReceivingTransaction"); + +export const ReceivingTransaction: Model = + mongoose.models.ReceivingTransaction || + mongoose.model( + "ReceivingTransaction", + ReceivingTransactionSchema, + "wms_receiving-transactions", + ); +export type TReceivingTransactionModel = mongoose.InferSchemaType< + typeof ReceivingTransactionSchema +>; +export type TReceivingTransactionInput = Omit< + TReceivingTransactionModel, + "worldRef" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/wms/index.ts b/packages/controlmart/src/models/wms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..485406b3b7dd7881e9eeb249d16847cf258ac3c7 --- /dev/null +++ b/packages/controlmart/src/models/wms/index.ts @@ -0,0 +1,24 @@ +// Core WMS Infrastructure Models +export * from "./dc.wms.model"; +export * from "./warehouses.model"; +export * from "./zone.wms.model"; +export * from "./bin.wms.model"; +export * from "./dock_door.wms.model"; + +// Inventory & Transaction Models +export * from "./inventory.wms.model"; +export * from "./inventory_transaction.wms.model"; +export * from "./cycle_count.wms.model"; + +// Order & Shipment Models +export * from "./inbound_order.wms.model"; +export * from "./inbound_receiving_transaction.wms.model"; +export * from "./outbound_order.wms.model"; +export * from "./outbound_shipment.wms.model"; + +// Operations Models +export * from "./task.wms.model"; +export * from "./replenishment.wms.model"; + +// Metrics & Reporting Models +export * from "./daily_metrics.wms.model"; diff --git a/packages/controlmart/src/models/wms/inventory.wms.model.ts b/packages/controlmart/src/models/wms/inventory.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..e1801480ef689d8cd1282fe3180ef65e748ca2a0 --- /dev/null +++ b/packages/controlmart/src/models/wms/inventory.wms.model.ts @@ -0,0 +1,125 @@ +import mongoose, { Schema, Model, Query } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; +import type { UpdateQuery } from "mongoose"; + +const InventorySchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + inventoryId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "inventory"), + }, + binId: { type: String, required: true, index: true }, + productId: { type: String, required: true, index: true }, + + sku: { type: String, required: true, index: true }, + productName: String, + + lotNumber: { type: String, index: true }, + serialNumber: String, + licensePlateNumber: String, + + receivedDate: { type: Date, index: true }, + expirationDate: { type: Date, index: true }, + manufactureDate: Date, + + quantityOnHand: { type: Number, required: true, default: 0, min: 0 }, + quantityAllocated: { type: Number, default: 0, min: 0 }, + quantityAvailable: { type: Number, min: 0 }, + quantityQuarantine: { type: Number, default: 0 }, + quantityDamaged: { type: Number, default: 0 }, + uom: { type: String, default: "EACH" }, + + inventoryStatus: { + type: String, + enum: ["AVAILABLE", "ALLOCATED", "QUARANTINE", "DAMAGED", "EXPIRED", "HOLD"], + default: "AVAILABLE", + index: true, + }, + holdReason: String, + + lastCountedAt: Date, + lastMovementAt: Date, + + allocations: [ + { + orderId: String, + orderLineId: String, + allocatedQuantity: Number, + allocatedAt: Date, + taskId: String, + priority: String, + }, + ], + + container: { + licensePlateNumber: String, + palletId: String, + containerType: String, + nested: Boolean, + parentContainer: String, + }, + + fifoFefo: { + fifoDate: Date, + fefoDate: Date, + rotationRule: { + type: String, + enum: ["FIFO", "FEFO", "LIFO", "MANUAL"], + default: "FIFO", + }, + }, + + cost: { + unitCost: Number, + totalValue: Number, + currency: String, + costMethod: String, + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +InventorySchema.pre(["save", "findOneAndUpdate"], function (next) { + if (this instanceof mongoose.Document) { + const doc = this as any; + doc.quantityAvailable = (doc.quantityOnHand ?? 0) - (doc.quantityAllocated ?? 0); + return next(); + } + const query = this as Query & { getUpdate?: () => UpdateQuery }; + + const update = query.getUpdate?.() as UpdateQuery | undefined; + if (!update) return next(); + if (!update.$set) update.$set = {}; + const qty = update.quantityOnHand ?? update.$set.quantityOnHand ?? undefined; + const alloc = update.quantityAllocated ?? update.$set.quantityAllocated ?? undefined; + if (qty !== undefined || alloc !== undefined) { + const newQty = qty ?? 0; + const newAlloc = alloc ?? 0; + + update.$set.quantityAvailable = newQty - newAlloc; + query.setUpdate(update); + } + + next(); +}); + +InventorySchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(InventorySchema, "WMS", "Inventory"); + +export const Inventory: Model = + mongoose.models.Inventory || + mongoose.model("Inventory", InventorySchema, "wms_inventory"); +export type TInventoryModel = mongoose.InferSchemaType; +export type TInventoryInput = Omit; diff --git a/packages/controlmart/src/models/wms/inventory_transaction.wms.model.ts b/packages/controlmart/src/models/wms/inventory_transaction.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..3973d4c1d0022d23aad1d205c2270e8dfae33fb1 --- /dev/null +++ b/packages/controlmart/src/models/wms/inventory_transaction.wms.model.ts @@ -0,0 +1,190 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; +import { Inventory } from "./inventory.wms.model"; +import { Product } from "../erp/product.erp.model"; + +const InventoryTransactionSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + transactionId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "inventory-transaction"), + }, + transactionType: { + type: String, + required: true, + enum: [ + "RECEIVE", + "PUTAWAY", + "PICK", + "MOVE", + "ADJUST", + "CYCLE_COUNT", + "RETURN", + "DAMAGE", + "SHIP", + ], + index: true, + }, + productId: { type: String, required: true, index: true }, + sku: String, + fromBinId: String, + toBinId: String, + + lotNumber: String, + licensePlateNumber: String, + quantity: { type: Number, required: true }, + uom: String, + + referenceType: { + type: String, + enum: ["PO", "ORDER", "TASK", "CYCLE_COUNT", "INITIAL_LOAD"], + }, + referenceId: String, + transactionDate: { type: Date, default: Date.now, index: true }, + userId: String, + userName: String, + reasonCode: String, + notes: String, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +InventoryTransactionSchema.plugin(AuditPlugin); + +// Transaction types that add to inventory +const ADD_TYPES = ["RECEIVE", "PUTAWAY", "RETURN"]; +// Transaction types that subtract from inventory +const SUBTRACT_TYPES = ["PICK", "SHIP", "DAMAGE"]; + +InventoryTransactionSchema.pre("save", async function (next) { + const doc = this; + + const worldId = doc.worldRef?.worldId; + if (!worldId) { + return next(new Error("World reference is required")); + } + + // Robust product lookup - try multiple matching strategies + const orConditions: any[] = [ + { productId: doc.productId }, + { name: doc.productId }, + ]; + + if (doc.sku) { + orConditions.push({ name: doc.sku }); + orConditions.push({ description: { $regex: doc.sku, $options: "i" } }); + } + + const product = await Product.findOne({ + $or: orConditions, + "worldRef.worldId": worldId, + }); + + if (!product) { + return next(new Error(`Product not found: ${doc.productId}`)); + } + + // Calculate expiration date based on lead time + const leadTimeDays = product.leadTimeDays ?? 0; + const receivedDate = new Date(doc.transactionDate); + const expirationDate = new Date(receivedDate.getTime() + leadTimeDays * 24 * 60 * 60 * 1000); + + // Determine quantity change based on transaction type + let quantityDelta = doc.quantity; + if (SUBTRACT_TYPES.includes(doc.transactionType)) { + quantityDelta = -doc.quantity; // Negative for outbound transactions + } + + try { + // Handle MOVE transactions: decrement from source bin first + if (doc.transactionType === "MOVE" && doc.fromBinId) { + await Inventory.findOneAndUpdate( + { + "worldRef.worldId": worldId, + warehouseId: doc.warehouseId, + binId: doc.fromBinId, + productId: doc.productId, + sku: doc.sku, + lotNumber: doc.lotNumber, + }, + { + $inc: { quantityOnHand: -doc.quantity }, + $set: { lastMovementAt: doc.transactionDate }, + }, + ); + // For destination, we add (positive delta) + quantityDelta = doc.quantity; + } + + // Determine which bin to use for the inventory record + const targetBinId = doc.toBinId || doc.fromBinId; + + // Inventory lookup key - uniquely identifies an inventory record + // lotNumber is the primary batch differentiator - OD must provide it + const inventoryKey = { + "worldRef.worldId": worldId, + warehouseId: doc.warehouseId, + binId: targetBinId, + productId: doc.productId, + sku: doc.sku ?? doc.productId, + lotNumber: doc.lotNumber, + }; + + // Use findOneAndUpdate with upsert to create or update inventory + await Inventory.findOneAndUpdate( + inventoryKey, + { + $inc: { quantityOnHand: quantityDelta, quantityAvailable: quantityDelta }, + $set: { + lastMovementAt: doc.transactionDate, + productName: product.description || product.name, + licensePlateNumber: doc.licensePlateNumber, + uom: doc.uom, + }, + $setOnInsert: { + worldRef: doc.worldRef, + warehouseId: doc.warehouseId, + binId: targetBinId, + productId: doc.productId, + sku: doc.sku, + lotNumber: doc.lotNumber, + receivedDate: doc.transactionDate, + expirationDate: expirationDate, + inventoryStatus: "AVAILABLE", + }, + }, + { upsert: true, new: true }, + ); + } catch (error) { + console.error("Inventory update error:", error); + } + next(); +}); + +// attachBusinessRuleHooks(InventoryTransactionSchema, "WMS", "InventoryTransaction"); + +export const InventoryTransaction: Model = + mongoose.models.InventoryTransaction || + mongoose.model( + "InventoryTransaction", + InventoryTransactionSchema, + "wms_inventory-transactions", + ); +export type TInventoryTransactionModel = mongoose.InferSchemaType< + typeof InventoryTransactionSchema +>; +export type TInventoryTransactionInput = Omit< + TInventoryTransactionModel, + "worldRef" | "createdAt" | "updatedAt" +>; diff --git a/packages/controlmart/src/models/wms/outbound_order.wms.model.ts b/packages/controlmart/src/models/wms/outbound_order.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..1315a8e55d5d0636f8cdb45b3403f7ef9cc05180 --- /dev/null +++ b/packages/controlmart/src/models/wms/outbound_order.wms.model.ts @@ -0,0 +1,149 @@ + +import mongoose, { Schema, Model } from "mongoose"; +import { AddressSchema, WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const OutboundOrderLineSchema = new Schema( + { + lineNumber: Number, + orderLineId: String, + productId: { type: String, required: true }, + productName: String, + orderedQuantity: { type: Number, required: true }, + allocatedQuantity: { type: Number, default: 0 }, + pickedQuantity: { type: Number, default: 0 }, + shippedQuantity: { type: Number, default: 0 }, + uom: String, + lineStatus: { + type: String, + enum: [ + "CREATED", + "ALLOCATED", + "PICKING", + "PICKED", + "PACKED", + "SHIPPED", + "SHORT", + "CANCELLED", + ], + default: "CREATED", + }, + lotNumber: String, + serialNumbers: [String], + }, + { _id: false }, +); + +const OutboundOrderSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + orderId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "outbound-order"), + }, + orderNumber: { type: String, required: true, index: true }, + + orderType: { + type: String, + enum: ["SALES", "TRANSFER", "REPLENISHMENT", "RETURN", "SAMPLE"], + }, + + customer: { + customerId: String, + customerName: String, + accountNumber: String, + }, + + destinationType: { + type: String, + enum: ["CUSTOMER", "STORE", "DC"], + }, + + shipToAddress: AddressSchema, + + orderPriority: { + type: String, + enum: ["RUSH", "URGENT", "NORMAL", "STANDARD"], + default: "NORMAL", + index: true, + }, + + shipping: { + carrierCode: String, + carrierName: String, + serviceLevel: { + type: String, + enum: ["GROUND", "2_DAY", "NEXT_DAY", "STANDARD", "EXPRESS"], + default: "GROUND", + }, + shippingAccount: String, + }, + + orderStatus: { + type: String, + enum: [ + "CREATED", + "RELEASED", + "ALLOCATED", + "PICKING", + "PICKED", + "PACKING", + "PACKED", + "SHIPPED", + "CANCELLED", + "CLOSED", + ], + default: "CREATED", + index: true, + }, + + dates: { + orderDate: Date, + requiredShipDate: { type: Date, index: true }, + scheduledShipDate: Date, + actualShipDate: Date, + promisedDeliveryDate: Date, + }, + + totals: { + lines: Number, + units: Number, + weight: Number, + cube: Number, + value: Number, + }, + + lines: [OutboundOrderLineSchema], + + waveId: String, + shipmentId: String, + + tracking: { + trackingNumbers: [String], + proNumber: String, + }, + + notes: String, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +OutboundOrderSchema.plugin(AuditPlugin); + +// Prevent duplicate order numbers within the same world +OutboundOrderSchema.index({ "worldRef.worldId": 1, orderNumber: 1 }, { unique: true }); + +// attachBusinessRuleHooks(OutboundOrderSchema, "WMS", "OutboundOrder"); + +export const OutboundOrder: Model = + mongoose.models.OutboundOrder || + mongoose.model("OutboundOrder", OutboundOrderSchema, "wms_outbound-orders"); +export type TOutboundOrderModel = mongoose.InferSchemaType; +export type TOutboundOrderInput = Omit; diff --git a/packages/controlmart/src/models/wms/outbound_shipment.wms.model.ts b/packages/controlmart/src/models/wms/outbound_shipment.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..180347c943df3cce6eb50cf7f309f90409c31c89 --- /dev/null +++ b/packages/controlmart/src/models/wms/outbound_shipment.wms.model.ts @@ -0,0 +1,146 @@ + +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AddressSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const ShipmentLineSchema = new Schema( + { + lineNumber: Number, + orderId: String, + orderLineId: String, + sku: String, + productName: String, + quantityShipped: Number, + quantityOrdered: Number, + unitOfMeasure: String, + lotNumber: String, + serialNumbers: [String], + palletId: String, + packageCount: Number, + weight: Number, + customFields: Schema.Types.Mixed, + }, + { _id: false }, +); + +const ShipmentSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + shipmentId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "outbound-shipment"), + }, + + carrier: { + name: String, + scac: String, + mode: { + type: String, + enum: ["PARCEL", "LTL", "TL"], + }, + }, + + serviceLevel: String, + trackingNumber: String, + trailerNumber: String, + + dockDoorId: String, + + shipmentStatus: { + type: String, + enum: [ + "CREATED", + "HU_CREATED", + "MANIFESTED", + "LOADING", + "LOADED", + "SHIPPED", + "IN_TRANSIT", + "DELIVERED", + "EXCEPTION", + ], + default: "CREATED", + index: true, + }, + + orders: { + type: [ + new Schema( + { + orderId: String, + }, + { _id: false }, + ), + ], + default: [], + }, + + totals: new Schema( + { + packages: Number, + pallets: Number, + weight: Number, + cube: Number, + value: Number, + }, + { _id: false }, + ), + + dates: new Schema( + { + shipDate: Date, + manifestDate: Date, + loadStartTime: Date, + loadEndTime: Date, + actualShipTime: Date, + estimatedDeliveryDate: Date, + actualDeliveryDate: Date, + }, + { _id: false }, + ), + + lines: [ShipmentLineSchema], + + fromAddress: AddressSchema, + toAddress: AddressSchema, + + documents: [{ type: String, url: String, documentType: String, _id: false }], + + events: { + type: [ + new Schema({ + timestamp: Date, + location: String, + status: String, + note: String, + source: String, + }), + ], + default: [], + }, + + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ShipmentSchema.plugin(AuditPlugin); + +// Prevent duplicate shipments within the same world (redundant with unique ID but good practice) +ShipmentSchema.index({ "worldRef.worldId": 1, shipmentId: 1 }, { unique: true }); + +// attachBusinessRuleHooks(ShipmentSchema, "WMS", "Shipment"); + +export const Shipment: Model = + mongoose.models.Shipment || + mongoose.model("WMSShipment", ShipmentSchema, "wms_outbound-shipments"); +export type TShipmentModel = mongoose.InferSchemaType; +export type TShipmentInput = Omit; diff --git a/packages/controlmart/src/models/wms/replenishment.wms.model.ts b/packages/controlmart/src/models/wms/replenishment.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..c82af0c7cc582fbaf282f522a092a79f610e6dc8 --- /dev/null +++ b/packages/controlmart/src/models/wms/replenishment.wms.model.ts @@ -0,0 +1,82 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const ReplenishmentSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { type: String, required: true, index: true }, + replenishmentId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "replenishment"), + }, + + replenishmentType: { + type: String, + enum: ["MIN_MAX", "WAVE_BASED", "DEMAND_BASED", "TIME_BASED"], + index: true, + }, + + priority: { type: Number, default: 50 }, + + status: { + type: String, + enum: ["SUGGESTED", "APPROVED", "TASK_CREATED", "IN_PROGRESS", "COMPLETED", "CANCELLED"], + default: "SUGGESTED", + index: true, + }, + + productId: { type: String, required: true, index: true }, + sku: String, + + fromBin: { + binId: String, + binCode: String, + availableQuantity: Number, + }, + + toBin: { + binId: String, + binCode: String, + currentQuantity: Number, + minQuantity: Number, + maxQuantity: Number, + }, + + quantity: { + suggested: Number, + approved: Number, + actual: Number, + uom: String, + }, + + taskId: String, + + suggestedAt: { type: Date, default: Date.now }, + approvedAt: Date, + completedAt: Date, + + approvedBy: String, + completedBy: String, + + notes: String, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ReplenishmentSchema.plugin(AuditPlugin); + +// attachBusinessRuleHooks(ReplenishmentSchema, "WMS", "Replenishment"); + +export const Replenishment: Model = + mongoose.models.Replenishment || + mongoose.model("Replenishment", ReplenishmentSchema, "wms_replenishments"); +export type TReplenishmentModel = mongoose.InferSchemaType; +export type TReplenishmentInput = Omit; diff --git a/packages/controlmart/src/models/wms/task.wms.model.ts b/packages/controlmart/src/models/wms/task.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..788541b97e464ffa583fa8c9ed011b2555128850 --- /dev/null +++ b/packages/controlmart/src/models/wms/task.wms.model.ts @@ -0,0 +1,177 @@ + +import mongoose, { Schema, Model } from "mongoose"; +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; + +const TaskDetailSchema = new Schema( + { + detailId: String, + productId: String, + sku: String, + binId: String, + lotNumber: String, + quantity: Number, + pickedQuantity: Number, + uom: String, + sequenceNumber: Number, + detailStatus: { + type: String, + enum: ["PENDING", "IN_PROGRESS", "COMPLETED", "SHORT", "SKIPPED"], + default: "PENDING", + }, + }, + { _id: false }, +); + +const TaskScanSchema = new Schema( + { + scanType: { + type: String, + enum: ["BIN", "PRODUCT", "LPN", "DESTINATION"], + required: true, + }, + scannedValue: String, + expectedValue: String, + scanResult: { + type: String, + enum: ["MATCH", "MISMATCH", "OVERRIDE"], + }, + scannedAt: { type: Date, default: Date.now }, + }, + { _id: false }, +); + +const TaskSchema = new Schema( + { + worldRef: WorldRefSchema, + taskId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "task"), + }, + warehouseId: { type: String, required: true, index: true }, + + taskType: { + type: String, + required: true, + enum: [ + "PICK", + "PUTAWAY", + "REPLENISHMENT", + "CYCLE_COUNT", + "MOVE", + "LOAD", + "UNLOAD", + "PACK", + "SORT", + ], + index: true, + }, + + taskSubtype: { + type: String, + enum: ["DISCRETE", "BATCH", "CLUSTER", "ZONE"], + index: true, + }, + + priority: { type: Number, default: 50, index: true }, + + taskStatus: { + type: String, + enum: [ + "CREATED", + "RELEASED", + "ASSIGNED", + "IN_PROGRESS", + "COMPLETED", + "CANCELLED", + "SUSPENDED", + ], + default: "CREATED", + index: true, + }, + + reference: { + type: { + type: String, + enum: ["PO", "ORDER", "WAVE", "INBOUND", "REPLENISHMENT"], + }, + id: String, + }, + + product: { + productId: String, + sku: String, + productName: String, + }, + + from: { + binId: String, + binCode: String, + zoneId: String, + }, + + to: { + binId: String, + binCode: String, + zoneId: String, + }, + + lotNumber: String, + licensePlateNumber: String, + + quantity: { + requested: Number, + actual: Number, + uom: String, + }, + + assignment: { + userId: String, + userName: String, + equipmentId: String, + assignedAt: Date, + }, + + zoneId: String, + + timing: { + createdAt: Date, + releasedAt: Date, + assignedAt: Date, + startedAt: Date, + completedAt: Date, + estimatedDuration: Number, + actualDuration: Number, + }, + + completedBy: String, + details: [TaskDetailSchema], + scans: [TaskScanSchema], + + performance: { + unitsPerHour: Number, + accuracy: Number, + }, + + notes: String, + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +TaskDetailSchema.plugin(AuditPlugin); + +// Prevent duplicate tasks within the same world +TaskSchema.index({ "worldRef.worldId": 1, taskId: 1 }, { unique: true }); + +// attachBusinessRuleHooks(TaskSchema, "WMS", "Task"); + +export const Task: Model = + mongoose.models.Task || mongoose.model("Task", TaskSchema, "wms_tasks"); +export type TTaskModel = mongoose.InferSchemaType; +export type TTaskInput = Omit; diff --git a/packages/controlmart/src/models/wms/warehouses.model.ts b/packages/controlmart/src/models/wms/warehouses.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..4fa0700cd5731222e848b4897a758957b810b1c8 --- /dev/null +++ b/packages/controlmart/src/models/wms/warehouses.model.ts @@ -0,0 +1,62 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AddressSchema, WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; +import slugify from "slugify"; + +const WarehouseSchema = new Schema( + { + worldRef: WorldRefSchema, + warehouseId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "warehouse"), + }, + warehouseCode: { type: String, required: true, index: true }, + warehouseName: { type: String, required: true }, + address: { + type: AddressSchema, + required: true, + }, + timezone: { type: String, required: true }, + warehouseType: { + type: String, + enum: ["FULFILLMENT", "STAGING", "RETURNS", "3PL", "VIRTUAL"], + index: true, + }, + status: { + type: String, + enum: ["ACTIVE", "DISABLED", "ARCHIVED"], + default: "ACTIVE", + index: true, + }, + }, + { timestamps: true }, +); + +WarehouseSchema.plugin(AuditPlugin); + +WarehouseSchema.pre("save", function (next) { + if (!this.warehouseCode) { + this.warehouseCode = slugify(this.warehouseName, { + lower: true, + strict: true, + }); + } + next(); +}); + +// attachBusinessRuleHooks(WarehouseSchema, "WMS", "Warehouse"); + +export const Warehouse: Model = + mongoose.models.Warehouse || + mongoose.model("Warehouse", WarehouseSchema, "wms_warehouses"); +export type TWarehouseModel = mongoose.InferSchemaType; +export type TWarehouseInput = Omit< + TWarehouseModel, + "worldRef" | "createdAt" | "updatedAt" | "warehouseId" | "warehouseCode" +>; diff --git a/packages/controlmart/src/models/wms/zone.wms.model.ts b/packages/controlmart/src/models/wms/zone.wms.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..81ea2489eb9dc9018c4f3ca9859f0106ab91657c --- /dev/null +++ b/packages/controlmart/src/models/wms/zone.wms.model.ts @@ -0,0 +1,60 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { WorldRefSchema } from "../shared.model"; +import { AuditPlugin } from "../audit.plugin"; +import { attachBusinessRuleHooks } from "../../business-rules/mongoose-hooks"; +import { generateIdByService } from "../../utils/mongo.util"; +import slugify from "slugify"; + +const ZoneSchema = new Schema( + { + worldRef: WorldRefSchema, + zoneId: { + type: String, + required: true, + unique: true, + index: true, + default: () => generateIdByService("wms", "zone"), + }, + warehouseId: { type: String, required: true, index: true }, + zoneCode: { type: String, required: true, index: true }, + zoneName: { + type: String, + required: true, + }, + zoneType: { + type: String, + enum: ["RECEIVING", "STORAGE", "PICKING", "PACKING", "SHIPPING", "STAGING", "QC", "RETURNS"], + index: true, + }, + temperatureControlled: { type: Boolean, default: false }, + temperatureRange: { min: Number, max: Number, unit: String }, + capacityCubicFeet: Number, + aisles: [{ aisleId: String, aisleCode: String, aisleType: String }], + customFields: Schema.Types.Mixed, + }, + { timestamps: true }, +); + +ZoneSchema.index({ worldRef: 1, warehouseId: 1, zoneCode: 1 }, { unique: true }); +ZoneSchema.plugin(AuditPlugin); + +ZoneSchema.pre("save", function (next) { + if (!this.zoneCode) { + this.zoneCode = slugify(this.zoneName, { + lower: true, + strict: true, + }); + } + next(); +}); + +// attachBusinessRuleHooks(ZoneSchema, "WMS", "Zone"); + +export const Zone: Model = + mongoose.models.Zone || mongoose.model("Zone", ZoneSchema, "wms_zones"); +export type TZoneModel = mongoose.InferSchemaType; +export type TZoneInput = Omit< + TZoneModel, + "worldRef" | "createdAt" | "updatedAt" | "zoneId" | "zoneCode" +>; diff --git a/packages/controlmart/src/models/world.model.ts b/packages/controlmart/src/models/world.model.ts new file mode 100644 index 0000000000000000000000000000000000000000..68c601cfe1f925e1039519f3d039c31acdfe11e5 --- /dev/null +++ b/packages/controlmart/src/models/world.model.ts @@ -0,0 +1,56 @@ +import mongoose, { Schema, Model } from "mongoose"; + +import { AuditPlugin } from "./audit.plugin"; + +const world = new Schema( + { + name: { type: String, required: true }, + url: { type: String }, + apiKey: { type: String }, + apiSecret: { type: String }, + is_default: { type: Boolean, default: false, required: false }, + description: { type: String }, + layout: { type: String, required: false }, + mpcCompany: { type: String }, + realHoursPerSimDay: { type: Number, default: 2, required: false }, + samplingStrategy: { type: Schema.Types.Mixed, required: false }, + capabilityIds: { type: [String], required: false, default: [] }, + chaos: { + processChaosEnabled: { type: Boolean, default: false }, + infraChaosEnabled: { type: Boolean, default: false }, + }, + personas: { type: Schema.Types.Mixed, required: false }, + ticketCreationEnabled: { type: Boolean, default: true }, + }, + { timestamps: true }, +); + +world.plugin(AuditPlugin); + +// CRITICAL INDEXES - World queries currently do collection scans without these +// Unique business key for world lookups by name +world.index({ name: 1 }, { unique: true }); + +// Boolean flag queries (frequently used for default world retrieval) +world.index({ is_default: 1 }); + +// Authentication lookup - compound index for credential verification +// Sparse: only create index entries when both fields are present +world.index({ apiKey: 1, apiSecret: 1 }, { unique: true, sparse: true }); + +// Company filtering (used in world filtering queries) +world.index({ mpcCompany: 1 }); + +// URL-based routing (sparse: only when url is provided) +world.index({ url: 1 }, { unique: true, sparse: true }); + +// Common filter combination for company-specific default worlds +world.index({ is_default: 1, mpcCompany: 1 }); + +export const World: Model = + (mongoose.models.World as Model) || mongoose.model("World", world); + +export type TWorldModel = mongoose.InferSchemaType; +export type TWorldInput = Omit; +// Also re-export from type file for compatibility +export type { TWorld } from './world.model.type'; diff --git a/packages/controlmart/src/models/world.model.type.ts b/packages/controlmart/src/models/world.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..19efc1f703f2d6e15a9f30f2b9628047fbf7ac4e --- /dev/null +++ b/packages/controlmart/src/models/world.model.type.ts @@ -0,0 +1,60 @@ +import type { ChaosPolicy } from '../types/od.type'; + +export interface CapabilityFilter { + domain?: string[]; + complexity?: 'simple' | 'medium' | 'complex'; + services?: string[]; + personas?: string[]; + patterns?: string[]; +} + +export type SamplingStrategy = + | { type: 'all' } + | { type: 'filter'; filter: CapabilityFilter } + | { type: 'random'; count: number; seed?: number } + | { type: 'seeded'; count: number; seed: number }; + +/** + * World persona configuration + * Controls which personas can access the world and custom capability mappings + */ +export interface PersonaConfig { + /** + * List of allowed persona IDs that can access this world + * If not specified, all personas have access + */ + allowedPersonas?: string[]; + + /** + * Custom persona-to-capability mappings for this world + * Overrides the default capability assignments for specific personas + */ + personaOverrides?: Record; +} + + +export interface TWorldChaosConfig { + processChaosEnabled: boolean; + infraChaosEnabled: boolean; +} + +export interface TWorld { + _id: string; + name: string; + url: string; + apiKey: string; + apiSecret: string; + is_default?: boolean; + description?: string; + mpcCompany?: string; + samplingStrategy?: SamplingStrategy; + capabilityIds?: string[]; + chaos?: TWorldChaosConfig; + personas?: PersonaConfig; + + ticketCreationEnabled?: boolean; + createdAt?: Date; + updatedAt?: Date; +} + +export type TWorldInput = Omit; diff --git a/packages/controlmart/src/ods/index.ts b/packages/controlmart/src/ods/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..c1ee19b98006c0e59d34de6e7f72f5bbc2e6e1ce --- /dev/null +++ b/packages/controlmart/src/ods/index.ts @@ -0,0 +1,25 @@ +/** + * OD Registry Initialization + * + * This file registers all ODs with the ODRegistry singleton. + * ODs are loaded from JSON files in the worlds folder. + * Import this file at application startup to make all ODs available. + */ + +import { ODRegistry } from "../services/od-registry.service"; +import { ODs as perishablesODs } from "../worlds/perishables-food-manufacturer/ods"; +import { ODs as manufacturingODs } from "../worlds/manufacturing-unit/ods"; +import { processOutboundODs } from "../worlds/process-outbound/index"; + +/** + * Initialize the OD Registry with all available ODs + * Registers ODs from both perishables-food-manufacturer and manufacturing-unit worlds + */ +export function initializeODRegistry(): void { + + ODRegistry.registerODs(perishablesODs); + + ODRegistry.registerODs(manufacturingODs); + + ODRegistry.registerODs([processOutboundODs.outbound_order]); +} diff --git a/packages/controlmart/src/operational-descriptor/chaos-engine.od.ts b/packages/controlmart/src/operational-descriptor/chaos-engine.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..a790b895933d639a067b4a89aa48b60fe04cc4d2 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/chaos-engine.od.ts @@ -0,0 +1,596 @@ +import type { Logger } from "pino"; + +import type { + ChaosPolicy, + ChaosScenario, + ChaosConfig, + ChaosInjectionResult, +} from "../types/od.type"; + +export type RandomFunction = () => number; +export type ChaosContext = { + stepId: string; + service?: string; + tool?: string; +}; + +export type ChaosResult = { + output: T; + chaosResult: ChaosInjectionResult | null; +}; + +export const createSeededRandom = (seed: string): RandomFunction => { + let hash = 0; + for (let i = 0; i < seed.length; i++) { + hash = (hash << 5) - hash + seed.charCodeAt(i); + hash = hash & hash; + } + + return () => { + hash = (hash * 9301 + 49297) % 233280; + return hash / 233280; + }; +}; + +export const defaultRandom: RandomFunction = Math.random; + +export const shouldInjectChaos = ( + policy: ChaosPolicy, + random: RandomFunction = defaultRandom, +): boolean => { + if (!policy.enabled) return false; + return random() < policy.probability; +}; + +export const selectScenario = ( + scenarios: ChaosScenario[], + random: RandomFunction = defaultRandom, +): ChaosScenario => { + if (scenarios.length === 0) { + throw new Error("No chaos scenarios provided"); + } + + const totalWeight = scenarios.reduce((sum, s) => sum + s.weight, 0); + let randomValue = random() * totalWeight; + + for (const scenario of scenarios) { + randomValue -= scenario.weight; + if (randomValue <= 0) { + return scenario; + } + } + + return scenarios[scenarios.length - 1]!; +}; + +export const hasField = (obj: any, path: string): boolean => { + return ( + path.split(".").reduce((current, key) => current && current.hasOwnProperty(key), obj) !== + undefined + ); +}; + +export const getField = (obj: any, path: string): any => { + return path.split(".").reduce((current, key) => current?.[key], obj); +}; + +export const setField = (obj: any, path: string, value: any): any => { + const keys = path.split("."); + const lastKey = keys.pop()!; + const result = { ...obj }; + + const target = keys.reduce((current, key) => { + if (!current[key]) current[key] = {}; + return current[key]; + }, result); + + target[lastKey] = value; + return result; +}; + +export const deleteField = (obj: any, path: string): any => { + const keys = path.split("."); + const result = { ...obj }; + + if (keys.length === 1) { + const key = keys[0] as string; + const { [key]: _, ...rest } = result; + return rest; + } else { + const lastKey = keys.pop()!; + const target = keys.reduce((current, key) => current?.[key], result); + if (target && typeof target === "object") { + delete target[lastKey]; + } + return result; + } +}; + +export const injectDataCorruption = ( + data: any, + config: ChaosConfig, + random: RandomFunction = defaultRandom, +): { output: any; modifications: string[] } => { + if (!data || typeof data !== "object") { + return { output: data, modifications: [] }; + } + + const modifications: string[] = []; + let corrupted = JSON.parse(JSON.stringify(data)); + const fieldsToCorrupt = config.corruptFields || []; + console.log("fieldsToCorrupt", fieldsToCorrupt, corrupted); + for (const field of fieldsToCorrupt) { + if (hasField(corrupted, field)) { + const originalValue = getField(corrupted, field); + let corruptedValue; + + switch (config.corruptionType) { + case "null": + corruptedValue = null; + break; + case "wrong_type": + corruptedValue = typeof originalValue === "string" ? 12345 : "corrupted_string"; + break; + case "invalid_format": + corruptedValue = + typeof originalValue === "string" + ? originalValue.split("").reverse().join("") + : "INVALID_FORMAT"; + break; + case "random_value": + corruptedValue = `CHAOS_${random().toString(36).substr(2, 9)}`; + break; + default: + corruptedValue = null; + } + + corrupted = setField(corrupted, field, corruptedValue); + modifications.push(`Corrupted field '${field}': ${originalValue} → ${corruptedValue}`); + } + } + + return { output: corrupted, modifications }; +}; + +export const injectMissingData = ( + data: any, + config: ChaosConfig, +): { output: any; modifications: string[] } => { + const modifications: string[] = []; + + if (config.missingRecords) { + modifications.push("Returned empty result set"); + + if (config.throwError !== false) { + throw new Error("User not found - may have been deleted"); + } + + return { + output: Array.isArray(data) ? [] : null, + modifications, + }; + } + + if (!data || typeof data !== "object") { + return { output: data, modifications }; + } + + let modified = JSON.parse(JSON.stringify(data)); + const fieldsToRemove = config.missingFields || []; + + for (const field of fieldsToRemove) { + if (hasField(modified, field)) { + modified = deleteField(modified, field); + modifications.push(`Removed field '${field}'`); + } + } + + return { output: modified, modifications }; +}; + +export const injectStaleData = ( + data: any, + config: ChaosConfig, +): { output: any; modifications: string[] } => { + if (!data || typeof data !== "object") { + return { output: data, modifications: [] }; + } + + const modifications: string[] = []; + const staleAgeMs = (config.staleDataAge || 60) * 60 * 1000; + const staleDate = new Date(Date.now() - staleAgeMs); + + let modified = JSON.parse(JSON.stringify(data)); + + const timestampFields = [ + "createdAt", + "updatedAt", + "lastModified", + "timestamp", + "created_at", + "updated_at", + ]; + + for (const field of timestampFields) { + if (hasField(modified, field)) { + modified = setField(modified, field, staleDate.toISOString()); + modifications.push(`Made field '${field}' stale (${config.staleDataAge} minutes old)`); + } + } + + return { output: modified, modifications }; +}; + +export const injectFormatChange = ( + data: any, + config: ChaosConfig, +): { output: any; modifications: string[] } => { + if (!data || typeof data !== "object") { + return { output: data, modifications: [] }; + } + + const modifications: string[] = []; + let modified = JSON.parse(JSON.stringify(data)); + const changes = config.schemaChanges || []; + + for (const change of changes) { + switch (change.change) { + case "rename": + if (hasField(modified, change.field)) { + const value = getField(modified, change.field); + modified = deleteField(modified, change.field); + modified = setField(modified, change.newName!, value); + modifications.push(`Renamed field '${change.field}' to '${change.newName}'`); + } + break; + + case "remove": + if (hasField(modified, change.field)) { + modified = deleteField(modified, change.field); + modifications.push(`Removed field '${change.field}'`); + } + break; + + case "change_type": + if (hasField(modified, change.field)) { + const originalValue = getField(modified, change.field); + let newValue; + + switch (change.newType) { + case "string": + newValue = String(originalValue); + break; + case "number": + newValue = typeof originalValue === "string" ? parseInt(originalValue) || 0 : 0; + break; + case "boolean": + newValue = Boolean(originalValue); + break; + default: + newValue = originalValue; + } + + modified = setField(modified, change.field, newValue); + modifications.push(`Changed type of '${change.field}' to ${change.newType}`); + } + break; + } + } + + return { output: modified, modifications }; +}; + +export const injectPartialData = ( + data: any, + config: ChaosConfig, + random: RandomFunction = defaultRandom, +): { output: any; modifications: string[] } => { + if (!Array.isArray(data)) { + return { output: data, modifications: [] }; + } + + const modifications: string[] = []; + const partial = config.partialResults!; + const targetCount = Math.floor(data.length * (partial.percentage / 100)); + + let result; + if (partial.randomize) { + const shuffled = [...data].sort(() => random() - 0.5); + result = shuffled.slice(0, targetCount); + } else { + result = data.slice(0, targetCount); + } + + modifications.push( + `Returned partial data: ${result.length}/${data.length} items (${partial.percentage}%)`, + ); + + return { output: result, modifications }; +}; + +export const injectDuplicateData = ( + data: any, + random: RandomFunction = defaultRandom, +): { output: any; modifications: string[] } => { + if (!Array.isArray(data) || data.length === 0) { + return { output: data, modifications: [] }; + } + + const modifications: string[] = []; + const duplicateCount = Math.min(3, Math.floor(data.length / 2)); + const itemsToDuplicate = []; + + for (let i = 0; i < duplicateCount; i++) { + const randomIndex = Math.floor(random() * data.length); + itemsToDuplicate.push(JSON.parse(JSON.stringify(data[randomIndex]))); + } + + const result = [...data, ...itemsToDuplicate]; + modifications.push(`Added ${duplicateCount} duplicate records`); + + return { output: result, modifications }; +}; + +export const injectInvalidState = ( + data: any, + config: ChaosConfig, + random: RandomFunction = defaultRandom, +): { output: any; modifications: string[] } => { + if (!data || typeof data !== "object") { + return { output: data, modifications: [] }; + } + + const modifications: string[] = []; + let modified = JSON.parse(JSON.stringify(data)); + const invalidStates = config.invalidStates || [ + "deleted", + "suspended", + "inactive", + "pending_deletion", + ]; + + if (Array.isArray(modified)) { + const corruptCount = Math.min(2, modified.length); + for (let i = 0; i < corruptCount; i++) { + const randomIndex = Math.floor(random() * modified.length); + const invalidState = invalidStates[Math.floor(random() * invalidStates.length)]; + + if (modified[randomIndex] && typeof modified[randomIndex] === "object") { + modified[randomIndex] = { + ...modified[randomIndex], + status: invalidState, + active: false, + }; + modifications.push(`Set item ${randomIndex} to invalid state: ${invalidState}`); + } + } + } else { + const invalidState = invalidStates[Math.floor(random() * invalidStates.length)]; + modified = { + ...modified, + status: invalidState, + active: false, + }; + modifications.push(`Set record to invalid state: ${invalidState}`); + } + + return { output: modified, modifications }; +}; + +export const injectTimingIssue = ( + data: any, + config: ChaosConfig, +): { output: any; modifications: string[] } => { + if (!data || typeof data !== "object") { + return { output: data, modifications: [] }; + } + + const modifications: string[] = []; + let modified = JSON.parse(JSON.stringify(data)); + + const futureDate = new Date(Date.now() + 86400000); + + const timestampFields = ["createdAt", "updatedAt", "scheduledAt", "timestamp"]; + + for (const field of timestampFields) { + if (hasField(modified, field)) { + modified = setField(modified, field, futureDate.toISOString()); + modifications.push(`Set '${field}' to future timestamp`); + } + } + + return { output: modified, modifications }; +}; + +export const injectRateLimit = async (config: ChaosConfig): Promise => { + const delay = config.rateLimitDelay || 1000; + await new Promise((resolve) => setTimeout(resolve, delay)); +}; + +export const injectChaos = async ( + originalOutput: T, + policy: ChaosPolicy, + context: ChaosContext, + logger: Logger, + random: RandomFunction = defaultRandom, +): Promise> => { + if (!shouldInjectChaos(policy, random)) { + return { output: originalOutput, chaosResult: null }; + } + + if (policy.preprocessInput) { + originalOutput = policy.preprocessInput(originalOutput); + } + + const scenario = selectScenario(policy.scenarios, random); + const persistCorrupted = policy.persistCorruptedData || false; + + logger.warn( + { + stepId: context.stepId, + chaosType: scenario.type, + scenario: scenario.description, + service: context.service, + tool: context.tool, + persistCorruptedData: persistCorrupted, + }, + " Chaos injection triggered", + ); + + try { + let modifications: string[] = []; + let modifiedOutput: T; + + switch (scenario.type) { + case "data_corruption": { + const result = injectDataCorruption(originalOutput, scenario.config, random); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + case "missing_data": { + const result = injectMissingData(originalOutput, scenario.config); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + case "stale_data": { + const result = injectStaleData(originalOutput, scenario.config); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + case "format_change": { + const result = injectFormatChange(originalOutput, scenario.config); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + case "permission_denied": + if (persistCorrupted) { + // Instead of failing, return corrupted data indicating permission issues + modifiedOutput = { + ...originalOutput, + _chaosError: "PERMISSION_DENIED", + _errorMessage: scenario.config.permissionError || "Permission denied - chaos injection", + _corruptedAt: new Date().toISOString(), + } as T; + modifications.push("Permission denied error converted to corrupted data"); + } else { + throw new Error(scenario.config.permissionError || "Permission denied - chaos injection"); + } + break; + + case "rate_limit": + if (persistCorrupted) { + await injectRateLimit(scenario.config); + modifiedOutput = { + ...originalOutput, + _chaosError: "RATE_LIMITED", + _errorMessage: + scenario.config.rateLimitMessage || "Rate limit exceeded - chaos injection", + _corruptedAt: new Date().toISOString(), + } as T; + modifications.push("Rate limit error converted to corrupted data"); + } else { + await injectRateLimit(scenario.config); + throw new Error( + scenario.config.rateLimitMessage || "Rate limit exceeded - chaos injection", + ); + } + break; + + case "partial_data": { + const result = injectPartialData(originalOutput, scenario.config, random); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + case "duplicate_data": { + const result = injectDuplicateData(originalOutput, random); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + case "invalid_state": { + const result = injectInvalidState(originalOutput, scenario.config, random); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + case "dependency_failure": + if (persistCorrupted) { + modifiedOutput = { + ...originalOutput, + _chaosError: "DEPENDENCY_FAILURE", + _errorMessage: `Dependency service ${scenario.config.dependencyService} unavailable - chaos injection`, + _failedService: scenario.config.dependencyService, + _corruptedAt: new Date().toISOString(), + } as T; + modifications.push( + `Dependency failure for ${scenario.config.dependencyService} converted to corrupted data`, + ); + } else { + throw new Error( + `Dependency service ${scenario.config.dependencyService} unavailable - chaos injection`, + ); + } + break; + + case "timing_issue": { + const result = injectTimingIssue(originalOutput, scenario.config); + modifiedOutput = result.output; + modifications = result.modifications; + break; + } + + default: + modifiedOutput = originalOutput; + modifications = []; + } + + const chaosResult: ChaosInjectionResult = { + triggered: true, + scenario, + injectedAt: new Date().toISOString(), + originalOutput, + modifications, + }; + + if (policy.postprocessOutput) { + modifiedOutput = policy.postprocessOutput(modifiedOutput, scenario.type); + } + + return { output: modifiedOutput, chaosResult }; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + const chaosResult: ChaosInjectionResult = { + triggered: true, + scenario, + injectedAt: new Date().toISOString(), + originalOutput, + modifications: [`Error thrown: ${errorMessage}`], + }; + + throw error; + } +}; + +export const createChaosInjector = (logger: Logger, seed?: string) => { + const random = seed ? createSeededRandom(seed) : defaultRandom; + + return ( + originalOutput: T, + policy: ChaosPolicy, + context: ChaosContext, + ): Promise> => { + return injectChaos(originalOutput, policy, context, logger, random); + }; +}; diff --git a/packages/controlmart/src/operational-descriptor/executor.od.ts b/packages/controlmart/src/operational-descriptor/executor.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..152cd1e250a396c66a139a862579c0473846ae35 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/executor.od.ts @@ -0,0 +1,282 @@ +import Ajv from "ajv"; + +import type { + OperationalDescriptor, + StepExecutionResult, + RunResult, + Context, + ExecutionOptions, +} from "../types/od.type"; +import { OD_SCHEMA } from "./schema.od"; +import { runStep } from "./run-step.od"; +import { evaluateAssertion } from "./run-helper.od"; +import { getIdFromMongoObject } from "../utils/mongo.util"; +import { v4 as uuidv4 } from "uuid"; +import { createMongoLogQueueStorage } from "./log-queue.storage"; +import type { Logger } from "pino"; + +const defaultValidator = new Ajv({ allErrors: true, useDefaults: true, strict: false }); +defaultValidator.addKeyword("function", { + keyword: "isFunction", + validate: (schema: any, data: any) => { + return typeof data === "function"; + }, + errors: true, +}); + +export async function executeOperationalDescriptor( + od: OperationalDescriptor, + options: ExecutionOptions, +): Promise { + const baseLogger = options.logger; + const validator = options.validator ?? defaultValidator; + const { world } = options; + + if (options.validator && !validator.getKeyword("function")) { + validator.addKeyword("function", { + keyword: "isFunction", + validate: (schema: any, data: any) => { + return typeof data === "function"; + }, + errors: true, + }); + } + + + const validate = validator.compile(OD_SCHEMA); + const isValid = validate(od); + + if (!isValid) { + const validationErrors = validate.errors + ?.map((e) => `${e.instancePath} ${e.message}`) + .join(", "); + + baseLogger.error( + { + descriptorId: od.id, + errors: validate.errors, + }, + `OD schema validation failed: ${validationErrors}`, + ); + + throw new Error(`Invalid Operational Descriptor: ${validationErrors}`); + } + + // Validate Runtime Inputs if inputSchema is present + if (od.inputSchema) { + // Ensure input object exists for validation/defaults + options.input = options.input || {}; + + // We need a fresh compile for the input schema as it varies per OD + const validateInput = validator.compile(od.inputSchema); + const isInputValid = validateInput(options.input); + + if (!isInputValid) { + const inputErrors = validateInput.errors + ?.map((e) => `${e.instancePath || 'root'} ${e.message}`) + .join(", "); + + baseLogger.error( + { + descriptorId: od.id, + input: options.input, + errors: validateInput.errors + }, + `OD Input validation failed: ${inputErrors}` + ); + throw new Error(`Invalid OD Inputs: ${inputErrors}`); + } + } + + const runId = uuidv4(); + + + const worldId = getIdFromMongoObject(world); + if (!worldId) { + throw new Error("World ID not found in execution context"); + } + + const logQueueStorage = options.logQueueStorage ?? createMongoLogQueueStorage(runId, worldId, { + id: od.id, + name: od.name, + persona: od.persona, + }); + options.logQueueStorage = logQueueStorage; + + options.odMetadata = { + id: od.id, + name: od.name, + persona: od.persona, + }; + + const auditLogger = options.auditLogger?.child({ + worldId: getIdFromMongoObject(world), + runId, + odId: od.id, + }); + + const createLogProxy = (targetLogger: Logger, auditLogger?: Logger): Logger => { + const handler: ProxyHandler = { + get(target, prop, receiver) { + if (prop === "child") { + return (...args: any[]) => { + const childLogger = (target as any).child(...args); + const childAuditLogger = auditLogger ? (auditLogger as any).child(...args) : undefined; + // Recursively wrap the child logger, passing the new child audit logger + // This ensures subsequent .child() calls keep chaining the audit logger + return createLogProxy(childLogger, childAuditLogger); + }; + } + + if (typeof target[prop as keyof Logger] === "function" && ["info", "warn", "error", "debug"].includes(String(prop))) { + return (...args: any[]) => { + const level = String(prop); + + try { + let msg = ""; + if (typeof args[0] === "string") { + msg = args[0]; + } else if (typeof args[0] === "object" && args[1] && typeof args[1] === "string") { + msg = args[1]; + } else if (typeof args[0] === "object") { + msg = args[0].msg || args[0].message || JSON.stringify(args[0]); + } else { + msg = String(args[0]); + } + logQueueStorage.addLog(`[${level.toUpperCase()}] ${msg}`); + } catch (e) { } + + if (auditLogger) { + try { + (auditLogger as any)[level](...args); + } catch (e) { } + } + return (target[prop as keyof Logger] as Function).apply(target, args); + }; + } + + return Reflect.get(target, prop, receiver); + } + }; + + return new Proxy(targetLogger, handler); + }; + // Pass the initial auditLogger to the first proxy creation + const logger = createLogProxy(baseLogger, auditLogger); + + const ctx: Context = { + ...options.input, + __world: world, + __startTime: new Date().toISOString(), + }; + + const runStartTime = Date.now(); + const stepResults: StepExecutionResult[] = []; + + logger.info( + { + descriptorId: od.id, + descriptorVersion: od.version, + descriptorName: od.name, + }, + `Starting OD execution: ${od.name} v${od.version}`, + ); + + let runStatus: "success" | "failed" | "partial" = "success"; + + try { + for (const step of od.steps) { + const stepResult = await runStep(step, ctx, options, logger, od.chaos); + stepResults.push(stepResult); + + // Handle exit_early step - graceful early termination + if (stepResult.status === "exited") { + logger.info( + { + stepId: step.id, + output: stepResult.output, + }, + `Exiting OD early due to exit condition met at step: ${step.id}`, + ); + break; + } + + if (stepResult.status === "failed") { + runStatus = od.runPolicy?.failureMode === "continue" ? "partial" : "failed"; + + if (od.runPolicy?.failureMode === "fail_fast") { + logger.error( + { + stepId: step.id, + failureMode: "fail_fast", + }, + `Failing fast due to step failure: ${step.id}`, + ); + break; + } + } + } + + if (od.assertions) { + for (const assertion of od.assertions) { + const assertionResult = await evaluateAssertion(assertion, ctx, logger); + + if (!assertionResult.passed && !assertion.continueOnFailure) { + runStatus = "failed"; + logger.error( + { + assertionId: assertion.id, + assertionResult, + }, + `Global assertion failed: ${assertion.id}`, + ); + break; + } + } + } + } catch (err) { + runStatus = "failed"; + logger.error( + { + error: err instanceof Error ? err.message : String(err), + stack: err instanceof Error ? err.stack : undefined, + }, + `Unhandled error during OD execution`, + ); + } finally { + // Persist chaos logs + await options.logQueueStorage?.persist(); + } + + const runEndTime = Date.now(); + const runResult: RunResult = { + runId: runId, + worldId: getIdFromMongoObject(world), + descriptorId: od.id, + descriptorVersion: od.version, + status: runStatus, + startTime: new Date(runStartTime).toISOString(), + endTime: new Date(runEndTime).toISOString(), + durationMs: runEndTime - runStartTime, + stepResults, + totalSteps: stepResults.length, + successfulSteps: stepResults.filter((s) => s.status === "success" || s.status === "exited").length, + failedSteps: stepResults.filter((s) => s.status === "failed").length, + skippedSteps: stepResults.filter((s) => s.status === "skipped").length, + chaosMetadata: options.chaosTelemetry?.getTelemetry(), // Enhanced chaos telemetry (MORPH-413) + }; + + logger.info( + { + status: runStatus, + totalSteps: runResult.totalSteps, + successfulSteps: runResult.successfulSteps, + failedSteps: runResult.failedSteps, + skippedSteps: runResult.skippedSteps, + durationMs: runResult.durationMs, + }, + `OD execution completed with status: ${runStatus}`, + ); + + return runResult; +} diff --git a/packages/controlmart/src/operational-descriptor/generic-builder.od.ts b/packages/controlmart/src/operational-descriptor/generic-builder.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..26097b3b91373b8290b347e1fa233eea08e57d6f --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/generic-builder.od.ts @@ -0,0 +1,600 @@ +import type { Logger } from "pino"; + +import type { + OperationalDescriptor, + Step, + McpStep, + NoopStep, + MapStep, + ScriptStep, + Binding, + OutputBinding, + RetryPolicy, + ChaosPolicy, + ChaosScenario, + Assertion, + RunPolicy, + Condition, +} from "../types/od.type"; + +export interface GenericODBuilderConfig { + id?: string; + name?: string; + version?: string; + description?: string; + namespace?: string; + logger?: Logger; + chaosProbability?: number; + defaultRetryPolicy?: RetryPolicy; + runPolicy?: RunPolicy; +} + +export interface StepBuilderConfig { + id: string; + name: string; + description?: string; + condition?: Condition; + timeoutMs?: number; + continueOnError?: boolean; + compensationStepId?: string | null; + assertions?: Assertion[]; + chaos?: ChaosPolicy; +} + +export interface McpStepConfig extends StepBuilderConfig { + service: string; + tool: string; + input: any; + inputType?: "literal" | "template" | "jmesPath"; + outputStoreAs: string; + outputExtract?: string; + retry?: RetryPolicy; +} + +export interface NoopStepConfig extends StepBuilderConfig { + input: any; + inputType?: "literal" | "template" | "jmesPath"; + outputStoreAs?: string; + outputExtract?: string; +} + +export interface MapStepConfig extends StepBuilderConfig { + iterable: any; + iterableType?: "literal" | "template" | "jmesPath"; + itemName: string; + concurrency?: number; + children: Step[]; +} + +export interface ScriptStepConfig extends StepBuilderConfig { + script: string; + language?: "javascript" | "typescript"; + input?: any; + inputType?: "literal" | "template" | "jmesPath"; + outputStoreAs?: string; + outputExtract?: string; +} + +export class GenericODBuilder { + private od: OperationalDescriptor; + private config: GenericODBuilderConfig; + + constructor(config: GenericODBuilderConfig = {}) { + this.config = { + version: "1.0.0", + chaosProbability: 0.0, + defaultRetryPolicy: { + maxRetries: 3, + backoff: "exponential", + baseMs: 200, + maxBackoffMs: 30000, + jitter: true, + }, + ...config, + }; + + this.od = { + id: this.config.id || this.generateId("od"), + name: this.config.name || "Generic Operational Descriptor", + version: this.config.version!, + description: this.config.description, + namespace: this.config.namespace, + steps: [], + chaos: this.createBaseChaosConfig(), + runPolicy: this.config.runPolicy, + }; + } + + // ID Generation + generateId(prefix: string, suffix?: string): string { + const timestamp = new Date().toISOString().replace(/[:.]/g, "-"); + const random = Math.random().toString(36).substr(2, 6); + return suffix + ? `${prefix}-${random}-${suffix}-${timestamp}` + : `${prefix}-${random}-${timestamp}`; + } + + // Basic OD Configuration + setId(id: string): this { + this.od.id = id; + return this; + } + + setName(name: string): this { + this.od.name = name; + return this; + } + + setVersion(version: string): this { + this.od.version = version; + return this; + } + + setDescription(description: string): this { + this.od.description = description; + return this; + } + + setNamespace(namespace: string): this { + this.od.namespace = namespace; + return this; + } + + setRunPolicy(policy: RunPolicy): this { + this.od.runPolicy = policy; + return this; + } + + // Chaos Configuration + createBaseChaosConfig(): ChaosPolicy { + return { + enabled: true, + probability: this.config.chaosProbability || 0.0, + scenarios: this.getDefaultChaosScenarios(), + }; + } + + getDefaultChaosScenarios(): ChaosScenario[] { + return [ + { + type: "data_corruption", + weight: 35, + description: "Corrupt data fields", + config: { + corruptFields: ["name", "id"], + corruptionType: "wrong_type", + }, + }, + { + type: "missing_data", + weight: 30, + description: "Missing required fields", + config: { + missingFields: ["name"], + }, + }, + { + type: "format_change", + weight: 20, + description: "Invalid format structure", + config: { + schemaChanges: [ + { + field: "id", + change: "change_type", + newType: "string", + }, + ], + }, + }, + { + type: "rate_limit", + weight: 15, + description: "Rate limiting simulation", + config: { + rateLimitDelay: 1000, + rateLimitMessage: "Rate limit exceeded", + }, + }, + ]; + } + + setChaosPolicy(chaos: ChaosPolicy): this { + this.od.chaos = chaos; + return this; + } + + addChaosScenario(scenario: ChaosScenario): this { + if (!this.od.chaos) { + this.od.chaos = this.createBaseChaosConfig(); + } + this.od.chaos.scenarios.push(scenario); + return this; + } + + // Binding Helpers + createBinding(value: any, type: "literal" | "template" | "jmesPath" = "literal"): Binding { + return { type, value }; + } + + createOutputBinding(storeAs: string, extract?: string): OutputBinding { + return { storeAs, extract }; + } + + // Step Creation Methods + addMcpStep(config: McpStepConfig): this { + const step: McpStep = { + id: config.id, + name: config.name, + type: "mcp", + service: config.service, + tool: config.tool, + description: config.description, + condition: config.condition, + input: this.createBinding(config.input, config.inputType), + output: this.createOutputBinding(config.outputStoreAs, config.outputExtract), + retry: config.retry || this.config.defaultRetryPolicy, + timeoutMs: config.timeoutMs, + continueOnError: config.continueOnError, + compensationStepId: config.compensationStepId, + assertions: config.assertions, + chaos: config.chaos, + }; + + this.od.steps.push(step); + return this; + } + + addNoopStep(config: NoopStepConfig): this { + const step: NoopStep = { + id: config.id, + name: config.name, + type: "noop", + description: config.description, + condition: config.condition, + input: this.createBinding(config.input, config.inputType), + output: config.outputStoreAs + ? this.createOutputBinding(config.outputStoreAs, config.outputExtract) + : undefined, + timeoutMs: config.timeoutMs, + continueOnError: config.continueOnError, + compensationStepId: config.compensationStepId, + assertions: config.assertions, + chaos: config.chaos, + }; + + this.od.steps.push(step); + return this; + } + + addMapStep(config: MapStepConfig): this { + const step: MapStep = { + id: config.id, + name: config.name, + type: "map", + description: config.description, + condition: config.condition, + mapSpec: { + iterable: this.createBinding(config.iterable, config.iterableType), + itemName: config.itemName, + concurrency: config.concurrency, + }, + children: config.children, + timeoutMs: config.timeoutMs, + continueOnError: config.continueOnError, + compensationStepId: config.compensationStepId, + assertions: config.assertions, + chaos: config.chaos, + }; + + this.od.steps.push(step); + return this; + } + + addScriptStep(config: ScriptStepConfig): this { + const step: ScriptStep = { + id: config.id, + name: config.name, + type: "script", + script: config.script, + language: config.language || "javascript", + description: config.description, + condition: config.condition, + input: config.input ? this.createBinding(config.input, config.inputType) : undefined, + output: config.outputStoreAs + ? this.createOutputBinding(config.outputStoreAs, config.outputExtract) + : undefined, + timeoutMs: config.timeoutMs, + continueOnError: config.continueOnError, + compensationStepId: config.compensationStepId, + assertions: config.assertions, + chaos: config.chaos, + }; + + this.od.steps.push(step); + return this; + } + + // Convenience Methods for Common Patterns + addEdiStep( + stepId: string, + stepName: string, + tool: string, + inputValue: any, + outputStoreAs: string, + chaosConfig?: ChaosPolicy, + ): this { + return this.addMcpStep({ + id: stepId, + name: stepName, + service: "edi", + tool, + input: inputValue, + inputType: "template", + outputStoreAs, + chaos: chaosConfig, + }); + } + + addErpStep( + stepId: string, + stepName: string, + tool: string, + inputValue: any, + outputStoreAs: string, + retryConfig?: RetryPolicy, + ): this { + return this.addMcpStep({ + id: stepId, + name: stepName, + service: "erp", + tool, + input: inputValue, + inputType: "template", + outputStoreAs, + retry: retryConfig, + }); + } + + addCrmStep( + stepId: string, + stepName: string, + tool: string, + inputValue: any, + outputStoreAs: string, + retryConfig?: RetryPolicy, + ): this { + return this.addMcpStep({ + id: stepId, + name: stepName, + service: "crm", + tool, + input: inputValue, + inputType: "template", + outputStoreAs, + retry: retryConfig, + }); + } + + addWmsStep( + stepId: string, + stepName: string, + tool: string, + inputValue: any, + outputStoreAs: string, + retryConfig?: RetryPolicy, + ): this { + return this.addMcpStep({ + id: stepId, + name: stepName, + service: "wms", + tool, + input: inputValue, + inputType: "template", + outputStoreAs, + retry: retryConfig, + }); + } + + // Assertion Management + addAssertion(assertion: Assertion): this { + if (!this.od.assertions) { + this.od.assertions = []; + } + this.od.assertions.push(assertion); + return this; + } + + addAssertions(assertions: Assertion[]): this { + if (!this.od.assertions) { + this.od.assertions = []; + } + this.od.assertions.push(...assertions); + return this; + } + + // Step Management + addSteps(steps: Step[]): this { + this.od.steps.push(...steps); + return this; + } + + insertStep(index: number, step: Step): this { + this.od.steps.splice(index, 0, step); + return this; + } + + removeStep(stepId: string): this { + this.od.steps = this.od.steps.filter((step) => step.id !== stepId); + return this; + } + + // Retry Policy Helpers + createRetryPolicy(config: Partial = {}): RetryPolicy { + return { + maxRetries: 3, + backoff: "exponential", + baseMs: 200, + maxBackoffMs: 30000, + jitter: true, + ...config, + }; + } + + // Condition Helpers + createCondition( + expression: string, + language: "jmespath" | "jsonata" | "cel" | "javascript" = "jmespath", + ): Condition { + return { expression, language }; + } + + // Chaos Scenario Helpers + createDataCorruptionScenario( + weight: number, + description: string, + corruptFields: string[], + corruptionType: "null" | "wrong_type" | "invalid_format" | "random_value" = "wrong_type", + ): ChaosScenario { + return { + type: "data_corruption", + weight, + description, + config: { corruptFields, corruptionType }, + }; + } + + createMissingDataScenario( + weight: number, + description: string, + missingFields: string[], + throwError: boolean = true, + ): ChaosScenario { + return { + type: "missing_data", + weight, + description, + config: { missingFields, throwError }, + }; + } + + createFormatChangeScenario( + weight: number, + description: string, + schemaChanges: Array<{ + field: string; + change: "rename" | "remove" | "change_type" | "add_nested" | "flatten"; + newName?: string; + newType?: "string" | "number" | "boolean" | "object" | "array"; + newStructure?: any; + }>, + ): ChaosScenario { + return { + type: "format_change", + weight, + description, + config: { schemaChanges }, + }; + } + + createRateLimitScenario( + weight: number, + description: string, + delay: number = 1000, + message: string = "Rate limit exceeded", + ): ChaosScenario { + return { + type: "rate_limit", + weight, + description, + config: { rateLimitDelay: delay, rateLimitMessage: message }, + }; + } + + // Build and Validation + build(): OperationalDescriptor { + // Validate that we have at least one step + if (this.od.steps.length === 0) { + throw new Error("Operational Descriptor must have at least one step"); + } + + // Validate step IDs are unique + const stepIds = this.od.steps.map((step) => step.id); + const uniqueStepIds = new Set(stepIds); + if (stepIds.length !== uniqueStepIds.size) { + throw new Error("Step IDs must be unique"); + } + + return { ...this.od }; + } + + // Utility Methods + clone(): GenericODBuilder { + const cloned = new GenericODBuilder(this.config); + cloned.od = JSON.parse(JSON.stringify(this.od)); + return cloned; + } + + getStepCount(): number { + return this.od.steps.length; + } + + getStepIds(): string[] { + return this.od.steps.map((step) => step.id); + } + + hasStep(stepId: string): boolean { + return this.od.steps.some((step) => step.id === stepId); + } +} + +// Factory for creating common builder configurations +export class GenericODBuilderFactory { + static createEdiBuilder(config: GenericODBuilderConfig = {}): GenericODBuilder { + return new GenericODBuilder({ + name: "EDI Processing Workflow", + description: "Generic EDI document processing workflow", + chaosProbability: 0.1, + ...config, + }); + } + + static createErpBuilder(config: GenericODBuilderConfig = {}): GenericODBuilder { + return new GenericODBuilder({ + name: "ERP Integration Workflow", + description: "Generic ERP system integration workflow", + chaosProbability: 0.05, + ...config, + }); + } + + static createCrmBuilder(config: GenericODBuilderConfig = {}): GenericODBuilder { + return new GenericODBuilder({ + name: "CRM Integration Workflow", + description: "Generic CRM system integration workflow", + chaosProbability: 0.05, + ...config, + }); + } + + static createWmsBuilder(config: GenericODBuilderConfig = {}): GenericODBuilder { + return new GenericODBuilder({ + name: "WMS Integration Workflow", + description: "Generic WMS system integration workflow", + chaosProbability: 0.05, + runPolicy: { + failureMode: "fail_fast", + }, + ...config, + }); + } + + static createMultiServiceBuilder(config: GenericODBuilderConfig = {}): GenericODBuilder { + return new GenericODBuilder({ + name: "Multi-Service Integration Workflow", + description: "Workflow integrating multiple services", + chaosProbability: 0.1, + ...config, + }); + } +} diff --git a/packages/controlmart/src/operational-descriptor/init.od.ts b/packages/controlmart/src/operational-descriptor/init.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f328b16fa5bf21b0416423769a0b767b44c5927 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/init.od.ts @@ -0,0 +1,87 @@ +import type { TCompanyGenerate } from "../models/erp/company.erp.model"; +import { getIdFromMongoObject } from "../utils/mongo.util"; +import { + EdiTransactionRepository, + type TEdiTransactionModelRepository, +} from "../repository/edi/edi.repository"; +import { CompanyRepository, type TCompanyRepository } from "../repository/erp/company.repository"; +import { WorldRepository } from "../repository/world.repository"; +import { WorldLogRepository, type TWorldLogRepository } from "../repository/logs.repository"; +import type { TWorldModel } from "../models/world.model"; +import type { EService } from "../utils/service-mesh.util"; + +export interface IOperationalDescriptorContext { + world: TWorldModel; + worldId: string; + worldLogRepo: TWorldLogRepository; + ediTransactionRepo: TEdiTransactionModelRepository; + companyRepo: TCompanyRepository; + mpcCompany?: TCompanyGenerate; // Optional for testing/scenarios that don't require it + serviceType: EService; +} + +export interface InitODOptions { + /** If true, treat the first parameter as a world name (semantic lookup), otherwise as world ID (default) */ + lookupByName?: boolean; + /** If true, don't throw when mpcCompany is not found */ + allowMissingMpcCompany?: boolean; +} + +/** + * Initialize an Operational Descriptor context + * + * Supports two patterns: + * 1. Direct ID lookup (default): initOperationalDescriptor("world-id-123", EService.WMS) + * 2. Name-based lookup: initOperationalDescriptor("demoWorld", EService.WMS, { lookupByName: true }) + */ +export async function initOperationalDescriptor( + worldIdOrName: string, + serviceType: EService, + options?: InitODOptions +): Promise { + const { lookupByName = false, allowMissingMpcCompany = false } = options || {}; + + if (!worldIdOrName) { + // Default to "demoWorld" for backward compatibility with od-arch pattern + worldIdOrName = lookupByName ? "demoWorld" : worldIdOrName; + if (!worldIdOrName) { + throw new Error("worldId or worldName is required to initialize operational descriptor"); + } + } + + try { + // Look up world by name or ID based on options + const world = lookupByName + ? await WorldRepository.findWorldByName(worldIdOrName) + : await WorldRepository.findWorldById(worldIdOrName); + + if (!world) { + throw new Error(`World not found${lookupByName ? ` with name: ${worldIdOrName}` : ` with ID: ${worldIdOrName}`}`); + } + + const worldId = getIdFromMongoObject(world); + const worldLogRepo: TWorldLogRepository = WorldLogRepository(worldId); + const ediTransactionRepo: TEdiTransactionModelRepository = EdiTransactionRepository(worldId); + const companyRepo: TCompanyRepository = CompanyRepository(worldId); + + const mpcCompany = (await companyRepo.getMpcCompany()) as TCompanyGenerate | null; + + // Check if mpcCompany is required + if (!mpcCompany && !allowMissingMpcCompany) { + throw new Error("MPC company not found"); + } + + return { + world, + worldId, + worldLogRepo, + ediTransactionRepo, + companyRepo, + mpcCompany: mpcCompany || undefined, + serviceType, + }; + } catch (error) { + console.error("Error initializing operational descriptor", error); + throw error; + } +} diff --git a/packages/controlmart/src/operational-descriptor/log-queue.storage.ts b/packages/controlmart/src/operational-descriptor/log-queue.storage.ts new file mode 100644 index 0000000000000000000000000000000000000000..2646bd0a646a660683642da1fa46d90d3c748750 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/log-queue.storage.ts @@ -0,0 +1,83 @@ +import { LogQueueRepository } from "../repository/log-queue.repository"; + +export interface LogQueueEntry { + odId: string; + odName: string; + persona?: string; + stepId: string; + service: string; + tool: string; + chaosType?: string; + modifications?: string[]; + inputType?: string; + outputType?: string; + data?: any; + context?: any; + timestamp: string; +} + +export interface LogQueueStorage { + add(entry: LogQueueEntry): void; + addLog(message: string): void; + persist(): Promise; + getAll(): LogQueueEntry[]; + clear(): void; +} + +export const createMongoLogQueueStorage = ( + runId: string, + worldId: string, + initialMetadata?: { id: string; name: string; persona?: string } +): LogQueueStorage => { + let entries: LogQueueEntry[] = []; + let executionLogs: string[] = []; + let odMetadata: { id: string; name: string; persona?: string } | undefined = initialMetadata; + + return { + add: (entry: LogQueueEntry) => { + entries.push(entry); + if (!odMetadata) { + odMetadata = { + id: entry.odId, + name: entry.odName, + persona: entry.persona, + }; + } + }, + addLog: (message: string) => { + executionLogs.push(message); + }, + persist: async () => { + if (entries.length === 0 && executionLogs.length === 0) return; + + if (!odMetadata) { + console.warn("Cannot persist log queue: OD metadata missing"); + return; + } + + try { + const logRepo = LogQueueRepository(worldId); + await logRepo.createLogQueue({ + runId, + worldRef: { worldId }, + odId: odMetadata.id, + odName: odMetadata.name, + persona: odMetadata.persona, + entries: entries as any, + logs: executionLogs, + isTicketCandidate: entries.some(e => !!e.chaosType), + status: "queued", + retryCount: 0, + }); + } catch (error) { + console.error("Failed to persist log queue to MongoDB:", error); + throw error; + } + }, + getAll: () => [...entries], + clear: () => { + entries = []; + executionLogs = []; + }, + }; +}; diff --git a/packages/controlmart/src/operational-descriptor/od-set-executor.ts b/packages/controlmart/src/operational-descriptor/od-set-executor.ts new file mode 100644 index 0000000000000000000000000000000000000000..e4d3e03c65a0380cdfc52f832c378131b6587311 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/od-set-executor.ts @@ -0,0 +1,170 @@ +#!/usr/bin/env bun + +/** + * Operational Descriptor Set Executor + * + * A simple yet powerful executor for running multiple ODs in sequence or parallel + * with proper logging, error handling, and execution context management. + */ + +import { pino, type Logger } from "pino"; + +import { executeOperationalDescriptor } from "./executor.od"; +import type { OperationalDescriptor, ExecutionOptions } from "../types/od.type"; + +export interface ODExecutionContext extends Omit { + tools: Record Promise>>; + globalState?: Record; +} + +export interface ODExecutionResult { + odId: string; + success: boolean; + result?: any; + error?: Error; + executionTime: number; + chaosEvents?: any[]; +} + +export interface ODSetExecutionOptions { + parallel?: boolean; + stopOnError?: boolean; + maxConcurrency?: number; + retryAttempts?: number; + retryDelay?: number; +} + +/** + * Simple OD Set Executor + */ +export class ODSetExecutor { + private logger: Logger; + private executionHistory: ODExecutionResult[] = []; + + constructor(logger?: Logger) { + this.logger = + logger || + pino({ + level: "info", + formatters: { + level: (label) => ({ level: label }), + }, + timestamp: pino.stdTimeFunctions.isoTime, + }); + } + + /** + * Execute a single OD + */ + async executeOD( + od: OperationalDescriptor, + context: ODExecutionContext, + ): Promise { + const startTime = Date.now(); + const odId = od.id; + + this.logger.info({ odId, odName: od.name }, "Starting OD execution"); + + try { + // Convert our context to match the expected ExecutionOptions interface + const executionOptions: ExecutionOptions = { + world: context.world, + tools: context.tools, + logger: context.logger, + validator: context.validator, + }; + + const result = await executeOperationalDescriptor(od, executionOptions); + const executionTime = Date.now() - startTime; + + const executionResult: ODExecutionResult = { + odId, + success: true, + result, + executionTime, + }; + + this.executionHistory.push(executionResult); + this.logger.info( + { odId, executionTime, success: true }, + "OD execution completed successfully", + ); + + return executionResult; + } catch (error) { + const executionTime = Date.now() - startTime; + const executionResult: ODExecutionResult = { + odId, + success: false, + error: error as Error, + executionTime, + }; + + this.executionHistory.push(executionResult); + this.logger.error( + { + odId, + executionTime, + error: error instanceof Error ? error.message : String(error), + }, + "OD execution failed", + ); + + return executionResult; + } + } + + /** + * Get execution statistics + */ + getExecutionStats() { + const total = this.executionHistory.length; + const successful = this.executionHistory.filter((r) => r.success).length; + const failed = total - successful; + const avgExecutionTime = + total > 0 ? this.executionHistory.reduce((sum, r) => sum + r.executionTime, 0) / total : 0; + + return { + total, + successful, + failed, + successRate: total > 0 ? (successful / total) * 100 : 0, + avgExecutionTime: Math.round(avgExecutionTime), + history: this.executionHistory, + }; + } + + /** + * Clear execution history + */ + clearHistory() { + this.executionHistory = []; + } + + /** + * Print execution summary + */ + printExecutionSummary() { + const stats = this.getExecutionStats(); + + console.log("\n" + "=".repeat(60)); + console.log(" OD Set Execution Summary"); + console.log("=".repeat(60)); + console.log(`Total ODs Executed: ${stats.total}`); + console.log(`Successful: ${stats.successful} (${stats.successRate.toFixed(1)}%)`); + console.log(`Failed: ${stats.failed}`); + console.log(`Average Execution Time: ${stats.avgExecutionTime}ms`); + + if (stats.failed > 0) { + console.log("\n Failed ODs:"); + const failures = this.executionHistory.filter((r) => !r.success); + failures.forEach((failure) => { + console.log(` - ${failure.odId}: ${failure.error?.message || "Unknown error"}`); + }); + } + + console.log("=".repeat(60)); + } +} + +export default ODSetExecutor; diff --git a/packages/controlmart/src/operational-descriptor/run-helper.od.ts b/packages/controlmart/src/operational-descriptor/run-helper.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..61d626209969d5372e2afd5faf319efa954a776c --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/run-helper.od.ts @@ -0,0 +1,116 @@ +import type { Logger } from "pino"; +import jmespath from "jmespath"; + +import type { Context, Assertion, AssertionResult } from "../types/od.type"; + +export const evalCondition = ( + condition: { expression: string; language?: string }, + ctx: Context, +): boolean => { + if (condition.language && condition.language !== "jmespath") { + throw new Error(`Unsupported condition language: ${condition.language}`); + } + return Boolean(jmespath.search(ctx, condition.expression)); +}; + +export const calcBackoff = ( + type: "fixed" | "exponential" | "linear", + baseMs: number, + attempt: number, + maxMs?: number, + jitter: boolean = true, +): number => { + let delay: number; + switch (type) { + case "fixed": + delay = baseMs; + break; + case "linear": + delay = baseMs * attempt; + break; + case "exponential": + default: + delay = baseMs * Math.pow(2, attempt - 1); + break; + } + + if (maxMs) delay = Math.min(delay, maxMs); + + if (jitter) { + const jitterAmount = delay * 0.25; + delay = delay + (Math.random() * 2 - 1) * jitterAmount; + } + + return Math.floor(delay); +}; + +export const executeWithTimeout = ( + fn: () => Promise, + timeoutMs: number, + stepId: string, +): Promise => { + return Promise.race([ + fn(), + new Promise((_, reject) => + setTimeout( + () => reject(new Error(`Step ${stepId} timed out after ${timeoutMs}ms`)), + timeoutMs, + ), + ), + ]); +}; + +export const evaluateAssertion = async ( + assertion: Assertion, + ctx: Context, + logger: Logger, +): Promise => { + const startTime = new Date().toISOString(); + + try { + const language = assertion.language ?? "jmespath"; + + if (language !== "jmespath") { + throw new Error(`Unsupported assertion language: ${language}`); + } + + const actualValue = jmespath.search(ctx, assertion.expression); + const passed = Boolean(actualValue); + + const level = passed ? "debug" : "warn"; + logger[level]( + { + assertionId: assertion.id, + actualValue, + passed, + expression: assertion.expression, + }, + `Assertion ${assertion.id}: ${passed ? "PASSED" : "FAILED"}`, + ); + + return { + assertionId: assertion.id, + passed, + actualValue, + evaluatedAt: startTime, + }; + } catch (err) { + const error = err instanceof Error ? err.message : String(err); + + logger.error( + { + assertionId: assertion.id, + error, + expression: assertion.expression, + }, + `Assertion evaluation error`, + ); + + return { + assertionId: assertion.id, + passed: false, + error, + evaluatedAt: startTime, + }; + } +}; diff --git a/packages/controlmart/src/operational-descriptor/run-step.od.ts b/packages/controlmart/src/operational-descriptor/run-step.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..05f882e8f5747345966cd9d6933689bd0d421c1c --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/run-step.od.ts @@ -0,0 +1,585 @@ +import type { Logger } from "pino"; +import jmespath from "jmespath"; + +import type { + Step, + Context, + StepExecutionResult, + ExecutionOptions, + AssertionResult, + Binding, + McpStep, + MapStep, + BranchStep, + ExitEarlyStep, + ChaosPolicy, +} from "../types/od.type"; +import { calcBackoff, evalCondition, evaluateAssertion, executeWithTimeout } from "./run-helper.od"; +import { injectChaos, createSeededRandom, defaultRandom } from "./chaos-engine.od"; +import { getTool, executeTool, getToolDefaultChaosPolicy } from "./tools/registry.tool"; +import { ChaosConfigRegistry } from "../services/chaos-config.registry"; +import { getIdFromMongoObject } from "../utils/mongo.util"; + +export const runStep = async ( + step: Step, + ctx: Context, + options: ExecutionOptions, + parentLogger: Logger, + globalChaosPolicy?: ChaosPolicy, +): Promise => { + const world = options.world; + const stepStartTime = Date.now(); + + // Determine service type for logging + let serviceType = "od"; + if (step.type === "mcp") { + serviceType = (step as McpStep).service; + } else if (step.type === "script") { + serviceType = "od"; // Scripts run in the OD service context + } + + // Create step-scoped logger with service context + const stepLogger = parentLogger.child({ + stepId: step.id, + service: serviceType, + serviceType: serviceType, // Explicitly bind for Mongo transport + }); + + // Resolve chaos using the Chaos Config Registry with priority cascade + const worldId = getIdFromMongoObject(world); + const odId = options.odMetadata?.id || "unknown"; + const chaosContext = { + worldId, + capabilityId: options.capabilityId, + odId, + stepId: step.id, + stepChaos: step.chaos, // Pass step-level chaos through context + service: step.type === 'mcp' ? (step as McpStep).service : undefined, + tool: step.type === 'mcp' ? (step as McpStep).tool : undefined, + }; + + // Resolve chaos policy with source tracking + const { policy: chaosPolicy, source: chaosSource } = ChaosConfigRegistry.resolveChaosPolicy(chaosContext); + + stepLogger.info( + { + stepType: step.type, + stepName: step.name, + serviceType, + chaosEnabled: chaosPolicy.enabled, + chaosProbability: chaosPolicy.probability, + chaosSource, + }, + `Executing step: ${step.id} (${step.type})`, + ); + + if (step.condition) { + const conditionResult = evalCondition(step.condition, ctx); + if (!conditionResult) { + stepLogger.info({ condition: step.condition.expression }, `Skipping step due to condition`); + + const stepEndTime = Date.now(); + return { + stepId: step.id, + status: "skipped", + startTime: new Date(stepStartTime).toISOString(), + endTime: new Date(stepEndTime).toISOString(), + durationMs: stepEndTime - stepStartTime, + }; + } + } + + const retries = step.retry?.maxRetries ?? 0; + let attempt = 0; + let lastError: Error | undefined; + + while (true) { + try { + const result = step.timeoutMs + ? await executeWithTimeout( + () => runStepCore(step, ctx, options, chaosPolicy, stepLogger, chaosSource, globalChaosPolicy), + step.timeoutMs, + step.id, + ) + : await runStepCore(step, ctx, options, chaosPolicy, stepLogger, chaosSource, globalChaosPolicy); + + const { output, chaosResult } = result; + + if (step.output?.storeAs) { + try { + ctx[step.output.storeAs] = step.output.extract + ? jmespath.search(output, step.output.extract) + : output; + } catch (extractErr) { + stepLogger.warn( + { + error: extractErr instanceof Error ? extractErr.message : String(extractErr), + outputKey: step.output.storeAs, + extractPath: step.output.extract, + }, + `Failed to extract output`, + ); + } + } + + const assertionResults: AssertionResult[] = []; + if (step.assertions) { + for (const assertion of step.assertions) { + const assertionResult = await evaluateAssertion(assertion, ctx, stepLogger); + assertionResults.push(assertionResult); + + if (!assertionResult.passed && !assertion.continueOnFailure) { + throw new Error(`Assertion failed: ${assertion.id} - ${assertionResult.error}`); + } + } + } + + const stepEndTime = Date.now(); + const durationMs = stepEndTime - stepStartTime; + + // Check for exit_early step with shouldExit flag + const shouldExit = step.type === "exit_early" && (result as any).shouldExit; + + stepLogger.info( + { + durationMs, + output: step.type === "script" || step.type === "mcp" ? output : undefined, + exitEarly: shouldExit, + }, + shouldExit ? `Step completed - exiting OD early` : `Step completed successfully` + ); + + return { + stepId: step.id, + status: shouldExit ? "exited" : "success", + startTime: new Date(stepStartTime).toISOString(), + endTime: new Date(stepEndTime).toISOString(), + durationMs, + output, + assertionResults: assertionResults.length > 0 ? assertionResults : undefined, + chaosInjected: chaosResult, + }; + } catch (err) { + attempt++; + lastError = err as Error; + + stepLogger.warn( + { + attempt, + maxRetries: retries, + error: lastError.message, + stack: lastError.stack, + }, + `Error in step, attempt ${attempt}`, + ); + + // Log error to queue + if (options.logQueueStorage) { + options.logQueueStorage.add({ + odId: options.odMetadata?.id || "unknown", + odName: options.odMetadata?.name || "unknown", + persona: options.odMetadata?.persona, + stepId: step.id, + service: (step as McpStep).service || "od", + tool: (step as McpStep).tool || "script", + chaosType: lastError.message.includes("[CHAOS]") ? "ERROR_INJECTION" : "STEP_FAILURE", + modifications: [lastError.message], + context: (() => { + const { __world, ...safeCtx } = ctx; + return safeCtx; + })(), + timestamp: new Date().toISOString(), + }); + } + + if (attempt > retries) { + if (step.continueOnError) { + stepLogger.info({ continueOnError: true }, `Continuing after error`); + + const stepEndTime = Date.now(); + return { + stepId: step.id, + status: "failed", + startTime: new Date(stepStartTime).toISOString(), + endTime: new Date(stepEndTime).toISOString(), + durationMs: stepEndTime - stepStartTime, + error: lastError.message, + }; + } + throw lastError; + } + + const backoff = step.retry?.backoff ?? "exponential"; + const baseMs = step.retry?.baseMs ?? 200; + const jitter = step.retry?.jitter ?? true; + const delay = calcBackoff(backoff, baseMs, attempt, step.retry?.maxBackoffMs, jitter); + + stepLogger.debug( + { + delayMs: delay, + backoffType: backoff, + nextAttempt: attempt + 1, + }, + `Retrying step after delay`, + ); + + await new Promise((res) => setTimeout(res, delay)); + } + } +}; + +const runStepCore = async ( + step: Step, + ctx: Context, + options: ExecutionOptions, + chaosPolicy?: ChaosPolicy, + logger?: Logger, + chaosSource?: string, + globalChaosPolicy?: ChaosPolicy, +): Promise<{ output: any; chaosResult?: any }> => { + switch (step.type) { + case "noop": + return { output: null }; + + case "mcp": + return runMcpStep(step, ctx, options, chaosPolicy, logger, chaosSource); + + case "map": + return runMapStep(step, ctx, options, globalChaosPolicy, logger); + + case "branch": + return runBranchStep(step, ctx, options, globalChaosPolicy, logger); + + case "script": + return runScriptStep(step, ctx, options); + + case "exit_early": + return runExitEarlyStep(step, ctx, logger); + } +}; + +const runMcpStep = async ( + step: McpStep, + ctx: Context, + options: ExecutionOptions, + chaosPolicy?: ChaosPolicy, + logger?: Logger, + chaosSource?: string, +): Promise<{ output: any; chaosResult?: any }> => { + const input = resolveBinding(step.input, ctx); + + if (logger) { + logger.info({ input }, "MCP Step Input"); + } else { + // Fallback if logger not passed (should not happen in updated flow) + console.log(`MCP Step Input for ${step.id}:`, JSON.stringify(input, null, 2)); + } + + // Log input to LogQueue + if (options.logQueueStorage) { + options.logQueueStorage.add({ + odId: options.odMetadata?.id || "unknown", + odName: options.odMetadata?.name || "unknown", + persona: options.odMetadata?.persona, + stepId: step.id, + service: step.service, + tool: step.tool, + // No chaos type for normal input logging + inputType: "mcp_input", + data: input, + context: (() => { + const { __world, ...safeCtx } = ctx; + return safeCtx; + })(), + timestamp: new Date().toISOString(), + }); + } + + // Try to find the tool in the registry first (new way) + const toolId = `${step.service}.${step.tool}`; + const registeredTool = getTool(toolId); + + let result; + if (registeredTool) { + const worldId = getIdFromMongoObject(ctx.__world); + if (!worldId) { + throw new Error("World ID not found in context for repository tool execution"); + } + result = await executeTool(toolId, worldId, input); + } else { + // Fallback to legacy options.tools (old way) + const service = options.tools?.[step.service]; + if (!service) throw new Error(`Unknown service: ${step.service}`); + + const toolFn = service[step.tool]; + if (!toolFn) throw new Error(`Unknown tool: ${step.tool} in ${step.service}`); + + result = await toolFn(input, ctx); + } + + const toolDefaultChaos = registeredTool ? getToolDefaultChaosPolicy(toolId) : undefined; + const effectiveChaosPolicy = step.chaos || chaosPolicy || toolDefaultChaos; + + return maybeInjectChaos(result, step, ctx, effectiveChaosPolicy, logger, options, chaosSource).then(finalResult => { + // Log output to LogQueue (successful case) + if (options.logQueueStorage) { + options.logQueueStorage.add({ + odId: options.odMetadata?.id || "unknown", + odName: options.odMetadata?.name || "unknown", + persona: options.odMetadata?.persona, + stepId: step.id, + service: step.service, + tool: step.tool, + outputType: "mcp_output", + data: finalResult.output, + context: (() => { + const { __world, ...safeCtx } = ctx; + return safeCtx; + })(), + timestamp: new Date().toISOString(), + }); + } + return finalResult; + }); +}; + +const runMapStep = async ( + step: MapStep, + ctx: Context, + options: ExecutionOptions, + globalChaosPolicy?: ChaosPolicy, + logger?: Logger, +): Promise<{ output: any[] }> => { + const iterable = resolveBinding(step.mapSpec.iterable, ctx); + if (!Array.isArray(iterable)) throw new Error(`mapSpec.iterable is not an array`); + + const concurrency = step.mapSpec.concurrency ?? 1; + const results: any[] = []; + + const queue = [...iterable]; + const workers = Array.from({ length: concurrency }, async () => { + while (queue.length > 0) { + const item = queue.shift(); + const localCtx = { ...ctx, [step.mapSpec.itemName]: item }; + + for (const child of step.children) { + await runStep(child, localCtx, options, logger || options.logger, globalChaosPolicy); + } + + results.push(localCtx); + } + }); + + await Promise.all(workers); + return { output: results }; +}; + +const runBranchStep = async ( + step: BranchStep, + ctx: Context, + options: ExecutionOptions, + globalChaosPolicy?: ChaosPolicy, + logger?: Logger, +): Promise<{ output: any }> => { + const conditionResult = evalCondition(step.branchSpec.condition, ctx); + + if (logger) { + logger.info( + { + condition: step.branchSpec.condition.expression, + conditionResult, + branch: conditionResult ? "then" : "else", + }, + `Branch step ${step.id}: condition evaluated to ${conditionResult}`, + ); + } + + const stepsToExecute = conditionResult ? step.branchSpec.then : (step.branchSpec.else || []); + + // Execute the selected branch steps sequentially + const results: any[] = []; + for (const childStep of stepsToExecute) { + const childResult = await runStep(childStep, ctx, options, logger || options.logger, globalChaosPolicy); + // Store the last step's output as the branch output + if (childResult.output !== undefined) { + results.push(childResult.output); + } + } + + // Return the last step's output, or all results if multiple steps executed + return { output: results.length === 1 ? results[0] : results }; +}; + +const runScriptStep = async ( + step: any, + ctx: Context, + _options: ExecutionOptions, +): Promise<{ output: any }> => { + // Create a safe execution context + const safeCtx = { ...ctx }; + + // Remove sensitive world data from script access + const { __world, ...publicCtx } = safeCtx; + + try { + const AsyncFunction = Object.getPrototypeOf(async function () { }).constructor; + const scriptFunction = new AsyncFunction("ctx", "options", step.script); + const result = await scriptFunction(publicCtx, { + jmespath: (query: string) => jmespath.search(publicCtx, query), + template: (template: string) => + template.replace(/\{\{(.*?)\}\}/g, (_, expr) => { + const trimmed = expr.trim(); + return jmespath.search(publicCtx, trimmed) ?? ""; + }), + }); + + return { output: result }; + } catch (error) { + throw new Error( + `Script execution failed: ${error instanceof Error ? error.message : String(error)}`, + ); + } +}; + +const runExitEarlyStep = async ( + step: ExitEarlyStep, + ctx: Context, + logger?: Logger, +): Promise<{ output: any; shouldExit: boolean }> => { + const conditionResult = evalCondition(step.exitCondition, ctx); + + if (logger) { + logger.info( + { + condition: step.exitCondition.expression, + conditionResult, + willExit: conditionResult, + message: step.message, + }, + `Exit early step ${step.id}: condition evaluated to ${conditionResult}`, + ); + } + + return { + output: { exitConditionMet: conditionResult, message: step.message }, + shouldExit: conditionResult, + }; +}; + +const resolveBinding = (binding: Binding, ctx: Context): any => { + switch (binding.type) { + case "literal": + return binding.value; + + case "template": { + const templateVal = binding.template !== undefined ? binding.template : binding.value; + const resolveValue = (val: any): any => { + if (typeof val === "string") { + // Case 1: Whole string is a single {{expr}} + const fullMatch = val.match(/^\s*\{\{(.*?)\}\}\s*$/); + if (fullMatch) { + const expr = fullMatch[1]!.trim(); + return jmespath.search(ctx, expr); + } + + // Case 2: Inline replacements in a larger string + return val.replace(/\{\{(.*?)\}\}/g, (_, expr) => { + const trimmed = expr.trim(); + const result = jmespath.search(ctx, trimmed); + if (typeof result === "object") { + return JSON.stringify(result); + } + return result ?? ""; + }); + } + + // Case 3: Object or Array → recurse + if (Array.isArray(val)) return val.map((v) => resolveValue(v)); + if (val && typeof val === "object") { + const obj: Record = {}; + for (const [k, v] of Object.entries(val)) { + obj[k] = resolveValue(v); + } + return obj; + } + + // Case 4: primitive fallback + return val; + }; + + return resolveValue(templateVal); + } + + case "jmesPath": + return jmespath.search(ctx, binding.value); + + default: + return binding.value; + } +}; + +const maybeInjectChaos = async ( + result: any, + step: McpStep, + ctx: Context, + chaosPolicy?: ChaosPolicy, + logger?: Logger, + options?: ExecutionOptions, + chaosSource?: string, +): Promise<{ output: any; chaosResult?: any }> => { + if (!chaosPolicy || !logger) { + return { output: result }; + } + + const random = chaosPolicy.seed ? createSeededRandom(chaosPolicy.seed) : defaultRandom; + const chaosResult = await injectChaos( + result, + chaosPolicy, + { stepId: step.id, service: step.service, tool: step.tool }, + logger, + random, + ); + + if (chaosResult.chaosResult && chaosResult.chaosResult.triggered) { + logger.warn( + { + stepId: step.id, + chaosType: chaosResult.chaosResult.scenario.type, + modifications: chaosResult.chaosResult.modifications, + chaosSource, + }, + "Chaos injected into step output", + ); + + // Log to LogQueue storage (research branch feature with context) + if (options?.logQueueStorage) { + options.logQueueStorage.add({ + odId: options.odMetadata?.id || "unknown", + odName: options.odMetadata?.name || "unknown", + persona: options.odMetadata?.persona, + stepId: step.id, + service: step.service, + tool: step.tool, + chaosType: chaosResult.chaosResult.scenario.type, + modifications: chaosResult.chaosResult.modifications, + context: (() => { + const { __world, ...safeCtx } = ctx; + return safeCtx; + })(), + timestamp: chaosResult.chaosResult.injectedAt, + }); + } + + // Record injection in telemetry collector (od-arch MORPH-413 feature) + if (options?.chaosTelemetry && chaosSource) { + options.chaosTelemetry.recordInjection( + step.id, + step.name || step.id, + chaosResult.chaosResult, + chaosSource + ); + } + } + + return { output: chaosResult.output, chaosResult: chaosResult.chaosResult }; +}; diff --git a/packages/controlmart/src/operational-descriptor/schedule.od.ts b/packages/controlmart/src/operational-descriptor/schedule.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..647f3d3bf514662694da0174faec67fb30a16ed5 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/schedule.od.ts @@ -0,0 +1,524 @@ +import type { Job } from "@hokify/agenda"; +import { executeOperationalDescriptor } from "./executor.od"; +import { auditLogger } from "../services/audit-logger.service"; +import type { ExecutionOptions, OperationalDescriptor } from "../types/od.type"; +import { createAppLogger } from "../utils/logger.util"; +import type { TOperationalDescriptorModel } from "../models/od.model"; +import type { TWorldModel } from "../models/world.model"; +import { getIdFromMongoObject } from "../utils/mongo.util"; +import { ODRepository } from "../repository/od.repository"; +import { WorldRepository } from "../repository/world.repository"; +import { + defineJob, + scheduleJob, + createRecurringJob, + cancelJob, + cancelJobsByQuery, + listScheduledJobs, + rescheduleJob, + pauseJob, + resumeJob, + pauseJobsByQuery, + resumeJobsByQuery, + schedulerLogger, +} from "../services/scheduler.service"; + +export interface ScheduledODJobData { + odId: string; + odName: string; + odDescriptor: OperationalDescriptor; + worldId: string; + world: TWorldModel; + metadata?: Record; +} + +export const executeODJob = async (job: Job): Promise => { + const { odName, odDescriptor, world, metadata } = job.attrs.data as ScheduledODJobData; + + const jobLogger = createAppLogger({ + service: "od-job-executor", + }); + + jobLogger.info( + { + scheduledAt: job.attrs.nextRunAt, + metadata, + }, + `Starting scheduled OD execution: ${odName}`, + ); + + try { + const executionOptions: ExecutionOptions = { + world, + tools: {}, + logger: jobLogger, + auditLogger, + input: metadata, + }; + + const result = await executeOperationalDescriptor(odDescriptor, executionOptions); + + jobLogger.info( + { + status: result.status, + durationMs: result.durationMs, + successfulSteps: result.successfulSteps, + failedSteps: result.failedSteps, + }, + `Scheduled OD execution completed: ${odName}`, + ); + + (job.attrs as any).lastRunResult = { + status: result.status, + completedAt: new Date(), + durationMs: result.durationMs, + }; + + await job.save(); + + if (result.status === "failed") { + throw new Error(`OD execution failed: ${odName}`); + } + } catch (error) { + jobLogger.error( + { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + }, + `Scheduled OD execution failed: ${odName}`, + ); + throw error; + } +}; + +export const initializeODScheduling = (): void => { + defineJob("execute-od", executeODJob); + schedulerLogger.info("OD scheduling initialized - 'execute-od' job type registered"); +}; + +export const scheduleOD = async ( + time: string | Date, + od: TOperationalDescriptorModel, + world: TWorldModel, + metadata?: Record, +): Promise> => { + const jobData: ScheduledODJobData = { + odId: od.odId, + odName: od.name, + odDescriptor: od.data as OperationalDescriptor, + worldId: getIdFromMongoObject(world), + world, + metadata, + }; + + const job = await scheduleJob(time, "execute-od", jobData); + + schedulerLogger.info( + { + odId: od.odId, + odName: od.name, + scheduledFor: job.attrs.nextRunAt, + jobId: job.attrs._id, + }, + `Scheduled OD for one-time execution`, + ); + + return job; +}; + +export const scheduleRecurringOD = async ( + interval: string, + od: TOperationalDescriptorModel, + world: TWorldModel, + metadata?: Record, + options?: { nextRunAt?: Date | string }, +): Promise> => { + const jobData: ScheduledODJobData = { + odId: od.odId, + odName: od.name, + odDescriptor: od.data as OperationalDescriptor, + worldId: getIdFromMongoObject(world), + world, + metadata, + }; + + const job = await createRecurringJob( + interval, + "execute-od", + jobData, + { + "data.odId": od.odId, + "data.worldId": getIdFromMongoObject(world), + name: "execute-od", // Ensure we match the job name as well in the unique query + ...(metadata ? { "data.metadata": metadata } : {}), + }, + options, + ); + + schedulerLogger.info( + { + odId: od.odId, + odName: od.name, + interval, + nextRunAt: job.attrs.nextRunAt, + jobId: job.attrs._id, + }, + `Scheduled OD for recurring execution`, + ); + + return job; +}; + +export const cancelScheduledOD = async (jobId: string): Promise => { + const numRemoved = await cancelJob(jobId); + + schedulerLogger.info( + { + jobId, + numRemoved, + }, + `Cancelled scheduled OD`, + ); + + return numRemoved; +}; + +export const cancelScheduledODByOdId = async (odId: string): Promise => { + const numRemoved = await cancelJobsByQuery({ "data.odId": odId }); + + schedulerLogger.info( + { + odId, + numRemoved, + }, + `Cancelled all scheduled jobs for OD`, + ); + + return numRemoved; +}; + +export const listScheduledJobsForOD = async (odId: string, worldId?: string): Promise => { + const query: any = { "data.odId": odId }; + if (worldId) { + query["data.worldId"] = worldId; + } + const jobs = await listScheduledJobs(query); + return jobs; +}; + +export const listScheduledJobsForWorld = async (worldId: string): Promise => { + const jobs = await listScheduledJobs({ "data.worldId": worldId } as any); + return jobs; +}; + +export const rescheduleOD = async (jobId: string, newTime: string | Date): Promise => { + const job = await rescheduleJob(jobId, newTime); + + if (job) { + schedulerLogger.info( + { + jobId, + odId: (job.attrs.data as ScheduledODJobData)?.odId, + newScheduledTime: job.attrs.nextRunAt, + }, + `Rescheduled OD`, + ); + } + + return job; +}; + +export const pauseODSchedule = async (jobId: string): Promise => { + const job = await pauseJob(jobId); + + if (job) { + schedulerLogger.info( + { + jobId, + odId: (job.attrs.data as ScheduledODJobData)?.odId, + }, + `Paused OD schedule`, + ); + } + + return job; +}; + +export const resumeODSchedule = async (jobId: string): Promise => { + const job = await resumeJob(jobId); + + if (job) { + schedulerLogger.info( + { + jobId, + odId: (job.attrs.data as ScheduledODJobData)?.odId, + }, + `Resumed OD schedule`, + ); + } + + return job; +}; + +export const pauseAllODSchedulesForWorld = async (worldId: string): Promise => { + const numPaused = await pauseJobsByQuery({ + "data.worldId": worldId, + name: "execute-od", + }); + + schedulerLogger.info( + { + worldId, + numPaused, + }, + `Paused all OD schedules for world`, + ); + + return numPaused; +}; + +export const resumeAllODSchedulesForWorld = async (worldId: string): Promise => { + const numResumed = await resumeJobsByQuery({ + "data.worldId": worldId, + name: "execute-od", + }); + + schedulerLogger.info( + { + worldId, + numResumed, + }, + `Resumed all OD schedules for world`, + ); + + return numResumed; +}; + +export const getScheduleStatusForWorld = async (worldId: string): Promise<'operational' | 'paused' | 'partial'> => { + const jobs = await listScheduledJobsForWorld(worldId); + + if (jobs.length === 0) { + return 'operational'; // Default state if no schedules exist + } + + const allPaused = jobs.every((job) => (job.attrs as any).disabled); + const allRunning = jobs.every((job) => !(job.attrs as any).disabled); + + if (allPaused) return 'paused'; + if (allRunning) return 'operational'; + return 'partial'; +}; + +export interface ODWithScheduleInfo extends TOperationalDescriptorModel { + schedules?: Array<{ + jobId: string; + nextRunAt?: Date | null; + isRecurring: boolean; + interval?: string; + disabled: boolean; + }>; +} + +export const getODWithSchedules = async ( + odId: string, + worldId: string, +): Promise => { + const od = await ODRepository.getODById(odId, worldId); + if (!od) return null; + + try { + const jobs = await listScheduledJobsForOD(odId, worldId); + const schedules = jobs.map((job) => ({ + jobId: job.attrs._id?.toString() || "", + nextRunAt: job.attrs.nextRunAt, + isRecurring: !!job.attrs.repeatInterval, + interval: job.attrs.repeatInterval as string, + disabled: (job.attrs as any).disabled || false, + })); + + return { + ...od, + schedules, + }; + } catch (error) { + schedulerLogger.warn({ error, odId }, "Failed to fetch schedule info for OD"); + return od; // Return OD without schedule info if there's an error + } +}; + +export const deleteODSafely = async ( + odId: string, + worldId: string, +): Promise<{ + deleted: boolean; + cancelledSchedules: number; +}> => { + try { + // Cancel all schedules first + const cancelledCount = await cancelScheduledODByOdId(odId); + + // Then delete the OD + await ODRepository.deleteODById(odId, worldId); + + schedulerLogger.info( + { odId, worldId, cancelledSchedules: cancelledCount }, + "Successfully deleted OD and cancelled schedules", + ); + + return { + deleted: true, + cancelledSchedules: cancelledCount, + }; + } catch (error) { + schedulerLogger.error({ error, odId, worldId }, "Failed to safely delete OD"); + throw error; + } +}; + +export const bulkScheduleODs = async ( + schedules: Array<{ + odId: string; + type: "once" | "recurring"; + time?: string | Date; + interval?: string; + metadata?: Record; + }>, + worldId: string, +) => { + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World ${worldId} not found`); + } + + const results = await Promise.allSettled( + schedules.map(async (schedule) => { + const od = await ODRepository.getODById(schedule.odId, worldId); + if (!od) { + throw new Error(`OD ${schedule.odId} not found`); + } + + if (schedule.type === "once" && schedule.time) { + return await scheduleOD(schedule.time, od, world, schedule.metadata); + } else if (schedule.type === "recurring" && schedule.interval) { + return await scheduleRecurringOD(schedule.interval, od, world, schedule.metadata); + } else { + throw new Error(`Invalid schedule configuration for OD ${schedule.odId}`); + } + }), + ); + + const successful = results.filter((r) => r.status === "fulfilled").length; + const failed = results.filter((r) => r.status === "rejected"); + + schedulerLogger.info( + { + worldId, + total: schedules.length, + successful, + failed: failed.length, + }, + "Bulk schedule operation completed", + ); + + return { + total: schedules.length, + successful, + failed: failed.map((f, index) => ({ + odId: schedules[index]?.odId, + error: f.reason.message, + })), + results: results.map((r, index) => ({ + odId: schedules[index]?.odId, + success: r.status === "fulfilled", + job: r.status === "fulfilled" ? r.value : null, + error: r.status === "rejected" ? r.reason.message : null, + })), + }; +}; + +export const validateODSchema = ( + od: OperationalDescriptor, +): { + isValid: boolean; + errors: string[]; + warnings: string[]; +} => { + const errors: string[] = []; + const warnings: string[] = []; + + // Basic structure validation + if (!od.id || od.id.trim().length === 0) { + errors.push("OD ID is required"); + } + + if (!od.name || od.name.trim().length === 0) { + errors.push("OD name is required"); + } + + if (!od.version || !/^\d+\.\d+\.\d+$/.test(od.version)) { + errors.push("Version must follow semver format (e.g., 1.0.0)"); + } + + if (!od.steps || od.steps.length === 0) { + errors.push("At least one step is required"); + } + + // Step validation + od.steps?.forEach((step, index) => { + if (!step.id || step.id.trim().length === 0) { + errors.push(`Step ${index + 1}: ID is required`); + } + + if (!step.name || step.name.trim().length === 0) { + errors.push(`Step ${index + 1}: Name is required`); + } + + if (!step.type) { + errors.push(`Step ${index + 1}: Type is required`); + } + + // Type-specific validation + if (step.type === "script" && !("script" in step)) { + errors.push(`Step ${index + 1}: Script is required for script steps`); + } + + if (step.type === "mcp" && (!("service" in step) || !("tool" in step))) { + errors.push(`Step ${index + 1}: Service and tool are required for MCP steps`); + } + + if (step.type === "map" && !("mapSpec" in step)) { + errors.push(`Step ${index + 1}: mapSpec is required for map steps`); + } + + // Warnings + if (!step.description) { + warnings.push(`Step ${index + 1}: Consider adding a description for better documentation`); + } + + if (!step.timeoutMs) { + warnings.push(`Step ${index + 1}: Consider setting a timeout for better reliability`); + } + }); + + // Policy validation + if (od.runPolicy?.deduplicationWindowMs && od.runPolicy.deduplicationWindowMs < 0) { + errors.push("Deduplication window must be positive"); + } + + // Chaos policy validation + if (od.chaos) { + if (od.chaos.probability < 0 || od.chaos.probability > 1) { + errors.push("Chaos probability must be between 0 and 1"); + } + + if (!od.chaos.scenarios || od.chaos.scenarios.length === 0) { + errors.push("At least one chaos scenario is required when chaos is enabled"); + } + } + + return { + isValid: errors.length === 0, + errors, + warnings, + }; +}; diff --git a/packages/controlmart/src/operational-descriptor/schema.od.ts b/packages/controlmart/src/operational-descriptor/schema.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..91db3c7203e74c0ad585e9fe9bfc69f95ddc1b65 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/schema.od.ts @@ -0,0 +1,305 @@ +export const OD_SCHEMA = { + $schema: "http://json-schema.org/draft-07/schema#", + title: "OperationalDescriptor", + type: "object", + required: ["id", "name", "version", "steps"], + additionalProperties: false, + properties: { + id: { type: "string" }, + name: { type: "string" }, + type: { + type: "string", + enum: ["standard", "background_job", "workflow"], + default: "standard", + }, + version: { + type: "string", + pattern: "^[0-9]+\\.[0-9]+\\.[0-9]+$", + }, + description: { type: "string" }, + namespace: { type: "string" }, + persona: { type: "string" }, + inputSchema: { type: "object" }, + runPolicy: { $ref: "#/definitions/runPolicy" }, + steps: { + type: "array", + minItems: 1, + items: { $ref: "#/definitions/step" }, + }, + assertions: { + type: "array", + items: { $ref: "#/definitions/assertion" }, + }, + chaos: { $ref: "#/definitions/chaosPolicy" }, + }, + definitions: { + assertion: { + type: "object", + required: ["id", "expression"], + properties: { + id: { type: "string" }, + description: { type: "string" }, + expression: { type: "string" }, + language: { + type: "string", + enum: ["jmespath", "jsonata", "cel", "javascript"], + default: "jmespath", + }, + continueOnFailure: { type: "boolean", default: false }, + }, + }, + binding: { + type: "object", + required: ["type"], + properties: { + type: { type: "string", enum: ["literal", "template", "jmesPath"] }, + value: {}, + template: {}, + }, + oneOf: [{ required: ["value"] }, { required: ["template"] }], + }, + outputBinding: { + type: "object", + required: ["storeAs"], + additionalProperties: false, + properties: { + storeAs: { type: "string" }, + extract: { type: "string" }, + }, + }, + retryPolicy: { + type: "object", + additionalProperties: false, + properties: { + maxRetries: { type: "integer", minimum: 0, default: 3 }, + backoff: { + type: "string", + enum: ["fixed", "exponential", "linear"], + default: "exponential", + }, + baseMs: { type: "integer", minimum: 0, default: 200 }, + maxBackoffMs: { type: "integer", minimum: 0, default: 30000 }, + jitter: { type: "boolean", default: true }, + }, + }, + condition: { + type: "object", + required: ["expression"], + additionalProperties: false, + properties: { + language: { + type: "string", + enum: ["jmespath", "jsonata", "cel", "javascript"], + default: "jmespath", + }, + expression: { type: "string" }, + }, + }, + step: { + type: "object", + required: ["id", "name", "type"], + additionalProperties: false, + properties: { + id: { type: "string", pattern: "^[a-zA-Z0-9_\\-]{1,64}$" }, + name: { type: "string" }, + type: { type: "string", enum: ["mcp", "noop", "map", "script", "branch", "exit_early"] }, + description: { type: "string" }, + condition: { $ref: "#/definitions/condition" }, + input: { $ref: "#/definitions/binding" }, + output: { $ref: "#/definitions/outputBinding" }, + service: { type: "string" }, + tool: { type: "string" }, + script: { type: "string" }, + language: { + type: "string", + enum: ["javascript", "typescript"], + default: "javascript", + }, + retry: { $ref: "#/definitions/retryPolicy" }, + timeoutMs: { type: "integer", minimum: 1 }, + continueOnError: { type: "boolean" }, + compensationStepId: { type: ["string", "null"] }, + mapSpec: { + type: "object", + required: ["iterable", "itemName"], + additionalProperties: false, + properties: { + iterable: { $ref: "#/definitions/binding" }, + itemName: { type: "string" }, + concurrency: { type: "integer", minimum: 1 }, + }, + }, + children: { + type: "array", + items: { $ref: "#/definitions/step" }, + }, + branchSpec: { + type: "object", + required: ["condition", "then"], + additionalProperties: false, + properties: { + condition: { $ref: "#/definitions/condition" }, + then: { + type: "array", + items: { $ref: "#/definitions/step" }, + minItems: 1, + }, + else: { + type: "array", + items: { $ref: "#/definitions/step" }, + }, + }, + }, + exitCondition: { $ref: "#/definitions/condition" }, + message: { type: "string" }, + assertions: { + type: "array", + items: { $ref: "#/definitions/assertion" }, + }, + chaos: { $ref: "#/definitions/chaosPolicy" }, + }, + allOf: [ + { + if: { properties: { type: { const: "mcp" } } }, + then: { required: ["service", "tool", "input"] }, + }, + { + if: { properties: { type: { const: "map" } } }, + then: { required: ["mapSpec", "children"] }, + }, + { + if: { properties: { type: { const: "script" } } }, + then: { required: ["script"] }, + }, + { + if: { properties: { type: { const: "branch" } } }, + then: { required: ["branchSpec"] }, + }, + { + if: { properties: { type: { const: "exit_early" } } }, + then: { required: ["exitCondition"] }, + }, + ], + }, + runPolicy: { + type: "object", + additionalProperties: false, + properties: { + idempotencyKeyExpr: { type: "string" }, + deduplicationWindowMs: { type: "integer", minimum: 0 }, + storeRuns: { type: "boolean", default: true }, + failureMode: { + type: "string", + enum: ["fail_fast", "continue", "compensate"], + default: "fail_fast", + }, + }, + }, + chaosPolicy: { + type: "object", + required: ["enabled", "probability", "scenarios"], + additionalProperties: false, + properties: { + enabled: { type: "boolean" }, + probability: { type: "number", minimum: 0, maximum: 1 }, + scenarios: { + type: "array", + items: { $ref: "#/definitions/chaosScenario" }, + minItems: 1, + }, + persistCorruptedData: { + type: "boolean", + description: "When true, persist corrupted data instead of throwing errors", + }, + preprocessInput: { function: true }, + postprocessOutput: { function: true }, + seed: { type: "string" }, + }, + }, + chaosScenario: { + type: "object", + required: ["type", "weight", "description", "config"], + additionalProperties: false, + properties: { + type: { + type: "string", + enum: [ + "data_corruption", + "missing_data", + "stale_data", + "format_change", + "permission_denied", + "rate_limit", + "partial_data", + "duplicate_data", + "invalid_state", + "dependency_failure", + "timing_issue", + ], + }, + weight: { type: "number", minimum: 0 }, + description: { type: "string" }, + config: { $ref: "#/definitions/chaosConfig" }, + }, + }, + chaosConfig: { + type: "object", + additionalProperties: false, + properties: { + corruptFields: { + type: "array", + items: { type: "string" }, + }, + corruptionType: { + type: "string", + enum: ["null", "wrong_type", "invalid_format", "random_value"], + }, + missingFields: { + type: "array", + items: { type: "string" }, + }, + missingRecords: { type: "boolean" }, + staleDataAge: { type: "number", minimum: 0 }, + schemaChanges: { + type: "array", + items: { $ref: "#/definitions/schemaChange" }, + }, + permissionError: { type: "string" }, + rateLimitDelay: { type: "number", minimum: 0 }, + rateLimitMessage: { type: "string" }, + partialResults: { + type: "object", + required: ["percentage"], + properties: { + percentage: { type: "number", minimum: 0, maximum: 100 }, + randomize: { type: "boolean" }, + }, + }, + invalidStates: { + type: "array", + items: { type: "string" }, + }, + dependencyService: { type: "string" }, + cascadeFailure: { type: "boolean" }, + }, + }, + schemaChange: { + type: "object", + required: ["field", "change"], + additionalProperties: false, + properties: { + field: { type: "string" }, + change: { + type: "string", + enum: ["rename", "remove", "change_type", "add_nested", "flatten"], + }, + newName: { type: "string" }, + newType: { + type: "string", + enum: ["string", "number", "boolean", "object", "array"], + }, + newStructure: {}, + }, + }, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/edi.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/edi.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..7b4e9d028db901905cd9b244931b37bfb65c4198 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/edi.tool.od.ts @@ -0,0 +1,486 @@ +import { EdiTransactionRepository } from "../../repository/edi/edi.repository"; +import { EdiGenerators } from "../../helpers/edi/generators.edi.helper"; +import { validateEdiTransaction } from "../../utils/edi/validation.edi.util"; +import { createAppLogger } from "../../utils/logger.util"; +import type { ChaosConfig } from "./registry.tool"; +import type { ChaosPolicy } from "../../types/od.type"; + +export interface EdiToolDefinition { + repository?: typeof EdiTransactionRepository; + method?: string; + helper?: typeof EdiGenerators | any; + helperMethod?: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +const logger = createAppLogger({ service: "edi-validation-tool" }); + +const ediValidationHelper = { + validate: async (args: { rawEdi: string; docType: string; worldId: string; context?: any }) => { + const result = validateEdiTransaction(args.rawEdi, args.docType, args.context); + + if (!result.isValid) { + // Log errors so the log processor can pick them up and create tickets + // We use a structured log format that the ingestion pipeline recognizes as a "ticket candidate" + // or we just log an error and rely on query rules. + // Based on LogQueueSchema, simply logging might not be enough unless we write to LogQueue directly? + // "The goal is to generate as real tickets as possible" using logs. + // Assuming standard error logging is sufficient effectively. + logger.error({ + msg: `EDI ${args.docType} Validation Failed`, + worldId: args.worldId, + docType: args.docType, + errors: result.errors, + isTicketCandidate: true, // Hint for log processor + tags: ["edi", "validation", "failure"] + }, `EDI Validation Failed: ${result.errors.map(e => e.message).join(", ")}`); + } + return result; + } +}; + +export const ediToolChaosConfigs: Record = { + "edi.transaction.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: { + partnerId: (value: string) => value?.includes("CHAOS"), + }, + }, + "edi.transaction.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "edi.transaction.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "edi.transaction.get_by_page": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: {}, + }, + "edi.transaction.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "edi.transaction.update_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "edi.transaction.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "edi.transaction.requeue": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: {}, + }, + "edi.statistics.invoice": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 300 }, + conditionalFailures: {}, + }, + "edi.statistics.errors_by_doctype": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 250 }, + conditionalFailures: {}, + }, + "edi.statistics.errors_by_partner": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 250 }, + conditionalFailures: {}, + }, + "edi.generate.850": { + enabled: false, + failureRate: 0.02, + errorTypes: ["VALIDATION_ERROR", "FORMAT_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "edi.generate.855": { + enabled: false, + failureRate: 0.02, + errorTypes: ["VALIDATION_ERROR", "FORMAT_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "edi.generate.856": { + enabled: false, + failureRate: 0.02, + errorTypes: ["VALIDATION_ERROR", "FORMAT_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "edi.generate.810": { + enabled: false, + failureRate: 0.02, + errorTypes: ["VALIDATION_ERROR", "FORMAT_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "edi.generate.997": { + enabled: false, + failureRate: 0.02, + errorTypes: ["VALIDATION_ERROR", "FORMAT_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "edi.generate.977": { + enabled: false, + failureRate: 0.02, + errorTypes: ["VALIDATION_ERROR", "FORMAT_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "edi.generate.sscc": { + enabled: false, + failureRate: 0.01, + errorTypes: ["VALIDATION_ERROR", "FORMAT_ERROR"], + delayMs: { min: 0, max: 30 }, + conditionalFailures: {}, + }, + "edi.validation.check": { + enabled: false, + failureRate: 0, + errorTypes: [], + delayMs: { min: 0, max: 0 }, + } +}; + +/** + * Sophisticated ChaosPolicy defaults for EDI tools + * These define realistic failure scenarios using the chaos-engine + * Disabled by default - enable via global chaos configuration + */ +export const ediToolDefaultChaos: Record = { + "edi.transaction.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 2, + description: "Corrupt partner ID or transaction number", + config: { + corruptFields: ["partnerId", "transactionNumber"], + corruptionType: "invalid_format", + }, + }, + { + type: "duplicate_data", + weight: 1, + description: "Simulate duplicate transaction submission", + config: {}, + }, + { + type: "missing_data", + weight: 1, + description: "Missing required EDI fields", + config: { + missingFields: ["documentType", "partnerId"], + }, + }, + ], + }, + "edi.transaction.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Transaction not found or deleted", + config: { + missingRecords: true, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated transaction data", + config: { + staleDataAge: 120, + }, + }, + ], + }, + "edi.transaction.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete result set", + config: { + partialResults: { + percentage: 60, + randomize: true, + }, + }, + }, + { + type: "duplicate_data", + weight: 1, + description: "Include duplicate transactions", + config: {}, + }, + ], + }, + "edi.transaction.update": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt status or metadata fields", + config: { + corruptFields: ["status", "metadata"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Transaction in invalid state for update", + config: { + invalidStates: ["deleted", "archived", "locked"], + }, + }, + ], + }, + "edi.generate.850": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt item data in EDI generation", + config: { + corruptFields: ["sku", "quantity", "unitPrice"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "edi.generate.856": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt shipment data in EDI generation", + config: { + corruptFields: ["sscc", "quantity"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "edi.generate.810": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt invoice data in EDI generation", + config: { + corruptFields: ["invoiceNumber", "totalAmount"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "edi.generate.997": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt acknowledgment data in EDI generation", + config: { + corruptFields: ["controlNumber"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "edi.generate.977": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt receipt data in EDI generation", + config: { + corruptFields: ["receiptNumber", "quantity"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "edi.generate.sscc": { + enabled: false, + probability: 0.01, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt SSCC generation", + config: { + corruptFields: ["companyPrefix", "serial"], + corruptionType: "invalid_format", + }, + }, + ], + }, +}; + +export const ediTools: Record = { + "edi.transaction.create": { + repository: EdiTransactionRepository, + method: "createEdiTransaction", + chaos: ediToolChaosConfigs["edi.transaction.create"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.transaction.create"], + }, + "edi.transaction.get_by_id": { + repository: EdiTransactionRepository, + method: "getEdiTransactionById", + chaos: ediToolChaosConfigs["edi.transaction.get_by_id"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.transaction.get_by_id"], + }, + "edi.transaction.get_all": { + repository: EdiTransactionRepository, + method: "getAllEdiTransactions", + chaos: ediToolChaosConfigs["edi.transaction.get_all"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.transaction.get_all"], + }, + "edi.transaction.update": { + repository: EdiTransactionRepository, + method: "updateEdiTransaction", + chaos: ediToolChaosConfigs["edi.transaction.update"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.transaction.update"], + }, + "edi.transaction.update_status": { + repository: EdiTransactionRepository, + method: "updateEdiStatus", + chaos: ediToolChaosConfigs["edi.transaction.update_status"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.transaction.update_status"], + }, + "edi.transaction.delete": { + repository: EdiTransactionRepository, + method: "deleteEdiTransaction", + chaos: ediToolChaosConfigs["edi.transaction.delete"]!, + }, + "edi.transaction.requeue": { + repository: EdiTransactionRepository, + method: "requeueEdiTransaction", + chaos: ediToolChaosConfigs["edi.transaction.requeue"]!, + }, + "edi.statistics.invoice": { + repository: EdiTransactionRepository, + method: "ediInvoiceStatistics", + chaos: ediToolChaosConfigs["edi.statistics.invoice"]!, + }, + "edi.statistics.errors_by_doctype": { + repository: EdiTransactionRepository, + method: "getTopEdiErrorsStatsByDocType", + chaos: ediToolChaosConfigs["edi.statistics.errors_by_doctype"]!, + }, + "edi.statistics.errors_by_partner": { + repository: EdiTransactionRepository, + method: "getTopEdiErrorStatsByPartners", + chaos: ediToolChaosConfigs["edi.statistics.errors_by_partner"]!, + }, + "edi.transaction.get_by_page": { + repository: EdiTransactionRepository, + method: "getEdiTransactionsByPageNumber", + chaos: ediToolChaosConfigs["edi.transaction.get_by_page"]!, + }, + "edi.generate.850": { + helper: EdiGenerators, + helperMethod: "generateEDI850", + chaos: ediToolChaosConfigs["edi.generate.850"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.generate.850"], + }, + "edi.generate.855": { + helper: EdiGenerators, + helperMethod: "generate855OrderAcknowledgment", + chaos: ediToolChaosConfigs["edi.generate.855"]!, + }, + "edi.generate.856": { + helper: EdiGenerators, + helperMethod: "generateAdvancedShipNotice", + chaos: ediToolChaosConfigs["edi.generate.856"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.generate.856"], + }, + "edi.generate.810": { + helper: EdiGenerators, + helperMethod: "generateInvoice", + chaos: ediToolChaosConfigs["edi.generate.810"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.generate.810"], + }, + "edi.generate.997": { + helper: EdiGenerators, + helperMethod: "generate997Acknowledgment", + chaos: ediToolChaosConfigs["edi.generate.997"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.generate.997"], + }, + "edi.generate.977": { + helper: EdiGenerators, + helperMethod: "generateReceiptAcknowledgment", + chaos: ediToolChaosConfigs["edi.generate.977"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.generate.977"], + }, + "edi.generate.sscc": { + helper: EdiGenerators, + helperMethod: "generateSSCC", + chaos: ediToolChaosConfigs["edi.generate.sscc"]!, + defaultChaosPolicy: ediToolDefaultChaos["edi.generate.sscc"], + }, + "edi.validation.check": { + helper: ediValidationHelper, + helperMethod: "validate", + chaos: ediToolChaosConfigs["edi.validation.check"]! + } +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/erp/company.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/erp/company.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..dff3987126130a7c3a01770c603373d7332e1829 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/erp/company.tool.od.ts @@ -0,0 +1,383 @@ +import { CompanyRepository } from "../../../repository/erp/company.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface ErpCompanyToolDefinition { + repository: typeof CompanyRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const erpCompanyToolChaosConfigs: Record = { + "erp.company.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + companyId: (value: string) => value?.includes("CHAOS"), + }, + }, + "erp.company.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.company.get_by_duns": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.company.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "erp.company.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "erp.company.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "erp.company.get_mpc": { + enabled: false, + failureRate: 0.01, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.company.bulk_upsert": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 100, max: 500 }, + conditionalFailures: { + count: (value: number) => value > 500, + }, + }, + "erp.company.get_random": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.company.get_random_customer": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.company.get_customers": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, +}; + +/** + * Sophisticated ChaosPolicy defaults for ERP Company tools + */ +export const erpCompanyToolDefaultChaos: Record = { + "erp.company.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Simulate duplicate company creation", + config: {}, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt company name or ID", + config: { + corruptFields: ["name", "companyId"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.company.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Company not found", + config: { + missingRecords: true, + }, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt company contact information", + config: { + corruptFields: ["primaryContact", "billingAddress"], + corruptionType: "null", + }, + }, + ], + }, + "erp.company.get_by_duns": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "DUNS number not found", + config: { + missingRecords: true, + }, + }, + ], + }, + "erp.company.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete company list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated company data", + config: { + staleDataAge: 240, + }, + }, + ], + }, + "erp.company.update": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt update data", + config: { + corruptFields: ["status", "currency"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Company in invalid state for update", + config: { + invalidStates: ["DELETED", "ARCHIVED"], + }, + }, + ], + }, + "erp.company.delete": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Company already deleted or not found", + config: { + missingRecords: true, + }, + }, + ], + }, + "erp.company.get_mpc": { + enabled: false, + probability: 0.01, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "MPC Company not found", + config: { + missingRecords: true, + }, + }, + ], + }, + "erp.company.bulk_upsert": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Some companies failed to upsert", + config: { + partialResults: { + percentage: 80, + randomize: true, + }, + }, + }, + { + type: "rate_limit", + weight: 1, + description: "Bulk operation timed out", + config: { + rateLimitDelay: 5000, + }, + }, + ], + }, + "erp.company.get_random": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "No companies found", + config: { + missingRecords: true, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Company in invalid state", + config: { + invalidStates: ["INACTIVE", "BLOCKED", "DELETED"], + }, + }, + ], + }, + "erp.company.get_random_customer": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "No customers found", + config: { + missingRecords: true, + }, + }, + ], + }, + "erp.company.get_customers": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "partial_data", + weight: 1, + description: "Return incomplete customer list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + ], + }, +}; + +export const erpCompanyTools: Record = { + "erp.company.create": { + repository: CompanyRepository, + method: "createCompany", + chaos: erpCompanyToolChaosConfigs["erp.company.create"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.create"], + }, + "erp.company.get_by_id": { + repository: CompanyRepository, + method: "getCompanyById", + chaos: erpCompanyToolChaosConfigs["erp.company.get_by_id"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.get_by_id"], + }, + "erp.company.get_by_duns": { + repository: CompanyRepository, + method: "getCompanyByDunsNumber", + chaos: erpCompanyToolChaosConfigs["erp.company.get_by_duns"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.get_by_duns"], + }, + "erp.company.get_all": { + repository: CompanyRepository, + method: "getAllCompanies", + chaos: erpCompanyToolChaosConfigs["erp.company.get_all"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.get_all"], + }, + "erp.company.update": { + repository: CompanyRepository, + method: "updateCompany", + chaos: erpCompanyToolChaosConfigs["erp.company.update"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.update"], + }, + "erp.company.delete": { + repository: CompanyRepository, + method: "deleteCompany", + chaos: erpCompanyToolChaosConfigs["erp.company.delete"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.delete"], + }, + "erp.company.get_mpc": { + repository: CompanyRepository, + method: "getMpcCompany", + chaos: erpCompanyToolChaosConfigs["erp.company.get_mpc"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.get_mpc"], + }, + "erp.company.bulk_upsert": { + repository: CompanyRepository, + method: "bulkUpsertCompanies", + chaos: erpCompanyToolChaosConfigs["erp.company.bulk_upsert"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.bulk_upsert"], + }, + "erp.company.get_random": { + repository: CompanyRepository, + method: "getRandomCompany", + chaos: erpCompanyToolChaosConfigs["erp.company.get_random"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.get_random"], + }, + "erp.company.get_random_customer": { + repository: CompanyRepository, + method: "getRandomCustomer", + chaos: erpCompanyToolChaosConfigs["erp.company.get_random_customer"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.get_random_customer"], + }, + "erp.company.get_customers": { + repository: CompanyRepository, + method: "getCustomerCompanies", + chaos: erpCompanyToolChaosConfigs["erp.company.get_customers"]!, + defaultChaosPolicy: erpCompanyToolDefaultChaos["erp.company.get_customers"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/erp/index.ts b/packages/controlmart/src/operational-descriptor/tools/erp/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..d02edba1bdcc97f65293b999ca021c50b8a45767 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/erp/index.ts @@ -0,0 +1,6 @@ +export * from "./company.tool.od"; +export * from "./order.tool.od"; +export * from "./invoice.tool.od"; +export * from "./payment.tool.od"; +export * from "./product.tool.od"; +export * from "./shipment.tool.od"; diff --git a/packages/controlmart/src/operational-descriptor/tools/erp/invoice.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/erp/invoice.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..358db89288a7366a959c3142d4ba4690c61e5619 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/erp/invoice.tool.od.ts @@ -0,0 +1,233 @@ +import { InvoiceRepository } from "../../../repository/erp/invoice.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface ErpInvoiceToolDefinition { + repository: typeof InvoiceRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const erpInvoiceToolChaosConfigs: Record = { + "erp.invoice.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + invoiceId: (value: string) => value?.includes("CHAOS"), + }, + }, + "erp.invoice.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.invoice.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "erp.invoice.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "erp.invoice.update_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "erp.invoice.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, +}; + +/** + * Sophisticated ChaosPolicy defaults for ERP Invoice tools + */ +export const erpInvoiceToolDefaultChaos: Record = { + "erp.invoice.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Simulate duplicate invoice creation", + config: {}, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt invoice ID or amount", + config: { + corruptFields: ["invoiceId", "totalAmount"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.invoice.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Invoice not found", + config: { + missingRecords: true, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated invoice data", + config: { + staleDataAge: 180, + }, + }, + ], + }, + "erp.invoice.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete invoice list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Include invoices in invalid states", + config: { + invalidStates: ["VOIDED", "DELETED"], + }, + }, + ], + }, + "erp.invoice.update": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt update data", + config: { + corruptFields: ["status", "dueDate"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Invoice in invalid state for update", + config: { + invalidStates: ["PAID", "VOIDED"], + }, + }, + ], + }, + "erp.invoice.update_status": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Invalid status transition", + config: { + invalidStates: ["PAID", "VOIDED"], + }, + }, + ], + }, + "erp.invoice.delete": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Invoice already deleted or not found", + config: { + missingRecords: true, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Cannot delete invoice with payments", + config: { + invalidStates: ["HAS_PAYMENTS"], + }, + }, + ], + }, +}; + +export const erpInvoiceTools: Record = { + "erp.invoice.create": { + repository: InvoiceRepository, + method: "createInvoice", + chaos: erpInvoiceToolChaosConfigs["erp.invoice.create"]!, + defaultChaosPolicy: erpInvoiceToolDefaultChaos["erp.invoice.create"], + }, + "erp.invoice.get_by_id": { + repository: InvoiceRepository, + method: "getInvoiceById", + chaos: erpInvoiceToolChaosConfigs["erp.invoice.get_by_id"]!, + defaultChaosPolicy: erpInvoiceToolDefaultChaos["erp.invoice.get_by_id"], + }, + "erp.invoice.get_all": { + repository: InvoiceRepository, + method: "getAllInvoices", + chaos: erpInvoiceToolChaosConfigs["erp.invoice.get_all"]!, + defaultChaosPolicy: erpInvoiceToolDefaultChaos["erp.invoice.get_all"], + }, + "erp.invoice.update": { + repository: InvoiceRepository, + method: "updateInvoice", + chaos: erpInvoiceToolChaosConfigs["erp.invoice.update"]!, + defaultChaosPolicy: erpInvoiceToolDefaultChaos["erp.invoice.update"], + }, + "erp.invoice.update_status": { + repository: InvoiceRepository, + method: "updateInvoiceStatus", + chaos: erpInvoiceToolChaosConfigs["erp.invoice.update_status"]!, + defaultChaosPolicy: erpInvoiceToolDefaultChaos["erp.invoice.update_status"], + }, + "erp.invoice.delete": { + repository: InvoiceRepository, + method: "deleteInvoice", + chaos: erpInvoiceToolChaosConfigs["erp.invoice.delete"]!, + defaultChaosPolicy: erpInvoiceToolDefaultChaos["erp.invoice.delete"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/erp/order.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/erp/order.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..73737265cffe4b369d3c0482fb1363b477a6d5ae --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/erp/order.tool.od.ts @@ -0,0 +1,249 @@ +import { OrderRepository } from "../../../repository/erp/order.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface ErpOrderToolDefinition { + repository: typeof OrderRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const erpOrderToolChaosConfigs: Record = { + "erp.order.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + orderId: (value: string) => value?.includes("CHAOS"), + }, + }, + "erp.order.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.order.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "erp.order.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "erp.order.update_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "erp.order.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, +}; + +/** + * Sophisticated ChaosPolicy defaults for ERP Order tools + * These define realistic failure scenarios using the chaos-engine + * Disabled by default - enable via global chaos configuration + */ +export const erpOrderToolDefaultChaos: Record = { + "erp.order.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 2, + description: "Corrupt order ID or customer information", + config: { + corruptFields: ["orderId", "customerId", "totalAmount"], + corruptionType: "invalid_format", + }, + }, + { + type: "duplicate_data", + weight: 1, + description: "Simulate duplicate order submission", + config: {}, + }, + { + type: "missing_data", + weight: 1, + description: "Missing required order fields", + config: { + missingFields: ["lineItems", "shippingAddress"], + }, + }, + ], + }, + "erp.order.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Order not found or deleted", + config: { + missingRecords: true, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated order data", + config: { + staleDataAge: 180, + }, + }, + ], + }, + "erp.order.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete order list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Include orders in invalid states", + config: { + invalidStates: ["cancelled", "deleted", "suspended"], + }, + }, + ], + }, + "erp.order.update": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt order status or amounts", + config: { + corruptFields: ["status", "totalAmount", "paidAmount"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Order in invalid state for update", + config: { + invalidStates: ["locked", "archived", "finalized"], + }, + }, + ], + }, + "erp.order.update_status": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Invalid status transition", + config: { + invalidStates: ["cancelled", "completed", "locked"], + }, + }, + { + type: "timing_issue", + weight: 1, + description: "Future timestamp on status update", + config: {}, + }, + ], + }, + "erp.order.delete": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Order already deleted or not found", + config: { + missingRecords: true, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Cannot delete order with related records", + config: { + invalidStates: ["HAS_RELATED_RECORDS"], + }, + }, + ], + }, +}; + +export const erpOrderTools: Record = { + "erp.order.create": { + repository: OrderRepository, + method: "createOrder", + chaos: erpOrderToolChaosConfigs["erp.order.create"]!, + defaultChaosPolicy: erpOrderToolDefaultChaos["erp.order.create"], + }, + "erp.order.get_by_id": { + repository: OrderRepository, + method: "getOrderById", + chaos: erpOrderToolChaosConfigs["erp.order.get_by_id"]!, + defaultChaosPolicy: erpOrderToolDefaultChaos["erp.order.get_by_id"], + }, + "erp.order.get_all": { + repository: OrderRepository, + method: "getAllOrders", + chaos: erpOrderToolChaosConfigs["erp.order.get_all"]!, + defaultChaosPolicy: erpOrderToolDefaultChaos["erp.order.get_all"], + }, + "erp.order.update": { + repository: OrderRepository, + method: "updateOrder", + chaos: erpOrderToolChaosConfigs["erp.order.update"]!, + defaultChaosPolicy: erpOrderToolDefaultChaos["erp.order.update"], + }, + "erp.order.update_status": { + repository: OrderRepository, + method: "updateOrderStatus", + chaos: erpOrderToolChaosConfigs["erp.order.update_status"]!, + defaultChaosPolicy: erpOrderToolDefaultChaos["erp.order.update_status"], + }, + "erp.order.delete": { + repository: OrderRepository, + method: "deleteOrder", + chaos: erpOrderToolChaosConfigs["erp.order.delete"]!, + defaultChaosPolicy: erpOrderToolDefaultChaos["erp.order.delete"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/erp/payment.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/erp/payment.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..9f28b184963ac0e51f862637d1b04562c8fb7888 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/erp/payment.tool.od.ts @@ -0,0 +1,270 @@ +import { PaymentRepository } from "../../../repository/erp/payment.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface ErpPaymentToolDefinition { + repository: typeof PaymentRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const erpPaymentToolChaosConfigs: Record = { + "erp.payment.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + paymentId: (value: string) => value?.includes("CHAOS"), + }, + }, + "erp.payment.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.payment.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "erp.payment.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "erp.payment.update_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "erp.payment.apply_allocations": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + amount: (value: number) => value > 10000, + }, + }, + "erp.payment.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, +}; + +/** + * Sophisticated ChaosPolicy defaults for ERP Payment tools + */ +export const erpPaymentToolDefaultChaos: Record = { + "erp.payment.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Simulate duplicate payment creation", + config: {}, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt payment ID or amount", + config: { + corruptFields: ["paymentId", "amount"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.payment.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Payment not found", + config: { + missingRecords: true, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated payment data", + config: { + staleDataAge: 180, + }, + }, + ], + }, + "erp.payment.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete payment list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Include payments in invalid states", + config: { + invalidStates: ["VOIDED", "FAILED"], + }, + }, + ], + }, + "erp.payment.update": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt update data", + config: { + corruptFields: ["status", "method"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Payment in invalid state for update", + config: { + invalidStates: ["PROCESSED", "VOIDED"], + }, + }, + ], + }, + "erp.payment.update_status": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Invalid status transition", + config: { + invalidStates: ["PROCESSED", "VOIDED"], + }, + }, + ], + }, + "erp.payment.apply_allocations": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 2, + description: "Allocation amount exceeds payment total", + config: { + corruptionType: "invalid_format", + }, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt allocation data", + config: { + corruptFields: ["invoiceId", "amount"], + }, + }, + ], + }, + "erp.payment.delete": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Payment already deleted or not found", + config: { + missingRecords: true, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Cannot delete processed payment", + config: { + invalidStates: ["PROCESSED"], + }, + }, + ], + }, +}; + +export const erpPaymentTools: Record = { + "erp.payment.create": { + repository: PaymentRepository, + method: "createPayment", + chaos: erpPaymentToolChaosConfigs["erp.payment.create"]!, + defaultChaosPolicy: erpPaymentToolDefaultChaos["erp.payment.create"], + }, + "erp.payment.get_by_id": { + repository: PaymentRepository, + method: "getPaymentById", + chaos: erpPaymentToolChaosConfigs["erp.payment.get_by_id"]!, + defaultChaosPolicy: erpPaymentToolDefaultChaos["erp.payment.get_by_id"], + }, + "erp.payment.get_all": { + repository: PaymentRepository, + method: "getAllPayments", + chaos: erpPaymentToolChaosConfigs["erp.payment.get_all"]!, + defaultChaosPolicy: erpPaymentToolDefaultChaos["erp.payment.get_all"], + }, + "erp.payment.update": { + repository: PaymentRepository, + method: "updatePayment", + chaos: erpPaymentToolChaosConfigs["erp.payment.update"]!, + defaultChaosPolicy: erpPaymentToolDefaultChaos["erp.payment.update"], + }, + "erp.payment.update_status": { + repository: PaymentRepository, + method: "updatePaymentStatus", + chaos: erpPaymentToolChaosConfigs["erp.payment.update_status"]!, + defaultChaosPolicy: erpPaymentToolDefaultChaos["erp.payment.update_status"], + }, + "erp.payment.apply_allocations": { + repository: PaymentRepository, + method: "applyAllocations", + chaos: erpPaymentToolChaosConfigs["erp.payment.apply_allocations"]!, + defaultChaosPolicy: erpPaymentToolDefaultChaos["erp.payment.apply_allocations"], + }, + "erp.payment.delete": { + repository: PaymentRepository, + method: "deletePayment", + chaos: erpPaymentToolChaosConfigs["erp.payment.delete"]!, + defaultChaosPolicy: erpPaymentToolDefaultChaos["erp.payment.delete"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/erp/product.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/erp/product.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..5dd01cc9e1c659f5cde04b22c9ae2559b630421b --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/erp/product.tool.od.ts @@ -0,0 +1,352 @@ +import { ProductRepository } from "../../../repository/erp/product.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface ErpProductToolDefinition { + repository: typeof ProductRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const erpProductToolChaosConfigs: Record = { + "erp.product.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + productId: (value: string) => value?.includes("CHAOS"), + }, + }, + "erp.product.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.product.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "erp.product.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "erp.product.update_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "erp.product.update_pricing": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "erp.product.toggle_inventory": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: {}, + }, + "erp.product.bulk_upsert": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 100, max: 500 }, + conditionalFailures: { + count: (value: number) => value > 500, + }, + }, + "erp.product.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "erp.product.get_random": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, +}; + +/** + * Sophisticated ChaosPolicy defaults for ERP Product tools + */ +export const erpProductToolDefaultChaos: Record = { + "erp.product.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Simulate duplicate product creation", + config: {}, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt product ID or name", + config: { + corruptFields: ["productId", "name"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.product.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Product not found", + config: { + missingRecords: true, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated product data", + config: { + staleDataAge: 180, + }, + }, + ], + }, + "erp.product.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete product list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Include products in invalid states", + config: { + invalidStates: ["DISCONTINUED", "DELETED"], + }, + }, + ], + }, + "erp.product.update": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt update data", + config: { + corruptFields: ["status", "category"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Product in invalid state for update", + config: { + invalidStates: ["DISCONTINUED", "LOCKED"], + }, + }, + ], + }, + "erp.product.update_status": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Invalid status transition", + config: { + invalidStates: ["DISCONTINUED"], + }, + }, + ], + }, + "erp.product.update_pricing": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "data_corruption", + weight: 2, + description: "Negative price or invalid currency", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.product.toggle_inventory": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "invalid_state", + weight: 1, + description: "Cannot toggle inventory for non-physical product", + config: {}, + }, + ], + }, + "erp.product.bulk_upsert": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Some products failed to upsert", + config: { + partialResults: { + percentage: 80, + randomize: true, + }, + }, + }, + { + type: "rate_limit", + weight: 1, + description: "Bulk operation timed out", + config: { + rateLimitDelay: 5000, + }, + }, + ], + }, + "erp.product.delete": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Product already deleted or not found", + config: { + missingRecords: true, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Cannot delete product with active orders", + config: { + invalidStates: ["HAS_RELATED_RECORDS"], + }, + }, + ], + }, + "erp.product.get_random": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "No products found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const erpProductTools: Record = { + "erp.product.create": { + repository: ProductRepository, + method: "createProduct", + chaos: erpProductToolChaosConfigs["erp.product.create"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.create"], + }, + "erp.product.get_by_id": { + repository: ProductRepository, + method: "getProductById", + chaos: erpProductToolChaosConfigs["erp.product.get_by_id"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.get_by_id"], + }, + "erp.product.get_all": { + repository: ProductRepository, + method: "getAllProducts", + chaos: erpProductToolChaosConfigs["erp.product.get_all"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.get_all"], + }, + "erp.product.update": { + repository: ProductRepository, + method: "updateProduct", + chaos: erpProductToolChaosConfigs["erp.product.update"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.update"], + }, + "erp.product.update_status": { + repository: ProductRepository, + method: "updateProductStatus", + chaos: erpProductToolChaosConfigs["erp.product.update_status"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.update_status"], + }, + "erp.product.update_pricing": { + repository: ProductRepository, + method: "updateProductPricing", + chaos: erpProductToolChaosConfigs["erp.product.update_pricing"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.update_pricing"], + }, + "erp.product.toggle_inventory": { + repository: ProductRepository, + method: "toggleInventoryTracking", + chaos: erpProductToolChaosConfigs["erp.product.toggle_inventory"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.toggle_inventory"], + }, + "erp.product.bulk_upsert": { + repository: ProductRepository, + method: "bulkUpsertProducts", + chaos: erpProductToolChaosConfigs["erp.product.bulk_upsert"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.bulk_upsert"], + }, + "erp.product.delete": { + repository: ProductRepository, + method: "deleteProduct", + chaos: erpProductToolChaosConfigs["erp.product.delete"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.delete"], + }, + "erp.product.get_random": { + repository: ProductRepository, + method: "getRandomProduct", + chaos: erpProductToolChaosConfigs["erp.product.get_random"]!, + defaultChaosPolicy: erpProductToolDefaultChaos["erp.product.get_random"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/erp/shipment.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/erp/shipment.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..8beb6051ac2d6a3a45f0c5790881716a311c9f12 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/erp/shipment.tool.od.ts @@ -0,0 +1,379 @@ +import { ERPShipmentRepository } from "../../../repository/erp/shipment.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface ErpShipmentToolDefinition { + repository: typeof ERPShipmentRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const erpShipmentToolChaosConfigs: Record = { + "erp.shipment.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + shipmentId: (value: string) => value?.includes("CHAOS"), + }, + }, + "erp.shipment.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "erp.shipment.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "erp.shipment.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "erp.shipment.update_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "erp.shipment.update_tracking": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "erp.shipment.add_event": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 80 }, + conditionalFailures: {}, + }, + "erp.shipment.add_document": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "erp.shipment.update_lines": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "erp.shipment.bulk_upsert": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 100, max: 500 }, + conditionalFailures: { + count: (value: number) => value > 500, + }, + }, + "erp.shipment.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, +}; + +/** + * Sophisticated ChaosPolicy defaults for ERP Shipment tools + */ +export const erpShipmentToolDefaultChaos: Record = { + "erp.shipment.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Simulate duplicate shipment creation", + config: {}, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt shipment ID or carrier", + config: { + corruptFields: ["shipmentId", "carrier"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.shipment.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Shipment not found", + config: { + missingRecords: true, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated shipment data", + config: { + staleDataAge: 180, + }, + }, + ], + }, + "erp.shipment.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete shipment list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Include shipments in invalid states", + config: { + invalidStates: ["LOST", "DAMAGED"], + }, + }, + ], + }, + "erp.shipment.update": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt update data", + config: { + corruptFields: ["status", "carrier"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Shipment in invalid state for update", + config: { + invalidStates: ["DELIVERED", "CANCELLED"], + }, + }, + ], + }, + "erp.shipment.update_status": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Invalid status transition", + config: { + invalidStates: ["DELIVERED", "CANCELLED"], + }, + }, + ], + }, + "erp.shipment.update_tracking": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Invalid tracking number format", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.shipment.add_event": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "timing_issue", + weight: 1, + description: "Event timestamp out of order", + config: {}, + }, + ], + }, + "erp.shipment.add_document": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Invalid document URL or type", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.shipment.update_lines": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Line items mismatch with order", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "erp.shipment.bulk_upsert": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Some shipments failed to upsert", + config: { + partialResults: { + percentage: 80, + randomize: true, + }, + }, + }, + { + type: "rate_limit", + weight: 1, + description: "Bulk operation timed out", + config: { + rateLimitDelay: 5000, + }, + }, + ], + }, + "erp.shipment.delete": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Shipment already deleted or not found", + config: { + missingRecords: true, + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Cannot delete delivered shipment", + config: { + invalidStates: ["DELIVERED"], + }, + }, + ], + }, +}; + +export const erpShipmentTools: Record = { + "erp.shipment.create": { + repository: ERPShipmentRepository, + method: "createShipment", + chaos: erpShipmentToolChaosConfigs["erp.shipment.create"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.create"], + }, + "erp.shipment.get_by_id": { + repository: ERPShipmentRepository, + method: "getShipmentById", + chaos: erpShipmentToolChaosConfigs["erp.shipment.get_by_id"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.get_by_id"], + }, + "erp.shipment.get_all": { + repository: ERPShipmentRepository, + method: "getAllShipments", + chaos: erpShipmentToolChaosConfigs["erp.shipment.get_all"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.get_all"], + }, + "erp.shipment.update": { + repository: ERPShipmentRepository, + method: "updateShipment", + chaos: erpShipmentToolChaosConfigs["erp.shipment.update"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.update"], + }, + "erp.shipment.update_status": { + repository: ERPShipmentRepository, + method: "updateShipmentStatus", + chaos: erpShipmentToolChaosConfigs["erp.shipment.update_status"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.update_status"], + }, + "erp.shipment.update_tracking": { + repository: ERPShipmentRepository, + method: "updateTrackingDetails", + chaos: erpShipmentToolChaosConfigs["erp.shipment.update_tracking"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.update_tracking"], + }, + "erp.shipment.add_event": { + repository: ERPShipmentRepository, + method: "addShipmentEvent", + chaos: erpShipmentToolChaosConfigs["erp.shipment.add_event"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.add_event"], + }, + "erp.shipment.add_document": { + repository: ERPShipmentRepository, + method: "addShipmentDocument", + chaos: erpShipmentToolChaosConfigs["erp.shipment.add_document"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.add_document"], + }, + "erp.shipment.update_lines": { + repository: ERPShipmentRepository, + method: "updateShipmentLines", + chaos: erpShipmentToolChaosConfigs["erp.shipment.update_lines"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.update_lines"], + }, + "erp.shipment.bulk_upsert": { + repository: ERPShipmentRepository, + method: "bulkUpsertShipments", + chaos: erpShipmentToolChaosConfigs["erp.shipment.bulk_upsert"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.bulk_upsert"], + }, + "erp.shipment.delete": { + repository: ERPShipmentRepository, + method: "deleteShipment", + chaos: erpShipmentToolChaosConfigs["erp.shipment.delete"]!, + defaultChaosPolicy: erpShipmentToolDefaultChaos["erp.shipment.delete"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/finance/finance.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/finance/finance.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..e094334b4482e089f82a8a1de5aa6db3c4f4f513 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/finance/finance.tool.od.ts @@ -0,0 +1,252 @@ +import { FinanceRepository } from "../../../repository/finance/finance.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface FinanceToolDefinition { + repository: typeof FinanceRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const financeToolChaosConfigs: Record = { + "finance.transaction.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + transactionId: (value: string) => value?.includes("CHAOS"), + }, + }, + "finance.transaction.bulk_insert": { + enabled: false, + failureRate: 0.08, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 300 }, + conditionalFailures: {}, + }, + "finance.transaction.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "finance.transaction.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "finance.transaction.get_by_source": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "finance.transaction.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "finance.transaction.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "finance.aggregate.by_type": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 250 }, + conditionalFailures: {}, + }, + "finance.aggregate.by_partner": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 250 }, + conditionalFailures: {}, + }, + "finance.summary.get": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: {}, + }, +}; + +export const financeToolDefaultChaos: Record = { + "finance.transaction.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Simulate duplicate transaction creation", + config: {}, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt transaction amount or type", + config: { + corruptFields: ["amount", "type"], + corruptionType: "invalid_format", + }, + }, + ], + }, + "finance.transaction.bulk_insert": { + enabled: false, + probability: 0.08, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Only insert some transactions", + config: { + partialResults: { percentage: 60, randomize: true }, + }, + }, + { + type: "data_corruption", + weight: 1, + description: "Corrupt batch data", + config: { + corruptFields: ["amount"], + corruptionType: "wrong_type", + }, + }, + ], + }, + "finance.transaction.get_all": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete transaction list", + config: { + partialResults: { percentage: 50, randomize: true }, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated transaction data", + config: { staleDataAge: 300 }, + }, + ], + }, + "finance.transaction.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Transaction not found", + config: { missingRecords: true }, + }, + ], + }, + "finance.aggregate.by_type": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Return incorrect aggregation", + config: { + corruptFields: ["totalAmount"], + corruptionType: "wrong_type", + }, + }, + ], + }, + "finance.summary.get": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "stale_data", + weight: 1, + description: "Return stale financial summary", + config: { staleDataAge: 600 }, + }, + ], + }, +}; + +export const financeTools: Record = { + "finance.transaction.create": { + repository: FinanceRepository, + method: "createTransaction", + chaos: financeToolChaosConfigs["finance.transaction.create"]!, + defaultChaosPolicy: financeToolDefaultChaos["finance.transaction.create"], + }, + "finance.transaction.bulk_insert": { + repository: FinanceRepository, + method: "bulkInsertTransactions", + chaos: financeToolChaosConfigs["finance.transaction.bulk_insert"]!, + defaultChaosPolicy: financeToolDefaultChaos["finance.transaction.bulk_insert"], + }, + "finance.transaction.get_all": { + repository: FinanceRepository, + method: "getTransactions", + chaos: financeToolChaosConfigs["finance.transaction.get_all"]!, + defaultChaosPolicy: financeToolDefaultChaos["finance.transaction.get_all"], + }, + "finance.transaction.get_by_id": { + repository: FinanceRepository, + method: "getTransactionById", + chaos: financeToolChaosConfigs["finance.transaction.get_by_id"]!, + defaultChaosPolicy: financeToolDefaultChaos["finance.transaction.get_by_id"], + }, + "finance.transaction.get_by_source": { + repository: FinanceRepository, + method: "getTransactionsBySource", + chaos: financeToolChaosConfigs["finance.transaction.get_by_source"]!, + }, + "finance.transaction.update": { + repository: FinanceRepository, + method: "updateTransaction", + chaos: financeToolChaosConfigs["finance.transaction.update"]!, + }, + "finance.transaction.delete": { + repository: FinanceRepository, + method: "deleteTransaction", + chaos: financeToolChaosConfigs["finance.transaction.delete"]!, + }, + "finance.aggregate.by_type": { + repository: FinanceRepository, + method: "aggregateByType", + chaos: financeToolChaosConfigs["finance.aggregate.by_type"]!, + defaultChaosPolicy: financeToolDefaultChaos["finance.aggregate.by_type"], + }, + "finance.aggregate.by_partner": { + repository: FinanceRepository, + method: "aggregateByPartner", + chaos: financeToolChaosConfigs["finance.aggregate.by_partner"]!, + }, + "finance.summary.get": { + repository: FinanceRepository, + method: "getFinancialSummary", + chaos: financeToolChaosConfigs["finance.summary.get"]!, + defaultChaosPolicy: financeToolDefaultChaos["finance.summary.get"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/finance/index.ts b/packages/controlmart/src/operational-descriptor/tools/finance/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..292bd1b1ae4d5cadac2d931c46b7e0d8d7200d39 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/finance/index.ts @@ -0,0 +1,2 @@ +export { financeTools, financeToolChaosConfigs, financeToolDefaultChaos } from "./finance.tool.od"; +export { ledgerTools, ledgerToolChaosConfigs, ledgerToolDefaultChaos } from "./ledger.tool.od"; diff --git a/packages/controlmart/src/operational-descriptor/tools/finance/ledger.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/finance/ledger.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..155cb540e64c6949604549b2425d67781eb94704 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/finance/ledger.tool.od.ts @@ -0,0 +1,165 @@ +import { CompanyLedgerRepository } from "../../../repository/finance/ledger.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface LedgerToolDefinition { + repository: typeof CompanyLedgerRepository; + method: string; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const ledgerToolChaosConfigs: Record = { + "finance.ledger.ensure": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: {}, + }, + "finance.ledger.get": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "finance.ledger.update": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "finance.ledger.increment": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: { + cashDelta: (value: number) => Math.abs(value) > 100000, + }, + }, + "finance.ledger.delete": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "finance.ledger.summary": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: {}, + }, +}; + +export const ledgerToolDefaultChaos: Record = { + "finance.ledger.ensure": { + enabled: false, + probability: 0.04, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Corrupt ledger balance data", + config: { + corruptFields: ["cash", "totalReceivables", "totalPayables"], + corruptionType: "wrong_type", + }, + }, + ], + }, + "finance.ledger.get": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Ledger not found", + config: { missingRecords: true }, + }, + { + type: "stale_data", + weight: 1, + description: "Return outdated ledger data", + config: { staleDataAge: 120 }, + }, + ], + }, + "finance.ledger.increment": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 2, + description: "Incorrect increment calculation", + config: { + corruptFields: ["cashDelta", "receivablesDelta"], + corruptionType: "wrong_type", + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Resulting balance would be negative", + config: { + invalidStates: ["NEGATIVE_BALANCE"], + }, + }, + ], + }, + "finance.ledger.summary": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "stale_data", + weight: 1, + description: "Return stale summary", + config: { staleDataAge: 300 }, + }, + ], + }, +}; + +export const ledgerTools: Record = { + "finance.ledger.ensure": { + repository: CompanyLedgerRepository, + method: "ensure", + chaos: ledgerToolChaosConfigs["finance.ledger.ensure"]!, + defaultChaosPolicy: ledgerToolDefaultChaos["finance.ledger.ensure"], + }, + "finance.ledger.get": { + repository: CompanyLedgerRepository, + method: "get", + chaos: ledgerToolChaosConfigs["finance.ledger.get"]!, + defaultChaosPolicy: ledgerToolDefaultChaos["finance.ledger.get"], + }, + "finance.ledger.update": { + repository: CompanyLedgerRepository, + method: "update", + chaos: ledgerToolChaosConfigs["finance.ledger.update"]!, + }, + "finance.ledger.increment": { + repository: CompanyLedgerRepository, + method: "increment", + chaos: ledgerToolChaosConfigs["finance.ledger.increment"]!, + defaultChaosPolicy: ledgerToolDefaultChaos["finance.ledger.increment"], + }, + "finance.ledger.delete": { + repository: CompanyLedgerRepository, + method: "delete", + chaos: ledgerToolChaosConfigs["finance.ledger.delete"]!, + }, + "finance.ledger.summary": { + repository: CompanyLedgerRepository, + method: "summary", + chaos: ledgerToolChaosConfigs["finance.ledger.summary"]!, + defaultChaosPolicy: ledgerToolDefaultChaos["finance.ledger.summary"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/index.ts b/packages/controlmart/src/operational-descriptor/tools/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..52b610d720ee818b2061e752e7ce3a40dd5228ed --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/index.ts @@ -0,0 +1,5 @@ +export * from "./edi.tool.od"; +export * from "./registry.tool"; +export * from "./erp"; +export * from "./wms"; +export * from "./manufacturing"; diff --git a/packages/controlmart/src/operational-descriptor/tools/manufacturing/index.ts b/packages/controlmart/src/operational-descriptor/tools/manufacturing/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..a3a0eed7675e98bcbd4653a7da9fa23ddc795269 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/manufacturing/index.ts @@ -0,0 +1,2 @@ +export * from "./production-run.tool.od"; + diff --git a/packages/controlmart/src/operational-descriptor/tools/manufacturing/production-run.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/manufacturing/production-run.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..7581fe0a44f40659fa883075b69af15c66ba3a72 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/manufacturing/production-run.tool.od.ts @@ -0,0 +1,127 @@ +import { ManufacturingProductionRunRepository } from "../../../repository/manufacturing/production-run.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface ManufacturingProductionRunToolDefinition { + repository: typeof ManufacturingProductionRunRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const manufacturingProductionRunToolChaosConfigs: Record = { + "manufacturing.production.run.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "manufacturing.production.run.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "manufacturing.production.run.get_by_order": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "manufacturing.production.run.get_active": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "manufacturing.production.run.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "manufacturing.production.run.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, +}; + +export const manufacturingProductionRunToolDefaultChaos: Record = { + "manufacturing.production.run.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Production run already exists", + config: { + corruptionType: "invalid_format", + }, + }, + { + type: "missing_data", + weight: 1, + description: "Missing required production run fields", + config: { + missingFields: ["productionOrderId", "rawMaterialsConsumed"], + }, + }, + ], + }, + "manufacturing.production.run.update_status": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Invalid status transition", + config: { + invalidStates: ["COMPLETED", "CANCELLED"], + }, + }, + ], + }, +}; + +export const manufacturingProductionRunTools: Record< + string, + ManufacturingProductionRunToolDefinition +> = { + "manufacturing.production.run.create": { + repository: ManufacturingProductionRunRepository, + method: "createProductionRun", + chaos: manufacturingProductionRunToolChaosConfigs["manufacturing.production.run.create"]!, + defaultChaosPolicy: manufacturingProductionRunToolDefaultChaos["manufacturing.production.run.create"], + }, + "manufacturing.production.run.get_by_id": { + repository: ManufacturingProductionRunRepository, + method: "getProductionRunById", + chaos: manufacturingProductionRunToolChaosConfigs["manufacturing.production.run.get_by_id"]!, + }, + "manufacturing.production.run.get_by_order": { + repository: ManufacturingProductionRunRepository, + method: "getProductionRunsByOrderId", + chaos: manufacturingProductionRunToolChaosConfigs["manufacturing.production.run.get_by_order"]!, + }, + "manufacturing.production.run.get_active": { + repository: ManufacturingProductionRunRepository, + method: "getActiveProductionRuns", + chaos: manufacturingProductionRunToolChaosConfigs["manufacturing.production.run.get_active"]!, + }, + "manufacturing.production.run.update_status": { + repository: ManufacturingProductionRunRepository, + method: "updateProductionRunStatus", + chaos: manufacturingProductionRunToolChaosConfigs["manufacturing.production.run.update_status"]!, + defaultChaosPolicy: manufacturingProductionRunToolDefaultChaos["manufacturing.production.run.update_status"], + }, + "manufacturing.production.run.get_all": { + repository: ManufacturingProductionRunRepository, + method: "getAllProductionRuns", + chaos: manufacturingProductionRunToolChaosConfigs["manufacturing.production.run.get_all"]!, + }, +}; + diff --git a/packages/controlmart/src/operational-descriptor/tools/registry.tool.ts b/packages/controlmart/src/operational-descriptor/tools/registry.tool.ts new file mode 100644 index 0000000000000000000000000000000000000000..f572e99ddfe7484090302a326b17b3c2d92ea7c7 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/registry.tool.ts @@ -0,0 +1,720 @@ +import { ediTools } from "./edi.tool.od"; +import { + erpOrderTools, + erpCompanyTools, + erpInvoiceTools, + erpPaymentTools, + erpProductTools, + erpShipmentTools, +} from "./erp"; + +import { + wmsInboundOrderTools, + wmsInventoryTransactionTools, + wmsBinTools, + wmsCycleCountTools, + wmsDailyMetricsTools, + wmsDistributionCenterTools, + wmsDockDoorTools, + wmsInboundReceivingTransactionTools, + wmsOutboundOrderTools, + wmsOutboundShipmentTools, + wmsReplenishmentTools, + wmsTaskTools, + wmsWarehouseTools, + wmsZoneTools, + wmsInventoryCheckTools, +} from "./wms"; +import { tmsCarrierTools, tmsInboundTrailerTools, tmsShipmentTools } from "./tms"; +import { financeTools, ledgerTools } from "./finance"; +import { manufacturingProductionRunTools } from "./manufacturing"; +import { RepositoryError } from "../../utils/error.util"; +import type { ChaosPolicy } from "../../types/od.type"; +import { loadEnv } from "../../utils/env.util"; + +type RepositoryFactory = (worldId: string) => any; + +export interface ChaosEnvironmentConfig { + development: boolean; + staging: boolean; + production: boolean; +} + +export interface ChaosConfig { + enabled: boolean; + failureRate: number; + errorTypes: string[]; + delayMs: { min: number; max: number }; + conditionalFailures?: Record boolean>; + environments?: ChaosEnvironmentConfig; +} + +let globalChaosEnabled = loadEnv().ENABLE_CHAOS || false; +let currentEnvironment: "development" | "staging" | "production" = loadEnv().NODE_ENV; + +export const configureGlobalChaos = ( + enabled: boolean, + environment: "development" | "staging" | "production" = loadEnv().NODE_ENV, +) => { + globalChaosEnabled = enabled; + currentEnvironment = environment; + console.log(`Global chaos ${enabled ? "ENABLED" : "DISABLED"} for environment: ${environment}`); +}; + +export const enableChaosForTools = (toolIds: string[]) => { + let enabledCount = 0; + for (const toolId of toolIds) { + const tool = registry.get(toolId); + if (tool && tool.chaos) { + tool.chaos.enabled = true; + enabledCount++; + } + } + console.log(`Enabled chaos for ${enabledCount} tools:`, toolIds); +}; + +export const disableChaosForTools = (toolIds: string[]) => { + let disabledCount = 0; + for (const toolId of toolIds) { + const tool = registry.get(toolId); + if (tool && tool.chaos) { + tool.chaos.enabled = false; + disabledCount++; + } + } + console.log(`Disabled chaos for ${disabledCount} tools:`, toolIds); +}; + +const isChaosEnabledForTool = (chaos: ChaosConfig): boolean => { + if (!globalChaosEnabled) { + return chaos.enabled; + } + + if (chaos.environments) { + return chaos.environments[currentEnvironment] ?? chaos.enabled; + } + + return chaos.enabled; +}; + +interface ToolDefinition { + repository?: RepositoryFactory; + method?: string; + helper?: any; + helperMethod?: string; + chaos?: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +const registry = new Map(); + +export const registerTool = ( + toolId: string, + repository: RepositoryFactory, + method: string, + chaos: ChaosConfig, + defaultChaosPolicy?: ChaosPolicy, +) => { + registry.set(toolId, { repository, method, chaos, defaultChaosPolicy }); +}; + +export const registerHelperTool = ( + toolId: string, + helper: any, + helperMethod: string, + chaos: ChaosConfig, + defaultChaosPolicy?: ChaosPolicy, +) => { + registry.set(toolId, { helper, helperMethod, chaos, defaultChaosPolicy }); +}; + +export const getTool = (toolId: string): ToolDefinition | undefined => { + return registry.get(toolId); +}; + +export const getChaosConfig = (toolId: string): ChaosConfig | undefined => { + return registry.get(toolId)?.chaos; +}; + +export const getToolDefaultChaosPolicy = (toolId: string): ChaosPolicy | undefined => { + return registry.get(toolId)?.defaultChaosPolicy; +}; + +export const getAllToolIds = (): string[] => { + return Array.from(registry.keys()); +}; + +export const getToolsByCategory = (category: string): string[] => { + return Array.from(registry.keys()).filter((toolId) => toolId.startsWith(category)); +}; + +export const isToolRegistered = (toolId: string): boolean => { + return registry.has(toolId); +}; + +export const getToolCount = (): number => { + return registry.size; +}; + +export const validateToolRegistry = (): { valid: boolean; errors: string[] } => { + const errors: string[] = []; + + for (const [toolId, toolDef] of registry.entries()) { + const hasRepositoryConfig = toolDef.repository && toolDef.method; + const hasHelperConfig = toolDef.helper && toolDef.helperMethod; + + if (!hasRepositoryConfig && !hasHelperConfig) { + errors.push( + `Tool '${toolId}' missing either (repository + method) or (helper + helperMethod)`, + ); + } + + if (!toolDef.chaos) { + errors.push(`Tool '${toolId}' missing chaos configuration`); + } else { + if (typeof toolDef.chaos.enabled !== "boolean") { + errors.push(`Tool '${toolId}' chaos configuration missing 'enabled' boolean`); + } + if ( + typeof toolDef.chaos.failureRate !== "number" || + toolDef.chaos.failureRate < 0 || + toolDef.chaos.failureRate > 1 + ) { + errors.push(`Tool '${toolId}' chaos configuration invalid 'failureRate' (must be 0-1)`); + } + if (!Array.isArray(toolDef.chaos.errorTypes)) { + errors.push(`Tool '${toolId}' chaos configuration invalid 'errorTypes' (must be array)`); + } + if ( + !toolDef.chaos.delayMs || + typeof toolDef.chaos.delayMs.min !== "number" || + typeof toolDef.chaos.delayMs.max !== "number" + ) { + errors.push( + `Tool '${toolId}' chaos configuration invalid 'delayMs' (must have min/max numbers)`, + ); + } + } + } + + return { + valid: errors.length === 0, + errors, + }; +}; + +import { ChaosConfigRegistry } from "../../services/chaos-config.registry"; + +const shouldInjectChaos = (chaos: ChaosConfig, args: any, worldId: string): boolean => { + // 1. Check World-level Infrastructure Chaos setting + const worldConfig = ChaosConfigRegistry.getWorldChaosConfiguration(worldId); + if (worldConfig && !worldConfig.infraChaosEnabled) { + return false; // Force disabled if world infra chaos is off + } + + // 2. Check Global/Environment Chaos setting (Legacy/System-wide) + if (!isChaosEnabledForTool(chaos)) return false; + + if (chaos.conditionalFailures) { + for (const [key, condition] of Object.entries(chaos.conditionalFailures)) { + if (args[key] !== undefined && condition(args[key])) { + return true; + } + } + } + + return Math.random() < chaos.failureRate; +}; + +const injectChaosDelay = async (delayMs: { min: number; max: number }): Promise => { + const delay = Math.floor(Math.random() * (delayMs.max - delayMs.min + 1)) + delayMs.min; + if (delay > 0) { + await new Promise((resolve) => setTimeout(resolve, delay)); + } +}; + +const injectChaosError = (errorTypes: string[]): never => { + const errorType = errorTypes[Math.floor(Math.random() * errorTypes.length)]; + throw new RepositoryError(`[CHAOS] Simulated ${errorType} failure`, errorType as any); +}; + +export const executeToolWithChaos = async ( + toolId: string, + worldId: string, + args: any, +): Promise => { + const tool = registry.get(toolId); + if (!tool) { + throw new Error( + `Tool '${toolId}' not found in registry. Available tools: ${getAllToolIds().join(", ")}`, + ); + } + + if (tool.chaos && shouldInjectChaos(tool.chaos, args, worldId)) { + await injectChaosDelay(tool.chaos.delayMs); + + if (tool.chaos.errorTypes.length > 0) { + injectChaosError(tool.chaos.errorTypes); + } + } + + try { + if (tool.repository && tool.method) { + // Repository tool execution + const repoInstance = tool.repository(worldId); + const fn = repoInstance[tool.method]; + + if (typeof fn !== "function") { + throw new Error( + `Method '${tool.method}' not found in repository for tool '${toolId}'. Available methods: ${Object.getOwnPropertyNames( + repoInstance, + ) + .filter((name) => typeof repoInstance[name] === "function") + .join(", ")}`, + ); + } + + // For repository methods, pass the args object directly instead of spreading + // Most repository methods expect a single object parameter + return fn(args); + } else if (tool.helper && tool.helperMethod) { + // Helper tool execution + const fn = tool.helper[tool.helperMethod]; + + if (typeof fn !== "function") { + throw new Error( + `Method '${tool.helperMethod}' not found in helper for tool '${toolId}'. Available methods: ${Object.getOwnPropertyNames( + tool.helper, + ) + .filter((name) => typeof tool.helper[name] === "function") + .join(", ")}`, + ); + } + + // For helper methods, pass the args object directly, injecting worldId if not present + const argsWithContext = typeof args === 'object' && args !== null ? { worldId, ...args } : args; + return fn(argsWithContext); + } else { + throw new Error(`Tool '${toolId}' has neither repository nor helper configuration`); + } + } catch (error) { + if (error instanceof RepositoryError) { + throw error; + } + throw new RepositoryError( + `Failed to execute tool '${toolId}': ${error instanceof Error ? error.message : String(error)}`, + "TOOL_EXECUTION_ERROR" as any, + ); + } +}; + +export const executeTool = async (toolId: string, worldId: string, args: any): Promise => { + return executeToolWithChaos(toolId, worldId, args); +}; + +const initialize = () => { + Object.entries(ediTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } else if (toolDef.helper && toolDef.helperMethod) { + registerHelperTool( + toolId, + toolDef.helper, + toolDef.helperMethod, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } else { + // Fallback for tools that just have helpers but no default policy explicitly or special handling + if (toolDef.helper && toolDef.helperMethod) { + registerHelperTool( + toolId, + toolDef.helper, + toolDef.helperMethod, + toolDef.chaos + ); + } + } + }); + + Object.entries(erpOrderTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(erpCompanyTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(erpInvoiceTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(erpPaymentTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(erpProductTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(erpShipmentTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(tmsCarrierTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(tmsInboundTrailerTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(tmsShipmentTools).forEach(([toolId, toolDef]) => { + if (toolDef.repository && toolDef.method) { + registerTool( + toolId, + (worldId: string) => toolDef.repository!(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + } + }); + + Object.entries(wmsInboundOrderTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + ); + }); + + Object.entries(wmsOutboundOrderTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + ); + }); + + Object.entries(wmsInventoryTransactionTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + ); + }); + + Object.entries(wmsBinTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsCycleCountTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsDailyMetricsTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsDistributionCenterTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsDockDoorTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsInboundReceivingTransactionTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsOutboundShipmentTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsReplenishmentTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsTaskTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsWarehouseTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsOutboundShipmentTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsZoneTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(wmsInventoryCheckTools).forEach(([toolId, toolDef]) => { + registerHelperTool( + toolId, + toolDef.helper, + toolDef.helperMethod, + toolDef.chaos, + ); + }); + + Object.entries(financeTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(ledgerTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); + + Object.entries(manufacturingProductionRunTools).forEach(([toolId, toolDef]) => { + registerTool( + toolId, + (worldId: string) => toolDef.repository(worldId), + toolDef.method, + toolDef.chaos, + toolDef.defaultChaosPolicy, + ); + }); +}; + +const initResult = (() => { + try { + initialize(); + const validation = validateToolRegistry(); + + if (!validation.valid) { + console.error("Tool registry validation failed:", validation.errors); + return { success: false, errors: validation.errors }; + } + + if (globalChaosEnabled) { + enableChaosForTools(getAllToolIds()); + console.log(`Automatically enabled chaos for all ${getToolCount()} tools due to ENABLE_CHAOS=true`); + } + + console.log( + `Global chaos ${globalChaosEnabled ? "ENABLED" : "DISABLED"} for environment: ${currentEnvironment}`, + ); + + console.log(`Tool registry initialized successfully with ${getToolCount()} tools`); + return { success: true, toolCount: getToolCount() }; + } catch (error) { + console.error("Failed to initialize tool registry:", error); + return { success: false, error: error instanceof Error ? error.message : String(error) }; + } +})(); + +export const getRegistryStatus = () => initResult; + +export const getChaosStatus = () => ({ + globalEnabled: globalChaosEnabled, + environment: currentEnvironment, + totalTools: getToolCount(), + enabledTools: Array.from(registry.entries()) + .filter(([_, tool]) => tool.chaos && isChaosEnabledForTool(tool.chaos)) + .map(([toolId]) => toolId), +}); + +export const setupChaosForTesting = () => { + configureGlobalChaos(true, "development"); + + const testTools = [ + "wms.inbound.order.create", + "wms.outbound.order.create", + "edi.transaction.create", + ]; + + enableChaosForTools(testTools); + console.log("Chaos engineering configured for testing"); +}; + +export const setupChaosForStaging = () => { + configureGlobalChaos(true, "staging"); + console.log("Chaos engineering configured for staging environment"); +}; + +export const disableAllChaos = () => { + configureGlobalChaos(false, currentEnvironment); + disableChaosForTools(getAllToolIds()); + console.log("All chaos engineering disabled"); +}; + +/** + * Example usage: + * + * + * setupChaosForTesting(); + * + * + * setupChaosForStaging(); + * + * + * enableChaosForTools(['wms.inbound.order.create', 'edi.transaction.create']); + * + * + * console.log(getChaosStatus()); + * + * + * disableAllChaos(); + */ diff --git a/packages/controlmart/src/operational-descriptor/tools/service-now.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/service-now.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..181bd2d51c8b4b80709b40b69520bc9780929e4e --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/service-now.tool.od.ts @@ -0,0 +1,18 @@ + +import { createTicketOnServiceNow } from "../../services/service-now.tickets.service"; +import type { ChaosConfig } from "./registry.tool"; + +export const serviceNowTools = { + "servicenow.ticket.create": { + helper: { + create: createTicketOnServiceNow + }, + helperMethod: "create", + chaos: { + enabled: false, + failureRate: 0.1, + errorTypes: ["SERVICE_UNAVAILABLE", "TIMEOUT"], + delayMs: { min: 100, max: 500 } + } as ChaosConfig + } +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/tms/carrier.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/tms/carrier.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..e04279ed75c3c1a4775b1501ea8d5389c4c7daa5 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/tms/carrier.tool.od.ts @@ -0,0 +1,210 @@ +import { TmsCarrierRepository } from "../../../repository/tms/carrier.tms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface TmsCarrierToolDefinition { + repository: typeof TmsCarrierRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const tmsCarrierToolChaosConfigs: Record = { + "tms.carrier.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + carrierCode: (value: string) => value?.includes("CHAOS"), + }, + }, + "tms.carrier.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "tms.carrier.get_by_code": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "tms.carrier.get_active": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.carrier.update_performance": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "OPTIMISTIC_LOCKING_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.carrier.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.carrier.get_by_performance": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.carrier.get_metrics": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "tms.carrier.search": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, +}; + +export const tmsCarrierToolDefaultChaos: Record = { + "tms.carrier.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Carrier already exists", + config: { + corruptionType: "invalid_format", + }, + }, + { + type: "data_corruption", + weight: 1, + description: "Invalid carrier data format", + config: { + corruptionType: "invalid_format", + corruptFields: ["carrierCode", "scacCode"], + }, + }, + ], + }, + "tms.carrier.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Carrier not found", + config: { + missingRecords: true, + }, + }, + { + type: "stale_data", + weight: 1, + description: "Return stale carrier data", + config: { + staleDataAge: 60, + }, + }, + ], + }, + "tms.carrier.get_active": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete carrier list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + { + type: "rate_limit", + weight: 1, + description: "Query timeout", + config: { + rateLimitDelay: 5000, + }, + }, + ], + }, + "tms.carrier.update_performance": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 2, + description: "Invalid performance metrics", + config: { + corruptionType: "invalid_format", + corruptFields: ["onTimeDeliveryRate", "damageClaimRate"], + }, + }, + ], + }, +}; + +export const tmsCarrierTools: Record = { + "tms.carrier.create": { + repository: TmsCarrierRepository, + method: "createCarrier", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.create"]!, + defaultChaosPolicy: tmsCarrierToolDefaultChaos["tms.carrier.create"], + }, + "tms.carrier.get_by_id": { + repository: TmsCarrierRepository, + method: "getCarrierById", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.get_by_id"]!, + defaultChaosPolicy: tmsCarrierToolDefaultChaos["tms.carrier.get_by_id"], + }, + "tms.carrier.get_by_code": { + repository: TmsCarrierRepository, + method: "getCarrierByCode", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.get_by_code"]!, + }, + "tms.carrier.get_active": { + repository: TmsCarrierRepository, + method: "getActiveCarriers", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.get_active"]!, + defaultChaosPolicy: tmsCarrierToolDefaultChaos["tms.carrier.get_active"], + }, + "tms.carrier.update_performance": { + repository: TmsCarrierRepository, + method: "updateCarrierPerformance", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.update_performance"]!, + defaultChaosPolicy: tmsCarrierToolDefaultChaos["tms.carrier.update_performance"], + }, + "tms.carrier.update_status": { + repository: TmsCarrierRepository, + method: "updateCarrierStatus", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.update_status"]!, + }, + "tms.carrier.get_by_performance": { + repository: TmsCarrierRepository, + method: "getCarriersByPerformance", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.get_by_performance"]!, + }, + "tms.carrier.get_metrics": { + repository: TmsCarrierRepository, + method: "getCarrierMetrics", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.get_metrics"]!, + }, + "tms.carrier.search": { + repository: TmsCarrierRepository, + method: "searchCarriers", + chaos: tmsCarrierToolChaosConfigs["tms.carrier.search"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/tms/inbound_trailer.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/tms/inbound_trailer.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..cf94cd84b10feea69ccac69cc892c887512ae8bc --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/tms/inbound_trailer.tool.od.ts @@ -0,0 +1,240 @@ +import { TmsInboundTrailerRepository } from "../../../repository/tms/inbound_trailer.tms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface TmsInboundTrailerToolDefinition { + repository: typeof TmsInboundTrailerRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const tmsInboundTrailerToolChaosConfigs: Record = { + "tms.trailer.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + trailerId: (value: string) => value?.includes("CHAOS"), + }, + }, + "tms.trailer.schedule": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.trailer.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.trailer.check_in": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.trailer.assign_dock": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.trailer.start_unloading": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.trailer.complete_unloading": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.trailer.add_delay": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.trailer.get_by_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.trailer.get_by_appointment": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.trailer.get_available_docks": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.trailer.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, +}; + +export const tmsInboundTrailerToolDefaultChaos: Record = { + "tms.trailer.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Trailer already exists", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "tms.trailer.schedule": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Trailer not in valid state for scheduling", + config: { + invalidStates: ["CHECKED_IN", "AT_DOCK"], + }, + }, + ], + }, + "tms.trailer.check_in": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Trailer not scheduled or en route", + config: { + invalidStates: ["CREATED", "UNLOADED"], + }, + }, + ], + }, + "tms.trailer.assign_dock": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Trailer not checked in", + config: { + invalidStates: ["SCHEDULED", "EN_ROUTE"], + }, + }, + { + type: "invalid_state", + weight: 1, + description: "Dock door occupied", + config: { + invalidStates: ["DOCK_OCCUPIED"], + }, + }, + ], + }, + "tms.trailer.get_by_status": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete trailer list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + ], + }, +}; + +export const tmsInboundTrailerTools: Record = { + "tms.trailer.create": { + repository: TmsInboundTrailerRepository, + method: "createInboundTrailer", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.create"]!, + defaultChaosPolicy: tmsInboundTrailerToolDefaultChaos["tms.trailer.create"], + }, + "tms.trailer.schedule": { + repository: TmsInboundTrailerRepository, + method: "scheduleTrailerAppointment", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.schedule"]!, + defaultChaosPolicy: tmsInboundTrailerToolDefaultChaos["tms.trailer.schedule"], + }, + "tms.trailer.update_status": { + repository: TmsInboundTrailerRepository, + method: "updateTrailerStatus", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.update_status"]!, + }, + "tms.trailer.check_in": { + repository: TmsInboundTrailerRepository, + method: "checkInTrailer", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.check_in"]!, + defaultChaosPolicy: tmsInboundTrailerToolDefaultChaos["tms.trailer.check_in"], + }, + "tms.trailer.assign_dock": { + repository: TmsInboundTrailerRepository, + method: "assignToDock", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.assign_dock"]!, + defaultChaosPolicy: tmsInboundTrailerToolDefaultChaos["tms.trailer.assign_dock"], + }, + "tms.trailer.start_unloading": { + repository: TmsInboundTrailerRepository, + method: "startUnloading", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.start_unloading"]!, + }, + "tms.trailer.complete_unloading": { + repository: TmsInboundTrailerRepository, + method: "completeUnloading", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.complete_unloading"]!, + }, + "tms.trailer.add_delay": { + repository: TmsInboundTrailerRepository, + method: "addDelay", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.add_delay"]!, + }, + "tms.trailer.get_by_status": { + repository: TmsInboundTrailerRepository, + method: "getTrailersByStatus", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.get_by_status"]!, + defaultChaosPolicy: tmsInboundTrailerToolDefaultChaos["tms.trailer.get_by_status"], + }, + "tms.trailer.get_by_appointment": { + repository: TmsInboundTrailerRepository, + method: "getTrailersByAppointmentDate", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.get_by_appointment"]!, + }, + "tms.trailer.get_available_docks": { + repository: TmsInboundTrailerRepository, + method: "getAvailableDockDoors", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.get_available_docks"]!, + }, + "tms.trailer.get_by_id": { + repository: TmsInboundTrailerRepository, + method: "getTrailerById", + chaos: tmsInboundTrailerToolChaosConfigs["tms.trailer.get_by_id"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/tms/index.ts b/packages/controlmart/src/operational-descriptor/tools/tms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..ebe5e278ad390ef76d15d14a157fc04353394f75 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/tms/index.ts @@ -0,0 +1,3 @@ +export * from "./carrier.tool.od"; +export * from "./inbound_trailer.tool.od"; +export * from "./shipment.tool.od"; diff --git a/packages/controlmart/src/operational-descriptor/tools/tms/shipment.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/tms/shipment.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..f39560a98ee3002b5acef272e65941bfc0a52aef --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/tms/shipment.tool.od.ts @@ -0,0 +1,232 @@ +import { TmsShipmentRepository } from "../../../repository/tms/shipment.tms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface TmsShipmentToolDefinition { + repository: typeof TmsShipmentRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const tmsShipmentToolChaosConfigs: Record = { + "tms.shipment.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + shipmentId: (value: string) => value?.includes("CHAOS"), + }, + }, + "tms.shipment.tender": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.shipment.accept": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.shipment.update_location": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "tms.shipment.process_edi_214": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "INVALID_STATE_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.shipment.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.shipment.create_event": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "tms.shipment.get_with_events": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "tms.shipment.get_by_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.shipment.get_in_transit": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.shipment.get_by_carrier": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "tms.shipment.add_delay": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, +}; + +export const tmsShipmentToolDefaultChaos: Record = { + "tms.shipment.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Shipment already exists", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "tms.shipment.tender": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Shipment not in PLANNED state", + config: { + invalidStates: ["CREATED", "TENDERED", "ACCEPTED"], + }, + }, + ], + }, + "tms.shipment.accept": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Shipment not in TENDERED state", + config: { + invalidStates: ["PLANNED", "ACCEPTED"], + }, + }, + ], + }, + "tms.shipment.process_edi_214": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 2, + description: "Invalid EDI data format", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "tms.shipment.get_by_status": { + enabled: false, + probability: 0.03, + scenarios: [ + { + type: "partial_data", + weight: 2, + description: "Return incomplete shipment list", + config: { + partialResults: { + percentage: 50, + randomize: true, + }, + }, + }, + ], + }, +}; + +export const tmsShipmentTools: Record = { + "tms.shipment.create": { + repository: TmsShipmentRepository, + method: "createShipment", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.create"]!, + defaultChaosPolicy: tmsShipmentToolDefaultChaos["tms.shipment.create"], + }, + "tms.shipment.tender": { + repository: TmsShipmentRepository, + method: "tenderShipment", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.tender"]!, + defaultChaosPolicy: tmsShipmentToolDefaultChaos["tms.shipment.tender"], + }, + "tms.shipment.accept": { + repository: TmsShipmentRepository, + method: "acceptShipment", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.accept"]!, + defaultChaosPolicy: tmsShipmentToolDefaultChaos["tms.shipment.accept"], + }, + "tms.shipment.update_location": { + repository: TmsShipmentRepository, + method: "updateShipmentLocation", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.update_location"]!, + }, + "tms.shipment.process_edi_214": { + repository: TmsShipmentRepository, + method: "processEdi214Update", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.process_edi_214"]!, + defaultChaosPolicy: tmsShipmentToolDefaultChaos["tms.shipment.process_edi_214"], + }, + "tms.shipment.update_status": { + repository: TmsShipmentRepository, + method: "updateShipmentStatus", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.update_status"]!, + }, + "tms.shipment.create_event": { + repository: TmsShipmentRepository, + method: "createStatusEvent", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.create_event"]!, + }, + "tms.shipment.get_with_events": { + repository: TmsShipmentRepository, + method: "getShipmentWithEvents", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.get_with_events"]!, + }, + "tms.shipment.get_by_status": { + repository: TmsShipmentRepository, + method: "getShipmentsByStatus", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.get_by_status"]!, + defaultChaosPolicy: tmsShipmentToolDefaultChaos["tms.shipment.get_by_status"], + }, + "tms.shipment.get_in_transit": { + repository: TmsShipmentRepository, + method: "getInTransitShipments", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.get_in_transit"]!, + }, + "tms.shipment.get_by_carrier": { + repository: TmsShipmentRepository, + method: "getShipmentsByCarrier", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.get_by_carrier"]!, + }, + "tms.shipment.add_delay": { + repository: TmsShipmentRepository, + method: "addDelay", + chaos: tmsShipmentToolChaosConfigs["tms.shipment.add_delay"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/bin.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/bin.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..f3be248fad19d64953775620a5ba07f41fd230fa --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/bin.tool.od.ts @@ -0,0 +1,149 @@ +import { WMSBinRepository } from "../../../repository/wms/bin.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSBinToolDefinition { + repository: typeof WMSBinRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsBinToolChaosConfigs: Record = { + "wms.bin.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + binCode: (value: string) => value?.includes("CHAOS"), + }, + }, + "wms.bin.get_by_zone": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.bin.get_by_warehouse": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.bin.get_by_code": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.bin.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.bin.get_available": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.bin.get_utilization": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.bin.update_capacity": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, +}; + +export const wmsBinToolDefaultChaos: Record = { + "wms.bin.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Bin already exists", + config: { + corruptionType: "invalid_format", + }, + }, + { + type: "data_corruption", + weight: 1, + description: "Invalid bin data format", + config: { + corruptionType: "invalid_format", + corruptFields: ["binCode", "zoneId"], + }, + }, + ], + }, + "wms.bin.get_by_code": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Bin not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsBinTools: Record = { + "wms.bin.create": { + repository: WMSBinRepository, + method: "createBin", + chaos: wmsBinToolChaosConfigs["wms.bin.create"]!, + defaultChaosPolicy: wmsBinToolDefaultChaos["wms.bin.create"], + }, + "wms.bin.get_by_zone": { + repository: WMSBinRepository, + method: "getBinsByZone", + chaos: wmsBinToolChaosConfigs["wms.bin.get_by_zone"]!, + }, + "wms.bin.get_by_warehouse": { + repository: WMSBinRepository, + method: "getBinsByWarehouse", + chaos: wmsBinToolChaosConfigs["wms.bin.get_by_warehouse"]!, + }, + "wms.bin.get_by_code": { + repository: WMSBinRepository, + method: "getBinByCode", + chaos: wmsBinToolChaosConfigs["wms.bin.get_by_code"]!, + defaultChaosPolicy: wmsBinToolDefaultChaos["wms.bin.get_by_code"], + }, + "wms.bin.update_status": { + repository: WMSBinRepository, + method: "updateBinStatus", + chaos: wmsBinToolChaosConfigs["wms.bin.update_status"]!, + }, + "wms.bin.get_available": { + repository: WMSBinRepository, + method: "getAvailableBins", + chaos: wmsBinToolChaosConfigs["wms.bin.get_available"]!, + }, + "wms.bin.get_utilization": { + repository: WMSBinRepository, + method: "getBinUtilization", + chaos: wmsBinToolChaosConfigs["wms.bin.get_utilization"]!, + }, + "wms.bin.update_capacity": { + repository: WMSBinRepository, + method: "updateBinCapacity", + chaos: wmsBinToolChaosConfigs["wms.bin.update_capacity"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/cycle-count.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/cycle-count.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..912546cbb13d4a34fbb670b781f29919393e4360 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/cycle-count.tool.od.ts @@ -0,0 +1,149 @@ +import { WMSCycleCountRepository } from "../../../repository/wms/cycle_count.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSCycleCountToolDefinition { + repository: typeof WMSCycleCountRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsCycleCountToolChaosConfigs: Record = { + "wms.cycle_count.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.cycle_count.get_by_status": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.cycle_count.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.cycle_count.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.cycle_count.assign_user": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.cycle_count.add_result": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.cycle_count.get_by_warehouse": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.cycle_count.get_variance_report": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.cycle_count.get_scheduled": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, +}; + +export const wmsCycleCountToolDefaultChaos: Record = { + "wms.cycle_count.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Invalid cycle count data", + config: { + corruptionType: "invalid_format", + corruptFields: ["warehouseId", "countType"], + }, + }, + ], + }, + "wms.cycle_count.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Cycle count not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsCycleCountTools: Record = { + "wms.cycle_count.create": { + repository: WMSCycleCountRepository, + method: "createCycleCount", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.create"]!, + defaultChaosPolicy: wmsCycleCountToolDefaultChaos["wms.cycle_count.create"], + }, + "wms.cycle_count.get_by_status": { + repository: WMSCycleCountRepository, + method: "getCycleCountsByStatus", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.get_by_status"]!, + }, + "wms.cycle_count.get_by_id": { + repository: WMSCycleCountRepository, + method: "getCycleCountById", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.get_by_id"]!, + defaultChaosPolicy: wmsCycleCountToolDefaultChaos["wms.cycle_count.get_by_id"], + }, + "wms.cycle_count.update_status": { + repository: WMSCycleCountRepository, + method: "updateCycleCountStatus", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.update_status"]!, + }, + "wms.cycle_count.assign_user": { + repository: WMSCycleCountRepository, + method: "assignUserToCycleCount", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.assign_user"]!, + }, + "wms.cycle_count.add_result": { + repository: WMSCycleCountRepository, + method: "addCountToResult", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.add_result"]!, + }, + "wms.cycle_count.get_by_warehouse": { + repository: WMSCycleCountRepository, + method: "getCycleCountsByWarehouse", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.get_by_warehouse"]!, + }, + "wms.cycle_count.get_variance_report": { + repository: WMSCycleCountRepository, + method: "getCycleCountVarianceReport", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.get_variance_report"]!, + }, + "wms.cycle_count.get_scheduled": { + repository: WMSCycleCountRepository, + method: "getScheduledCycleCounts", + chaos: wmsCycleCountToolChaosConfigs["wms.cycle_count.get_scheduled"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/daily-metrics.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/daily-metrics.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..4e83caf7deecbe2bdbd6cadfeed3716bdc2d826e --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/daily-metrics.tool.od.ts @@ -0,0 +1,126 @@ +import { WMSDailyMetricsRepository } from "../../../repository/wms/daily_metrics.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSDailyMetricsToolDefinition { + repository: typeof WMSDailyMetricsRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsDailyMetricsToolChaosConfigs: Record = { + "wms.daily_metrics.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.daily_metrics.get_by_date_range": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.daily_metrics.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.daily_metrics.update": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.daily_metrics.get_summary": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.daily_metrics.get_trends": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.daily_metrics.get_zone_comparison": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, +}; + +export const wmsDailyMetricsToolDefaultChaos: Record = { + "wms.daily_metrics.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Metrics already exist for date", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "wms.daily_metrics.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Metric not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsDailyMetricsTools: Record = { + "wms.daily_metrics.create": { + repository: WMSDailyMetricsRepository, + method: "createDailyMetrics", + chaos: wmsDailyMetricsToolChaosConfigs["wms.daily_metrics.create"]!, + defaultChaosPolicy: wmsDailyMetricsToolDefaultChaos["wms.daily_metrics.create"], + }, + "wms.daily_metrics.get_by_date_range": { + repository: WMSDailyMetricsRepository, + method: "getDailyMetricsByDateRange", + chaos: wmsDailyMetricsToolChaosConfigs["wms.daily_metrics.get_by_date_range"]!, + }, + "wms.daily_metrics.get_by_id": { + repository: WMSDailyMetricsRepository, + method: "getDailyMetricsById", + chaos: wmsDailyMetricsToolChaosConfigs["wms.daily_metrics.get_by_id"]!, + defaultChaosPolicy: wmsDailyMetricsToolDefaultChaos["wms.daily_metrics.get_by_id"], + }, + "wms.daily_metrics.update": { + repository: WMSDailyMetricsRepository, + method: "updateDailyMetrics", + chaos: wmsDailyMetricsToolChaosConfigs["wms.daily_metrics.update"]!, + }, + "wms.daily_metrics.get_summary": { + repository: WMSDailyMetricsRepository, + method: "getMetricsSummary", + chaos: wmsDailyMetricsToolChaosConfigs["wms.daily_metrics.get_summary"]!, + }, + "wms.daily_metrics.get_trends": { + repository: WMSDailyMetricsRepository, + method: "getPerformanceTrends", + chaos: wmsDailyMetricsToolChaosConfigs["wms.daily_metrics.get_trends"]!, + }, + "wms.daily_metrics.get_zone_comparison": { + repository: WMSDailyMetricsRepository, + method: "getZonePerformanceComparison", + chaos: wmsDailyMetricsToolChaosConfigs["wms.daily_metrics.get_zone_comparison"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/dc.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/dc.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..fa474b06d5b65020a1daad299785d8f24baf51ac --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/dc.tool.od.ts @@ -0,0 +1,148 @@ +import { WMSDistributionCenterRepository } from "../../../repository/wms/dc.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSDistributionCenterToolDefinition { + repository: typeof WMSDistributionCenterRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsDistributionCenterToolChaosConfigs: Record = { + "wms.dc.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.dc.get_by_status": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.dc.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.dc.get_by_warehouse": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.dc.update": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.dc.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.dc.get_capacity": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.dc.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.dc.is_operational": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 50 }, + }, +}; + +export const wmsDistributionCenterToolDefaultChaos: Record = { + "wms.dc.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "DC already exists", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "wms.dc.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "DC not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsDistributionCenterTools: Record = { + "wms.dc.create": { + repository: WMSDistributionCenterRepository, + method: "createDistributionCenter", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.create"]!, + defaultChaosPolicy: wmsDistributionCenterToolDefaultChaos["wms.dc.create"], + }, + "wms.dc.get_by_status": { + repository: WMSDistributionCenterRepository, + method: "getDistributionCentersByStatus", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.get_by_status"]!, + }, + "wms.dc.get_by_id": { + repository: WMSDistributionCenterRepository, + method: "getDistributionCenterById", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.get_by_id"]!, + defaultChaosPolicy: wmsDistributionCenterToolDefaultChaos["wms.dc.get_by_id"], + }, + "wms.dc.get_by_warehouse": { + repository: WMSDistributionCenterRepository, + method: "getDistributionCentersByWarehouse", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.get_by_warehouse"]!, + }, + "wms.dc.update": { + repository: WMSDistributionCenterRepository, + method: "updateDistributionCenter", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.update"]!, + }, + "wms.dc.update_status": { + repository: WMSDistributionCenterRepository, + method: "updateOperationalStatus", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.update_status"]!, + }, + "wms.dc.get_capacity": { + repository: WMSDistributionCenterRepository, + method: "getDistributionCenterCapacity", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.get_capacity"]!, + }, + "wms.dc.get_all": { + repository: WMSDistributionCenterRepository, + method: "getAllDistributionCenters", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.get_all"]!, + }, + "wms.dc.is_operational": { + repository: WMSDistributionCenterRepository, + method: "isOperationalAtTime", + chaos: wmsDistributionCenterToolChaosConfigs["wms.dc.is_operational"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/dock-door.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/dock-door.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..dc5d5993274e024fa0e0ffd8a852e9e6c7d3ca6a --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/dock-door.tool.od.ts @@ -0,0 +1,163 @@ +import { WMSDockDoorRepository } from "../../../repository/wms/dock_door.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSDockDoorToolDefinition { + repository: typeof WMSDockDoorRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsDockDoorToolChaosConfigs: Record = { + "wms.dock_door.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.dock_door.get_by_warehouse": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.dock_door.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.dock_door.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.dock_door.assign_appointment": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR", "OPTIMISTIC_LOCKING_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.dock_door.clear_appointment": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.dock_door.get_available": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.dock_door.get_utilization": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.dock_door.get_schedule": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, +}; + +export const wmsDockDoorToolDefaultChaos: Record = { + "wms.dock_door.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Dock door already exists", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "wms.dock_door.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Dock door not found", + config: { + missingRecords: true, + }, + }, + ], + }, + "wms.dock_door.assign_appointment": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Dock door not available", + config: { + invalidStates: ["OCCUPIED"], + }, + }, + ], + }, +}; + +export const wmsDockDoorTools: Record = { + "wms.dock_door.create": { + repository: WMSDockDoorRepository, + method: "createDockDoor", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.create"]!, + defaultChaosPolicy: wmsDockDoorToolDefaultChaos["wms.dock_door.create"], + }, + "wms.dock_door.get_by_warehouse": { + repository: WMSDockDoorRepository, + method: "getDockDoorsByWarehouse", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.get_by_warehouse"]!, + }, + "wms.dock_door.get_by_id": { + repository: WMSDockDoorRepository, + method: "getDockDoorById", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.get_by_id"]!, + defaultChaosPolicy: wmsDockDoorToolDefaultChaos["wms.dock_door.get_by_id"], + }, + "wms.dock_door.update_status": { + repository: WMSDockDoorRepository, + method: "updateDockDoorStatus", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.update_status"]!, + }, + "wms.dock_door.assign_appointment": { + repository: WMSDockDoorRepository, + method: "assignAppointmentToDoor", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.assign_appointment"]!, + defaultChaosPolicy: wmsDockDoorToolDefaultChaos["wms.dock_door.assign_appointment"], + }, + "wms.dock_door.clear_appointment": { + repository: WMSDockDoorRepository, + method: "clearAppointmentFromDoor", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.clear_appointment"]!, + }, + "wms.dock_door.get_available": { + repository: WMSDockDoorRepository, + method: "getAvailableDockDoors", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.get_available"]!, + }, + "wms.dock_door.get_utilization": { + repository: WMSDockDoorRepository, + method: "getDockDoorUtilization", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.get_utilization"]!, + }, + "wms.dock_door.get_schedule": { + repository: WMSDockDoorRepository, + method: "getDockDoorSchedule", + chaos: wmsDockDoorToolChaosConfigs["wms.dock_door.get_schedule"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/inbound-order.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/inbound-order.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..70e4c49210a9e8ac8dac702c0d3582d58dbfd51f --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/inbound-order.tool.od.ts @@ -0,0 +1,128 @@ +import { WMSInboundOrderRepository } from "../../../repository/wms/inbound_order.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; + +export interface WmsInboundOrderToolDefinition { + repository: typeof WMSInboundOrderRepository; + method: string; + chaos: ChaosConfig; +} + +export const wmsInboundOrderToolChaosConfigs: Record = { + "wms.inbound.order.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + poNumber: (value: string) => value?.includes("CHAOS"), + }, + }, + "wms.inbound.order.get_by_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "wms.inbound.order.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "wms.inbound.order.get_by_po_number": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "wms.inbound.order.update_status": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "wms.inbound.order.update_receiving_progress": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "wms.inbound.order.get_orders_expected_today": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: {}, + }, + "wms.inbound.order.get_receiving_metrics": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 300 }, + conditionalFailures: {}, + }, + "wms.inbound.order.get_orders_by_vendor": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: {}, + }, +}; + +export const wmsInboundOrderTools: Record = { + "wms.inbound.order.create": { + repository: WMSInboundOrderRepository, + method: "createInboundOrder", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.create"]!, + }, + "wms.inbound.order.get_by_status": { + repository: WMSInboundOrderRepository, + method: "getInboundOrdersByStatus", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.get_by_status"]!, + }, + "wms.inbound.order.get_by_id": { + repository: WMSInboundOrderRepository, + method: "getInboundOrderById", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.get_by_id"]!, + }, + "wms.inbound.order.get_by_po_number": { + repository: WMSInboundOrderRepository, + method: "getInboundOrderByPoNumber", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.get_by_po_number"]!, + }, + "wms.inbound.order.update_status": { + repository: WMSInboundOrderRepository, + method: "updateOrderStatus", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.update_status"]!, + }, + "wms.inbound.order.update_receiving_progress": { + repository: WMSInboundOrderRepository, + method: "updateReceivingProgress", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.update_receiving_progress"]!, + }, + "wms.inbound.order.get_orders_expected_today": { + repository: WMSInboundOrderRepository, + method: "getOrdersExpectedToday", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.get_orders_expected_today"]!, + }, + "wms.inbound.order.get_receiving_metrics": { + repository: WMSInboundOrderRepository, + method: "getReceivingMetrics", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.get_receiving_metrics"]!, + }, + "wms.inbound.order.get_orders_by_vendor": { + repository: WMSInboundOrderRepository, + method: "getOrdersByVendor", + chaos: wmsInboundOrderToolChaosConfigs["wms.inbound.order.get_orders_by_vendor"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/inbound-receiving-transaction.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/inbound-receiving-transaction.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..1a6b1343015e0fb30f280a0de79c9dac678e8041 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/inbound-receiving-transaction.tool.od.ts @@ -0,0 +1,122 @@ +import { WMSInboundReceivingTransactionRepository } from "../../../repository/wms/inbound_receiving_transaction.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSInboundReceivingTransactionToolDefinition { + repository: typeof WMSInboundReceivingTransactionRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsInboundReceivingTransactionToolChaosConfigs: Record = { + "wms.receiving_transaction.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.receiving_transaction.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.receiving_transaction.get_all": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.receiving_transaction.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.receiving_transaction.add_item": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.receiving_transaction.get_metrics": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, +}; + +export const wmsInboundReceivingTransactionToolDefaultChaos: Record = { + "wms.receiving_transaction.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Invalid transaction data", + config: { + corruptionType: "invalid_format", + corruptFields: ["warehouseId", "inboundOrderId"], + }, + }, + ], + }, + "wms.receiving_transaction.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Transaction not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsInboundReceivingTransactionTools: Record< + string, + WMSInboundReceivingTransactionToolDefinition +> = { + "wms.receiving_transaction.create": { + repository: WMSInboundReceivingTransactionRepository, + method: "createReceivingTransaction", + chaos: wmsInboundReceivingTransactionToolChaosConfigs["wms.receiving_transaction.create"]!, + defaultChaosPolicy: + wmsInboundReceivingTransactionToolDefaultChaos["wms.receiving_transaction.create"], + }, + "wms.receiving_transaction.get_by_id": { + repository: WMSInboundReceivingTransactionRepository, + method: "getTransactionById", + chaos: wmsInboundReceivingTransactionToolChaosConfigs["wms.receiving_transaction.get_by_id"]!, + defaultChaosPolicy: + wmsInboundReceivingTransactionToolDefaultChaos["wms.receiving_transaction.get_by_id"], + }, + "wms.receiving_transaction.get_all": { + repository: WMSInboundReceivingTransactionRepository, + method: "getAllTransactions", + chaos: wmsInboundReceivingTransactionToolChaosConfigs["wms.receiving_transaction.get_all"]!, + }, + "wms.receiving_transaction.update_status": { + repository: WMSInboundReceivingTransactionRepository, + method: "updateTransactionStatus", + chaos: + wmsInboundReceivingTransactionToolChaosConfigs["wms.receiving_transaction.update_status"]!, + }, + "wms.receiving_transaction.add_item": { + repository: WMSInboundReceivingTransactionRepository, + method: "addItemToTransaction", + chaos: wmsInboundReceivingTransactionToolChaosConfigs["wms.receiving_transaction.add_item"]!, + }, + "wms.receiving_transaction.get_metrics": { + repository: WMSInboundReceivingTransactionRepository, + method: "getReceivingMetrics", + chaos: wmsInboundReceivingTransactionToolChaosConfigs["wms.receiving_transaction.get_metrics"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/index.ts b/packages/controlmart/src/operational-descriptor/tools/wms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..4ca1b4bc4f88053e3dfdfb5afd6f6d7746b2e432 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/index.ts @@ -0,0 +1,15 @@ +export * from "./inbound-order.tool.od"; +export * from "./inventory-transaction.tool.od"; +export * from "./outbound-order.tool.od"; +export * from "./bin.tool.od"; +export * from "./cycle-count.tool.od"; +export * from "./daily-metrics.tool.od"; +export * from "./dc.tool.od"; +export * from "./dock-door.tool.od"; +export * from "./inbound-receiving-transaction.tool.od"; +export * from "./outbound-shipment.tool.od"; +export * from "./replenishment.tool.od"; +export * from "./task.tool.od"; +export * from "./warehouse.tool.od"; +export * from "./zone.tool.od"; +export * from "./inventory_check.tool.od"; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/inventory-transaction.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/inventory-transaction.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..29b9728f41cf743043e9122effc5444844987816 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/inventory-transaction.tool.od.ts @@ -0,0 +1,105 @@ +import { WMSInventoryTransactionRepository } from "../../../repository/wms/inventory_transaction.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; + +export interface WmsInventoryTransactionToolDefinition { + repository: typeof WMSInventoryTransactionRepository; + method: string; + chaos: ChaosConfig; +} + +export const wmsInventoryTransactionToolChaosConfigs: Record = { + "wms.inventory.transaction.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + productId: (value: string) => value?.includes("CHAOS"), + }, + }, + "wms.inventory.transaction.get_by_product": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "wms.inventory.transaction.get_by_bin": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: {}, + }, + "wms.inventory.transaction.get_by_reference": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: {}, + }, + "wms.inventory.transaction.get_movement_report": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 300 }, + conditionalFailures: {}, + }, + "wms.inventory.transaction.get_history": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 250 }, + conditionalFailures: { + limit: (value: number) => value > 500, + }, + }, + "wms.inventory.transaction.get_adjustments": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: {}, + }, +}; + +export const wmsInventoryTransactionTools: Record = { + "wms.inventory.transaction.create": { + repository: WMSInventoryTransactionRepository, + method: "createInventoryTransaction", + chaos: wmsInventoryTransactionToolChaosConfigs["wms.inventory.transaction.create"]!, + }, + "wms.inventory.transaction.get_by_product": { + repository: WMSInventoryTransactionRepository, + method: "getTransactionsByProduct", + chaos: wmsInventoryTransactionToolChaosConfigs["wms.inventory.transaction.get_by_product"]!, + }, + "wms.inventory.transaction.get_by_bin": { + repository: WMSInventoryTransactionRepository, + method: "getTransactionsByBin", + chaos: wmsInventoryTransactionToolChaosConfigs["wms.inventory.transaction.get_by_bin"]!, + }, + "wms.inventory.transaction.get_by_reference": { + repository: WMSInventoryTransactionRepository, + method: "getTransactionsByReference", + chaos: wmsInventoryTransactionToolChaosConfigs["wms.inventory.transaction.get_by_reference"]!, + }, + "wms.inventory.transaction.get_movement_report": { + repository: WMSInventoryTransactionRepository, + method: "getInventoryMovementReport", + chaos: + wmsInventoryTransactionToolChaosConfigs["wms.inventory.transaction.get_movement_report"]!, + }, + "wms.inventory.transaction.get_history": { + repository: WMSInventoryTransactionRepository, + method: "getTransactionHistory", + chaos: wmsInventoryTransactionToolChaosConfigs["wms.inventory.transaction.get_history"]!, + }, + "wms.inventory.transaction.get_adjustments": { + repository: WMSInventoryTransactionRepository, + method: "getInventoryAdjustments", + chaos: wmsInventoryTransactionToolChaosConfigs["wms.inventory.transaction.get_adjustments"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/inventory_check.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/inventory_check.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..d47a7c3a41d3f2b8863e8ed448598760423fd6c9 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/inventory_check.tool.od.ts @@ -0,0 +1,43 @@ +import { WMSInventoryTransactionRepository } from "../../../repository/wms/inventory_transaction.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; + +export const wmsInventoryCheckToolChaosConfigs: Record = { + "wms.inventory.ensure_available": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "TIMEOUT"], + delayMs: { min: 50, max: 200 }, + }, +}; + +const ensureAvailable = async (args: { + worldId: string; + productId: string; + quantity: number; + warehouseId?: string; +}) => { + const { worldId, productId, quantity, warehouseId } = args; + + console.log(`[Inventory Check] Checking availability for World: ${worldId}, Product: ${productId}, Warehouse: ${warehouseId}`); + const repo = WMSInventoryTransactionRepository(worldId); + const { totalAvailable } = await repo.getInventoryTotalByProduct(productId, warehouseId); + console.log(`[Inventory Check] Total available for ${productId}: ${totalAvailable}`); + + if (totalAvailable < quantity) { + throw new Error( + `[Inventory Outage] Product ${productId} has insufficient stock (Available: ${totalAvailable}, Required: ${quantity}). This has been flagged for replenishment.` + ); + } + + return { available: totalAvailable, required: quantity, status: "SUFFICIENT" }; +}; + +export const wmsInventoryCheckTools = { + "wms.inventory.ensure_available": { + helper: { + ensureAvailable, + }, + helperMethod: "ensureAvailable", + chaos: wmsInventoryCheckToolChaosConfigs["wms.inventory.ensure_available"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/outbound-order.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/outbound-order.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..76fe91b0efc7ca04784ad4bbb62a71f1a3414b13 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/outbound-order.tool.od.ts @@ -0,0 +1,140 @@ +import { WMSOutboundOrderRepository } from "../../../repository/wms/outbound_order.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; + +export interface WmsOutboundOrderToolDefinition { + repository: typeof WMSOutboundOrderRepository; + method: string; + chaos: ChaosConfig; +} + +export const wmsOutboundOrderToolChaosConfigs: Record = { + "wms.outbound.order.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + conditionalFailures: { + orderNumber: (value: string) => value?.includes("CHAOS"), + }, + }, + "wms.outbound.order.get_by_status": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: { + limit: (value: number) => value > 1000, + }, + }, + "wms.outbound.order.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "wms.outbound.order.get_by_number": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR"], + delayMs: { min: 0, max: 50 }, + conditionalFailures: {}, + }, + "wms.outbound.order.update_status": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 100 }, + conditionalFailures: { + status: (value: string) => value === "CHAOS", + }, + }, + "wms.outbound.order.allocate_line": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "wms.outbound.order.update_picking_progress": { + enabled: false, + failureRate: 0.04, + errorTypes: ["DATABASE_ERROR", "NOT_FOUND_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 120 }, + conditionalFailures: {}, + }, + "wms.outbound.order.get_orders_ready_for_picking": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: {}, + }, + "wms.outbound.order.get_fulfillment_metrics": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 300 }, + conditionalFailures: {}, + }, + "wms.outbound.order.get_orders_by_customer": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + conditionalFailures: {}, + }, +}; + +export const wmsOutboundOrderTools: Record = { + "wms.outbound.order.create": { + repository: WMSOutboundOrderRepository, + method: "createOutboundOrder", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.create"]!, + }, + "wms.outbound.order.get_by_status": { + repository: WMSOutboundOrderRepository, + method: "getOutboundOrdersByStatus", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.get_by_status"]!, + }, + "wms.outbound.order.get_by_id": { + repository: WMSOutboundOrderRepository, + method: "getOutboundOrderById", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.get_by_id"]!, + }, + "wms.outbound.order.get_by_number": { + repository: WMSOutboundOrderRepository, + method: "getOutboundOrderByNumber", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.get_by_number"]!, + }, + "wms.outbound.order.update_status": { + repository: WMSOutboundOrderRepository, + method: "updateOrderStatus", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.update_status"]!, + }, + "wms.outbound.order.allocate_line": { + repository: WMSOutboundOrderRepository, + method: "allocateOrderLine", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.allocate_line"]!, + }, + "wms.outbound.order.update_picking_progress": { + repository: WMSOutboundOrderRepository, + method: "updatePickingProgress", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.update_picking_progress"]!, + }, + "wms.outbound.order.get_orders_ready_for_picking": { + repository: WMSOutboundOrderRepository, + method: "getOrdersReadyForPicking", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.get_orders_ready_for_picking"]!, + }, + "wms.outbound.order.get_fulfillment_metrics": { + repository: WMSOutboundOrderRepository, + method: "getOrderFulfillmentMetrics", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.get_fulfillment_metrics"]!, + }, + "wms.outbound.order.get_orders_by_customer": { + repository: WMSOutboundOrderRepository, + method: "getOrdersByCustomer", + chaos: wmsOutboundOrderToolChaosConfigs["wms.outbound.order.get_orders_by_customer"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/outbound-shipment.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/outbound-shipment.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..55457bbc353f37c86426308eff6c78862c247965 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/outbound-shipment.tool.od.ts @@ -0,0 +1,165 @@ +import { WMSOutboundShipmentRepository } from "../../../repository/wms/outbound_shipment.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSOutboundShipmentToolDefinition { + repository: typeof WMSOutboundShipmentRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsOutboundShipmentToolChaosConfigs: Record = { + "wms.outbound_shipment.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.outbound_shipment.get_by_status": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.outbound_shipment.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.outbound_shipment.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.outbound_shipment.get_by_warehouse": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.outbound_shipment.add_event": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.outbound_shipment.get_metrics": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.outbound_shipment.get_ready": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.outbound_shipment.get_by_tracking": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, +}; + +export const wmsOutboundShipmentToolDefaultChaos: Record = { + "wms.outbound_shipment.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Invalid shipment data", + config: { + corruptionType: "invalid_format", + corruptFields: ["warehouseId", "toAddress"], + }, + }, + ], + }, + "wms.outbound_shipment.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Shipment not found", + config: { + missingRecords: true, + }, + }, + ], + }, + "wms.outbound_shipment.get_by_tracking": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Tracking number not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsOutboundShipmentTools: Record = { + "wms.outbound_shipment.create": { + repository: WMSOutboundShipmentRepository, + method: "createShipment", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.create"]!, + defaultChaosPolicy: wmsOutboundShipmentToolDefaultChaos["wms.outbound_shipment.create"], + }, + "wms.outbound_shipment.get_by_status": { + repository: WMSOutboundShipmentRepository, + method: "getShipmentsByStatus", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.get_by_status"]!, + }, + "wms.outbound_shipment.get_by_id": { + repository: WMSOutboundShipmentRepository, + method: "getShipmentById", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.get_by_id"]!, + defaultChaosPolicy: wmsOutboundShipmentToolDefaultChaos["wms.outbound_shipment.get_by_id"], + }, + "wms.outbound_shipment.update_status": { + repository: WMSOutboundShipmentRepository, + method: "updateShipmentStatus", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.update_status"]!, + }, + "wms.outbound_shipment.get_by_warehouse": { + repository: WMSOutboundShipmentRepository, + method: "getShipmentsByWarehouse", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.get_by_warehouse"]!, + }, + "wms.outbound_shipment.add_event": { + repository: WMSOutboundShipmentRepository, + method: "addTrackingEvent", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.add_event"]!, + }, + "wms.outbound_shipment.get_metrics": { + repository: WMSOutboundShipmentRepository, + method: "getShipmentMetrics", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.get_metrics"]!, + }, + "wms.outbound_shipment.get_ready": { + repository: WMSOutboundShipmentRepository, + method: "getShipmentsReadyToShip", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.get_ready"]!, + }, + "wms.outbound_shipment.get_by_tracking": { + repository: WMSOutboundShipmentRepository, + method: "getShipmentsByTrackingNumber", + chaos: wmsOutboundShipmentToolChaosConfigs["wms.outbound_shipment.get_by_tracking"]!, + defaultChaosPolicy: + wmsOutboundShipmentToolDefaultChaos["wms.outbound_shipment.get_by_tracking"], + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/replenishment.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/replenishment.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..d6badbf5464dcfdf1ed2794023b00d047bd0911d --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/replenishment.tool.od.ts @@ -0,0 +1,149 @@ +import { WMSReplenishmentRepository } from "../../../repository/wms/replenishment.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSReplenishmentToolDefinition { + repository: typeof WMSReplenishmentRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsReplenishmentToolChaosConfigs: Record = { + "wms.replenishment.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.replenishment.get_by_status": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.replenishment.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.replenishment.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.replenishment.get_by_product": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.replenishment.get_by_bin": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.replenishment.approve": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.replenishment.get_metrics": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.replenishment.cancel": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, +}; + +export const wmsReplenishmentToolDefaultChaos: Record = { + "wms.replenishment.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "data_corruption", + weight: 1, + description: "Invalid replenishment data", + config: { + corruptionType: "invalid_format", + corruptFields: ["warehouseId", "productId", "fromBin", "toBin"], + }, + }, + ], + }, + "wms.replenishment.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Replenishment not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsReplenishmentTools: Record = { + "wms.replenishment.create": { + repository: WMSReplenishmentRepository, + method: "createReplenishment", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.create"]!, + defaultChaosPolicy: wmsReplenishmentToolDefaultChaos["wms.replenishment.create"], + }, + "wms.replenishment.get_by_status": { + repository: WMSReplenishmentRepository, + method: "getReplenishmentsByStatus", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.get_by_status"]!, + }, + "wms.replenishment.get_by_id": { + repository: WMSReplenishmentRepository, + method: "getReplenishmentById", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.get_by_id"]!, + defaultChaosPolicy: wmsReplenishmentToolDefaultChaos["wms.replenishment.get_by_id"], + }, + "wms.replenishment.update_status": { + repository: WMSReplenishmentRepository, + method: "updateReplenishmentStatus", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.update_status"]!, + }, + "wms.replenishment.get_by_product": { + repository: WMSReplenishmentRepository, + method: "getReplenishmentsByProduct", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.get_by_product"]!, + }, + "wms.replenishment.get_by_bin": { + repository: WMSReplenishmentRepository, + method: "getReplenishmentsByBin", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.get_by_bin"]!, + }, + "wms.replenishment.approve": { + repository: WMSReplenishmentRepository, + method: "approveReplenishment", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.approve"]!, + }, + "wms.replenishment.get_metrics": { + repository: WMSReplenishmentRepository, + method: "getReplenishmentMetrics", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.get_metrics"]!, + }, + "wms.replenishment.cancel": { + repository: WMSReplenishmentRepository, + method: "cancelReplenishment", + chaos: wmsReplenishmentToolChaosConfigs["wms.replenishment.cancel"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/task.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/task.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..3b31087f60a0deaa92928aa4ab3afae74067a65c --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/task.tool.od.ts @@ -0,0 +1,137 @@ +import { WMSTaskRepository } from "../../../repository/wms/task.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSTaskToolDefinition { + repository: typeof WMSTaskRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsTaskToolChaosConfigs: Record = { + "wms.task.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.task.get_logs": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.task.get_by_user": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.task.get_timestamps": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.task.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.task.get_metrics": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.task.get_active": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.task.add_scan": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, +}; + +export const wmsTaskToolDefaultChaos: Record = { + "wms.task.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Task already exists", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "wms.task.update_status": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "invalid_state", + weight: 2, + description: "Invalid task status transition", + config: { + invalidStates: ["COMPLETED"], + }, + }, + ], + }, +}; + +export const wmsTaskTools: Record = { + "wms.task.create": { + repository: WMSTaskRepository, + method: "createTask", + chaos: wmsTaskToolChaosConfigs["wms.task.create"]!, + defaultChaosPolicy: wmsTaskToolDefaultChaos["wms.task.create"], + }, + "wms.task.get_logs": { + repository: WMSTaskRepository, + method: "getTaskLogs", + chaos: wmsTaskToolChaosConfigs["wms.task.get_logs"]!, + }, + "wms.task.get_by_user": { + repository: WMSTaskRepository, + method: "getTasksByUser", + chaos: wmsTaskToolChaosConfigs["wms.task.get_by_user"]!, + }, + "wms.task.get_timestamps": { + repository: WMSTaskRepository, + method: "getTaskTimestamps", + chaos: wmsTaskToolChaosConfigs["wms.task.get_timestamps"]!, + }, + "wms.task.update_status": { + repository: WMSTaskRepository, + method: "updateTaskStatus", + chaos: wmsTaskToolChaosConfigs["wms.task.update_status"]!, + defaultChaosPolicy: wmsTaskToolDefaultChaos["wms.task.update_status"], + }, + "wms.task.get_metrics": { + repository: WMSTaskRepository, + method: "getTaskPerformanceMetrics", + chaos: wmsTaskToolChaosConfigs["wms.task.get_metrics"]!, + }, + "wms.task.get_active": { + repository: WMSTaskRepository, + method: "getActiveTasks", + chaos: wmsTaskToolChaosConfigs["wms.task.get_active"]!, + }, + "wms.task.add_scan": { + repository: WMSTaskRepository, + method: "addTaskScan", + chaos: wmsTaskToolChaosConfigs["wms.task.add_scan"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/warehouse.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/warehouse.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..8d56bb40a2d1f6c5d89ee65f51aee593cded1de5 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/warehouse.tool.od.ts @@ -0,0 +1,160 @@ +import { WMSWarehouseRepository } from "../../../repository/wms/warehouse.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSWarehouseToolDefinition { + repository: typeof WMSWarehouseRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsWarehouseToolChaosConfigs: Record = { + "wms.warehouse.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.warehouse.get_all": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.warehouse.get_by_code": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.warehouse.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.warehouse.update": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.warehouse.get_by_type": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.warehouse.get_active": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.warehouse.update_status": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.warehouse.search": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, + "wms.warehouse.get_by_timezone": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, +}; + +export const wmsWarehouseToolDefaultChaos: Record = { + "wms.warehouse.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Warehouse already exists", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "wms.warehouse.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Warehouse not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsWarehouseTools: Record = { + "wms.warehouse.create": { + repository: WMSWarehouseRepository, + method: "createWarehouse", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.create"]!, + defaultChaosPolicy: wmsWarehouseToolDefaultChaos["wms.warehouse.create"], + }, + "wms.warehouse.get_all": { + repository: WMSWarehouseRepository, + method: "getAllWarehouses", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.get_all"]!, + }, + "wms.warehouse.get_by_code": { + repository: WMSWarehouseRepository, + method: "getWarehouseByCode", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.get_by_code"]!, + defaultChaosPolicy: wmsWarehouseToolDefaultChaos["wms.warehouse.get_by_code"], + }, + "wms.warehouse.get_by_id": { + repository: WMSWarehouseRepository, + method: "getWarehouseById", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.get_by_id"]!, + defaultChaosPolicy: wmsWarehouseToolDefaultChaos["wms.warehouse.get_by_id"], + }, + "wms.warehouse.update": { + repository: WMSWarehouseRepository, + method: "updateWarehouse", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.update"]!, + }, + "wms.warehouse.get_by_type": { + repository: WMSWarehouseRepository, + method: "getWarehousesByType", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.get_by_type"]!, + }, + "wms.warehouse.get_active": { + repository: WMSWarehouseRepository, + method: "getActiveWarehouses", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.get_active"]!, + }, + "wms.warehouse.update_status": { + repository: WMSWarehouseRepository, + method: "updateWarehouseStatus", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.update_status"]!, + }, + "wms.warehouse.search": { + repository: WMSWarehouseRepository, + method: "searchWarehouses", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.search"]!, + }, + "wms.warehouse.get_by_timezone": { + repository: WMSWarehouseRepository, + method: "getWarehousesByTimezone", + chaos: wmsWarehouseToolChaosConfigs["wms.warehouse.get_by_timezone"]!, + }, +}; diff --git a/packages/controlmart/src/operational-descriptor/tools/wms/zone.tool.od.ts b/packages/controlmart/src/operational-descriptor/tools/wms/zone.tool.od.ts new file mode 100644 index 0000000000000000000000000000000000000000..62a6ab2b648aa704f278e56b54932ba34b41d256 --- /dev/null +++ b/packages/controlmart/src/operational-descriptor/tools/wms/zone.tool.od.ts @@ -0,0 +1,149 @@ +import { WMSZoneRepository } from "../../../repository/wms/zone.wms.repository"; +import type { ChaosConfig } from "../registry.tool"; +import type { ChaosPolicy } from "../../../types/od.type"; + +export interface WMSZoneToolDefinition { + repository: typeof WMSZoneRepository; + method: keyof ReturnType; + chaos: ChaosConfig; + defaultChaosPolicy?: ChaosPolicy; +} + +export const wmsZoneToolChaosConfigs: Record = { + "wms.zone.create": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DUPLICATE_ERROR", "DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.zone.get_by_warehouse": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.zone.get_by_code": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.zone.get_by_id": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.zone.update": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.zone.get_by_type": { + enabled: false, + failureRate: 0.02, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 100 }, + }, + "wms.zone.add_aisle": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.zone.remove_aisle": { + enabled: false, + failureRate: 0.05, + errorTypes: ["DATABASE_ERROR", "VALIDATION_ERROR"], + delayMs: { min: 0, max: 150 }, + }, + "wms.zone.get_utilization": { + enabled: false, + failureRate: 0.03, + errorTypes: ["DATABASE_ERROR", "TIMEOUT_ERROR"], + delayMs: { min: 0, max: 200 }, + }, +}; + +export const wmsZoneToolDefaultChaos: Record = { + "wms.zone.create": { + enabled: false, + probability: 0.05, + scenarios: [ + { + type: "duplicate_data", + weight: 2, + description: "Zone already exists", + config: { + corruptionType: "invalid_format", + }, + }, + ], + }, + "wms.zone.get_by_id": { + enabled: false, + probability: 0.02, + scenarios: [ + { + type: "missing_data", + weight: 1, + description: "Zone not found", + config: { + missingRecords: true, + }, + }, + ], + }, +}; + +export const wmsZoneTools: Record = { + "wms.zone.create": { + repository: WMSZoneRepository, + method: "createZone", + chaos: wmsZoneToolChaosConfigs["wms.zone.create"]!, + defaultChaosPolicy: wmsZoneToolDefaultChaos["wms.zone.create"], + }, + "wms.zone.get_by_warehouse": { + repository: WMSZoneRepository, + method: "getZonesByWarehouse", + chaos: wmsZoneToolChaosConfigs["wms.zone.get_by_warehouse"]!, + }, + "wms.zone.get_by_code": { + repository: WMSZoneRepository, + method: "getZoneByCode", + chaos: wmsZoneToolChaosConfigs["wms.zone.get_by_code"]!, + defaultChaosPolicy: wmsZoneToolDefaultChaos["wms.zone.get_by_code"], + }, + "wms.zone.get_by_id": { + repository: WMSZoneRepository, + method: "getZoneById", + chaos: wmsZoneToolChaosConfigs["wms.zone.get_by_id"]!, + defaultChaosPolicy: wmsZoneToolDefaultChaos["wms.zone.get_by_id"], + }, + "wms.zone.update": { + repository: WMSZoneRepository, + method: "updateZone", + chaos: wmsZoneToolChaosConfigs["wms.zone.update"]!, + }, + "wms.zone.get_by_type": { + repository: WMSZoneRepository, + method: "getZonesByType", + chaos: wmsZoneToolChaosConfigs["wms.zone.get_by_type"]!, + }, + "wms.zone.add_aisle": { + repository: WMSZoneRepository, + method: "addAisleToZone", + chaos: wmsZoneToolChaosConfigs["wms.zone.add_aisle"]!, + }, + "wms.zone.remove_aisle": { + repository: WMSZoneRepository, + method: "removeAisleFromZone", + chaos: wmsZoneToolChaosConfigs["wms.zone.remove_aisle"]!, + }, + "wms.zone.get_utilization": { + repository: WMSZoneRepository, + method: "getZoneCapacityUtilization", + chaos: wmsZoneToolChaosConfigs["wms.zone.get_utilization"]!, + }, +}; diff --git a/packages/controlmart/src/personas/catalog.ts b/packages/controlmart/src/personas/catalog.ts new file mode 100644 index 0000000000000000000000000000000000000000..d493629777ce4c79c1ae86b4f1076fa455dfaf3e --- /dev/null +++ b/packages/controlmart/src/personas/catalog.ts @@ -0,0 +1,119 @@ +/** + * Persona Catalog + * + * Defines supply chain personas and their associated capabilities. + * Starting with 5 representative personas covering different role types. + */ + +import type { Persona } from '../types/persona.type'; + +/** + * Catalog of all available personas + */ +export const personaCatalog: Persona[] = [ + { + id: 'store-manager', + name: 'Store Manager', + description: + 'Manages store operations including inventory management, ordering, and customer service. Responsible for ensuring product availability and store performance.', + role: 'management', + department: 'store-operations', + capabilityIds: [ + 'inventory-check', + 'shipment-tracking', + ], + metadata: { + accessLevel: 'advanced', + tags: ['retail', 'inventory', 'management', 'customer-facing'], + priority: 1, + permissions: ['read:inventory', 'read:orders', 'read:shipments'], + }, + }, + { + id: 'warehouse-worker', + name: 'Warehouse Worker', + description: + 'Performs day-to-day warehouse operations including picking, packing, receiving, and cycle counting. Frontline operational role.', + role: 'operational', + department: 'warehouse', + capabilityIds: [ + 'inventory-check', + 'equipment-availability-check', + ], + metadata: { + accessLevel: 'basic', + tags: ['warehouse', 'operational', 'picking', 'inventory'], + priority: 3, + permissions: ['read:inventory', 'read:equipment'], + }, + }, + { + id: 'customer-service-rep', + name: 'Customer Service Representative', + description: + 'Handles customer inquiries about order status, shipment tracking, and issue resolution. Primary customer interface for order-related questions.', + role: 'specialist', + department: 'customer-service', + capabilityIds: [ + 'inventory-check', + 'shipment-tracking', + ], + metadata: { + accessLevel: 'basic', + tags: ['customer-service', 'support', 'tracking'], + priority: 4, + permissions: ['read:orders', 'read:shipments', 'read:inventory'], + }, + }, + { + id: 'warehouse-manager', + name: 'Warehouse Manager', + description: + 'Oversees all warehouse operations including receiving, storage, picking, packing, and shipping. Responsible for warehouse performance, resource allocation, and process optimization.', + role: 'management', + department: 'warehouse', + capabilityIds: [ + 'inventory-check', + 'shipment-tracking', + 'equipment-availability-check', + 'dock-appointment-scheduling', + ], + metadata: { + accessLevel: 'advanced', + tags: ['warehouse', 'management', 'operations', 'optimization'], + priority: 2, + permissions: [ + 'read:inventory', + 'read:shipments', + 'read:equipment', + 'read:dock-schedule', + 'write:dock-schedule', + ], + }, + }, + { + id: 'system-administrator', + name: 'System Administrator', + description: + 'Manages system configuration, user access, integrations, and monitoring. Full system access for administrative and troubleshooting purposes.', + role: 'system', + department: 'warehouse', + capabilityIds: [ + 'inventory-check', + 'shipment-tracking', + 'equipment-availability-check', + 'dock-appointment-scheduling', + ], + metadata: { + accessLevel: 'admin', + tags: ['admin', 'system', 'configuration', 'monitoring'], + priority: 10, + permissions: [ + 'admin:*', + 'read:*', + 'write:*', + 'config:*', + ], + }, + }, +]; diff --git a/packages/controlmart/src/repository/audit.repository.ts b/packages/controlmart/src/repository/audit.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..b79fbdc064544c2050113a5379c81c4b7d782efe --- /dev/null +++ b/packages/controlmart/src/repository/audit.repository.ts @@ -0,0 +1,41 @@ +import { AuditLog, type TAuditLogModel, type TQueryableAuditFields } from "../models/audit.model"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; + +const getAuditLogs = async (query: TQueryableAuditFields) => { + try { + const filters: Record = { + "after.worldRef.worldId": query.worldId, + }; + query.model && (filters.model = query.model); + query.documentId && (filters.documentId = query.documentId); + + if (query.dateStart && query.dateEnd) { + filters.createdAt = { + $gte: query.dateStart, + $lte: query.dateEnd, + }; + } else if (query.dateStart) { + filters.createdAt = { + $gte: query.dateStart, + }; + } else if (query.dateEnd) { + filters.createdAt = { + $lte: query.dateEnd, + }; + } + + const loadedAuditLog: TAuditLogModel[] = await AuditLog.find(filters).sort({ + createdAt: -1, + }); + return loadedAuditLog; + } catch (error) { + throw new RepositoryError( + `Failed to load audit logs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const AuditRecordRepository = { + getAuditLogs, +}; diff --git a/packages/controlmart/src/repository/capability.repository.ts b/packages/controlmart/src/repository/capability.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..015272f8aca08d2d4aae29354692508e7afcd48f --- /dev/null +++ b/packages/controlmart/src/repository/capability.repository.ts @@ -0,0 +1,275 @@ +import { Capability } from "../models/capability.model"; +import type { TCapabilityModel, TCapabilityInput } from "../models/capability.model.type"; +import type { CapabilityFilter } from "../types/capability.type"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; +import { + applyOffsetPagination, + buildOffsetMeta, + DEFAULT_LIMITS, + type OffsetPaginationOptions, + type OffsetPaginationMeta +} from "../utils/pagination.util"; + +/** + * Create a new capability + * @param data Capability data (without _id, timestamps) + * @returns Created capability document + * @throws RepositoryError with VALIDATION_ERROR or DATABASE_ERROR + */ +const create = async (data: TCapabilityInput): Promise => { + try { + // Validation + if (!data.id || !data.name || !data.odId) { + throw new RepositoryError( + "Capability ID, name, and odId are required", + "VALIDATION_ERROR" + ); + } + + const capability = await Capability.create(data); + return capability; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create capability: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Find capability by ID + * @param id Capability ID + * @returns Capability document or null if not found + * @throws RepositoryError with VALIDATION_ERROR or DATABASE_ERROR + */ +const findById = async (id: string): Promise => { + try { + if (!id) { + throw new RepositoryError("Capability ID is required", "VALIDATION_ERROR"); + } + return await (Capability as any).findOne({ id }).exec(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find capability: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Find multiple capabilities by IDs + * @param ids Array of capability IDs + * @returns Array of capability documents + * @throws RepositoryError with DATABASE_ERROR + */ +const findByIds = async (ids: string[]): Promise => { + try { + if (!ids || ids.length === 0) return []; + return await (Capability as any).find({ id: { $in: ids } }).exec(); + } catch (error) { + throw new RepositoryError( + `Failed to find capabilities: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Get all capabilities with optional filters and pagination + * @param filters Optional filter criteria + * @param pagination Optional pagination options + * @returns Object with capabilities array and pagination metadata + * @throws RepositoryError with DATABASE_ERROR + */ +const getAll = async ( + filters?: CapabilityFilter, + pagination?: OffsetPaginationOptions +): Promise<{ data: TCapabilityModel[]; pagination?: OffsetPaginationMeta }> => { + try { + const query = filters ? buildFilterQuery(filters) : {}; + + // If pagination is provided, apply it + if (pagination) { + const page = pagination.page || 1; + const limit = pagination.limit || DEFAULT_LIMITS.capabilities; + const { skip, limit: finalLimit } = applyOffsetPagination(page, limit); + + // Get total count for pagination metadata + const total = await (Capability as any).countDocuments(query).exec(); + + // Get paginated results + const data = await (Capability as any) + .find(query) + .sort({ name: 1 }) + .skip(skip) + .limit(finalLimit) + .exec(); + + // Build pagination metadata + const paginationMeta = buildOffsetMeta(total, page, limit); + + return { data, pagination: paginationMeta }; + } + + // No pagination - return all results (backward compatible) + const data = await (Capability as any).find(query).sort({ name: 1 }).exec(); + return { data }; + } catch (error) { + throw new RepositoryError( + `Failed to get capabilities: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Search capabilities by text query + * Searches in name, description, tags.domain, and tags.services + * @param query Search query string + * @returns Array of matching capability documents + * @throws RepositoryError with DATABASE_ERROR + */ +const search = async (query: string): Promise => { + try { + if (!query) return []; + + const regex = new RegExp(query, "i"); + return await (Capability as any) + .find({ + $or: [ + { name: regex }, + { description: regex }, + { "tags.domain": regex }, + { "tags.services": regex } + ] + }) + .sort({ name: 1 }) + .exec(); + } catch (error) { + throw new RepositoryError( + `Failed to search capabilities: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Update capability by ID + * @param id Capability ID + * @param data Partial capability data to update + * @returns Updated capability document + * @throws RepositoryError with VALIDATION_ERROR, NOT_FOUND_ERROR, or DATABASE_ERROR + */ +const update = async ( + id: string, + data: Partial +): Promise => { + try { + if (!id) { + throw new RepositoryError("Capability ID is required", "VALIDATION_ERROR"); + } + + // Separate fields to set vs unset (following world.repository.ts pattern) + const setFields: any = { updatedAt: new Date() }; + const unsetFields: any = {}; + + for (const [key, value] of Object.entries(data)) { + if (value === undefined) { + unsetFields[key] = ''; + } else { + setFields[key] = value; + } + } + + const updateOps: any = { $set: setFields }; + if (Object.keys(unsetFields).length > 0) { + updateOps.$unset = unsetFields; + } + + const updated = await (Capability as any).findOneAndUpdate( + { id }, + updateOps, + { new: true } + ).exec(); + + if (!updated) { + throw new RepositoryError("Capability not found", "NOT_FOUND_ERROR"); + } + + return updated; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update capability: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Delete capability by ID + * @param id Capability ID + * @returns True if deleted, false if not found + * @throws RepositoryError with VALIDATION_ERROR or DATABASE_ERROR + */ +const deleteCapability = async (id: string): Promise => { + try { + if (!id) { + throw new RepositoryError("Capability ID is required", "VALIDATION_ERROR"); + } + + const result = await Capability.deleteOne({ id }).exec(); + return result.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete capability: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Build MongoDB query from filter criteria + * @param filters Filter criteria + * @returns MongoDB query object + */ +const buildFilterQuery = (filters: CapabilityFilter): any => { + const query: any = {}; + + if (filters.domain && filters.domain.length > 0) { + query['tags.domain'] = { $in: filters.domain }; + } + if (filters.complexity) { + query['tags.complexity'] = filters.complexity; + } + if (filters.services && filters.services.length > 0) { + query['tags.services'] = { $in: filters.services }; + } + if (filters.personas && filters.personas.length > 0) { + query['tags.personas'] = { $in: filters.personas }; + } + if (filters.patterns && filters.patterns.length > 0) { + query['tags.patterns'] = { $in: filters.patterns }; + } + + return query; +}; + +/** + * Capability Repository + * Provides CRUD operations for capabilities (global resources) + */ +export const CapabilityRepository = { + create, + findById, + findByIds, + getAll, + search, + filter: getAll, // Alias for getAll with filters + update, + delete: deleteCapability +}; + +export type TCapabilityRepository = typeof CapabilityRepository; diff --git a/packages/controlmart/src/repository/edi/edi.repository.ts b/packages/controlmart/src/repository/edi/edi.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..c0d90af7c11d17bdd8cbf1e9a79515028967bc88 --- /dev/null +++ b/packages/controlmart/src/repository/edi/edi.repository.ts @@ -0,0 +1,796 @@ +import crypto from "crypto"; + +import { + EdiTransaction, + type TEdiTransactionInput, + type TEdiTransactionModel, +} from "../../models/edi/transactions.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { Edi810Parser, Edi850Parser, Edi856Parser } from "../../utils/edi"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const generateIdempotencyKey = (data: TEdiTransactionInput, world: TWorldRefModel): string => { + const keySource = [ + world.worldId, + data.partnerId, + data.docType, + data.direction, + data.fileName || data.flowId || data.transactionId || "", + ].join("|"); + + return crypto.createHash("sha256").update(keySource).digest("hex"); +}; + +const getDollarAmountFromEdi = ( + rawEdi: TEdiTransactionInput, + docType: TEdiTransactionInput["docType"], +): number => { + try { + if (docType === "850") { + const parser = Edi850Parser.parse(rawEdi.rawEdi!); + return parser.totalAmount; + } + if (docType === "810") { + const parser = Edi810Parser.parse(rawEdi.rawEdi!); + return parser.totalAmount; + } + if (docType === "856") { + const parser = Edi856Parser.parse(rawEdi.rawEdi!); + return parser.totalAmount; + } + return 0; + } catch { + return 0; + } +}; + +const buildDateFilter = (dateStart?: Date, dateEnd?: Date) => { + if (!dateStart && !dateEnd) return undefined; + const filter: any = {}; + if (dateStart) filter.$gte = dateStart; + if (dateEnd) filter.$lte = dateEnd; + return filter; +}; + +const createEdiTransaction = async ( + world: TWorldRefModel, + data: TEdiTransactionInput, +): Promise => { + try { + const transactionId = data.transactionId || crypto.randomUUID(); + // Use the generated transactionId in idempotency key to ensure uniqueness + const dataWithTransactionId = { ...data, transactionId }; + const _idempotencyKey = generateIdempotencyKey(dataWithTransactionId, world); + const ediAmount = getDollarAmountFromEdi(data, data.docType); + const upsertEdi = await EdiTransaction.findOneAndUpdate( + { _idempotencyKey }, + { + $setOnInsert: { + ...data, + dollarValue: data.dollarValue ?? (ediAmount && !isNaN(ediAmount) ? ediAmount : undefined), + transactionId, + worldRef: world, + timestamp: data.timestamp || new Date(), + status: data.status || (data.direction === "INBOUND" ? "RECEIVED" : "QUEUED"), + _idempotencyKey, + }, + }, + { upsert: true, new: true }, + ); + + const jsonified = upsertEdi.toJSON(); + return jsonified as TEdiTransactionModel; + } catch (error: any) { + if (error.code === 11000) { + throw new RepositoryError("Duplicate EDI transaction detected", "DUPLICATE_ERROR"); + } + throw new RepositoryError( + `Failed to create EDI transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getEdiTransactionById = async ( + worldId: string, + transactionId: string, +): Promise => { + try { + if (!transactionId) { + throw new RepositoryError("Transaction ID is required", "VALIDATION_ERROR"); + } + + const transaction = await EdiTransaction.findOne({ + "worldRef.worldId": worldId, + transactionId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TEdiTransactionModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to find EDI transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllEdiTransactions = async ( + worldId: string, + filters?: { + partnerId?: string; + customerId?: string; + docType?: string; + direction?: "INBOUND" | "OUTBOUND"; + status?: string; + dateStart?: Date; + dateEnd?: Date; + flowId?: string; + limit?: number; + cursor?: string | null; + }, +): Promise> => { + try { + const query: Record = { "worldRef.worldId": worldId }; + + if (filters?.partnerId) query.partnerId = filters.partnerId; + if (filters?.customerId) query.customerId = filters.customerId; + if (filters?.docType) query.docType = filters.docType; + if (filters?.direction) query.direction = filters.direction; + if (filters?.status) query.status = filters.status; + if (filters?.flowId) query.flowId = filters.flowId; + if (filters?.cursor) { + query._id = { $lt: filters.cursor }; + } + + if (filters?.dateStart || filters?.dateEnd) { + query.timestamp = { + ...(filters.dateStart && { $gte: filters.dateStart }), + ...(filters.dateEnd && { $lte: filters.dateEnd }), + }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const transactions = await EdiTransaction.find(query) + .sort({ timestamp: -1 }) + .limit(limit + 1); + + const hasMore = transactions.length > limit; + const results = hasMore ? transactions.slice(0, limit) : transactions; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((t) => t.toJSON() as TEdiTransactionModel), + nextCursor, + totalCount: hasMore ? await EdiTransaction.countDocuments(query) : results.length, + hasMore, + limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve EDI transactions: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateEdiTransaction = async ( + worldId: string, + transactionId: string, + updateData: Partial, +): Promise => { + try { + const updated = await EdiTransaction.findOneAndUpdate( + { "worldRef.worldId": worldId, transactionId }, + { + $set: { + ...updateData, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = updated?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("EDI transaction not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TEdiTransactionModel; + } catch (error) { + throw new RepositoryError( + `Failed to update EDI transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateEdiStatus = async ( + worldId: string, + transactionId: string, + status: TEdiTransactionModel["status"], + errorReason?: string, + errorDetails?: any, +): Promise => { + try { + const updateFields: Record = { + status, + updatedAt: new Date(), + }; + if (errorReason) updateFields.errorReason = errorReason; + if (errorDetails) updateFields.errorDetails = errorDetails; + + const updated = await EdiTransaction.findOneAndUpdate( + { "worldRef.worldId": worldId, transactionId }, + { $set: updateFields }, + { new: true }, + ); + + const jsonified = updated?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("EDI transaction not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TEdiTransactionModel; + } catch (error) { + throw new RepositoryError( + `Failed to update EDI transaction status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteEdiTransaction = async (worldId: string, transactionId: string): Promise => { + try { + const transaction = await EdiTransaction.deleteOne({ + "worldRef.worldId": worldId, + transactionId, + }); + + const jsonified = transaction; + return jsonified.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete EDI transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const requeueEdiTransaction = async ( + worldId: string, + transactionId: string, +): Promise => { + try { + const updated = await EdiTransaction.findOneAndUpdate( + { + "worldRef.worldId": worldId, + transactionId, + status: "ERRORED", + }, + { + $set: { + status: "QUEUED", + errorReason: null, + errorDetails: null, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = updated?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Only errored transactions can be requeued", "VALIDATION_ERROR"); + } + + return jsonified as TEdiTransactionModel; + } catch (error) { + throw new RepositoryError( + `Failed to requeue EDI transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const ediInvoiceStatistics = async ( + worldId: string, + dateStart?: Date, + dateEnd?: Date, +): Promise<{ + total: number; + errored: number; + rejectionRate: number; + totalExposureDollar: number; + firstPassAcceptanceRate: number; +}> => { + try { + const dateFilter: any = {}; + if (dateStart) dateFilter.$gte = dateStart; + if (dateEnd) dateFilter.$lte = dateEnd; + + const matchStage: Record = { + "worldRef.worldId": worldId, + docType: "810", + ...(Object.keys(dateFilter).length > 0 ? { timestamp: dateFilter } : {}), + }; + + console.log("Match Stage:", matchStage); + + const total = await EdiTransaction.countDocuments(matchStage); + + console.log("Total EDI Invoices:", total); + + const erroredEdi = await EdiTransaction.find({ + ...matchStage, + status: "ERRORED", + }); + + const erroredTransactions = erroredEdi.map((t) => t.toJSON() as TEdiTransactionModel); + + const rejectionRate = total > 0 ? (erroredTransactions.length / total) * 100 : 0; + + const exposureAmounts: number = erroredTransactions + .map((edi: TEdiTransactionModel) => { + try { + const parser = Edi810Parser.parse(edi.rawEdi!); + return parser.totalAmount; + } catch { + return 0; + } + }) + .reduce((acc: number, val: number) => acc + val, 0); + + const firstPassAcceptanceRate = + total > 0 ? ((total - erroredTransactions.length) / total) * 100 : 0; + + return { + total, + errored: erroredTransactions.length, + rejectionRate, + totalExposureDollar: exposureAmounts || 0, + firstPassAcceptanceRate, + }; + } catch (error) { + throw new RepositoryError( + `Failed to calculate EDI invoice rejection rate: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTopEdiErrorsStatsByDocType = async ( + worldId: string, + topLimit: number = 10, + dateStart?: Date, + dateEnd?: Date, +): Promise> => { + try { + const dateFilter = buildDateFilter(dateStart, dateEnd); + + const matchStage: any = { + "worldRef.worldId": worldId, + }; + if (dateFilter) matchStage.createdAt = dateFilter; + + const results = await EdiTransaction.aggregate([ + { $match: matchStage }, + { + $group: { + _id: "$docType", + total: { $sum: 1 }, + }, + }, + { + $project: { + _id: 0, + docType: "$_id", + }, + }, + { + $lookup: { + from: "editransactions", + let: { dt: "$docType" }, + pipeline: [ + { + $match: { + $expr: { + $and: [ + { $eq: ["$docType", "$$dt"] }, + { $eq: ["$worldRef.worldId", worldId] }, + { $eq: ["$status", "ERRORED"] }, + ...(dateFilter + ? [{ $gte: ["$createdAt", dateStart] }, { $lte: ["$createdAt", dateEnd] }] + : []), + ], + }, + }, + }, + { + $group: { + _id: null, + count: { $sum: 1 }, + }, + }, + ], + as: "errorStats", + }, + }, + { + $addFields: { + count: { + $ifNull: [{ $arrayElemAt: ["$errorStats.count", 0] }, 0], + }, + }, + }, + { $sort: { count: -1 } }, + { $limit: topLimit }, + { + $project: { + docType: 1, + count: 1, + _id: 0, + }, + }, + ]); + + return results; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve top EDI errors by document type: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +// the sheer suddennes of this implementation scares me +// bhai humne partnerID ki jagah customerID use kiya hai edi ko simulate karte waqt +// ab maine yaha daal diya hai customer id partner id ki jagah + +const getTopEdiErrorStatsByPartners = async ( + worldId: string, + topLimit: number, + dateStart?: Date, + dateEnd?: Date, +): Promise> => { + try { + const dateFilter = buildDateFilter(dateStart, dateEnd); + + const matchStage: any = { + "worldRef.worldId": worldId, + status: "ERRORED", + }; + if (dateFilter) matchStage.createdAt = dateFilter; + + const results = await EdiTransaction.aggregate([ + { $match: matchStage }, + { + $group: { + _id: "$customerId", // since you’re simulating partnerId + count: { $sum: 1 }, + }, + }, + { $sort: { count: -1 } }, + { $limit: topLimit }, + { + $project: { + partnerId: "$_id", + count: 1, + _id: 0, + }, + }, + ]); + + return results; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve top EDI errors by partner: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getEdiTransactionsByPageNumber = async ( + worldId: string, + filters?: { + partnerId?: string; + customerId?: string; + docType?: string; + direction?: "INBOUND" | "OUTBOUND"; + status?: string; + dateStart?: Date; + dateEnd?: Date; + flowId?: string; + page?: number; + pageSize?: number; + }, +): Promise> => { + try { + const query: Record = { "worldRef.worldId": worldId }; + + if (filters?.partnerId) query.partnerId = filters.partnerId; + if (filters?.customerId) query.customerId = filters.customerId; + if (filters?.docType) query.docType = filters.docType; + if (filters?.direction) query.direction = filters.direction; + if (filters?.status) query.status = filters.status; + if (filters?.flowId) query.flowId = filters.flowId; + + if (filters?.dateStart || filters?.dateEnd) { + query.timestamp = { + ...(filters.dateStart && { $gte: filters.dateStart }), + ...(filters.dateEnd && { $lte: filters.dateEnd }), + }; + } + + const pageSize = filters?.pageSize || GLOBAL_PAGE_LIMIT; + const page = filters?.page || 1; + + const results = await EdiTransaction.find(query) + .sort({ timestamp: -1 }) + .skip((page - 1) * pageSize) + .limit(pageSize); + + const totalCount = await EdiTransaction.countDocuments(query); + + return { + items: results.map((t) => t.toJSON() as TEdiTransactionModel), + totalCount, + limit: pageSize, + hasMore: page * pageSize < totalCount, + nextCursor: undefined, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve EDI transactions by page number: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getEdiDollarAmountExposureByPartners = async ( + worldId: string, + topLimit: number, + dateStart?: Date, + dateEnd?: Date, +): Promise> => { + try { + const dateFilter = buildDateFilter(dateStart, dateEnd); + + const matchStage: any = { + "worldRef.worldId": worldId, + status: "ERRORED", + }; + if (dateFilter) matchStage.createdAt = dateFilter; + + const results = await EdiTransaction.aggregate([ + { $match: matchStage }, + { + $group: { + _id: "$customerId", + totalDollarAmount: { $sum: "$dollarValue" }, + }, + }, + { $sort: { totalDollarAmount: -1 } }, + { $limit: topLimit ?? 10 }, + { + $project: { + partnerId: "$_id", + totalDollarAmount: 1, + _id: 0, + }, + }, + ]); + + return results; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve EDI dollar amount exposure by partner: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getEdiDollarAmountExposureByDocumentType = async ( + worldId: string, + topLimit: number, + dateStart?: Date, + dateEnd?: Date, +): Promise> => { + try { + const dateFilter = buildDateFilter(dateStart, dateEnd); + + const matchStage: any = { + "worldRef.worldId": worldId, + status: "ERRORED", + }; + if (dateFilter) matchStage.createdAt = dateFilter; + + const results = await EdiTransaction.aggregate([ + { $match: matchStage }, + { + $group: { + _id: "$docType", + totalDollarAmount: { $sum: "$dollarValue" }, + }, + }, + { $sort: { totalDollarAmount: -1 } }, + { $limit: topLimit ?? 10 }, + { + $project: { + docType: "$_id", + totalDollarAmount: 1, + _id: 0, + }, + }, + ]); + + return results; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve EDI dollar amount exposure by document type: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +/** + * Get EDI dashboard metrics + */ +const getEdiDashboard = async ( + worldId: string, +): Promise<{ + total: number; + todayCount: number; + byDirection: { INBOUND: number; OUTBOUND: number }; + byDocType: Record; + byStatus: Record; + errorCount: number; +}> => { + try { + const baseQuery = { "worldRef.worldId": worldId }; + + // Get total count + const total = await EdiTransaction.countDocuments(baseQuery); + + // Get today's count + const todayStart = new Date(); + todayStart.setHours(0, 0, 0, 0); + const todayCount = await EdiTransaction.countDocuments({ + ...baseQuery, + timestamp: { $gte: todayStart }, + }); + + // Get counts by direction + const directionAgg = await EdiTransaction.aggregate([ + { $match: baseQuery }, + { + $group: { + _id: "$direction", + count: { $sum: 1 }, + }, + }, + ]); + const byDirection = { INBOUND: 0, OUTBOUND: 0 }; + directionAgg.forEach((item) => { + if (item._id === "INBOUND" || item._id === "IN") byDirection.INBOUND = item.count; + if (item._id === "OUTBOUND" || item._id === "OUT") byDirection.OUTBOUND = item.count; + }); + + // Get counts by docType + const docTypeAgg = await EdiTransaction.aggregate([ + { $match: baseQuery }, + { + $group: { + _id: "$docType", + count: { $sum: 1 }, + }, + }, + ]); + const byDocType: Record = {}; + docTypeAgg.forEach((item) => { + byDocType[item._id] = item.count; + }); + + // Get counts by status + const statusAgg = await EdiTransaction.aggregate([ + { $match: baseQuery }, + { + $group: { + _id: "$status", + count: { $sum: 1 }, + }, + }, + ]); + const byStatus: Record = {}; + statusAgg.forEach((item) => { + byStatus[item._id] = item.count; + }); + + // Get error count + const errorCount = await EdiTransaction.countDocuments({ + ...baseQuery, + status: { $in: ["ERROR", "ERRORED", "REJECTED"] }, + }); + + return { + total, + todayCount, + byDirection, + byDocType, + byStatus, + errorCount, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve EDI dashboard metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const EdiTransactionRepository = (worldId: string) => ({ + createEdiTransaction: (data: TEdiTransactionInput) => + createEdiTransaction({ worldId } as TWorldRefModel, data), + // Get methods + getEdiTransactionById: (id: string) => getEdiTransactionById(worldId, id), + + getAllEdiTransactions: (filters?: { + partnerId?: string; + customerId?: string; + docType?: string; + direction?: "INBOUND" | "OUTBOUND"; + status?: string; + dateStart?: Date; + dateEnd?: Date; + flowId?: string; + cursor?: string; + limit?: number; + }) => getAllEdiTransactions(worldId, filters), + + updateEdiTransaction: (id: string, data: Partial) => + updateEdiTransaction(worldId, id, data), + + updateEdiStatus: ( + id: string, + status: TEdiTransactionModel["status"], + errorReason?: string, + errorDetails?: any, + ) => updateEdiStatus(worldId, id, status, errorReason, errorDetails), + + requeueEdiTransaction: (id: string) => requeueEdiTransaction(worldId, id), + + deleteEdiTransaction: (id: string) => deleteEdiTransaction(worldId, id), + ediInvoiceStatistics: (dateStart?: Date, dateEnd?: Date) => + ediInvoiceStatistics(worldId, dateStart, dateEnd), + + getTopEdiErrorsStatsByDocType: (topLimit: number = 10, dateStart?: Date, dateEnd?: Date) => + getTopEdiErrorsStatsByDocType(worldId, topLimit, dateStart, dateEnd), + getTopEdiErrorStatsByPartners: (topLimit: number = 10, dateStart?: Date, dateEnd?: Date) => + getTopEdiErrorStatsByPartners(worldId, topLimit, dateStart, dateEnd), + getEdiTransactionsByPageNumber: (filters?: { + partnerId?: string; + customerId?: string; + docType?: string; + direction?: "INBOUND" | "OUTBOUND"; + status?: string; + dateStart?: Date; + dateEnd?: Date; + flowId?: string; + cursor?: string; + page?: number; + pageSize?: number; + }) => getEdiTransactionsByPageNumber(worldId, filters), + getEdiDollarAmountExposureByPartners: (topLimit: number, dateStart?: Date, dateEnd?: Date) => + getEdiDollarAmountExposureByPartners(worldId, topLimit, dateStart, dateEnd), + getEdiDollarAmountExposureByDocumentType: (topLimit: number, dateStart?: Date, dateEnd?: Date) => + getEdiDollarAmountExposureByDocumentType(worldId, topLimit, dateStart, dateEnd), + getEdiDashboard: () => getEdiDashboard(worldId), +}); + +export type TEdiTransactionModelRepository = ReturnType; diff --git a/packages/controlmart/src/repository/erp/company.repository.ts b/packages/controlmart/src/repository/erp/company.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..11b2870cefdd65228f6af79608a2adfcd5e7c993 --- /dev/null +++ b/packages/controlmart/src/repository/erp/company.repository.ts @@ -0,0 +1,344 @@ +import { + Company, + type TCompanyModel, + type TCompanyInput, +} from "../../models/erp/company.erp.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createCompany = async ( + world: TWorldRefModel, + data: TCompanyInput, +): Promise => { + try { + if (!data.companyId || !data.name) { + throw new RepositoryError("Company ID and name are required", "VALIDATION_ERROR"); + } + + const existing = await Company.findOne({ + "worldRef.worldId": world.worldId, + ...(data.companyId ? { companyId: data.companyId } : {}), + }); + + if (existing) { + throw new RepositoryError( + `Company with ID ${data.companyId} already exists in this world`, + "DUPLICATE_ERROR", + ); + } + + if (data.isMpcCompany) { + await Company.updateMany( + { "worldRef.worldId": world.worldId }, + { $set: { isMpcCompany: false } }, + ); + } + + const transaction = await Company.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TCompanyModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create company: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCompanyById = async ( + worldId: string, + companyId: string, +): Promise => { + try { + if (!companyId) { + throw new RepositoryError("Company ID is required", "VALIDATION_ERROR"); + } + + const transaction = await Company.findOne({ + "worldRef.worldId": worldId, + companyId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCompanyModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find company: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCompanyByDunsNumber = async ( + worldId: string, + dunsNumber: string, +): Promise => { + try { + if (!dunsNumber) { + throw new RepositoryError("DUNS number is required", "VALIDATION_ERROR"); + } + + const transaction = await Company.findOne({ + "worldRef.worldId": worldId, + duns: dunsNumber, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCompanyModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find company by DUNS number: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllCompanies = async ( + worldId: string, + filters?: { + status?: string; + companyType?: string; + currency?: string; + isMpcCompany?: boolean; + search?: string; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { "worldRef.worldId": worldId }; + + if (filters?.status) query.status = filters.status; + if (filters?.companyType) query.companyType = filters.companyType; + if (filters?.currency) query.currency = filters.currency; + if (filters?.isMpcCompany !== undefined) query.isMpcCompany = filters.isMpcCompany; + + if (filters?.search) { + query.$or = [ + { name: new RegExp(filters.search, "i") }, + { dunsNumber: new RegExp(filters.search, "i") }, + ]; + } + + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const companies = await Company.find(query) + .sort({ createdAt: -1 }) + .limit(limit + 1); + + const hasMore = companies.length > limit; + const results = hasMore ? companies.slice(0, limit) : companies; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((c) => c.toJSON() as TCompanyModel), + nextCursor, + totalCount: hasMore ? await Company.countDocuments(query) : results.length, + hasMore, + limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve companies: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateCompany = async ( + worldId: string, + companyId: string, + updateData: Partial, +): Promise => { + try { + if (updateData.isMpcCompany) { + await Company.updateMany({ "worldRef.worldId": worldId }, { $set: { isMpcCompany: false } }); + } + + const transaction = await Company.findOneAndUpdate( + { "worldRef.worldId": worldId, companyId }, + { + $set: { + ...updateData, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Company not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TCompanyModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update company: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteCompany = async (worldId: string, companyId: string): Promise => { + try { + const res = await Company.deleteOne({ + "worldRef.worldId": worldId, + companyId, + }); + return res.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete company: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getMpcCompany = async (worldId: string): Promise => { + try { + const transaction = await Company.findOne({ + "worldRef.worldId": worldId, + isMpcCompany: true, + }); + const jsonified = transaction?.toJSON() || null; + return jsonified as TCompanyModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to get MPC company: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const bulkUpsertCompanies = async ( + world: TWorldRefModel, + companies: TCompanyInput[], +): Promise => { + try { + const bulkOps = companies.map((c) => ({ + updateOne: { + filter: { "worldRef.worldId": world.worldId, companyId: c.companyId }, + update: { + $set: { ...c, worldRef: world, updatedAt: new Date() }, + }, + upsert: true, + }, + })); + + const result = await Company.bulkWrite(bulkOps); + return result.modifiedCount + result.upsertedCount; + } catch (error) { + throw new RepositoryError( + `Failed to bulk upsert companies: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getRandomCompany = async ( + worldId: string, + type: "npc" | "mpc" = "npc", +): Promise => { + try { + const query = { + "worldRef.worldId": worldId, + isMpcCompany: type === "mpc", + }; + const count = await Company.countDocuments(query); + if (count === 0) return null; + + const random = Math.floor(Math.random() * count); + const company = await Company.findOne(query).skip(random); + + return (company?.toJSON() as TCompanyModel) || null; + } catch (error) { + throw new RepositoryError( + `Failed to get random company: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCustomerCompanies = async ( + worldId: string, +): Promise => { + try { + const query = { + "worldRef.worldId": worldId, + isMpcCompany: false, + companyType: "CUSTOMER", + }; + const companies = await Company.find(query); + return companies.map((c) => c.toJSON() as TCompanyModel); + } catch (error) { + throw new RepositoryError( + `Failed to get customer companies: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getRandomCustomer = async (worldId: string): Promise => { + try { + const query = { + "worldRef.worldId": worldId, + isMpcCompany: false, + companyType: "CUSTOMER", + }; + const count = await Company.countDocuments(query); + if (count === 0) return null; + + const random = Math.floor(Math.random() * count); + const company = await Company.findOne(query).skip(random); + + return (company?.toJSON() as TCompanyModel) || null; + } catch (error) { + throw new RepositoryError( + `Failed to get random customer: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const CompanyRepository = (worldId: string) => ({ + createCompany: (data: TCompanyInput) => createCompany({ worldId } as TWorldRefModel, data), + getCompanyById: (args: string | { companyId: string }) => + getCompanyById(worldId, typeof args === "string" ? args : args.companyId), + getCompanyByDunsNumber: (dunsNumber: string) => getCompanyByDunsNumber(worldId, dunsNumber), + getAllCompanies: (filters?: { + status?: string; + companyType?: string; + currency?: string; + isMpcCompany?: boolean; + search?: string; + cursor?: string; + limit?: number; + }) => getAllCompanies(worldId, filters), + updateCompany: (companyId: string, updateData: Partial) => + updateCompany(worldId, companyId, updateData), + deleteCompany: (companyId: string) => deleteCompany(worldId, companyId), + getMpcCompany: () => getMpcCompany(worldId), + bulkUpsertCompanies: (companies: TCompanyInput[]) => + bulkUpsertCompanies({ worldId } as TWorldRefModel, companies), + getRandomCompany: (type: "npc" | "mpc" = "npc") => getRandomCompany(worldId, type), + getRandomCustomer: () => getRandomCustomer(worldId), + getCustomerCompanies: () => getCustomerCompanies(worldId), +}); + +export type TCompanyRepository = ReturnType; diff --git a/packages/controlmart/src/repository/erp/index.ts b/packages/controlmart/src/repository/erp/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..dd065dc3d20379ec1947c5c6ad5edc2aeaf97782 --- /dev/null +++ b/packages/controlmart/src/repository/erp/index.ts @@ -0,0 +1,6 @@ +export * from "./company.repository"; +export * from "./product.repository"; +export * from "./invoice.repository"; +export * from "./order.repository"; +export * from "./shipment.repository"; +export * from "./payment.repository"; diff --git a/packages/controlmart/src/repository/erp/invoice.repository.ts b/packages/controlmart/src/repository/erp/invoice.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..b21c9eb6ea34815f3afe3dec3a330859fe4bde6c --- /dev/null +++ b/packages/controlmart/src/repository/erp/invoice.repository.ts @@ -0,0 +1,236 @@ +import { + Invoice, + type TInvoiceModel, + type TInvoiceInput, +} from "../../models/erp/invoice.erp.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createInvoice = async ( + world: TWorldRefModel, + data: TInvoiceInput, +): Promise => { + try { + // Only check for duplicates if invoiceId is explicitly provided + if (data.invoiceId) { + const existingInvoice = await Invoice.findOne({ + "worldRef.worldId": world.worldId, + invoiceId: data.invoiceId, + }); + + if (existingInvoice) { + throw new RepositoryError( + `Invoice ${data.invoiceId} already exists in this world`, + "DUPLICATE_ERROR", + ); + } + } + + const transaction = await Invoice.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TInvoiceModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create invoice: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getInvoiceById = async ( + worldId: string, + invoiceId: string, +): Promise => { + try { + if (!invoiceId) { + throw new RepositoryError("Invoice ID is required", "VALIDATION_ERROR"); + } + + const transaction = await Invoice.findOne({ + "worldRef.worldId": worldId, + invoiceId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TInvoiceModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find invoice by number: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllInvoices = async ( + worldId: string, + filters?: { + status?: string; + customerId?: string; + partnerId?: string; + invoiceId?: string; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.status) query.status = filters.status; + if (filters?.customerId) query.customerId = filters.customerId; + if (filters?.partnerId) query.partnerId = filters.partnerId; + if (filters?.invoiceId) query.invoiceId = filters.invoiceId; + if (filters?.dateStart || filters?.dateEnd) { + query.issueDate = {}; + if (filters.dateStart) query.issueDate.$gte = filters.dateStart; + if (filters.dateEnd) query.issueDate.$lte = filters.dateEnd; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const invoices = await Invoice.find(query) + .sort({ issueDate: -1 }) + .limit(limit + 1); + + const hasMore = invoices.length > limit; + const results = hasMore ? invoices.slice(0, limit) : invoices; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((i) => i.toJSON() as TInvoiceModel), + nextCursor, + totalCount: hasMore ? await Invoice.countDocuments(query) : results.length, + hasMore, + limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve invoices: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateInvoice = async ( + worldId: string, + invoiceId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await Invoice.findOneAndUpdate( + { "worldRef.worldId": worldId, invoiceId }, + { + $set: { + ...updateData, + "audit.updatedAt": new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Invoice not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TInvoiceModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update invoice: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateInvoiceStatus = async ( + worldId: string, + invoiceId: string, + status: TInvoiceModel["status"], +): Promise => { + try { + const transaction = await Invoice.findOneAndUpdate( + { "worldRef.worldId": worldId, invoiceId }, + { + $set: { + status, + "audit.updatedAt": new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Invoice not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TInvoiceModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update invoice status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteInvoice = async (worldId: string, invoiceId: string): Promise => { + try { + const transaction = await Invoice.deleteOne({ + "worldRef.worldId": worldId, + invoiceId, + }); + + const jsonified = transaction; + return jsonified.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete invoice: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const InvoiceRepository = (worldId: string) => ({ + createInvoice: (data: TInvoiceInput) => createInvoice({ worldId } as TWorldRefModel, data), + getInvoiceById: (invoiceId: string) => getInvoiceById(worldId, invoiceId), + getAllInvoices: (filters?: { + status?: string; + customerId?: string; + partnerId?: string; + invoiceId?: string; + cursor?: string; + limit?: number; + dateStart?: Date; + dateEnd?: Date; + }) => getAllInvoices(worldId, filters), + updateInvoice: ( + args: string | ({ invoiceId: string } & Partial), + updateData?: Partial, + ) => + typeof args === "string" + ? updateInvoice(worldId, args, updateData!) + : updateInvoice(worldId, args.invoiceId, args), + updateInvoiceStatus: (invoiceId: string, status: TInvoiceModel["status"]) => + updateInvoiceStatus(worldId, invoiceId, status), + deleteInvoice: (invoiceId: string) => deleteInvoice(worldId, invoiceId), +}); + +export type TInvoiceRepository = ReturnType; diff --git a/packages/controlmart/src/repository/erp/order.repository.ts b/packages/controlmart/src/repository/erp/order.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..0492938a1c8d8d500971609a504b5492ed07a9ca --- /dev/null +++ b/packages/controlmart/src/repository/erp/order.repository.ts @@ -0,0 +1,242 @@ +import { + PurchaseOrder, + type TOrderModel, + type TOrderInputModel, +} from "../../models/erp/orders.erp.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createOrder = async (world: TWorldRefModel, data: TOrderInputModel): Promise => { + try { + if (data.orderId) { + const existingOrder = await PurchaseOrder.findOne({ + "worldRef.worldId": world.worldId, + orderId: data.orderId, + }); + + if (existingOrder) { + throw new RepositoryError( + `Order ${data.orderId} already exists in this world`, + "DUPLICATE_ERROR", + ); + } + } + + const transaction = await PurchaseOrder.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TOrderModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create order: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOrderById = async (worldId: string, orderId: string): Promise => { + try { + if (!orderId) { + throw new RepositoryError("Order ID is required", "VALIDATION_ERROR"); + } + + const transaction = await PurchaseOrder.findOne({ + "worldRef.worldId": worldId, + orderId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TOrderModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError(`Failed to find order: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const getAllOrders = async ( + worldId: string, + filters?: { + status?: string; + customerId?: string; + partnerId?: string; + orderId?: string; + direction?: string; + orderDateStart?: Date; + orderDateEnd?: Date; + requestedDateStart?: Date; + requestedDateEnd?: Date; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + if (filters?.status) query.status = filters.status; + if (filters?.customerId) query.customerId = filters.customerId; + if (filters?.partnerId) query.partnerId = filters.partnerId; + if (filters?.orderId) query.orderId = filters.orderId; + if (filters?.direction) query.direction = filters.direction; + if (filters?.orderDateStart || filters?.orderDateEnd) { + query.orderDate = {}; + if (filters.orderDateStart) query.orderDate.$gte = filters.orderDateStart; + if (filters.orderDateEnd) query.orderDate.$lte = filters.orderDateEnd; + } + if (filters?.requestedDateStart || filters?.requestedDateEnd) { + query.requestedDate = {}; + if (filters.requestedDateStart) query.requestedDate.$gte = filters.requestedDateStart; + if (filters.requestedDateEnd) query.requestedDate.$lte = filters.requestedDateEnd; + } + + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const orders = await PurchaseOrder.find(query) + .sort({ orderDate: -1 }) + .limit(limit + 1); + + const hasMore = orders.length > limit; + const results = hasMore ? orders.slice(0, limit) : orders; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((o) => o.toJSON() as TOrderModel), + nextCursor, + totalCount: hasMore ? await PurchaseOrder.countDocuments(query) : results.length, + hasMore, + limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve orders: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateOrder = async ( + worldId: string, + orderId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await PurchaseOrder.findOneAndUpdate( + { "worldRef.worldId": worldId, orderId }, + { + $set: { + ...updateData, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Order not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TOrderModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update order: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateOrderStatus = async ( + worldId: string, + orderId: string, + status: TOrderModel["status"], +): Promise => { + try { + const transaction = await PurchaseOrder.findOneAndUpdate( + { "worldRef.worldId": worldId, orderId }, + { + $set: { + status, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Order not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TOrderModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update order status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteOrder = async (worldId: string, orderId: string): Promise => { + try { + const res = await PurchaseOrder.deleteOne({ + "worldRef.worldId": worldId, + orderId, + }); + return res.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete order: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const OrderRepository = (worldId: string) => ({ + createOrder: (data: TOrderInputModel) => createOrder({ worldId } as TWorldRefModel, data), + getOrderById: (args: string | { orderId: string }) => + getOrderById(worldId, typeof args === "string" ? args : args.orderId), + getAllOrders: (filters?: { + status?: string; + customerId?: string; + partnerId?: string; + orderId?: string; + direction?: string; + cursor?: string; + limit?: number; + orderDateStart?: Date; + orderDateEnd?: Date; + requestedDateStart?: Date; + requestedDateEnd?: Date; + }) => getAllOrders(worldId, filters), + updateOrder: ( + args: string | ({ orderId: string } & Partial), + updateData?: Partial, + ) => + typeof args === "string" + ? updateOrder(worldId, args, updateData!) + : updateOrder(worldId, args.orderId, args), + updateOrderStatus: ( + args: string | { orderId: string; status: TOrderModel["status"] }, + status?: TOrderModel["status"], + ) => + typeof args === "string" + ? updateOrderStatus(worldId, args, status!) + : updateOrderStatus(worldId, args.orderId, args.status), + deleteOrder: (orderId: string) => deleteOrder(worldId, orderId), +}); + +export type TOrderRepository = ReturnType; diff --git a/packages/controlmart/src/repository/erp/payment.repository.ts b/packages/controlmart/src/repository/erp/payment.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..217a0e671d48e4f5186996ae0be40bd4a43d521e --- /dev/null +++ b/packages/controlmart/src/repository/erp/payment.repository.ts @@ -0,0 +1,275 @@ +import { + Payment, + type TPaymentModel, + type TPaymentInput, + type TPaymentAllocationModel, +} from "../../models/erp/payment.erp.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createPayment = async ( + world: TWorldRefModel, + data: TPaymentInput, +): Promise => { + try { + // Only check for duplicates if paymentId is explicitly provided + if (data.paymentId) { + const existing = await Payment.findOne({ + "worldRef.worldId": world.worldId, + paymentId: data.paymentId, + }); + + if (existing) { + throw new RepositoryError( + `Payment ${data.paymentId} already exists in this world`, + "DUPLICATE_ERROR", + ); + } + } + + const transaction = await Payment.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TPaymentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create payment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getPaymentById = async ( + worldId: string, + paymentId: string, +): Promise => { + try { + if (!paymentId) { + throw new RepositoryError("Payment ID is required", "VALIDATION_ERROR"); + } + + const transaction = await Payment.findOne({ + "worldRef.worldId": worldId, + paymentId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TPaymentModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find payment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllPayments = async ( + worldId: string, + filters?: { + status?: string; + customerId?: string; + partnerId?: string; + dateStart?: Date; + dateEnd?: Date; + productId?: string; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.status) query.status = filters.status; + if (filters?.customerId) query.customerId = filters.customerId; + if (filters?.partnerId) query.partnerId = filters.partnerId; + if (filters?.dateStart || filters?.dateEnd) { + query.paymentDate = {}; + if (filters.dateStart) query.paymentDate.$gte = filters.dateStart; + if (filters.dateEnd) query.paymentDate.$lte = filters.dateEnd; + } + + if (filters?.productId) query["appliedProducts.productId"] = filters.productId; + + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const payments = await Payment.find(query) + .sort({ paymentDate: -1 }) + .limit(limit + 1); + + const hasMore = payments.length > limit; + const results = hasMore ? payments.slice(0, limit) : payments; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((p) => p.toJSON() as TPaymentModel), + nextCursor, + totalCount: hasMore ? await Payment.countDocuments(query) : results.length, + hasMore, + limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve payments: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updatePayment = async ( + worldId: string, + paymentId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await Payment.findOneAndUpdate( + { "worldRef.worldId": worldId, paymentId }, + { + $set: { + ...updateData, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Payment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TPaymentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update payment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updatePaymentStatus = async ( + worldId: string, + paymentId: string, + status: TPaymentModel["status"], +): Promise => { + try { + const transaction = await Payment.findOneAndUpdate( + { "worldRef.worldId": worldId, paymentId }, + { + $set: { status, updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Payment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TPaymentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update payment status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const applyAllocations = async ( + worldId: string, + paymentId: string, + allocations: TPaymentAllocationModel[], +): Promise => { + try { + const payment = await Payment.findOne({ + "worldRef.worldId": worldId, + paymentId, + }); + + if (!payment) { + throw new RepositoryError("Payment not found", "NOT_FOUND_ERROR"); + } + + const appliedTotal = allocations.reduce((sum, a) => sum + (a.appliedAmount || 0), 0); + + if (appliedTotal > (payment.totalAmount || 0)) { + throw new RepositoryError("Applied amount exceeds payment total", "VALIDATION_ERROR"); + } + + const transaction = await Payment.findOneAndUpdate( + { "worldRef.worldId": worldId, paymentId }, + { + $push: { allocations: { $each: allocations } }, + $set: { status: "APPLIED", updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TPaymentModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to apply allocations: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deletePayment = async (worldId: string, paymentId: string): Promise => { + try { + const res = await Payment.deleteOne({ + "worldRef.worldId": worldId, + paymentId, + }); + return res.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete payment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +/** + * World-scoped Payment Repository factory. + */ +export const PaymentRepository = (worldId: string) => ({ + createPayment: (data: TPaymentInput) => createPayment({ worldId } as TWorldRefModel, data), + getPaymentById: (paymentId: string) => getPaymentById(worldId, paymentId), + getAllPayments: (filters?: { + status?: string; + customerId?: string; + partnerId?: string; + dateStart?: Date; + dateEnd?: Date; + productId?: string; + cursor?: string; + limit?: number; + }) => getAllPayments(worldId, filters), + updatePayment: (paymentId: string, updateData: Partial) => + updatePayment(worldId, paymentId, updateData), + updatePaymentStatus: (paymentId: string, status: TPaymentModel["status"]) => + updatePaymentStatus(worldId, paymentId, status), + applyAllocations: (paymentId: string, allocations: TPaymentAllocationModel[]) => + applyAllocations(worldId, paymentId, allocations), + deletePayment: (paymentId: string) => deletePayment(worldId, paymentId), +}); + +export type TPaymentRepository = ReturnType; diff --git a/packages/controlmart/src/repository/erp/product.repository.ts b/packages/controlmart/src/repository/erp/product.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..002096b7b8b7a213005213573f1a1192390d3e87 --- /dev/null +++ b/packages/controlmart/src/repository/erp/product.repository.ts @@ -0,0 +1,348 @@ +import { + Product, + type TProductModel, + type TProductInput, +} from "../../models/erp/product.erp.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createProduct = async ( + world: TWorldRefModel, + data: TProductInput, +): Promise => { + try { + const existing = await Product.findOne({ + "worldRef.worldId": world.worldId, + ...(data.productId ? { productId: data.productId } : {}), + }); + + if (existing) { + throw new RepositoryError( + `Product with SKU ${data.productId} already exists in this world`, + "DUPLICATE_ERROR", + ); + } + + const transaction = await Product.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TProductModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getProductById = async ( + worldId: string, + productId: string, +): Promise => { + try { + if (!productId) { + throw new RepositoryError("Product ID is required", "VALIDATION_ERROR"); + } + + const transaction = await Product.findOne({ + "worldRef.worldId": worldId, + productId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TProductModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllProducts = async ( + worldId: string, + filters?: { + status?: string; + inventoryTracking?: boolean; + minPrice?: number; + maxPrice?: number; + searchText?: string; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.status) query.status = filters.status; + if (filters?.inventoryTracking !== undefined) { + query.inventoryTracking = filters.inventoryTracking; + } + + if (filters?.minPrice || filters?.maxPrice) { + query.price = {}; + if (filters.minPrice) query.price.$gte = filters.minPrice; + if (filters.maxPrice) query.price.$lte = filters.maxPrice; + } + + if (filters?.searchText) { + query.$or = [ + { name: new RegExp(filters.searchText, "i") }, + { description: new RegExp(filters.searchText, "i") }, + { productId: new RegExp(filters.searchText, "i") }, + ]; + } + + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const products = await Product.find(query) + .sort({ createdAt: -1 }) + .limit(limit + 1); + + const hasMore = products.length > limit; + const results = hasMore ? products.slice(0, limit) : products; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((p) => p.toJSON() as TProductModel), + nextCursor, + totalCount: hasMore ? await Product.countDocuments(query) : results.length, + hasMore, + limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve products: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateProduct = async ( + worldId: string, + productId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await Product.findOneAndUpdate( + { "worldRef.worldId": worldId, productId }, + { + $set: { + ...updateData, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Product not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TProductModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateProductStatus = async ( + worldId: string, + productId: string, + status: TProductModel["status"], +): Promise => { + try { + const transaction = await Product.findOneAndUpdate( + { "worldRef.worldId": worldId, productId }, + { + $set: { status, updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Product not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TProductModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update product status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateProductPricing = async ( + worldId: string, + productId: string, + price: { currency: string; amount: number }, + cost?: { currency: string; amount: number }, +): Promise => { + try { + const transaction = await Product.findOneAndUpdate( + { "worldRef.worldId": worldId, productId }, + { + $set: { + price, + ...(cost ? { cost } : {}), + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Product not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TProductModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update product pricing: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const toggleInventoryTracking = async ( + worldId: string, + productId: string, + enabled: boolean, +): Promise => { + try { + const transaction = await Product.findOneAndUpdate( + { "worldRef.worldId": worldId, productId }, + { + $set: { inventoryTracking: enabled, updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Product not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TProductModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to toggle inventory tracking: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const bulkUpsertProducts = async ( + world: TWorldRefModel, + products: TProductInput[], +): Promise => { + try { + const bulkOps = products.map((p) => ({ + updateOne: { + filter: { "worldRef.worldId": world.worldId, productId: p.productId }, + update: { $set: { ...p, worldRef: world, updatedAt: new Date() } }, + upsert: true, + }, + })); + + const result = await Product.bulkWrite(bulkOps); + return result.modifiedCount + result.upsertedCount; + } catch (error) { + throw new RepositoryError( + `Failed to bulk upsert products: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteProduct = async (worldId: string, productId: string): Promise => { + try { + const res = await Product.deleteOne({ + "worldRef.worldId": worldId, + productId, + }); + return res.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getRandomProduct = async (worldId: string): Promise => { + try { + const filter = { "worldRef.worldId": worldId, "customFields.billOfMaterials": { $exists: true } }; + const count = await Product.countDocuments(filter); + if (count === 0) return null; + + const random = Math.floor(Math.random() * count); + const product = await Product.findOne(filter).skip(random); + + return (product?.toJSON() as TProductModel) || null; + } catch (error) { + throw new RepositoryError( + `Failed to get random product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const ProductRepository = (worldId: string) => ({ + createProduct: (data: TProductInput) => createProduct({ worldId } as TWorldRefModel, data), + getProductById: (args: string | { productId: string }) => + getProductById(worldId, typeof args === "string" ? args : args.productId), + getAllProducts: (filters?: { + status?: string; + inventoryTracking?: boolean; + minPrice?: number; + maxPrice?: number; + searchText?: string; + cursor?: string; + limit?: number; + }) => getAllProducts(worldId, filters), + updateProduct: (productId: string, updateData: Partial) => + updateProduct(worldId, productId, updateData), + updateProductStatus: (productId: string, status: TProductModel["status"]) => + updateProductStatus(worldId, productId, status), + updateProductPricing: ( + productId: string, + price: { currency: string; amount: number }, + cost?: { currency: string; amount: number }, + ) => updateProductPricing(worldId, productId, price, cost), + toggleInventoryTracking: (productId: string, enabled: boolean) => + toggleInventoryTracking(worldId, productId, enabled), + bulkUpsertProducts: (products: TProductInput[]) => + bulkUpsertProducts({ worldId } as TWorldRefModel, products), + deleteProduct: (productId: string) => deleteProduct(worldId, productId), + getRandomProduct: () => getRandomProduct(worldId), +}); + +export type TProductRepository = ReturnType; diff --git a/packages/controlmart/src/repository/erp/shipment.repository.ts b/packages/controlmart/src/repository/erp/shipment.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..008ae4e8e09976f61d99c313191e980fc185c54e --- /dev/null +++ b/packages/controlmart/src/repository/erp/shipment.repository.ts @@ -0,0 +1,394 @@ +import { + Shipment, + type TShipmentModel, + type TShipmentInput, + type TShipmentLineModel, +} from "../../models/erp/shipment.erp.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createShipment = async ( + world: TWorldRefModel, + data: TShipmentInput, +): Promise => { + try { + const existing = await Shipment.findOne({ + "worldRef.worldId": world.worldId, + ...(data.shipmentId ? { shipmentId: data.shipmentId } : {}), + }); + + if (existing) { + throw new RepositoryError( + `Shipment ${data.shipmentId} already exists in this world`, + "DUPLICATE_ERROR", + ); + } + + const transaction = await Shipment.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TShipmentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentById = async ( + worldId: string, + shipmentId: string, +): Promise => { + try { + if (!shipmentId) { + throw new RepositoryError("Shipment ID is required", "VALIDATION_ERROR"); + } + + const transaction = await Shipment.findOne({ + "worldRef.worldId": worldId, + shipmentId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TShipmentModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllShipments = async ( + worldId: string, + filters?: { + status?: string; + productId?: string; + carrierName?: string; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + shipmentId?: string; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.status) query.status = filters.status; + if (filters?.productId) query.productId = filters.productId; + if (filters?.carrierName) query["carrier.name"] = new RegExp(filters.carrierName, "i"); + + if (filters?.dateStart || filters?.dateEnd) { + query.shipDate = {}; + if (filters.dateStart) query.shipDate.$gte = filters.dateStart; + if (filters.dateEnd) query.shipDate.$lte = filters.dateEnd; + } + + if (filters?.shipmentId) { + query.shipmentId = filters.shipmentId; + } + + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const shipments = await Shipment.find(query) + .sort({ shipDate: -1 }) + .limit(limit + 1); + + const hasMore = shipments.length > limit; + const results = hasMore ? shipments.slice(0, limit) : shipments; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((s) => s.toJSON() as TShipmentModel), + nextCursor, + totalCount: hasMore ? await Shipment.countDocuments(query) : results.length, + hasMore, + limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve shipments: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateShipment = async ( + worldId: string, + shipmentId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $set: { + ...updateData, + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Shipment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TShipmentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateShipmentStatus = async ( + worldId: string, + shipmentId: string, + status: TShipmentModel["status"], +): Promise => { + try { + const transaction = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $set: { status, updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Shipment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TShipmentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update shipment status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateTrackingDetails = async ( + worldId: string, + shipmentId: string, + carrier: { name?: string; scac?: string; mode?: string }, + trackingNumber?: string, +): Promise => { + try { + const transaction = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $set: { + carrier, + ...(trackingNumber ? { trackingNumber } : {}), + updatedAt: new Date(), + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Shipment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TShipmentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update tracking details: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addShipmentEvent = async ( + worldId: string, + shipmentId: string, + event: { ts: Date; location: string; status: string; note?: string }, +): Promise => { + try { + const transaction = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $push: { events: event }, + $set: { updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Shipment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TShipmentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to add shipment event: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addShipmentDocument = async ( + worldId: string, + shipmentId: string, + documentUrl: string, +): Promise => { + try { + const transaction = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $push: { documents: documentUrl }, + $set: { updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Shipment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TShipmentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to add shipment document: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateShipmentLines = async ( + worldId: string, + shipmentId: string, + lines: TShipmentLineModel[], +): Promise => { + try { + const transaction = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $set: { lines, updatedAt: new Date() }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("Shipment not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TShipmentModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update shipment lines: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const bulkUpsertShipments = async ( + world: TWorldRefModel, + shipments: TShipmentInput[], +): Promise => { + try { + const bulkOps = shipments.map((s) => ({ + updateOne: { + filter: { "worldRef.worldId": world.worldId, shipmentId: s.shipmentId }, + update: { $set: { ...s, worldRef: world, updatedAt: new Date() } }, + upsert: true, + }, + })); + + const result = await Shipment.bulkWrite(bulkOps); + return result.modifiedCount + result.upsertedCount; + } catch (error) { + throw new RepositoryError( + `Failed to bulk upsert shipments: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteShipment = async (worldId: string, shipmentId: string): Promise => { + try { + const res = await Shipment.deleteOne({ + "worldRef.worldId": worldId, + shipmentId, + }); + return res.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const ERPShipmentRepository = (worldId: string) => ({ + createShipment: (data: TShipmentInput) => createShipment({ worldId } as TWorldRefModel, data), + getShipmentById: (shipmentId: string) => getShipmentById(worldId, shipmentId), + getAllShipments: (filters?: { + status?: string; + poNumber?: string; + carrierName?: string; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + shipmentId?: string; + }) => getAllShipments(worldId, filters), + updateShipment: (shipmentId: string, updateData: Partial) => + updateShipment(worldId, shipmentId, updateData), + updateShipmentStatus: (shipmentId: string, status: TShipmentModel["status"]) => + updateShipmentStatus(worldId, shipmentId, status), + updateTrackingDetails: ( + shipmentId: string, + carrier: { name?: string; scac?: string; mode?: string }, + trackingNumber?: string, + ) => updateTrackingDetails(worldId, shipmentId, carrier, trackingNumber), + addShipmentEvent: ( + shipmentId: string, + event: { ts: Date; location: string; status: string; note?: string }, + ) => addShipmentEvent(worldId, shipmentId, event), + addShipmentDocument: (shipmentId: string, docUrl: string) => + addShipmentDocument(worldId, shipmentId, docUrl), + updateShipmentLines: (shipmentId: string, lines: TShipmentLineModel[]) => + updateShipmentLines(worldId, shipmentId, lines), + bulkUpsertShipments: (shipments: TShipmentInput[]) => + bulkUpsertShipments({ worldId } as TWorldRefModel, shipments), + deleteShipment: (shipmentId: string) => deleteShipment(worldId, shipmentId), +}); + +export type TERPShipmentRepository = ReturnType; diff --git a/packages/controlmart/src/repository/finance/finance.repository.ts b/packages/controlmart/src/repository/finance/finance.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..359cd2ff2d858836c1ee8ba9c7da1d4eac9910ff --- /dev/null +++ b/packages/controlmart/src/repository/finance/finance.repository.ts @@ -0,0 +1,536 @@ +import { + FinanceTransaction, + type TFinanceTransactionModel, + type TFinanceTransactionInput, +} from "../../models/finance/finance.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +/** + * Validates transaction data before database operations + */ +const validateTransactionData = (data: TFinanceTransactionInput): void => { + if (!data.type || !data.amount || !data.sourceType || !data.sourceId) { + throw new RepositoryError( + "Missing required fields: type, amount, sourceType, and sourceId are required", + "VALIDATION_ERROR", + ); + } + + if (data.amount <= 0) { + throw new RepositoryError("Amount must be greater than 0", "VALIDATION_ERROR"); + } + + if (!["payment_in", "payment_out"].includes(data.type)) { + throw new RepositoryError( + "Type must be either 'payment_in' or 'payment_out'", + "VALIDATION_ERROR", + ); + } + + if (!["invoice", "bill", "manual", "interest", "payment"].includes(data.sourceType)) { + throw new RepositoryError( + "SourceType must be one of: invoice, bill, manual, interest, payment", + "VALIDATION_ERROR", + ); + } +}; + +const createFinanceTransaction = async ( + world: TWorldRefModel, + data: TFinanceTransactionInput, +): Promise => { + try { + // Validate input data + validateTransactionData(data); + + // Let the model generate the transactionId if not provided + const transactionData = data.transactionId + ? { ...data, worldRef: world } + : { ...data, worldRef: world }; + + const transaction = await FinanceTransaction.create(transactionData); + + const jsonified = transaction.toJSON(); + return jsonified; + } catch (error) { + if (error instanceof RepositoryError) { + throw error; + } + throw new RepositoryError( + `Failed to create finance transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const bulkInsertTransactions = async ( + world: TWorldRefModel, + transactions: TFinanceTransactionInput[], +): Promise => { + try { + if (!Array.isArray(transactions) || transactions.length === 0) { + throw new RepositoryError( + "Transactions array is required and cannot be empty", + "VALIDATION_ERROR", + ); + } + + // Validate all transactions before inserting + transactions.forEach((transaction, index) => { + try { + validateTransactionData(transaction); + } catch (error) { + throw new RepositoryError( + `Validation failed for transaction at index ${index}: ${getErrorMessage(error)}`, + "VALIDATION_ERROR", + ); + } + }); + + // Let the model generate transactionIds for each transaction + const enriched = transactions.map((t) => ({ + ...t, + worldRef: world, + // Only set transactionId if it's provided, otherwise let the model default handle it + ...(t.transactionId ? { transactionId: t.transactionId } : {}), + })); + + const result = await FinanceTransaction.insertMany(enriched, { + ordered: false, + }); + return result.length; + } catch (error) { + if (error instanceof RepositoryError) { + throw error; + } + throw new RepositoryError( + `Failed to bulk insert transactions: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getFinanceTransactionModels = async ( + worldId: string, + filters?: { + partnerId?: string; + type?: "payment_in" | "payment_out"; + sourceType?: "invoice" | "bill" | "manual" | "interest" | "payment"; + sourceId?: string; + amountMin?: number; + amountMax?: number; + dateStart?: Date; + dateEnd?: Date; + search?: string; + limit?: number; + cursor?: string | null; + }, +): Promise<{ transactions: TFinanceTransactionModel[]; nextCursor?: string }> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + // Build query conditions efficiently + if (filters?.partnerId) query.partnerId = filters.partnerId; + if (filters?.type) query.type = filters.type; + if (filters?.sourceType) query.sourceType = filters.sourceType; + if (filters?.sourceId) query.sourceId = filters.sourceId; + + if (filters?.amountMin !== undefined || filters?.amountMax !== undefined) { + query.amount = {}; + if (filters.amountMin !== undefined) query.amount.$gte = filters.amountMin; + if (filters.amountMax !== undefined) query.amount.$lte = filters.amountMax; + } + + if (filters?.dateStart || filters?.dateEnd) { + query.createdAt = {}; // Use createdAt instead of timestamp for better performance with default indexes + if (filters.dateStart) query.createdAt.$gte = filters.dateStart; + if (filters.dateEnd) query.createdAt.$lte = filters.dateEnd; + } + + if (filters?.cursor) { + query._id = { $lt: filters.cursor }; + } + + // Handle search functionality - only add if search term is provided + if (filters?.search && filters.search.trim()) { + const searchRegex = new RegExp( + filters.search.trim().replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), + "i", + ); + query.$or = [ + { partnerId: { $regex: searchRegex } }, + { sourceId: { $regex: searchRegex } }, + { transactionId: { $regex: searchRegex } }, + { "metadata.description": { $regex: searchRegex } }, + { "metadata.reference": { $regex: searchRegex } }, + ]; + } + + const limit = Math.min(filters?.limit ?? GLOBAL_PAGE_LIMIT, 1000); // Cap limit to prevent abuse + const transactions = await FinanceTransaction.find(query) + .sort({ createdAt: -1, _id: -1 }) // Compound sort for consistency + .limit(limit + 1) + .lean(); // Use lean() for better performance when we don't need Mongoose documents + + const hasMore = transactions.length > limit; + const results = hasMore ? transactions.slice(0, limit) : transactions; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + transactions: results as TFinanceTransactionModel[], + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve finance transactions: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTransactionById = async ( + worldId: string, + transactionId: string, +): Promise => { + try { + const transaction = await FinanceTransaction.findOne({ + "worldRef.worldId": worldId, + transactionId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTransactionsBySource = async ( + worldId: string, + sourceType: string, + sourceId: string, + filters?: { + limit?: number; + cursor?: string; + }, +): Promise<{ transactions: TFinanceTransactionModel[]; nextCursor?: string }> => { + try { + // Input validation + if (!sourceType || !sourceId) { + throw new RepositoryError("sourceType and sourceId are required", "VALIDATION_ERROR"); + } + + const query: Record = { + "worldRef.worldId": worldId, + sourceType, + sourceId, + }; + + if (filters?.cursor) { + query._id = { $lt: filters.cursor }; + } + + const limit = Math.min(filters?.limit ?? GLOBAL_PAGE_LIMIT, 1000); + const transactions = await FinanceTransaction.find(query) + .sort({ createdAt: -1, _id: -1 }) + .limit(limit + 1) + .lean(); + + const hasMore = transactions.length > limit; + const results = hasMore ? transactions.slice(0, limit) : transactions; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + transactions: results as TFinanceTransactionModel[], + nextCursor, + }; + } catch (error) { + if (error instanceof RepositoryError) { + throw error; + } + throw new RepositoryError( + `Failed to retrieve transactions by source: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateFinanceTransaction = async ( + worldId: string, + transactionId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await FinanceTransaction.findOneAndUpdate( + { + "worldRef.worldId": worldId, + transactionId, + }, + { $set: updateData }, + { new: true, runValidators: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to update transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteFinanceTransaction = async ( + worldId: string, + transactionId: string, +): Promise => { + try { + const result = await FinanceTransaction.deleteOne({ + "worldRef.worldId": worldId, + transactionId, + }); + return result.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const aggregateTransactionsByType = async ( + worldId: string, + filters?: { + partnerId?: string; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise> => { + try { + const match: Record = { "worldRef.worldId": worldId }; + + if (filters?.partnerId) match.partnerId = filters.partnerId; + if (filters?.dateEnd || filters?.dateStart) { + match.createdAt = {}; // Use createdAt for better index performance + if (filters.dateStart) match.createdAt.$gte = filters.dateStart; + if (filters.dateEnd) match.createdAt.$lte = filters.dateEnd; + } + + const results = await FinanceTransaction.aggregate([ + { $match: match }, + { + $group: { + _id: "$type", + count: { $sum: 1 }, + totalAmount: { $sum: "$amount" }, + avgAmount: { $avg: "$amount" }, // Add average for better insights + }, + }, + { $sort: { totalAmount: -1 } }, // Sort by total amount descending + ]); + + const stats: Record = {}; + for (const r of results) { + stats[r._id] = { + count: r.count, + totalAmount: Math.round(r.totalAmount * 100) / 100, // Round to 2 decimal places + avgAmount: Math.round(r.avgAmount * 100) / 100, + }; + } + return stats; + } catch (error) { + throw new RepositoryError( + `Failed to aggregate transactions by type: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const aggregateTransactionsByPartner = async ( + worldId: string, + filters?: { + type?: "payment_in" | "payment_out"; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + }, +): Promise> => { + try { + const match: Record = { + "worldRef.worldId": worldId, + partnerId: { $exists: true, $ne: null }, + }; + + if (filters?.type) match.type = filters.type; + if (filters?.dateStart || filters?.dateEnd) { + match.createdAt = {}; + if (filters.dateStart) match.createdAt.$gte = filters.dateStart; + if (filters.dateEnd) match.createdAt.$lte = filters.dateEnd; + } + + const pipeline = [ + { $match: match }, + { + $group: { + _id: "$partnerId", + count: { $sum: 1 }, + totalAmount: { $sum: "$amount" }, + avgAmount: { $avg: "$amount" }, + }, + }, + { $sort: { totalAmount: -1 as -1 } }, + { $limit: Math.min(filters?.limit || 100, 500) }, // Cap limit to prevent abuse + { + $project: { + partnerId: "$_id", + count: 1, + totalAmount: { $round: ["$totalAmount", 2] }, + avgAmount: { $round: ["$avgAmount", 2] }, + _id: 0, + }, + }, + ]; + + const results = await FinanceTransaction.aggregate(pipeline); + + return results; + } catch (error) { + throw new RepositoryError( + `Failed to aggregate transactions by partner: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getFinancialSummary = async ( + worldId: string, + filters?: { + partnerId?: string; + dateEnd?: Date; + dateStart?: Date; + }, +): Promise<{ + totalIncoming: number; + totalOutgoing: number; + netBalance: number; + transactionCount: number; + avgTransactionAmount: number; +}> => { + try { + const match: Record = { "worldRef.worldId": worldId }; + + if (filters?.partnerId) match.partnerId = filters.partnerId; + if (filters?.dateEnd || filters?.dateStart) { + match.createdAt = {}; // Use createdAt for better index performance + if (filters.dateEnd) match.createdAt.$gte = filters.dateEnd; + if (filters.dateStart) match.createdAt.$lte = filters.dateStart; + } + + const results = await FinanceTransaction.aggregate([ + { $match: match }, + { + $group: { + _id: null, + totalIncoming: { + $sum: { + $cond: [{ $eq: ["$type", "payment_in"] }, "$amount", 0], + }, + }, + totalOutgoing: { + $sum: { + $cond: [{ $eq: ["$type", "payment_out"] }, "$amount", 0], + }, + }, + transactionCount: { $sum: 1 }, + avgAmount: { $avg: "$amount" }, + }, + }, + { + $project: { + totalIncoming: { $round: ["$totalIncoming", 2] }, + totalOutgoing: { $round: ["$totalOutgoing", 2] }, + netBalance: { + $round: [{ $subtract: ["$totalIncoming", "$totalOutgoing"] }, 2], + }, + transactionCount: 1, + avgTransactionAmount: { $round: ["$avgAmount", 2] }, + }, + }, + ]); + + if (results.length === 0) { + return { + totalIncoming: 0, + totalOutgoing: 0, + netBalance: 0, + transactionCount: 0, + avgTransactionAmount: 0, + }; + } + + const summary = results[0]; + return { + totalIncoming: summary.totalIncoming || 0, + totalOutgoing: summary.totalOutgoing || 0, + netBalance: summary.netBalance || 0, + transactionCount: summary.transactionCount || 0, + avgTransactionAmount: summary.avgTransactionAmount || 0, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get financial summary: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const FinanceRepository = (worldId: string) => ({ + createTransaction: (data: TFinanceTransactionInput) => + createFinanceTransaction({ worldId } as TWorldRefModel, data), + bulkInsertTransactions: (transactions: TFinanceTransactionInput[]) => + bulkInsertTransactions({ worldId } as TWorldRefModel, transactions), + getTransactions: (filters?: { + partnerId?: string; + type?: "payment_in" | "payment_out"; + sourceType?: "invoice" | "bill" | "manual" | "interest" | "payment"; + sourceId?: string; + amountMin?: number; + amountMax?: number; + dateEnd?: Date; + dateStart?: Date; + search?: string; + limit?: number; + cursor?: string | null; + }) => getFinanceTransactionModels(worldId, filters), + getTransactionById: (transactionId: string) => getTransactionById(worldId, transactionId), + getTransactionsBySource: ( + sourceType: string, + sourceId: string, + filters?: { limit?: number; cursor?: string }, + ) => getTransactionsBySource(worldId, sourceType, sourceId, filters), + updateTransaction: (transactionId: string, updateData: Partial) => + updateFinanceTransaction(worldId, transactionId, updateData), + deleteTransaction: (transactionId: string) => deleteFinanceTransaction(worldId, transactionId), + + aggregateByType: (filters?: { partnerId?: string; dateStart?: Date; dateEnd?: Date }) => + aggregateTransactionsByType(worldId, filters), + aggregateByPartner: (filters?: { + type?: "payment_in" | "payment_out"; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + }) => aggregateTransactionsByPartner(worldId, filters), + getFinancialSummary: (filters?: { partnerId?: string; dateStart?: Date; dateEnd?: Date }) => + getFinancialSummary(worldId, filters), +}); + +export type TFinanceRepository = ReturnType; diff --git a/packages/controlmart/src/repository/finance/ledger.repository.ts b/packages/controlmart/src/repository/finance/ledger.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..655d2a7e75d448a3fbc2bc06168a62b5d1900920 --- /dev/null +++ b/packages/controlmart/src/repository/finance/ledger.repository.ts @@ -0,0 +1,335 @@ +import { + CompanyLedger, + type TCompanyLedgerModel, + type TCompanyLedgerInput, +} from "../../models/finance/ledger.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; + +const createOrUpsertLedger = async ( + world: TWorldRefModel, + data: TCompanyLedgerInput, +): Promise => { + try { + const transaction = await CompanyLedger.findOneAndUpdate( + { + "worldRef.worldId": world.worldId, + }, + { + $set: { + ...data, + worldRef: world, + }, + }, + { new: true, upsert: true, runValidators: true }, + ); + + const jsonified = transaction.toJSON(); + return jsonified as TCompanyLedgerModel; + } catch (error) { + throw new RepositoryError( + `Failed to create/update company ledger: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const findLedger = async (worldId: string): Promise => { + try { + const transaction = await CompanyLedger.findOne({ + "worldRef.worldId": worldId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCompanyLedgerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to find ledger by company ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllLedgers = async ( + worldId: string, + filters?: { + minCash?: number; + maxCash?: number; + minReceivables?: number; + maxReceivables?: number; + minPayables?: number; + maxPayables?: number; + positiveNetPosition?: boolean; + negativeNetPosition?: boolean; + sortOrder?: "asc" | "desc"; + sortBy?: "cash" | "totalReceivables" | "totalPayables" | "netPosition" | "updatedAt"; + limit?: number; + cursor?: string | null; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.minCash !== undefined || filters?.maxCash !== undefined) { + query.cash = {}; + if (filters.minCash !== undefined) query.cash.$gte = filters.minCash; + if (filters.maxCash !== undefined) query.cash.$lte = filters.maxCash; + } + + if (filters?.minReceivables !== undefined || filters?.maxReceivables !== undefined) { + query.totalReceivables = {}; + if (filters.minReceivables !== undefined) + query.totalReceivables.$gte = filters.minReceivables; + if (filters.maxReceivables !== undefined) + query.totalReceivables.$lte = filters.maxReceivables; + } + + if (filters?.minPayables !== undefined || filters?.maxPayables !== undefined) { + query.totalPayables = {}; + if (filters.minPayables !== undefined) query.totalPayables.$gte = filters.minPayables; + if (filters.maxPayables !== undefined) query.totalPayables.$lte = filters.maxPayables; + } + + if (filters?.positiveNetPosition === true) { + query.netPosition = { $gt: 0 }; + } + if (filters?.negativeNetPosition === true) { + query.netPosition = { $lt: 0 }; + } + + if (filters?.cursor) { + query._id = { $lt: filters.cursor }; + } + + const transactions = await CompanyLedger.find(query) + .sort({ + [filters?.sortBy || "netPosition"]: filters?.sortOrder === "asc" ? 1 : -1, + }) + .limit(filters?.limit || 200); + + const jsonified = transactions.map((t) => t.toJSON() as TCompanyLedgerModel); + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve company ledgers: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateLedger = async ( + worldId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await CompanyLedger.findOneAndUpdate( + { + "worldRef.worldId": worldId, + }, + { $set: updateData }, + { new: true, runValidators: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCompanyLedgerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update ledger: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const incrementBalances = async ( + worldId: string, + deltas: { + cashDelta?: number; + receivablesDelta?: number; + payablesDelta?: number; + }, +): Promise => { + try { + const updateOperations: Record = {}; + + if (deltas.cashDelta !== undefined) { + updateOperations.$inc = { + ...updateOperations.$inc, + cash: deltas.cashDelta, + }; + } + if (deltas.receivablesDelta !== undefined) { + updateOperations.$inc = { + ...updateOperations.$inc, + totalReceivables: deltas.receivablesDelta, + }; + } + if (deltas.payablesDelta !== undefined) { + updateOperations.$inc = { + ...updateOperations.$inc, + totalPayables: deltas.payablesDelta, + }; + } + + const transaction = await CompanyLedger.findOneAndUpdate( + { + "worldRef.worldId": worldId, + }, + updateOperations, + { new: true, runValidators: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCompanyLedgerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to increment ledger balances: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteLedger = async (worldId: string): Promise => { + try { + const transaction = await CompanyLedger.deleteOne({ + "worldRef.worldId": worldId, + }); + + const jsonified = transaction; + return jsonified.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete ledger: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getLedgersSummary = async ( + worldId: string, + filters?: { + positiveNetPositionOnly?: boolean; + }, +): Promise<{ + totalCash: number; + totalReceivables: number; + totalPayables: number; + totalNetPosition: number; + ledgerCount: number; + positiveLedgers: number; + negativeLedgers: number; +}> => { + try { + const match: Record = { "worldRef.worldId": worldId }; + + if (filters?.positiveNetPositionOnly === true) { + match.netPosition = { $gt: 0 }; + } + + const results = await CompanyLedger.aggregate([ + { $match: match }, + { + $group: { + _id: null, + totalCash: { $sum: "$cash" }, + totalReceivables: { $sum: "$totalReceivables" }, + totalPayables: { $sum: "$totalPayables" }, + totalNetPosition: { $sum: "$netPosition" }, + ledgerCount: { $sum: 1 }, + positiveLedgers: { + $sum: { $cond: [{ $gt: ["$netPosition", 0] }, 1, 0] }, + }, + negativeLedgers: { + $sum: { $cond: [{ $lt: ["$netPosition", 0] }, 1, 0] }, + }, + }, + }, + ]); + + if (results.length === 0) { + return { + totalCash: 0, + totalReceivables: 0, + totalPayables: 0, + totalNetPosition: 0, + ledgerCount: 0, + positiveLedgers: 0, + negativeLedgers: 0, + }; + } + + return results[0]; + } catch (error) { + throw new RepositoryError( + `Failed to get ledgers summary: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTopLedgersByNetPosition = async ( + worldId: string, + limit = 10, + direction: "positive" | "negative" = "positive", +): Promise => { + try { + const query: Record = { "worldRef.worldId": worldId }; + + if (direction === "positive") { + query.netPosition = { $gt: 0 }; + } else { + query.netPosition = { $lt: 0 }; + } + + const sortOrder = direction === "positive" ? -1 : 1; + + const transactions = await CompanyLedger.find(query) + .sort({ netPosition: sortOrder }) + .limit(limit); + const jsonified = transactions.map((t) => t.toJSON() as TCompanyLedgerModel); + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to get top ledgers: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const bulkUpdateLedgers = async ( + world: TWorldRefModel, + updates: Array<{ data: Partial }>, +): Promise => { + try { + const bulkOps = updates.map((update) => ({ + updateOne: { + filter: { + "worldRef.worldId": world.worldId, + }, + update: { $set: { ...update.data, worldRef: world } }, + upsert: true, + }, + })); + + const result = await CompanyLedger.bulkWrite(bulkOps); + return result.modifiedCount + result.upsertedCount; + } catch (error) { + throw new RepositoryError( + `Failed to bulk update ledgers: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const CompanyLedgerRepository = (worldId: string) => ({ + ensure: (data: TCompanyLedgerInput) => createOrUpsertLedger({ worldId } as TWorldRefModel, data), + get: () => findLedger(worldId), + update: (updateData: Partial) => updateLedger(worldId, updateData), + increment: (deltas: { cashDelta?: number; receivablesDelta?: number; payablesDelta?: number }) => + incrementBalances(worldId, deltas), + delete: () => deleteLedger(worldId), + summary: () => getLedgersSummary(worldId), +}); + +export type TCompanyLedgerRepository = ReturnType; diff --git a/packages/controlmart/src/repository/index.ts b/packages/controlmart/src/repository/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..62532d29d7fe74d4acda4034537f6a8b7f841398 --- /dev/null +++ b/packages/controlmart/src/repository/index.ts @@ -0,0 +1,6 @@ +export * from "./world.repository"; +export * from "./erp"; +export * from "./logs.repository"; +export * from "./wms"; +export * from "./tms"; +export * from "./audit.repository"; diff --git a/packages/controlmart/src/repository/knowledge-graph.repository.ts b/packages/controlmart/src/repository/knowledge-graph.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..6c68286b6cc0f5625a366861b3bf05f04ea73d7d --- /dev/null +++ b/packages/controlmart/src/repository/knowledge-graph.repository.ts @@ -0,0 +1,209 @@ +import { KnowledgeGraphNode } from "../models/knowledge-graph-node.model"; +import { KnowledgeGraphEdge } from "../models/knowledge-graph-edge.model"; +import type { TKnowledgeGraphNodeInput } from "../models/knowledge-graph-node.model.type"; +import type { TKnowledgeGraphEdgeInput } from "../models/knowledge-graph-edge.model.type"; +import type { GraphNode, GraphEdge, NodeType, EdgeType } from "../types/knowledge-graph.type"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; + +/** + * Transform GraphNode to database format + * Maps: label → name, type (uppercase) → type (lowercase) + */ +const transformNodeToDB = (node: GraphNode): TKnowledgeGraphNodeInput => { + return { + id: node.id, + type: node.type.toLowerCase() as any, // PERSONA → persona, CAPABILITY → capability, etc. + name: node.label, // label → name + metadata: node.metadata + }; +}; + +/** + * Transform database node to GraphNode format + * Maps: name → label, type (lowercase) → type (uppercase) + */ +const transformNodeFromDB = (dbNode: any): GraphNode => { + return { + id: dbNode.id, + type: dbNode.type.toUpperCase() as NodeType, // persona → PERSONA, capability → CAPABILITY, etc. + label: dbNode.name, // name → label + metadata: dbNode.metadata + }; +}; + +/** + * Transform GraphEdge to database format + * Maps: from → source, to → target + */ +const transformEdgeToDB = (edge: GraphEdge): TKnowledgeGraphEdgeInput => { + return { + source: edge.from, // from → source + target: edge.to, // to → target + type: edge.type, + metadata: edge.metadata + }; +}; + +/** + * Transform database edge to GraphEdge format + * Maps: source → from, target → to + */ +const transformEdgeFromDB = (dbEdge: any): GraphEdge => { + return { + from: dbEdge.source, // source → from + to: dbEdge.target, // target → to + type: dbEdge.type as EdgeType, + metadata: dbEdge.metadata + }; +}; + +/** + * Save complete knowledge graph to database + * Replaces existing graph with new nodes and edges + * @param nodes Array of graph nodes + * @param edges Array of graph edges + * @throws RepositoryError with DATABASE_ERROR + */ +const saveGraph = async (nodes: GraphNode[], edges: GraphEdge[]): Promise => { + try { + const startTime = new Date(); + + // 1. Upsert Nodes + if (nodes.length > 0) { + const nodeOps = nodes.map(node => ({ + updateOne: { + filter: { id: node.id }, + update: { + $set: { + ...transformNodeToDB(node), + updatedAt: startTime // Force update time + } + }, + upsert: true + } + })); + await KnowledgeGraphNode.bulkWrite(nodeOps); + } + + // 2. Upsert Edges + if (edges.length > 0) { + const edgeOps = edges.map(edge => { + const dbEdge = transformEdgeToDB(edge); + return { + updateOne: { + filter: { + source: dbEdge.source, + target: dbEdge.target, + type: dbEdge.type + }, + update: { + $set: { + ...dbEdge, + updatedAt: startTime // Force update time + } + }, + upsert: true + } + }; + }); + await KnowledgeGraphEdge.bulkWrite(edgeOps); + } + + // 3. Prune Stale Data (updated before this operation) + // Using a slightly earlier time to account for clock skew/delays? + // No, strictly less than startTime is safe because we forcefully set updatedAt to startTime (or newer if overwrites happen) + // Wait, if we set updatedAt to startTime, and A and B run. + // A sets X to T1. + // B sets X to T2. + // A deletes < T1. + // If X is T2, A keeps it. Correct. + // If B hasn't run yet? + // A sets X to T1. + // A deletes Z (< T1). + // B runs. + + // There is a small edge case: + // A sets X to T1. + // B hasn't set X yet. X is T1. + // B is about to set X to T2. + // A deletes stale items < T1. + // This is fine. X is T1 (>= T1), so X is kept. + // Z (stale) was < ?? was old time. + + // What about "stale" items that A wants to delete but B wants to keep? + // If A and B are building the SAME graph (auto-seed), then valid set is identical. + // So if Z is stale for A, it is stale for B. + + await KnowledgeGraphNode.deleteMany({ updatedAt: { $lt: startTime } }).exec(); + await KnowledgeGraphEdge.deleteMany({ updatedAt: { $lt: startTime } }).exec(); + + console.log(`[KnowledgeGraphRepository] Saved ${nodes.length} nodes and ${edges.length} edges (pruned stale)`); + } catch (error) { + throw new RepositoryError( + `Failed to save knowledge graph: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Load complete knowledge graph from database + * @returns Object containing arrays of nodes and edges + * @throws RepositoryError with DATABASE_ERROR + */ +const loadGraph = async (): Promise<{ nodes: GraphNode[]; edges: GraphEdge[] }> => { + try { + // Load all nodes and edges from DB + const dbNodes = await (KnowledgeGraphNode as any).find({}).exec(); + const dbEdges = await (KnowledgeGraphEdge as any).find({}).exec(); + + // Transform to graph format + const nodes = dbNodes.map(transformNodeFromDB); + const edges = dbEdges.map(transformEdgeFromDB); + + console.log(`[KnowledgeGraphRepository] Loaded ${nodes.length} nodes and ${edges.length} edges`); + + return { nodes, edges }; + } catch (error) { + throw new RepositoryError( + `Failed to load knowledge graph: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Clear all knowledge graph data from database + * Deletes all nodes and edges + * @throws RepositoryError with DATABASE_ERROR + */ +const clearGraph = async (): Promise => { + try { + await KnowledgeGraphNode.deleteMany({}).exec(); + await KnowledgeGraphEdge.deleteMany({}).exec(); + + console.log(`[KnowledgeGraphRepository] Cleared all knowledge graph data`); + } catch (error) { + throw new RepositoryError( + `Failed to clear knowledge graph: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Knowledge Graph Repository + * Provides persistence operations for knowledge graph nodes and edges + * + * Handles transformation between GraphNode/GraphEdge types and MongoDB schema: + * - GraphNode.label ↔ DB.name + * - GraphEdge.from/to ↔ DB.source/target + * - GraphNode.type (uppercase) ↔ DB.type (lowercase) + */ +export const KnowledgeGraphRepository = { + saveGraph, + loadGraph, + clearGraph +}; + +export type TKnowledgeGraphRepository = typeof KnowledgeGraphRepository; diff --git a/packages/controlmart/src/repository/log-queue.repository.ts b/packages/controlmart/src/repository/log-queue.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..4ffb957176b3fa6a928ff319e9013010b2d01d3c --- /dev/null +++ b/packages/controlmart/src/repository/log-queue.repository.ts @@ -0,0 +1,101 @@ +import { LogQueue, type TLogQueueInput, type TLogQueueModel } from "../models/log-queue.model"; +import type { TWorldRefModel } from "../models/shared.model"; +import { loadEnv } from "../utils/env.util"; + +const createLogQueue = async (world: TWorldRefModel, input: TLogQueueInput): Promise => { + return await LogQueue.create({ + ...input, + worldRef: world, + }); +}; + +const updateLogQueueStatus = async ( + worldId: string, + runId: string, + status: "queued" | "consumed" | "failed", +): Promise => { + const update: Partial = { status }; + + if (status === "consumed") { + update.consumedAt = new Date(); + } + + return await LogQueue.findOneAndUpdate( + { runId, "worldRef.worldId": worldId }, + update, + { new: true } + ); +}; + +const findLogQueueByRunId = async (worldId: string, runId: string): Promise => { + return await LogQueue.findOne({ runId, "worldRef.worldId": worldId }); +}; + +const findPendingTicketCandidates = async (worldId: string, limit: number = 10): Promise => { + return await LogQueue.find({ + "worldRef.worldId": worldId, + status: "queued", + isTicketCandidate: true, + }) + .sort({ createdAt: 1 }) + .limit(limit); +}; + + +export const findGlobalPendingTicketCandidates = async (limit: number = 0): Promise => { + const { MAX_TICKET_RETRIES } = loadEnv(); + + const query = LogQueue.find({ + $or: [ + { status: "queued" }, + { + status: "failed", + retryCount: { $lt: MAX_TICKET_RETRIES } + } + ], + isTicketCandidate: true, + }) + .sort({ createdAt: 1 }); + + if (limit > 0) { + query.limit(limit); + } + + return await query; +}; + +/** + * Increment retry count and update status + */ +export const incrementLogQueueRetry = async (runId: string): Promise => { + return await LogQueue.findOneAndUpdate( + { runId }, + { + $inc: { retryCount: 1 }, + $set: { status: "failed" } + }, + { new: true } + ); +}; + +/** + * Delete consumed ticket candidates older than a specific date + */ +export const deleteGlobalConsumedLogQueueEntries = async (olderThan: Date): Promise => { + const result = await LogQueue.deleteMany({ + status: "consumed", + updatedAt: { $lt: olderThan }, + }); + return result.deletedCount; +}; + +export const LogQueueRepository = (worldId: string) => ({ + createLogQueue: (input: TLogQueueInput) => createLogQueue({ worldId } as TWorldRefModel, input), + updateLogQueueStatus: (runId: string, status: "queued" | "consumed" | "failed") => + updateLogQueueStatus(worldId, runId, status), + findLogQueueByRunId: (runId: string) => findLogQueueByRunId(worldId, runId), + findPendingTicketCandidates: (limit?: number) => findPendingTicketCandidates(worldId, limit), + incrementLogQueueRetry: (runId: string) => incrementLogQueueRetry(runId), +}); + + diff --git a/packages/controlmart/src/repository/logs.repository.ts b/packages/controlmart/src/repository/logs.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f817506298d428a7f91007dcb5c5704d2bdabe2 --- /dev/null +++ b/packages/controlmart/src/repository/logs.repository.ts @@ -0,0 +1,295 @@ +import { WorldLog, type TWorldLogModel, type TWorldLogInput } from "../models/logs.model"; +import type { TWorldRefModel } from "../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../utils/http.util"; + +const createLog = async (world: TWorldRefModel, data: TWorldLogInput): Promise => { + try { + if (!data.level || !data.serviceType) { + throw new RepositoryError("Level and serviceType are required", "VALIDATION_ERROR"); + } + + const transaction = await WorldLog.create({ + ...data, + worldRef: world, + timestamp: data.timestamp || new Date(), + }); + const jsonified = transaction.toJSON(); + return jsonified as TWorldLogModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError(`Failed to create log: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const bulkInsertLogs = async ( + world: TWorldRefModel, + logs: TWorldLogInput[], +): Promise => { + try { + if (!logs.length) { + throw new RepositoryError("Logs array cannot be empty", "VALIDATION_ERROR"); + } + + const MAX_BATCH_SIZE = 1000; + if (logs.length > MAX_BATCH_SIZE) { + throw new RepositoryError( + `Batch size cannot exceed ${MAX_BATCH_SIZE} logs`, + "VALIDATION_ERROR", + ); + } + + // Validate each log has required fields + const invalidLogs = logs.filter((log) => !log.level || !log.serviceType); + if (invalidLogs.length > 0) { + throw new RepositoryError("All logs must have level and serviceType", "VALIDATION_ERROR"); + } + + const enriched = logs.map((l) => ({ + ...l, + worldRef: world, + timestamp: l.timestamp || new Date(), + })); + const transaction = await WorldLog.insertMany(enriched, { ordered: false }); + const jsonified = transaction.map((t) => t.toJSON() as TWorldLogModel); + return jsonified; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to bulk insert logs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getLogs = async ( + worldId: string, + filters?: { + serviceType?: string; + level?: string; + searchText?: string; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + cursor?: string | null; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.serviceType) query.serviceType = filters.serviceType; + if (filters?.level) query.level = filters.level; + if (filters?.cursor) { + query._id = { $lt: filters.cursor }; + } + + if (filters?.dateStart || filters?.dateEnd) { + query.timestamp = {}; + if (filters.dateStart) query.timestamp.$gte = filters.dateStart; + if (filters.dateEnd) query.timestamp.$lte = filters.dateEnd; + } + + // Optimized full-text search with score sorting + if (filters?.searchText) { + query.$text = { + $search: filters.searchText, + $caseSensitive: false, + $diacriticSensitive: false, + }; + } + + const limit = Math.min(filters?.limit ?? GLOBAL_PAGE_LIMIT, GLOBAL_PAGE_LIMIT); + + // Get total count for pagination metadata + const totalCount = await WorldLog.countDocuments(query); + + const logs = await WorldLog.find( + query, + filters?.searchText ? { score: { $meta: "textScore" } } : {}, + ) + .sort( + filters?.searchText ? { score: { $meta: "textScore" }, timestamp: -1 } : { timestamp: -1 }, + ) + .limit(limit + 1); + + const hasMore = logs.length > limit; + const results = hasMore ? logs.slice(0, limit) : logs; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((l) => l.toJSON() as TWorldLogModel), + totalCount, + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve logs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getLogsByTransaction = async ( + worldId: string, + transactionId: string, + filters?: { + limit?: number; + cursor?: string; + }, +): Promise<{ logs: TWorldLogModel[]; nextCursor?: string }> => { + try { + if (!transactionId) throw new RepositoryError("Transaction ID is required", "VALIDATION_ERROR"); + + const query: Record = { + "worldRef.worldId": worldId, + transaction_id: transactionId, + }; + + if (filters?.cursor) { + query._id = { $lt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const logs = await WorldLog.find(query) + .sort({ timestamp: -1 }) + .limit(limit + 1); + + const hasMore = logs.length > limit; + const results = hasMore ? logs.slice(0, limit) : logs; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + logs: results.map((l) => l.toJSON() as TWorldLogModel), + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to fetch logs by transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const aggregateLogLevels = async ( + worldId: string, + filters?: { serviceType?: string; dateStart?: Date; dateEnd?: Date }, +): Promise> => { + try { + const match: Record = { "worldRef.worldId": worldId }; + if (filters?.serviceType) match.serviceType = filters.serviceType; + if (filters?.dateStart || filters?.dateEnd) { + match.timestamp = {}; + if (filters.dateStart) match.timestamp.$gte = filters.dateStart; + if (filters.dateEnd) match.timestamp.$lte = filters.dateEnd; + } + + const results = await WorldLog.aggregate([ + { $match: match }, + { $group: { _id: "$level", count: { $sum: 1 } } }, + ]); + + const levelCounts: Record = {}; + for (const r of results) levelCounts[r._id] = r.count; + return levelCounts; + } catch (error) { + throw new RepositoryError( + `Failed to aggregate log levels: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const cleanupOldLogs = async (worldId: string, days: number): Promise => { + try { + if (days < 1) { + throw new RepositoryError("Days must be at least 1", "VALIDATION_ERROR"); + } + + const MILLISECONDS_PER_DAY = 24 * 60 * 60 * 1000; + const cutoff = new Date(Date.now() - days * MILLISECONDS_PER_DAY); + + const res = await WorldLog.deleteMany({ + "worldRef.worldId": worldId, + timestamp: { $lt: cutoff }, + }); + return res.deletedCount || 0; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to cleanup logs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getLatestLogs = async ( + worldId: string, + filters?: { + serviceType?: string; + limit?: number; + cursor?: string; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + if (filters?.serviceType) query.serviceType = filters.serviceType; + if (filters?.cursor) { + query._id = { $lt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const logs = await WorldLog.find(query) + .sort({ timestamp: -1 }) + .limit(limit + 1); + + const hasMore = logs.length > limit; + const results = hasMore ? logs.slice(0, limit) : logs; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((l) => l.toJSON() as TWorldLogModel), + totalCount: await WorldLog.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get latest logs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WorldLogRepository = (worldId: string) => ({ + createLog: (data: TWorldLogInput) => createLog({ worldId } as TWorldRefModel, data), + bulkInsertLogs: (logs: TWorldLogInput[]) => bulkInsertLogs({ worldId } as TWorldRefModel, logs), + getLogs: (filters?: { + serviceType?: string; + level?: string; + searchText?: string; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + cursor?: string | null; + }) => getLogs(worldId, filters), + getLogsByTransaction: (transactionId: string, filters?: { limit?: number; cursor?: string }) => + getLogsByTransaction(worldId, transactionId, filters), + aggregateLogLevels: (filters?: { serviceType?: string; dateStart?: Date; dateEnd?: Date }) => + aggregateLogLevels(worldId, filters), + getLatestLogs: (filters?: { serviceType?: string; limit?: number; cursor?: string }) => + getLatestLogs(worldId, filters), + cleanupOldLogs: (days: number) => cleanupOldLogs(worldId, days), +}); + +export type TWorldLogRepository = ReturnType; diff --git a/packages/controlmart/src/repository/manufacturing/production-run.repository.ts b/packages/controlmart/src/repository/manufacturing/production-run.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..25033870711e85e97af899abd3269301d43419d0 --- /dev/null +++ b/packages/controlmart/src/repository/manufacturing/production-run.repository.ts @@ -0,0 +1,304 @@ +import type { TWorldRefModel } from "../../models/shared.model"; +import { + ProductionRun, + type TProductionRunInput, + type TProductionRunModel, +} from "../../models/manufacturing"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createProductionRun = async ( + world: TWorldRefModel, + data: TProductionRunInput, +): Promise => { + try { + if (!data.productionOrderId) { + throw new RepositoryError("Production Order ID is required", "VALIDATION_ERROR"); + } + + // Only check for duplicates if productionRunId is explicitly provided + if (data.productionRunId) { + const existing = await ProductionRun.findOne({ + "worldRef.worldId": world.worldId, + productionRunId: data.productionRunId, + }); + + if (existing) { + throw new RepositoryError( + `Production run with ID ${data.productionRunId} already exists`, + "DUPLICATE_ERROR", + ); + } + } + + const productionRun = await ProductionRun.create({ + ...data, + worldRef: world, + }); + + return productionRun.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create production run: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getProductionRunById = async ( + worldId: string, + productionRunId: string, +): Promise => { + try { + const productionRun = await ProductionRun.findOne({ + "worldRef.worldId": worldId, + productionRunId, + }); + + return productionRun ? productionRun.toJSON() : null; + } catch (error) { + throw new RepositoryError( + `Failed to get production run: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getProductionRunsByOrderId = async ( + worldId: string, + productionOrderId: string, +): Promise => { + try { + const productionRuns = await ProductionRun.find({ + "worldRef.worldId": worldId, + productionOrderId, + }).sort({ createdAt: -1 }); + + return productionRuns.map((run) => run.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get production runs by order ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getActiveProductionRuns = async ( + worldId: string, + filters?: { + status?: string[]; + workCenterId?: string; + equipmentId?: string; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } else { + // Default to active statuses + query.status = { $in: ["CREATED", "IN_PROGRESS"] }; + } + + if (filters?.workCenterId) { + query.workCenterId = filters.workCenterId; + } + + if (filters?.equipmentId) { + query.equipmentId = filters.equipmentId; + } + + const productionRuns = await ProductionRun.find(query).sort({ startTime: -1, createdAt: -1 }); + + return productionRuns.map((run) => run.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get active production runs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateProductionRunStatus = async ( + worldId: string, + productionRunId: string, + status: "CREATED" | "IN_PROGRESS" | "COMPLETED" | "FAILED" | "CANCELLED", + updates?: { + startTime?: Date; + endTime?: Date; + finishedGoodsProduced?: TProductionRunModel["finishedGoodsProduced"]; + qualityChecks?: TProductionRunModel["qualityChecks"]; + notes?: string; + }, +): Promise => { + try { + const updateData: Record = { + status, + }; + + if (status === "IN_PROGRESS" && !updates?.startTime) { + updateData.startTime = new Date(); + } + + if (status === "COMPLETED" || status === "FAILED" || status === "CANCELLED") { + if (!updates?.endTime) { + updateData.endTime = new Date(); + } + } + + if (updates?.startTime) { + updateData.startTime = updates.startTime; + } + + if (updates?.endTime) { + updateData.endTime = updates.endTime; + } + + if (updates?.finishedGoodsProduced) { + updateData.finishedGoodsProduced = updates.finishedGoodsProduced; + } + + if (updates?.qualityChecks) { + updateData.qualityChecks = updates.qualityChecks; + } + + if (updates?.notes !== undefined) { + updateData.notes = updates.notes; + } + + const productionRun = await ProductionRun.findOneAndUpdate( + { + "worldRef.worldId": worldId, + productionRunId, + }, + { $set: updateData }, + { new: true }, + ); + + return productionRun ? productionRun.toJSON() : null; + } catch (error) { + throw new RepositoryError( + `Failed to update production run status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllProductionRuns = async ( + worldId: string, + filters?: { + status?: string[]; + productionOrderId?: string; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + cursor?: string; + }, +): Promise<{ + items: TProductionRunModel[]; + totalCount: number; + hasMore: boolean; + nextCursor?: string; +}> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + + if (filters?.productionOrderId) { + query.productionOrderId = filters.productionOrderId; + } + + if (filters?.dateStart && filters?.dateEnd) { + query.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const limit = Math.min(filters?.limit || GLOBAL_PAGE_LIMIT, GLOBAL_PAGE_LIMIT); + + let skip = 0; + if (filters?.cursor) { + const cursorDoc = await ProductionRun.findOne({ + "worldRef.worldId": worldId, + productionRunId: filters.cursor, + }); + if (cursorDoc) { + skip = await ProductionRun.countDocuments({ + "worldRef.worldId": worldId, + createdAt: { $lte: cursorDoc.createdAt }, + ...query, + }); + } + } + + const totalCount = await ProductionRun.countDocuments(query); + + const productionRuns = await ProductionRun.find(query) + .sort({ createdAt: -1 }) + .limit(limit + 1) + .skip(skip); + + const hasMore = productionRuns.length > limit; + const items = productionRuns.slice(0, limit).map((run) => run.toJSON()); + + return { + items, + totalCount, + hasMore, + nextCursor: hasMore && items.length > 0 ? items[items.length - 1].productionRunId : undefined, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get all production runs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const ManufacturingProductionRunRepository = (worldId: string) => ({ + createProductionRun: (data: TProductionRunInput) => + createProductionRun({ worldId } as TWorldRefModel, data), + getProductionRunById: (args: string | { productionRunId: string }) => + getProductionRunById(worldId, typeof args === "string" ? args : args.productionRunId), + getProductionRunsByOrderId: (args: string | { productionOrderId: string }) => + getProductionRunsByOrderId(worldId, typeof args === "string" ? args : args.productionOrderId), + getActiveProductionRuns: (filters?: { + status?: string[]; + workCenterId?: string; + equipmentId?: string; + }) => getActiveProductionRuns(worldId, filters), + updateProductionRunStatus: ( + productionRunId: string | { productionRunId: string; status: "CREATED" | "IN_PROGRESS" | "COMPLETED" | "FAILED" | "CANCELLED" }, + status?: "CREATED" | "IN_PROGRESS" | "COMPLETED" | "FAILED" | "CANCELLED", + updates?: { + startTime?: Date; + endTime?: Date; + finishedGoodsProduced?: TProductionRunModel["finishedGoodsProduced"]; + qualityChecks?: TProductionRunModel["qualityChecks"]; + notes?: string; + }, + ) => { + const runId = typeof productionRunId === "string" ? productionRunId : productionRunId.productionRunId; + const runStatus = typeof productionRunId === "string" ? status! : productionRunId.status; + return updateProductionRunStatus(worldId, runId, runStatus, updates); + }, + getAllProductionRuns: (filters?: { + status?: string[]; + productionOrderId?: string; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + cursor?: string; + }) => getAllProductionRuns(worldId, filters), +}); + diff --git a/packages/controlmart/src/repository/od.repository.ts b/packages/controlmart/src/repository/od.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..235d7ba823a451842a81937410dd33d0834458ca --- /dev/null +++ b/packages/controlmart/src/repository/od.repository.ts @@ -0,0 +1,151 @@ +import { + OperationalDescriptor, + type TOperationalDescriptorModel, + type TOperationalDescriptorInput, +} from "../models/od.model"; +import type { TWorldRefModel } from "../models/shared.model"; +import type { TRepositoryPaginatedResult } from "../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../utils/http.util"; + +const createOD = async ( + world: TWorldRefModel, + data: TOperationalDescriptorInput, +): Promise => { + try { + const od = await OperationalDescriptor.create({ + ...data, + worldRef: world, + }); + const jsonified = od.toJSON(); + return jsonified as TOperationalDescriptorModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create operational descriptor: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; +const getODs = async ( + worldId: string, + filters: { + odType?: TOperationalDescriptorModel["odType"]; + name?: string; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: any = { worldRef: { worldId } }; + + if (filters.odType) { + query.odType = filters.odType; + } + + if (filters.name) { + query.name = { $regex: new RegExp(filters.name, "i") }; + } + + if (filters.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters.limit && filters.limit > 0 ? filters.limit : GLOBAL_PAGE_LIMIT; + + const ods = await OperationalDescriptor.find(query) + .sort({ _id: 1 }) + .limit(limit + 1); // Fetch one extra to check for next cursor + + const hasNextPage = ods.length > limit; + const results = hasNextPage ? ods.slice(0, -1) : ods; + + const paginatedResult: TRepositoryPaginatedResult = { + items: results.map((od) => od.toJSON() as TOperationalDescriptorModel), + totalCount: await OperationalDescriptor.countDocuments(query), + limit, + hasMore: hasNextPage, + nextCursor: + hasNextPage && results.length > 0 + ? results[results.length - 1]?._id?.toString() + : undefined, + }; + + return paginatedResult; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to list operational descriptors: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteODById = async (odId: string, worldId: string): Promise => { + try { + const result = await OperationalDescriptor.deleteOne({ odId, "worldRef.worldId": worldId }); + if (result.deletedCount === 0) { + throw new RepositoryError(`Operational Descriptor ${odId} not found`, "NOT_FOUND_ERROR"); + } + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to delete operational descriptor: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateODById = async ( + odId: string, + worldId: string, + updateData: Partial, +): Promise => { + try { + const result = await OperationalDescriptor.findOneAndUpdate( + { odId, "worldRef.worldId": worldId }, + { $set: updateData }, + { new: true, runValidators: true }, + ); + + if (!result) { + throw new RepositoryError(`Operational Descriptor ${odId} not found`, "NOT_FOUND_ERROR"); + } + + return result.toJSON() as TOperationalDescriptorModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update operational descriptor: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getODById = async ( + odId: string, + worldId: string, +): Promise => { + try { + const od = await OperationalDescriptor.findOne({ odId, "worldRef.worldId": worldId }); + + if (!od) { + return null; + } + + return od.toJSON() as TOperationalDescriptorModel; + } catch (error) { + throw new RepositoryError( + `Failed to get operational descriptor: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const ODRepository = { + createOD, + getODs, + getODById, + updateODById, + deleteODById, +}; diff --git a/packages/controlmart/src/repository/persona.repository.ts b/packages/controlmart/src/repository/persona.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..e372977b826cda395e938a21579e23a18f23b9fe --- /dev/null +++ b/packages/controlmart/src/repository/persona.repository.ts @@ -0,0 +1,221 @@ +import { Persona } from "../models/persona.model"; +import type { TPersonaModel, TPersonaInput } from "../models/persona.model.type"; +import type { PersonaFilter } from "../types/persona.type"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; +import { + applyOffsetPagination, + buildOffsetMeta, + DEFAULT_LIMITS, + type OffsetPaginationOptions, + type OffsetPaginationMeta +} from "../utils/pagination.util"; + +/** + * Create a new persona + * @param data Persona data (without _id, timestamps) + * @returns Created persona document + * @throws RepositoryError with VALIDATION_ERROR or DATABASE_ERROR + */ +const create = async (data: TPersonaInput): Promise => { + try { + // Validation + if (!data.id || !data.name || !data.role) { + throw new RepositoryError( + "Persona ID, name, and role are required", + "VALIDATION_ERROR" + ); + } + + const persona = await Persona.create(data); + return persona; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create persona: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Find persona by ID + * @param id Persona ID + * @returns Persona document or null if not found + * @throws RepositoryError with VALIDATION_ERROR or DATABASE_ERROR + */ +const findById = async (id: string): Promise => { + try { + if (!id) { + throw new RepositoryError("Persona ID is required", "VALIDATION_ERROR"); + } + return await (Persona as any).findOne({ id }).exec(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to find persona: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Get all personas with optional filters and pagination + * @param filters Optional filter criteria + * @param pagination Optional pagination options + * @returns Object with personas array and pagination metadata + * @throws RepositoryError with DATABASE_ERROR + */ +const getAll = async ( + filters?: PersonaFilter, + pagination?: OffsetPaginationOptions +): Promise<{ data: TPersonaModel[]; pagination?: OffsetPaginationMeta }> => { + try { + const query = filters ? buildFilterQuery(filters) : {}; + + // If pagination is provided, apply it + if (pagination) { + const page = pagination.page || 1; + const limit = pagination.limit || DEFAULT_LIMITS.personas; + const { skip, limit: finalLimit } = applyOffsetPagination(page, limit); + + // Get total count for pagination metadata + const total = await (Persona as any).countDocuments(query).exec(); + + // Get paginated results + const data = await (Persona as any) + .find(query) + .sort({ name: 1 }) + .skip(skip) + .limit(finalLimit) + .exec(); + + // Build pagination metadata + const paginationMeta = buildOffsetMeta(total, page, limit); + + return { data, pagination: paginationMeta }; + } + + // No pagination - return all results (backward compatible) + const data = await (Persona as any).find(query).sort({ name: 1 }).exec(); + return { data }; + } catch (error) { + throw new RepositoryError( + `Failed to get personas: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Update persona by ID + * @param id Persona ID + * @param data Partial persona data to update + * @returns Updated persona document + * @throws RepositoryError with VALIDATION_ERROR, NOT_FOUND_ERROR, or DATABASE_ERROR + */ +const update = async ( + id: string, + data: Partial +): Promise => { + try { + if (!id) { + throw new RepositoryError("Persona ID is required", "VALIDATION_ERROR"); + } + + // Separate fields to set vs unset (following world.repository.ts pattern) + const setFields: any = { updatedAt: new Date() }; + const unsetFields: any = {}; + + for (const [key, value] of Object.entries(data)) { + if (value === undefined) { + unsetFields[key] = ''; + } else { + setFields[key] = value; + } + } + + const updateOps: any = { $set: setFields }; + if (Object.keys(unsetFields).length > 0) { + updateOps.$unset = unsetFields; + } + + const updated = await (Persona as any).findOneAndUpdate( + { id }, + updateOps, + { new: true } + ).exec(); + + if (!updated) { + throw new RepositoryError("Persona not found", "NOT_FOUND_ERROR"); + } + + return updated; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update persona: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Delete persona by ID + * @param id Persona ID + * @returns True if deleted, false if not found + * @throws RepositoryError with VALIDATION_ERROR or DATABASE_ERROR + */ +const deletePersona = async (id: string): Promise => { + try { + if (!id) { + throw new RepositoryError("Persona ID is required", "VALIDATION_ERROR"); + } + + const result = await Persona.deleteOne({ id }).exec(); + return result.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete persona: ${getErrorMessage(error)}`, + "DATABASE_ERROR" + ); + } +}; + +/** + * Build MongoDB query from filter criteria + * @param filters Filter criteria + * @returns MongoDB query object + */ +const buildFilterQuery = (filters: PersonaFilter): any => { + const query: any = {}; + + if (filters.role) { + query.role = filters.role; + } + if (filters.department) { + query.department = filters.department; + } + if (filters.accessLevel) { + query['metadata.accessLevel'] = filters.accessLevel; + } + if (filters.tags && filters.tags.length > 0) { + query['metadata.tags'] = { $in: filters.tags }; + } + + return query; +}; + +/** + * Persona Repository + * Provides CRUD operations for personas (global resources) + */ +export const PersonaRepository = { + create, + findById, + getAll, + filter: getAll, // Alias for getAll with filters + update, + delete: deletePersona +}; + +export type TPersonaRepository = typeof PersonaRepository; diff --git a/packages/controlmart/src/repository/tickets.repository.ts b/packages/controlmart/src/repository/tickets.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..5894ce4c270c960d5c874019d099b38b3b6fb99a --- /dev/null +++ b/packages/controlmart/src/repository/tickets.repository.ts @@ -0,0 +1,219 @@ +import type { TWorldRefModel } from "../models/shared.model"; +import { WorldItsmTicket } from "../models/tickets.model"; +import type { TWorldItsmTicketModel, TWorldItsmTicketInput } from "../models/tickets.model"; +import type { TRepositoryPaginatedResult } from "../types/repository.type"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../utils/http.util"; + +const createTicket = async ( + world: TWorldRefModel, + ticketData: TWorldItsmTicketInput, +): Promise => { + try { + const transaction = await WorldItsmTicket.create({ + worldRef: world, + ...ticketData, + }); + const jsonified = transaction.toJSON(); + return jsonified as TWorldItsmTicketModel; + } catch (error) { + throw new RepositoryError( + `Failed to create ticket: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTicketById = async ( + worldId: string, + ticketId: string, +): Promise => { + try { + const transaction = await WorldItsmTicket.findOne({ + _id: ticketId, + "worldRef.worldId": worldId, + }); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldItsmTicketModel | null; + } catch (error) { + throw new RepositoryError(`Failed to get ticket: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const getTickets = async ( + worldId: string, + filters?: { + status?: TWorldItsmTicketModel["status"]; + priority?: TWorldItsmTicketModel["priority"]; + department?: string; + assignedTo?: string; + impact?: TWorldItsmTicketModel["impact"]; + urgency?: TWorldItsmTicketModel["urgency"]; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + cursor?: string | null; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters) { + if (filters.status) query.status = filters.status; + if (filters.priority) query.priority = filters.priority; + if (filters.department) query.department = filters.department; + if (filters.assignedTo) query.assignedTo = filters.assignedTo; + if (filters.impact) query.impact = filters.impact; + if (filters.urgency) query.urgency = filters.urgency; + if (filters.dateStart || filters.dateEnd) { + query.createdAt = {}; + if (filters.dateStart) query.createdAt.$gte = filters.dateStart; + if (filters.dateEnd) query.createdAt.$lte = filters.dateEnd; + } + if (filters.cursor) { + query._id = { $lt: filters.cursor }; + } + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const tickets = await WorldItsmTicket.find(query) + .sort({ createdAt: -1 }) + .limit(limit + 1); + + const hasMore = tickets.length > limit; + const results = hasMore ? tickets.slice(0, limit) : tickets; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((t) => t.toJSON() as TWorldItsmTicketModel), + totalCount: await WorldItsmTicket.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError(`Failed to get tickets: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +// Update the functions to include worldId validation +const updateTicket = async ( + worldId: string, + ticketId: string, + updateData: Partial, +): Promise => { + try { + const transaction = await WorldItsmTicket.findOneAndUpdate( + { + _id: ticketId, + "worldRef.worldId": worldId, + }, + updateData, + { new: true }, + ); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldItsmTicketModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update ticket: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateTicketWorkNotes = async ( + worldId: string, + ticketId: string, + workNotes: TWorldItsmTicketInput["workNotes"], +): Promise => { + try { + const transaction = await WorldItsmTicket.findOneAndUpdate( + { + _id: ticketId, + "worldRef.worldId": worldId, + }, + { workNotes }, + { new: true }, + ); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldItsmTicketModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update ticket work notes: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateTicketStatus = async ( + worldId: string, + ticketId: string, + status: TWorldItsmTicketModel["status"], +): Promise => { + try { + const transaction = await WorldItsmTicket.findOneAndUpdate( + { + _id: ticketId, + "worldRef.worldId": worldId, + }, + { status }, + { new: true }, + ); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldItsmTicketModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update ticket status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteTicket = async ( + worldId: string, + ticketId: string, +): Promise => { + try { + const transaction = await WorldItsmTicket.findOneAndDelete({ + _id: ticketId, + "worldRef.worldId": worldId, + }); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldItsmTicketModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to delete ticket: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const TicketRepository = (worldId: string) => ({ + createTicket: (ticketData: TWorldItsmTicketInput) => + createTicket({ worldId } as TWorldRefModel, ticketData), + getTicketById: (ticketId: string) => getTicketById(worldId, ticketId), + getTickets: (filters?: { + status?: TWorldItsmTicketModel["status"]; + priority?: TWorldItsmTicketModel["priority"]; + department?: string; + assignedTo?: string; + impact?: TWorldItsmTicketModel["impact"]; + urgency?: TWorldItsmTicketModel["urgency"]; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + cursor?: string | null; + }) => getTickets(worldId, filters), + updateTicket: (ticketId: string, updateData: Partial) => + updateTicket(worldId, ticketId, updateData), + updateTicketWorkNotes: (ticketId: string, workNotes: TWorldItsmTicketInput["workNotes"]) => + updateTicketWorkNotes(worldId, ticketId, workNotes), + updateTicketStatus: (ticketId: string, status: TWorldItsmTicketModel["status"]) => + updateTicketStatus(worldId, ticketId, status), + deleteTicket: (ticketId: string) => deleteTicket(worldId, ticketId), +}); + +export type TTicketRepository = ReturnType; diff --git a/packages/controlmart/src/repository/tms/carrier.tms.repository.ts b/packages/controlmart/src/repository/tms/carrier.tms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..84e2e73fd7edbbdf880819225f0c75b67befb942 --- /dev/null +++ b/packages/controlmart/src/repository/tms/carrier.tms.repository.ts @@ -0,0 +1,426 @@ +import { + Carrier, + type TCarrierModel, + type TCarrierInput, +} from "../../models/tms/carrier.tms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createCarrier = async ( + world: TWorldRefModel, + data: TCarrierInput, +): Promise => { + try { + if (!data.carrierId || !data.carrierCode) { + throw new RepositoryError("Carrier ID and code are required", "VALIDATION_ERROR"); + } + + const existing = await Carrier.findOne({ + "worldRef.worldId": world.worldId, + $or: [{ carrierId: data.carrierId }, { carrierCode: data.carrierCode }], + }); + + const existingJsonified = existing?.toJSON() || null; + + if (existingJsonified) { + throw new RepositoryError( + `Carrier with ID ${data.carrierId} or code ${data.carrierCode} already exists`, + "DUPLICATE_ERROR", + ); + } + + const transaction = await Carrier.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TCarrierModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create carrier: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCarrierById = async ( + worldId: string, + carrierId: string, +): Promise => { + try { + const transaction = await Carrier.findOne({ + "worldRef.worldId": worldId, + carrierId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCarrierModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to get carrier by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCarrierByCode = async ( + worldId: string, + carrierCode: string, +): Promise => { + try { + const transaction = await Carrier.findOne({ + "worldRef.worldId": worldId, + carrierCode, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCarrierModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to get carrier by code: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getActiveCarriers = async ( + worldId: string, + filters?: { + carrierType?: string; + serviceRegion?: string; + smartWayCertified?: boolean; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: any = { + "worldRef.worldId": worldId, + status: "ACTIVE", + }; + + if (filters?.carrierType) { + query.carrierType = filters.carrierType; + } + if (filters?.serviceRegion) { + query.serviceRegions = { $in: [filters.serviceRegion] }; + } + if (filters?.smartWayCertified !== undefined) { + query["compliance.smartWayCertified"] = filters.smartWayCertified; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const carriers = await Carrier.find(query) + .sort({ carrierName: 1 }) + .limit(limit + 1); + + const hasMore = carriers.length > limit; + const results = hasMore ? carriers.slice(0, limit) : carriers; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((c) => c.toJSON()), + limit, + hasMore, + nextCursor, + totalCount: await Carrier.countDocuments(query), + }; + } catch (error) { + throw new RepositoryError( + `Failed to get active carriers: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateCarrierPerformance = async ( + worldId: string, + carrierId: string, + performanceData: { + onTimeDeliveryRate?: number; + damageClaimRate?: number; + averageTransitTime?: number; + totalShipmentsCompleted?: number; + }, +): Promise => { + try { + const updateData: any = { + "performance.lastPerformanceUpdate": new Date(), + }; + + if (performanceData.onTimeDeliveryRate !== undefined) { + updateData["performance.onTimeDeliveryRate"] = performanceData.onTimeDeliveryRate; + } + if (performanceData.damageClaimRate !== undefined) { + updateData["performance.damageClaimRate"] = performanceData.damageClaimRate; + } + if (performanceData.averageTransitTime !== undefined) { + updateData["performance.averageTransitTime"] = performanceData.averageTransitTime; + } + if (performanceData.totalShipmentsCompleted !== undefined) { + updateData["performance.totalShipmentsCompleted"] = performanceData.totalShipmentsCompleted; + } + + const transaction = await Carrier.findOneAndUpdate( + { "worldRef.worldId": worldId, carrierId }, + { $set: updateData }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCarrierModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update carrier performance: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateCarrierStatus = async ( + worldId: string, + carrierId: string, + status: string, +): Promise => { + try { + const transaction = await Carrier.findOneAndUpdate( + { "worldRef.worldId": worldId, carrierId }, + { $set: { status } }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TCarrierModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update carrier status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCarriersByPerformance = async ( + worldId: string, + criteria: { + minOnTimeRate?: number; + maxDamageRate?: number; + maxTransitTime?: number; + carrierType?: string; + cursor?: string; + limit?: number; + }, +): Promise<{ carriers: TCarrierModel[]; nextCursor?: string }> => { + try { + const query: any = { + "worldRef.worldId": worldId, + status: "ACTIVE", + }; + + if (criteria.minOnTimeRate !== undefined) { + query["performance.onTimeDeliveryRate"] = { $gte: criteria.minOnTimeRate }; + } + if (criteria.maxDamageRate !== undefined) { + query["performance.damageClaimRate"] = { $lte: criteria.maxDamageRate }; + } + if (criteria.maxTransitTime !== undefined) { + query["performance.averageTransitTime"] = { $lte: criteria.maxTransitTime }; + } + if (criteria.carrierType) { + query.carrierType = criteria.carrierType; + } + if (criteria.cursor) { + query._id = { $gt: criteria.cursor }; + } + + const limit = criteria.limit ?? GLOBAL_PAGE_LIMIT; + const carriers = await Carrier.find(query) + .sort({ + "performance.onTimeDeliveryRate": -1, + "performance.damageClaimRate": 1, + }) + .limit(limit + 1); + + const hasMore = carriers.length > limit; + const results = hasMore ? carriers.slice(0, limit) : carriers; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + carriers: results.map((c) => c.toJSON()), + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get carriers by performance: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCarrierMetrics = async ( + worldId: string, + carrierId: string, +): Promise<{ + carrier: TCarrierModel | null; + metrics: { + currentPerformance: any; + recentTrends: any; + complianceStatus: any; + }; +}> => { + try { + const transaction = await Carrier.findOne({ + "worldRef.worldId": worldId, + carrierId, + }); + + const carrier = transaction?.toJSON() as TCarrierModel | null; + + if (!carrier) { + return { + carrier: null, + metrics: { + currentPerformance: null, + recentTrends: null, + complianceStatus: null, + }, + }; + } + + const metrics = { + currentPerformance: { + onTimeDeliveryRate: carrier.performance?.onTimeDeliveryRate || 0, + damageClaimRate: carrier.performance?.damageClaimRate || 0, + averageTransitTime: carrier.performance?.averageTransitTime || 0, + totalShipmentsCompleted: carrier.performance?.totalShipmentsCompleted || 0, + }, + recentTrends: { + lastPerformanceUpdate: carrier.performance?.lastPerformanceUpdate, + }, + complianceStatus: { + dotNumber: carrier.compliance?.dotNumber, + mcNumber: carrier.compliance?.mcNumber, + scacCode: carrier.compliance?.scacCode, + smartWayCertified: carrier.compliance?.smartWayCertified || false, + insuranceExpiry: carrier.compliance?.insuranceExpiry, + safetyRating: carrier.compliance?.safetyRating, + }, + }; + + return { carrier, metrics }; + } catch (error) { + throw new RepositoryError( + `Failed to get carrier metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const searchCarriers = async ( + worldId: string, + searchTerm: string, + filters?: { + carrierType?: string; + status?: string; + serviceRegion?: string; + cursor?: string; + limit?: number; + }, +): Promise<{ carriers: TCarrierModel[]; nextCursor?: string }> => { + try { + const query: any = { + "worldRef.worldId": worldId, + $or: [ + { carrierName: { $regex: searchTerm, $options: "i" } }, + { carrierCode: { $regex: searchTerm, $options: "i" } }, + { "compliance.scacCode": { $regex: searchTerm, $options: "i" } }, + ], + }; + + if (filters?.carrierType) { + query.carrierType = filters.carrierType; + } + if (filters?.status) { + query.status = filters.status; + } + if (filters?.serviceRegion) { + query.serviceRegions = { $in: [filters.serviceRegion] }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = Math.min(filters?.limit ?? GLOBAL_PAGE_LIMIT, 50); // Cap at 50 for search + const carriers = await Carrier.find(query) + .sort({ carrierName: 1 }) + .limit(limit + 1); + + const hasMore = carriers.length > limit; + const results = hasMore ? carriers.slice(0, limit) : carriers; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + carriers: results.map((c) => c.toJSON()), + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to search carriers: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const TmsCarrierRepository = (worldId: string) => ({ + createCarrier: (data: TCarrierInput) => createCarrier({ worldId } as TWorldRefModel, data), + getCarrierById: (carrierId: string) => getCarrierById(worldId, carrierId), + getCarrierByCode: (carrierCode: string) => getCarrierByCode(worldId, carrierCode), + getActiveCarriers: (filters?: { + carrierType?: string; + serviceRegion?: string; + smartWayCertified?: boolean; + cursor?: string; + limit?: number; + }) => getActiveCarriers(worldId, filters), + updateCarrierPerformance: ( + carrierId: string, + performanceData: { + onTimeDeliveryRate?: number; + damageClaimRate?: number; + averageTransitTime?: number; + totalShipmentsCompleted?: number; + }, + ) => updateCarrierPerformance(worldId, carrierId, performanceData), + updateCarrierStatus: (carrierId: string, status: string) => + updateCarrierStatus(worldId, carrierId, status), + getCarriersByPerformance: (criteria: { + minOnTimeRate?: number; + maxDamageRate?: number; + maxTransitTime?: number; + carrierType?: string; + cursor?: string; + limit?: number; + }) => getCarriersByPerformance(worldId, criteria), + getCarrierMetrics: (carrierId: string) => getCarrierMetrics(worldId, carrierId), + searchCarriers: ( + searchTerm: string, + filters?: { + carrierType?: string; + status?: string; + serviceRegion?: string; + cursor?: string; + limit?: number; + }, + ) => searchCarriers(worldId, searchTerm, filters), +}); + +export type TTmsCarrierRepository = ReturnType; diff --git a/packages/controlmart/src/repository/tms/inbound_trailer.tms.repository.ts b/packages/controlmart/src/repository/tms/inbound_trailer.tms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..2836b4b5dd8c2c3abb8d297e2f259b5e9f5c6738 --- /dev/null +++ b/packages/controlmart/src/repository/tms/inbound_trailer.tms.repository.ts @@ -0,0 +1,549 @@ +import { + InboundTrailer, + type TInboundTrailerModel, + type TInboundTrailerInput, +} from "../../models/tms/inbound_trailer.tms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createInboundTrailer = async ( + world: TWorldRefModel, + data: TInboundTrailerInput, +): Promise => { + try { + if (!data.trailerId) { + throw new RepositoryError("Trailer ID is required", "VALIDATION_ERROR"); + } + + const existing = await InboundTrailer.findOne({ + "worldRef.worldId": world.worldId, + trailerId: data.trailerId, + }); + + const existingJsonified = existing?.toJSON() || null; + + if (existingJsonified) { + throw new RepositoryError( + `Inbound trailer with ID ${data.trailerId} already exists`, + "DUPLICATE_ERROR", + ); + } + + const transaction = await InboundTrailer.create({ + ...data, + worldRef: world, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TInboundTrailerModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create inbound trailer: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const scheduleTrailerAppointment = async ( + worldId: string, + trailerId: string, + appointmentData: { + appointmentId?: string; + scheduledArrival: Date; + scheduledDeparture?: Date; + dockDoor?: string; + dcId: string; + facilityName?: string; + }, +): Promise => { + try { + const trailer = await InboundTrailer.findOneAndUpdate( + { + "worldRef.worldId": worldId, + trailerId, + status: { $in: ["SCHEDULED", "EN_ROUTE"] }, + }, + { + $set: { + status: "SCHEDULED", + "appointmentInfo.appointmentId": appointmentData.appointmentId || `APPT-${Date.now()}`, + "appointmentInfo.scheduledArrival": appointmentData.scheduledArrival, + "appointmentInfo.scheduledDeparture": appointmentData.scheduledDeparture, + "appointmentInfo.dockDoor": appointmentData.dockDoor, + "facilityInfo.dcId": appointmentData.dcId, + "facilityInfo.facilityName": appointmentData.facilityName, + }, + }, + { new: true }, + ); + + const jsonified = trailer?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to schedule trailer appointment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateTrailerStatus = async ( + worldId: string, + trailerId: string, + status: string, + eventData?: { + actualArrival?: Date; + actualDeparture?: Date; + estimatedArrival?: Date; + dockDoor?: string; + }, +): Promise => { + try { + const updateData: any = { status }; + + if (eventData) { + if (eventData.actualArrival) { + updateData["appointmentInfo.actualArrival"] = eventData.actualArrival; + } + if (eventData.actualDeparture) { + updateData["appointmentInfo.actualDeparture"] = eventData.actualDeparture; + } + if (eventData.estimatedArrival) { + updateData["appointmentInfo.estimatedArrival"] = eventData.estimatedArrival; + } + if (eventData.dockDoor) { + updateData["appointmentInfo.dockDoor"] = eventData.dockDoor; + } + } + + const trailer = await InboundTrailer.findOneAndUpdate( + { "worldRef.worldId": worldId, trailerId }, + { $set: updateData }, + { new: true }, + ); + + const jsonified = trailer?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update trailer status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const checkInTrailer = async ( + worldId: string, + trailerId: string, + checkInData: { + actualArrival: Date; + driverName?: string; + driverPhone?: string; + sealNumber?: string; + dockDoor?: string; + }, +): Promise => { + try { + const trailer = await InboundTrailer.findOneAndUpdate( + { + "worldRef.worldId": worldId, + trailerId, + status: { $in: ["SCHEDULED", "EN_ROUTE"] }, + }, + { + $set: { + status: "CHECKED_IN", + "appointmentInfo.actualArrival": checkInData.actualArrival, + "appointmentInfo.dockDoor": checkInData.dockDoor, + "carrierInfo.driverName": checkInData.driverName, + "carrierInfo.driverPhone": checkInData.driverPhone, + "cargo.sealNumber": checkInData.sealNumber, + }, + }, + { new: true }, + ); + + const jsonified = trailer?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to check in trailer: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const assignToDock = async ( + worldId: string, + trailerId: string, + dockDoor: string, +): Promise => { + try { + const trailer = await InboundTrailer.findOneAndUpdate( + { + "worldRef.worldId": worldId, + trailerId, + status: "CHECKED_IN", + }, + { + $set: { + status: "AT_DOCK", + "appointmentInfo.dockDoor": dockDoor, + }, + }, + { new: true }, + ); + + const jsonified = trailer?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to assign trailer to dock: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const startUnloading = async ( + worldId: string, + trailerId: string, +): Promise => { + try { + const trailer = await InboundTrailer.findOneAndUpdate( + { + "worldRef.worldId": worldId, + trailerId, + status: "AT_DOCK", + }, + { $set: { status: "UNLOADING" } }, + { new: true }, + ); + + const jsonified = trailer?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to start unloading: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const completeUnloading = async ( + worldId: string, + trailerId: string, + unloadingData: { + actualPallets?: number; + completionTime: Date; + }, +): Promise => { + try { + const trailer = await InboundTrailer.findOneAndUpdate( + { + "worldRef.worldId": worldId, + trailerId, + status: "UNLOADING", + }, + { + $set: { + status: "UNLOADED", + "cargo.actualPallets": unloadingData.actualPallets, + }, + }, + { new: true }, + ); + + const jsonified = trailer?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to complete unloading: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addDelay = async ( + worldId: string, + trailerId: string, + delayData: { + delayType: string; + reason: string; + reportedAt: Date; + estimatedDelay: number; + }, +): Promise => { + try { + const trailer = await InboundTrailer.findOneAndUpdate( + { "worldRef.worldId": worldId, trailerId }, + { + $push: { delays: delayData }, + $set: { status: "DELAYED" }, + }, + { new: true }, + ); + + const jsonified = trailer?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError(`Failed to add delay: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const getTrailersByStatus = async ( + worldId: string, + statuses: string[], + filters?: { + dcId?: string; + carrierId?: string; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: any = { + "worldRef.worldId": worldId, + status: { $in: statuses }, + }; + + if (filters?.dcId) { + query["facilityInfo.dcId"] = filters.dcId; + } + if (filters?.carrierId) { + query["carrierInfo.carrierId"] = filters.carrierId; + } + if (filters?.dateStart && filters?.dateEnd) { + query["appointmentInfo.scheduledArrival"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const trailers = await InboundTrailer.find(query) + .sort({ "appointmentInfo.scheduledArrival": 1 }) + .limit(limit + 1); + + const hasMore = trailers.length > limit; + const results = hasMore ? trailers.slice(0, limit) : trailers; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((t) => t.toJSON()), + limit, + hasMore, + nextCursor, + totalCount: await InboundTrailer.countDocuments(query), + }; + } catch (error) { + throw new RepositoryError( + `Failed to get trailers by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTrailersByAppointmentDate = async ( + worldId: string, + dcId: string, + appointmentDate: Date, + filters?: { + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const startOfDay = new Date(appointmentDate); + startOfDay.setHours(0, 0, 0, 0); + + const endOfDay = new Date(appointmentDate); + endOfDay.setHours(23, 59, 59, 999); + + const query: any = { + "worldRef.worldId": worldId, + "facilityInfo.dcId": dcId, + "appointmentInfo.scheduledArrival": { + $gte: startOfDay, + $lte: endOfDay, + }, + }; + + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const trailers = await InboundTrailer.find(query) + .sort({ "appointmentInfo.scheduledArrival": 1 }) + .limit(limit + 1); + + const hasMore = trailers.length > limit; + const results = hasMore ? trailers.slice(0, limit) : trailers; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((t) => t.toJSON()), + limit, + hasMore, + nextCursor, + totalCount: await InboundTrailer.countDocuments(query), + }; + } catch (error) { + throw new RepositoryError( + `Failed to get trailers by appointment date: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAvailableDockDoors = async ( + worldId: string, + dcId: string, + timeSlot: { + startTime: Date; + endTime: Date; + }, +): Promise => { + try { + const occupiedDoors = await InboundTrailer.find({ + "worldRef.worldId": worldId, + "facilityInfo.dcId": dcId, + status: { $in: ["AT_DOCK", "UNLOADING"] }, + $or: [ + { + "appointmentInfo.scheduledArrival": { $lte: timeSlot.endTime }, + "appointmentInfo.scheduledDeparture": { $gte: timeSlot.startTime }, + }, + { + "appointmentInfo.scheduledArrival": { $lte: timeSlot.endTime }, + "appointmentInfo.scheduledDeparture": null, + }, + ], + }).distinct("appointmentInfo.dockDoor"); + + const allDoors = Array.from( + { length: 20 }, + (_, i) => `DOCK-${(i + 1).toString().padStart(2, "0")}`, + ); + + return allDoors.filter((door) => !occupiedDoors.includes(door)); + } catch (error) { + throw new RepositoryError( + `Failed to get available dock doors: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTrailerById = async ( + worldId: string, + trailerId: string, +): Promise => { + try { + const transaction = await InboundTrailer.findOne({ + "worldRef.worldId": worldId, + trailerId, + }); + + const jsonified = transaction?.toJSON() || null; + return jsonified as TInboundTrailerModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to get trailer by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const TmsInboundTrailerRepository = (worldId: string) => ({ + createInboundTrailer: (data: TInboundTrailerInput) => + createInboundTrailer({ worldId } as TWorldRefModel, data), + scheduleTrailerAppointment: ( + trailerId: string, + appointmentData: { + appointmentId?: string; + scheduledArrival: Date; + scheduledDeparture?: Date; + dockDoor?: string; + dcId: string; + facilityName?: string; + }, + ) => scheduleTrailerAppointment(worldId, trailerId, appointmentData), + updateTrailerStatus: ( + trailerId: string, + status: string, + eventData?: { + actualArrival?: Date; + actualDeparture?: Date; + estimatedArrival?: Date; + dockDoor?: string; + }, + ) => updateTrailerStatus(worldId, trailerId, status, eventData), + checkInTrailer: ( + trailerId: string, + checkInData: { + actualArrival: Date; + driverName?: string; + driverPhone?: string; + sealNumber?: string; + dockDoor?: string; + }, + ) => checkInTrailer(worldId, trailerId, checkInData), + assignToDock: (trailerId: string, dockDoor: string) => assignToDock(worldId, trailerId, dockDoor), + startUnloading: (trailerId: string) => startUnloading(worldId, trailerId), + completeUnloading: ( + trailerId: string, + unloadingData: { + actualPallets?: number; + completionTime: Date; + }, + ) => completeUnloading(worldId, trailerId, unloadingData), + addDelay: ( + trailerId: string, + delayData: { + delayType: string; + reason: string; + reportedAt: Date; + estimatedDelay: number; + }, + ) => addDelay(worldId, trailerId, delayData), + getTrailersByStatus: ( + statuses: string[], + filters?: { + dcId?: string; + carrierId?: string; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }, + ) => getTrailersByStatus(worldId, statuses, filters), + getTrailersByAppointmentDate: ( + dcId: string, + appointmentDate: Date, + filters?: { + cursor?: string; + limit?: number; + }, + ) => getTrailersByAppointmentDate(worldId, dcId, appointmentDate, filters), + getAvailableDockDoors: ( + dcId: string, + timeSlot: { + startTime: Date; + endTime: Date; + }, + ) => getAvailableDockDoors(worldId, dcId, timeSlot), + getTrailerById: (trailerId: string) => getTrailerById(worldId, trailerId), +}); + +export type TTmsInboundTrailerRepository = ReturnType; diff --git a/packages/controlmart/src/repository/tms/index.ts b/packages/controlmart/src/repository/tms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..9eca3f0f81cc21308453f4072696bcc17a0849cb --- /dev/null +++ b/packages/controlmart/src/repository/tms/index.ts @@ -0,0 +1,3 @@ +export * from "./shipment.tms.repository"; +export * from "./inbound_trailer.tms.repository"; +export * from "./carrier.tms.repository"; diff --git a/packages/controlmart/src/repository/tms/shipment.tms.repository.ts b/packages/controlmart/src/repository/tms/shipment.tms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..01ba26debbba68b97ba00cc81acecd15b31c592c --- /dev/null +++ b/packages/controlmart/src/repository/tms/shipment.tms.repository.ts @@ -0,0 +1,691 @@ +import { + Shipment, + type TShipmentModel, + type TShipmentInput, +} from "../../models/tms/shipment.tms.model"; +import { + ShipmentStatusEvent, + type TShipmentStatusEventModel, + type TShipmentStatusEventInput, +} from "../../models/tms/shipment_status_event.tms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createShipment = async ( + world: TWorldRefModel, + data: TShipmentInput, +): Promise => { + try { + // if (!data.shipmentId) { + // throw new RepositoryError("Shipment ID is required", "VALIDATION_ERROR"); + // } + console.log("TMS: Create Shipment Data", data); + + const existing = await Shipment.findOne({ + "worldRef.worldId": world.worldId, + shipmentId: data.shipmentId, + }); + + console.log("TMS: Create Shipment Existing", existing); + const existingJsonified = existing?.toJSON() || null; + + if (existingJsonified) { + throw new RepositoryError( + `Shipment with ID ${data.shipmentId} already exists`, + "DUPLICATE_ERROR", + ); + } + + const transaction = await Shipment.create({ + ...data, + worldRef: world, + }); + + const savedShipment = transaction.toJSON(); + console.log("TMS: Create Shipment Saved", savedShipment); + return savedShipment as TShipmentModel; + } catch (error) { + console.log("TMS: Create Shipment Error", error); + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const tenderShipment = async ( + worldId: string, + shipmentId: string, + carrierInfo: { + carrierId: string; + carrierName: string; + carrierCode: string; + scacCode: string; + }, +): Promise => { + try { + console.log("TMS: Tender Shipment Data", shipmentId, carrierInfo); + console.log("TMS: Tender Shipment Data", worldId); + const shipment = await Shipment.findOneAndUpdate( + { + "worldRef.worldId": worldId, + shipmentId, + status: "PLANNED", + }, + { + $set: { + status: "TENDERED", + "carrierInfo.carrierId": carrierInfo.carrierId, + "carrierInfo.carrierName": carrierInfo.carrierName, + "carrierInfo.carrierCode": carrierInfo.carrierCode, + "carrierInfo.scacCode": carrierInfo.scacCode, + }, + }, + { new: true }, + ); + + const jsonified = shipment?.toJSON() || null; + + if (jsonified) { + await createStatusEvent(worldId, shipmentId, { + eventType: "STATUS_CHANGE", + eventTime: new Date(), + eventId: `${shipmentId}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + statusInfo: { + previousStatus: "PLANNED", + newStatus: "TENDERED", + }, + source: "API", + }); + } + + return jsonified as TShipmentModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to tender shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const acceptShipment = async ( + worldId: string, + shipmentId: string, + carrierAcceptanceData?: { + proNumber?: string; + trackingNumber?: string; + estimatedPickupDate?: Date; + }, +): Promise => { + try { + const updateData: any = { + status: "ACCEPTED", + }; + + if (carrierAcceptanceData) { + if (carrierAcceptanceData.proNumber) { + updateData["carrierInfo.proNumber"] = carrierAcceptanceData.proNumber; + } + if (carrierAcceptanceData.trackingNumber) { + updateData["carrierInfo.trackingNumber"] = carrierAcceptanceData.trackingNumber; + } + if (carrierAcceptanceData.estimatedPickupDate) { + updateData["dates.actualPickupDate"] = carrierAcceptanceData.estimatedPickupDate; + } + } + + const shipment = await Shipment.findOneAndUpdate( + { + "worldRef.worldId": worldId, + shipmentId, + status: "TENDERED", + }, + { $set: updateData }, + { new: true }, + ); + + const jsonified = shipment?.toJSON() || null; + + if (jsonified) { + await createStatusEvent(worldId, shipmentId, { + eventType: "STATUS_CHANGE", + eventTime: new Date(), + statusInfo: { + previousStatus: "TENDERED", + newStatus: "ACCEPTED", + }, + source: "API", + eventId: `${shipmentId}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + }); + } + + return jsonified as TShipmentModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to accept shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateShipmentLocation = async ( + worldId: string, + shipmentId: string, + locationData: { + latitude: number; + longitude: number; + city?: string; + state?: string; + timestamp: Date; + source?: "API" | "EDI" | "MANUAL" | "GPS" | "CARRIER_PORTAL"; + }, +): Promise => { + try { + const shipment = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $set: { + "currentLocation.lastKnownPosition.latitude": locationData.latitude, + "currentLocation.lastKnownPosition.longitude": locationData.longitude, + "currentLocation.lastUpdateTime": locationData.timestamp, + "currentLocation.currentCity": locationData.city, + "currentLocation.currentState": locationData.state, + }, + }, + { new: true }, + ); + + const jsonified = shipment?.toJSON() || null; + + if (jsonified) { + await createStatusEvent(worldId, shipmentId, { + eventType: "LOCATION_UPDATE", + eventTime: locationData.timestamp, + locationInfo: { + latitude: locationData.latitude, + longitude: locationData.longitude, + city: locationData.city, + state: locationData.state, + }, + source: locationData.source || "GPS", + eventId: `${shipmentId}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + }); + } + + return jsonified as TShipmentModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to update shipment location: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const processEdi214Update = async ( + worldId: string, + shipmentId: string, + ediData: { + status: string; + locationCode?: string; + city?: string; + state?: string; + timestamp: Date; + equipmentId?: string; + estimatedDeliveryDate?: Date; + rawEdiData: any; + }, +): Promise => { + try { + const updateData: any = { + status: ediData.status, + }; + + if (ediData.estimatedDeliveryDate) { + updateData["dates.estimatedDeliveryDate"] = ediData.estimatedDeliveryDate; + } + + const shipment = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { $set: updateData }, + { new: true }, + ); + + const jsonified = shipment?.toJSON() || null; + + if (jsonified) { + await createStatusEvent(worldId, shipmentId, { + eventType: "STATUS_CHANGE", + eventTime: ediData.timestamp, + statusInfo: { + previousStatus: jsonified.status, + newStatus: ediData.status, + }, + locationInfo: { + city: ediData.city, + state: ediData.state, + facility: ediData.locationCode, + }, + etaInfo: ediData.estimatedDeliveryDate + ? { + newETA: ediData.estimatedDeliveryDate, + previousETA: jsonified.dates?.estimatedDeliveryDate, + } + : undefined, + source: "EDI", + rawData: ediData.rawEdiData, + eventId: `${shipmentId}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + }); + } + + return jsonified as TShipmentModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to process EDI 214 update: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateShipmentStatus = async ( + worldId: string, + shipmentId: string, + status: string, + eventData?: { + timestamp?: Date; + location?: string; + note?: string; + source?: string; + }, +): Promise => { + try { + const currentShipment = await Shipment.findOne({ + "worldRef.worldId": worldId, + shipmentId, + }); + + const currentJsonified = currentShipment?.toJSON() || null; + + if (!currentJsonified) { + throw new RepositoryError(`Shipment with ID ${shipmentId} not found`, "NOT_FOUND_ERROR"); + } + + const shipment = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { $set: { status } }, + { new: true }, + ); + + const jsonified = shipment?.toJSON() || null; + + if (jsonified) { + await createStatusEvent(worldId, shipmentId, { + eventType: "STATUS_CHANGE", + eventTime: eventData?.timestamp || new Date(), + statusInfo: { + previousStatus: currentJsonified.status, + newStatus: status, + }, + source: (eventData?.source as "API" | "EDI" | "MANUAL" | "GPS" | "CARRIER_PORTAL") || "API", + eventId: `${shipmentId}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + }); + } + + return jsonified as TShipmentModel | null; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update shipment status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const createStatusEvent = async ( + worldId: string, + shipmentId: string, + eventData: Omit, +): Promise => { + try { + const transaction = await ShipmentStatusEvent.create({ + ...eventData, + shipmentId, + eventId: `${shipmentId}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + worldRef: { worldId }, + }); + + const jsonified = transaction.toJSON(); + return jsonified as TShipmentStatusEventModel; + } catch (error) { + throw new RepositoryError( + `Failed to create status event: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentWithEvents = async ( + worldId: string, + shipmentId: string, +): Promise<{ + shipment: TShipmentModel | null; + events: TShipmentStatusEventModel[]; +}> => { + try { + const [shipmentTransaction, eventsTransactions] = await Promise.all([ + Shipment.findOne({ + "worldRef.worldId": worldId, + shipmentId, + }), + ShipmentStatusEvent.find({ + "worldRef.worldId": worldId, + shipmentId, + }).sort({ eventTime: -1 }), + ]); + + const shipment = shipmentTransaction?.toJSON() || null; + const events = eventsTransactions.map((e) => e.toJSON() as TShipmentStatusEventModel); + + return { shipment: shipment as TShipmentModel | null, events }; + } catch (error) { + throw new RepositoryError( + `Failed to get shipment with events: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentsByStatus = async ( + worldId: string, + statuses: string[], + filters?: { + carrierId?: string; + dateStart?: Date; + dateEnd?: Date; + shipmentType?: string; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: any = { + "worldRef.worldId": worldId, + status: { $in: statuses }, + }; + + if (filters?.carrierId) { + query["carrierInfo.carrierId"] = filters.carrierId; + } + if (filters?.shipmentType) { + query.shipmentType = filters.shipmentType; + } + if (filters?.dateStart && filters?.dateEnd) { + query["dates.plannedPickupDate"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const shipments = await Shipment.find(query) + .sort({ "dates.plannedPickupDate": -1 }) + .limit(limit + 1); + + const hasMore = shipments.length > limit; + const results = hasMore ? shipments.slice(0, limit) : shipments; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((s) => s.toJSON()), + limit, + hasMore, + nextCursor, + totalCount: await Shipment.countDocuments(query), + }; + } catch (error) { + throw new RepositoryError( + `Failed to get shipments by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getInTransitShipments = async ( + worldId: string, + filters?: { + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: any = { + "worldRef.worldId": worldId, + status: { $in: ["PICKED_UP", "IN_TRANSIT", "OUT_FOR_DELIVERY"] }, + }; + + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const shipments = await Shipment.find(query) + .sort({ "dates.plannedDeliveryDate": 1 }) + .limit(limit + 1); + + const hasMore = shipments.length > limit; + const results = hasMore ? shipments.slice(0, limit) : shipments; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((s) => s.toJSON()), + nextCursor, + limit, + hasMore, + totalCount: await Shipment.countDocuments(query), + }; + } catch (error) { + throw new RepositoryError( + `Failed to get in-transit shipments: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentsByCarrier = async ( + worldId: string, + carrierId: string, + filters?: { + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: any = { + "worldRef.worldId": worldId, + "carrierInfo.carrierId": carrierId, + }; + + if (filters?.dateStart && filters?.dateEnd) { + query["dates.plannedPickupDate"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const shipments = await Shipment.find(query) + .sort({ "dates.plannedPickupDate": -1 }) + .limit(limit + 1); + + const hasMore = shipments.length > limit; + const results = hasMore ? shipments.slice(0, limit) : shipments; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((s) => s.toJSON()), + limit, + hasMore, + nextCursor, + totalCount: await Shipment.countDocuments(query), + }; + } catch (error) { + throw new RepositoryError( + `Failed to get shipments by carrier: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addDelay = async ( + worldId: string, + shipmentId: string, + delayData: { + delayType: string; + reason: string; + startTime: Date; + estimatedDelay: number; + endTime?: Date; + }, +): Promise => { + try { + const shipment = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $push: { + delays: delayData, + }, + $set: { + status: "DELAYED", + }, + }, + { new: true }, + ); + + const jsonified = shipment?.toJSON() || null; + + if (jsonified) { + await createStatusEvent(worldId, shipmentId, { + eventType: "DELAY", + eventTime: delayData.startTime, + exceptionInfo: { + exceptionType: delayData.delayType, + severity: "MEDIUM", + description: delayData.reason, + }, + source: "API", + eventId: `${shipmentId}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`, + }); + } + + return jsonified as TShipmentModel | null; + } catch (error) { + throw new RepositoryError(`Failed to add delay: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +export const TmsShipmentRepository = (worldId: string) => ({ + createShipment: (data: TShipmentInput) => createShipment({ worldId } as TWorldRefModel, data), + tenderShipment: ( + shipmentId: string, + carrierInfo: { + carrierId: string; + carrierName: string; + carrierCode: string; + scacCode: string; + }, + ) => tenderShipment(worldId, shipmentId, carrierInfo), + acceptShipment: ( + shipmentId: string, + carrierAcceptanceData?: { + proNumber?: string; + trackingNumber?: string; + estimatedPickupDate?: Date; + }, + ) => acceptShipment(worldId, shipmentId, carrierAcceptanceData), + updateShipmentLocation: ( + shipmentId: string, + locationData: { + latitude: number; + longitude: number; + city?: string; + state?: string; + timestamp: Date; + source?: "EDI" | "MANUAL" | "GPS" | "CARRIER_PORTAL"; + }, + ) => updateShipmentLocation(worldId, shipmentId, locationData), + processEdi214Update: ( + shipmentId: string, + ediData: { + status: string; + locationCode?: string; + city?: string; + state?: string; + timestamp: Date; + equipmentId?: string; + estimatedDeliveryDate?: Date; + rawEdiData: any; + }, + ) => processEdi214Update(worldId, shipmentId, ediData), + updateShipmentStatus: ( + shipmentId: string, + status: string, + eventData?: { + timestamp?: Date; + location?: string; + note?: string; + source?: string; + }, + ) => updateShipmentStatus(worldId, shipmentId, status, eventData), + createStatusEvent: ( + shipmentId: string, + eventData: Omit, + ) => createStatusEvent(worldId, shipmentId, eventData), + getShipmentWithEvents: (shipmentId: string) => getShipmentWithEvents(worldId, shipmentId), + getShipmentsByStatus: ( + statuses: string[], + filters?: { + carrierId?: string; + dateStart?: Date; + dateEnd?: Date; + shipmentType?: string; + cursor?: string; + limit?: number; + }, + ) => getShipmentsByStatus(worldId, statuses, filters), + getInTransitShipments: (filters?: { cursor?: string; limit?: number }) => + getInTransitShipments(worldId, filters), + getShipmentsByCarrier: ( + carrierId: string, + filters?: { + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }, + ) => getShipmentsByCarrier(worldId, carrierId, filters), + addDelay: ( + shipmentId: string, + delayData: { + delayType: string; + reason: string; + startTime: Date; + estimatedDelay: number; + endTime?: Date; + }, + ) => addDelay(worldId, shipmentId, delayData), +}); + +export type TTmsShipmentRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/bin.wms.repository.ts b/packages/controlmart/src/repository/wms/bin.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..34ee7c0da9e5fa38e50e3f729bf81f19a0db23b1 --- /dev/null +++ b/packages/controlmart/src/repository/wms/bin.wms.repository.ts @@ -0,0 +1,529 @@ +import { Bin, type TBinModel, type TBinInput } from "../../models/wms/bin.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createBin = async (world: TWorldRefModel, data: TBinInput): Promise => { + try { + if (!data.binCode || !data.zoneId || !data.warehouseId) { + throw new RepositoryError( + "Bin code, zone ID, and warehouse ID are required", + "VALIDATION_ERROR", + ); + } + + const existing = await Bin.findOne({ + "worldRef.worldId": world.worldId, + $or: [{ binId: data.binId }, { binCode: data.binCode, warehouseId: data.warehouseId }], + }); + + if (existing) { + throw new RepositoryError( + `Bin with code ${data.binCode} already exists in warehouse`, + "DUPLICATE_ERROR", + ); + } + + const bin = await Bin.create({ + ...data, + worldRef: world, + }); + + return bin.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError(`Failed to create bin: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const getBinsByZone = async ( + worldId: string, + zoneId: string, + filters?: { + status?: string[]; + binType?: string[]; + locationType?: string[]; + abcClassification?: string[]; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + zoneId, + }; + + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + if (filters?.binType?.length) { + query.binType = { $in: filters.binType }; + } + if (filters?.locationType?.length) { + query.locationType = { $in: filters.locationType }; + } + if (filters?.abcClassification?.length) { + query.abcClassification = { $in: filters.abcClassification }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const bins = await Bin.find(query) + .sort({ + "location.aisle": 1, + "location.bay": 1, + "location.level": 1, + }) + .limit(limit + 1); + + const hasMore = bins.length > limit; + const results = hasMore ? bins.slice(0, limit) : bins; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((bin) => bin.toJSON()), + totalCount: await Bin.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get bins by zone: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getBinsByWarehouse = async ( + worldId: string, + warehouseId: string, + filters?: { + status?: string[]; + zoneIds?: string[]; + binType?: string[]; + locationType?: string[]; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + }; + + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + if (filters?.zoneIds?.length) { + query.zoneId = { $in: filters.zoneIds }; + } + if (filters?.binType?.length) { + query.binType = { $in: filters.binType }; + } + if (filters?.locationType?.length) { + query.locationType = { $in: filters.locationType }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const bins = await Bin.find(query) + .sort({ createdAt: -1 }) + .limit(limit + 1); + + const hasMore = bins.length > limit; + const results = hasMore ? bins.slice(0, limit) : bins; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((bin) => bin.toJSON()), + totalCount: await Bin.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get bins by warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getBinByCode = async ( + worldId: string, + binCode: string, + warehouseId?: string, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + binCode, + }; + + if (warehouseId) { + query.warehouseId = warehouseId; + } + + const bin = await Bin.findOne(query); + return bin?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get bin by code: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateBinStatus = async ( + worldId: string, + binId: string, + status: string, + blockReason?: string, +): Promise => { + try { + const updateData: Record = { status }; + + if (status === "BLOCKED" && blockReason) { + updateData.blockReason = blockReason; + } else if (status !== "BLOCKED") { + updateData.$unset = { blockReason: "" }; + } + + const bin = await Bin.findOneAndUpdate({ "worldRef.worldId": worldId, binId }, updateData, { + new: true, + }); + + return bin?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update bin status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAvailableBins = async ( + worldId: string, + filters?: { + warehouseId?: string; + zoneIds?: string[]; + binType?: string[]; + locationType?: string[]; + minCapacity?: { + weight?: number; + volume?: number; + pallets?: number; + }; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + status: "AVAILABLE", + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + + if (filters?.zoneIds?.length) { + query.zoneId = { $in: filters.zoneIds }; + } + if (filters?.binType?.length) { + query.binType = { $in: filters.binType }; + } + if (filters?.locationType?.length) { + query.locationType = { $in: filters.locationType }; + } + + // Add capacity filters if specified + if (filters?.minCapacity?.weight) { + query["capacity.maxWeightLbs"] = { $gte: filters.minCapacity.weight }; + } + if (filters?.minCapacity?.volume) { + query["capacity.maxCubicFeet"] = { $gte: filters.minCapacity.volume }; + } + if (filters?.minCapacity?.pallets) { + query["capacity.maxPallets"] = { $gte: filters.minCapacity.pallets }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const bins = await Bin.find(query) + .sort({ + abcClassification: 1, + "location.aisle": 1, + "location.bay": 1, + "location.level": 1, + }) + .limit(limit + 1); + + const hasMore = bins.length > limit; + const results = hasMore ? bins.slice(0, limit) : bins; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((bin) => bin.toJSON()), + totalCount: await Bin.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get available bins: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getBinUtilization = async ( + worldId: string, + filters?: { + warehouseId?: string; + zoneIds?: string[]; + }, +): Promise<{ + totalBins: number; + availableBins: number; + occupiedBins: number; + blockedBins: number; + utilizationPercentage: number; + zoneUtilization: Array<{ + zoneId: string; + totalBins: number; + availableBins: number; + utilizationPercentage: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + + if (filters?.zoneIds?.length) { + matchStage.zoneId = { $in: filters.zoneIds }; + } + + const result = await Bin.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalBins: { $sum: 1 }, + availableBins: { $sum: { $cond: [{ $eq: ["$status", "AVAILABLE"] }, 1, 0] } }, + occupiedBins: { $sum: { $cond: [{ $eq: ["$status", "OCCUPIED"] }, 1, 0] } }, + blockedBins: { $sum: { $cond: [{ $eq: ["$status", "BLOCKED"] }, 1, 0] } }, + zoneData: { + $push: { + zoneId: "$zoneId", + status: "$status", + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalBins: 0, + availableBins: 0, + occupiedBins: 0, + blockedBins: 0, + utilizationPercentage: 0, + zoneUtilization: [], + }; + } + + const data = result[0]; + const utilizationPercentage = + data.totalBins > 0 ? ((data.totalBins - data.availableBins) / data.totalBins) * 100 : 0; + + // Calculate zone utilization + const zoneMap = new Map(); + data.zoneData.forEach((item: any) => { + const existing = zoneMap.get(item.zoneId) || { total: 0, available: 0 }; + existing.total += 1; + if (item.status === "AVAILABLE") { + existing.available += 1; + } + zoneMap.set(item.zoneId, existing); + }); + + const zoneUtilization = Array.from(zoneMap.entries()).map(([zoneId, stats]: [string, any]) => ({ + zoneId, + totalBins: stats.total, + availableBins: stats.available, + utilizationPercentage: + stats.total > 0 ? ((stats.total - stats.available) / stats.total) * 100 : 0, + })); + + return { + totalBins: data.totalBins, + availableBins: data.availableBins, + occupiedBins: data.occupiedBins, + blockedBins: data.blockedBins, + utilizationPercentage: Math.round(utilizationPercentage * 100) / 100, + zoneUtilization, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get bin utilization: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateBinCapacity = async ( + worldId: string, + binId: string, + capacity: { + maxWeightLbs?: number; + maxCubicFeet?: number; + maxPallets?: number; + }, +): Promise => { + try { + const updateData: Record = {}; + + if (capacity.maxWeightLbs !== undefined) { + updateData["capacity.maxWeightLbs"] = capacity.maxWeightLbs; + } + if (capacity.maxCubicFeet !== undefined) { + updateData["capacity.maxCubicFeet"] = capacity.maxCubicFeet; + } + if (capacity.maxPallets !== undefined) { + updateData["capacity.maxPallets"] = capacity.maxPallets; + } + + const bin = await Bin.findOneAndUpdate( + { "worldRef.worldId": worldId, binId }, + { $set: updateData }, + { new: true }, + ); + + return bin?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update bin capacity: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSBinRepository = (worldId: string) => ({ + createBin: (data: TBinInput) => createBin({ worldId } as TWorldRefModel, data), + getBinsByZone: ( + args: + | string + | { + zoneId: string; + filters?: { + status?: string[]; + binType?: string[]; + locationType?: string[]; + abcClassification?: string[]; + }; + }, + filters?: { + status?: string[]; + binType?: string[]; + locationType?: string[]; + abcClassification?: string[]; + }, + ) => + typeof args === "string" + ? getBinsByZone(worldId, args, filters) + : getBinsByZone(worldId, args.zoneId, args.filters), + getBinsByWarehouse: ( + args: + | string + | { + warehouseId: string; + filters?: { + status?: string[]; + zoneIds?: string[]; + binType?: string[]; + locationType?: string[]; + }; + }, + filters?: { + status?: string[]; + zoneIds?: string[]; + binType?: string[]; + locationType?: string[]; + }, + ) => + typeof args === "string" + ? getBinsByWarehouse(worldId, args, filters) + : getBinsByWarehouse(worldId, args.warehouseId, args.filters), + getBinByCode: ( + args: string | { binCode: string; warehouseId?: string }, + warehouseId?: string, + ) => + typeof args === "string" + ? getBinByCode(worldId, args, warehouseId) + : getBinByCode(worldId, args.binCode, args.warehouseId), + updateBinStatus: ( + args: string | { binId: string; status: string; blockReason?: string }, + status?: string, + blockReason?: string, + ) => + typeof args === "string" + ? updateBinStatus(worldId, args, status!, blockReason) + : updateBinStatus(worldId, args.binId, args.status, args.blockReason), + getAvailableBins: (filters?: { + warehouseId?: string; + zoneIds?: string[]; + binType?: string[]; + locationType?: string[]; + minCapacity?: { + weight?: number; + volume?: number; + pallets?: number; + }; + }) => getAvailableBins(worldId, filters), + getBinUtilization: (filters?: { warehouseId?: string; zoneIds?: string[] }) => + getBinUtilization(worldId, filters), + updateBinCapacity: ( + args: + | string + | { + binId: string; + capacity: { + maxWeightLbs?: number; + maxCubicFeet?: number; + maxPallets?: number; + }; + }, + capacity?: { + maxWeightLbs?: number; + maxCubicFeet?: number; + maxPallets?: number; + }, + ) => + typeof args === "string" + ? updateBinCapacity(worldId, args, capacity!) + : updateBinCapacity(worldId, args.binId, args.capacity), +}); + +export type TWMSBinRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/cycle_count.wms.repository.ts b/packages/controlmart/src/repository/wms/cycle_count.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..76e410a3261c17fcabe6ff2072aa7dea3fec0ed0 --- /dev/null +++ b/packages/controlmart/src/repository/wms/cycle_count.wms.repository.ts @@ -0,0 +1,481 @@ +import { + CycleCount, + type TCycleCountModel, + type TCycleCountInput, +} from "../../models/wms/cycle_count.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createCycleCount = async ( + world: TWorldRefModel, + data: TCycleCountInput, +): Promise => { + try { + if (!data.warehouseId || !data.countType) { + throw new RepositoryError("Warehouse ID and count type are required", "VALIDATION_ERROR"); + } + + const cycleCount = await CycleCount.create({ + ...data, + worldRef: world, + }); + + return cycleCount.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create cycle count: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCycleCountsByStatus = async ( + worldId: string, + status: string[], + filters?: { + warehouseId?: string; + countType?: string[]; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + countStatus: { $in: status }, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.countType?.length) { + query.countType = { $in: filters.countType }; + } + if (filters?.dateStart && filters?.dateEnd) { + query["schedule.scheduledDate"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const cycleCounts = await CycleCount.find(query) + .sort({ "schedule.scheduledDate": 1 }) + .limit(limit + 1); + + const hasMore = cycleCounts.length > limit; + const results = hasMore ? cycleCounts.slice(0, limit) : cycleCounts; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((count) => count.toJSON()), + totalCount: await CycleCount.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get cycle counts by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCycleCountById = async ( + worldId: string, + cycleCountId: string, +): Promise => { + try { + const cycleCount = await CycleCount.findOne({ + "worldRef.worldId": worldId, + cycleCountId, + }); + + return cycleCount?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get cycle count by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateCycleCountStatus = async ( + worldId: string, + cycleCountId: string, + status: string, + completedBy?: string, +): Promise => { + try { + const updateData: Record = { + countStatus: status, + }; + + const now = new Date(); + + switch (status) { + case "IN_PROGRESS": + updateData["schedule.startDate"] = now; + break; + case "COMPLETED": + updateData["schedule.completedDate"] = now; + if (completedBy) { + updateData.completedBy = completedBy; + } + break; + } + + const cycleCount = await CycleCount.findOneAndUpdate( + { "worldRef.worldId": worldId, cycleCountId }, + { $set: updateData }, + { new: true }, + ); + + return cycleCount?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update cycle count status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const assignUserToCycleCount = async ( + worldId: string, + cycleCountId: string, + assignment: { + userId: string; + userName: string; + assignedBins: string[]; + }, +): Promise => { + try { + const cycleCount = await CycleCount.findOneAndUpdate( + { "worldRef.worldId": worldId, cycleCountId }, + { + $push: { + assignments: assignment, + }, + }, + { new: true }, + ); + + return cycleCount?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to assign user to cycle count: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addCountToResult = async ( + worldId: string, + cycleCountId: string, + countResult: { + binId: string; + productId: string; + sku: string; + expectedQuantity: number; + actualQuantity: number; + variance: number; + countedBy: string; + countedAt: Date; + notes?: string; + }, +): Promise => { + try { + const cycleCount = await CycleCount.findOneAndUpdate( + { "worldRef.worldId": worldId, cycleCountId }, + { + $push: { + results: countResult, + }, + }, + { new: true }, + ); + + return cycleCount?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to add count result: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCycleCountsByWarehouse = async ( + worldId: string, + warehouseId: string, + filters?: { + status?: string[]; + countType?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + }; + + if (filters?.status?.length) { + query.countStatus = { $in: filters.status }; + } + if (filters?.countType?.length) { + query.countType = { $in: filters.countType }; + } + if (filters?.dateStart && filters?.dateEnd) { + query["schedule.scheduledDate"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const cycleCounts = await CycleCount.find(query).sort({ "schedule.scheduledDate": -1 }); + return cycleCounts.map((count) => count.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get cycle counts by warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getCycleCountVarianceReport = async ( + worldId: string, + filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + countType?: string[]; + }, +): Promise<{ + totalCounts: number; + completedCounts: number; + totalVariances: number; + significantVariances: number; + accuracyPercentage: number; + variancesByProduct: Array<{ + productId: string; + sku: string; + totalVariance: number; + countFrequency: number; + lastCountDate: Date; + }>; + variancesByZone: Array<{ + zoneId: string; + totalVariance: number; + countFrequency: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + countStatus: "COMPLETED", + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.countType?.length) { + matchStage.countType = { $in: filters.countType }; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage["schedule.completedDate"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await CycleCount.aggregate([ + { $match: matchStage }, + { $unwind: "$results" }, + { + $group: { + _id: null, + totalCounts: { $sum: 1 }, + totalVariances: { $sum: { $abs: "$results.variance" } }, + significantVariances: { + $sum: { $cond: [{ $gte: [{ $abs: "$results.variance" }, 5] }, 1, 0] }, + }, + zeroVariances: { + $sum: { $cond: [{ $eq: ["$results.variance", 0] }, 1, 0] }, + }, + productVariances: { + $push: { + productId: "$results.productId", + sku: "$results.sku", + variance: "$results.variance", + countedAt: "$results.countedAt", + zoneId: "$scope.zoneId", + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalCounts: 0, + completedCounts: 0, + totalVariances: 0, + significantVariances: 0, + accuracyPercentage: 0, + variancesByProduct: [], + variancesByZone: [], + }; + } + + const data = result[0]; + const accuracyPercentage = + data.totalCounts > 0 ? (data.zeroVariances / data.totalCounts) * 100 : 0; + + // Process product variances + const productMap = new Map(); + data.productVariances.forEach((item: any) => { + const existing = productMap.get(item.productId) || { + productId: item.productId, + sku: item.sku, + totalVariance: 0, + countFrequency: 0, + lastCountDate: item.countedAt, + }; + existing.totalVariance += Math.abs(item.variance); + existing.countFrequency += 1; + if (item.countedAt > existing.lastCountDate) { + existing.lastCountDate = item.countedAt; + } + productMap.set(item.productId, existing); + }); + + // Process zone variances + const zoneMap = new Map(); + data.productVariances.forEach((item: any) => { + if (item.zoneId) { + const existing = zoneMap.get(item.zoneId) || { + zoneId: item.zoneId, + totalVariance: 0, + countFrequency: 0, + }; + existing.totalVariance += Math.abs(item.variance); + existing.countFrequency += 1; + zoneMap.set(item.zoneId, existing); + } + }); + + return { + totalCounts: data.totalCounts, + completedCounts: data.totalCounts, + totalVariances: data.totalVariances, + significantVariances: data.significantVariances, + accuracyPercentage: Math.round(accuracyPercentage * 100) / 100, + variancesByProduct: Array.from(productMap.values()), + variancesByZone: Array.from(zoneMap.values()), + }; + } catch (error) { + throw new RepositoryError( + `Failed to get cycle count variance report: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getScheduledCycleCounts = async ( + worldId: string, + dateStart: Date, + dateEnd: Date, + warehouseId?: string, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + countStatus: "SCHEDULED", + "schedule.scheduledDate": { + $gte: dateStart, + $lte: dateEnd, + }, + }; + + if (warehouseId) { + query.warehouseId = warehouseId; + } + + const cycleCounts = await CycleCount.find(query).sort({ "schedule.scheduledDate": 1 }); + return cycleCounts.map((count) => count.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get scheduled cycle counts: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSCycleCountRepository = (worldId: string) => ({ + createCycleCount: (data: TCycleCountInput) => + createCycleCount({ worldId } as TWorldRefModel, data), + getCycleCountsByStatus: ( + status: string[], + filters?: { + warehouseId?: string; + countType?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, + ) => getCycleCountsByStatus(worldId, status, filters), + getCycleCountById: (cycleCountId: string) => getCycleCountById(worldId, cycleCountId), + updateCycleCountStatus: (cycleCountId: string, status: string, completedBy?: string) => + updateCycleCountStatus(worldId, cycleCountId, status, completedBy), + assignUserToCycleCount: ( + cycleCountId: string, + assignment: { + userId: string; + userName: string; + assignedBins: string[]; + }, + ) => assignUserToCycleCount(worldId, cycleCountId, assignment), + addCountToResult: ( + cycleCountId: string, + countResult: { + binId: string; + productId: string; + sku: string; + expectedQuantity: number; + actualQuantity: number; + variance: number; + countedBy: string; + countedAt: Date; + notes?: string; + }, + ) => addCountToResult(worldId, cycleCountId, countResult), + getCycleCountsByWarehouse: ( + warehouseId: string, + filters?: { + status?: string[]; + countType?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, + ) => getCycleCountsByWarehouse(worldId, warehouseId, filters), + getCycleCountVarianceReport: (filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + countType?: string[]; + }) => getCycleCountVarianceReport(worldId, filters), + getScheduledCycleCounts: (dateStart: Date, dateEnd: Date, warehouseId?: string) => + getScheduledCycleCounts(worldId, dateStart, dateEnd, warehouseId), +}); + +export type TWMSCycleCountRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/daily_metrics.wms.repository.ts b/packages/controlmart/src/repository/wms/daily_metrics.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..14bc9c818f9dd35ced231effe1fee69e756e0ee9 --- /dev/null +++ b/packages/controlmart/src/repository/wms/daily_metrics.wms.repository.ts @@ -0,0 +1,413 @@ +import { + DailyMetrics, + type TDailyMetricsModel, + type TDailyMetricsInput, +} from "../../models/wms/daily_metrics.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; + +const createDailyMetrics = async ( + world: TWorldRefModel, + data: TDailyMetricsInput, +): Promise => { + try { + if (!data.warehouseId || !data.date) { + throw new RepositoryError("Warehouse ID and date are required", "VALIDATION_ERROR"); + } + + // Check for existing metrics for the same date/warehouse + const existing = await DailyMetrics.findOne({ + "worldRef.worldId": world.worldId, + warehouseId: data.warehouseId, + date: data.date, + shift: data.shift || null, + zoneId: data.zoneId || null, + }); + + if (existing) { + throw new RepositoryError( + `Daily metrics already exist for this date/warehouse/shift combination`, + "DUPLICATE_ERROR", + ); + } + + const metrics = await DailyMetrics.create({ + ...data, + worldRef: world, + }); + + return metrics.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create daily metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDailyMetricsByDateRange = async ( + worldId: string, + warehouseId: string, + dateStart: Date, + dateEnd: Date, + filters?: { + shift?: string; + zoneId?: string; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + date: { + $gte: dateStart, + $lte: dateEnd, + }, + }; + + if (filters?.shift) { + query.shift = filters.shift; + } + if (filters?.zoneId) { + query.zoneId = filters.zoneId; + } + + const metrics = await DailyMetrics.find(query).sort({ date: -1 }); + return metrics.map((metric) => metric.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get daily metrics by date range: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDailyMetricsById = async ( + worldId: string, + metricId: string, +): Promise => { + try { + const metric = await DailyMetrics.findOne({ + "worldRef.worldId": worldId, + metricId, + }); + + return metric?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get daily metric by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateDailyMetrics = async ( + worldId: string, + metricId: string, + data: Partial, +): Promise => { + try { + const metric = await DailyMetrics.findOneAndUpdate( + { "worldRef.worldId": worldId, metricId }, + { $set: data }, + { new: true }, + ); + + return metric?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update daily metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getMetricsSummary = async ( + worldId: string, + filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + zoneId?: string; + }, +): Promise<{ + totalDays: number; + averageMetrics: { + inbound: { + avgPoReceived: number; + avgUnitsReceived: number; + avgUnitsPerHour: number; + }; + picking: { + avgOrdersShipped: number; + avgLinesPicked: number; + avgLinesPerHour: number; + avgPickAccuracy: number; + }; + packing: { + avgOrdersPacked: number; + avgPackingHours: number; + avgOrdersPerHour: number; + }; + inventory: { + avgAccuracy: number; + avgTurnover: number; + }; + }; + trends: { + date: string; + unitsReceived: number; + unitsShipped: number; + pickAccuracy: number; + }[]; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.zoneId) { + matchStage.zoneId = filters.zoneId; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage.date = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await DailyMetrics.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalDays: { $sum: 1 }, + avgPoReceived: { $avg: "$inbound.poReceived" }, + avgUnitsReceived: { $avg: "$inbound.unitsReceived" }, + avgUnitsPerHourInbound: { $avg: "$inbound.unitsPerHour" }, + avgOrdersShipped: { $avg: "$picking.ordersShipped" }, + avgLinesPicked: { $avg: "$picking.linesPicked" }, + avgLinesPerHour: { $avg: "$picking.linesPerHour" }, + avgPickAccuracy: { $avg: "$picking.pickAccuracy" }, + avgOrdersPacked: { $avg: "$packing.ordersPacked" }, + avgPackingHours: { $avg: "$packing.packingHours" }, + avgOrdersPerHour: { $avg: "$packing.ordersPerHour" }, + avgInventoryAccuracy: { $avg: "$inventory.accuracy" }, + avgInventoryTurnover: { $avg: "$inventory.turnover" }, + dailyData: { + $push: { + date: "$date", + unitsReceived: "$inbound.unitsReceived", + unitsShipped: { $multiply: ["$picking.unitsPicked", 1] }, + pickAccuracy: "$picking.pickAccuracy", + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalDays: 0, + averageMetrics: { + inbound: { + avgPoReceived: 0, + avgUnitsReceived: 0, + avgUnitsPerHour: 0, + }, + picking: { + avgOrdersShipped: 0, + avgLinesPicked: 0, + avgLinesPerHour: 0, + avgPickAccuracy: 0, + }, + packing: { + avgOrdersPacked: 0, + avgPackingHours: 0, + avgOrdersPerHour: 0, + }, + inventory: { + avgAccuracy: 0, + avgTurnover: 0, + }, + }, + trends: [], + }; + } + + const data = result[0]; + + const trends = data.dailyData + .map((day: any) => ({ + date: day.date.toISOString().split("T")[0], + unitsReceived: day.unitsReceived || 0, + unitsShipped: day.unitsShipped || 0, + pickAccuracy: day.pickAccuracy || 0, + })) + .sort((a: any, b: any) => a.date.localeCompare(b.date)); + + return { + totalDays: data.totalDays, + averageMetrics: { + inbound: { + avgPoReceived: Math.round((data.avgPoReceived || 0) * 100) / 100, + avgUnitsReceived: Math.round((data.avgUnitsReceived || 0) * 100) / 100, + avgUnitsPerHour: Math.round((data.avgUnitsPerHourInbound || 0) * 100) / 100, + }, + picking: { + avgOrdersShipped: Math.round((data.avgOrdersShipped || 0) * 100) / 100, + avgLinesPicked: Math.round((data.avgLinesPicked || 0) * 100) / 100, + avgLinesPerHour: Math.round((data.avgLinesPerHour || 0) * 100) / 100, + avgPickAccuracy: Math.round((data.avgPickAccuracy || 0) * 100) / 100, + }, + packing: { + avgOrdersPacked: Math.round((data.avgOrdersPacked || 0) * 100) / 100, + avgPackingHours: Math.round((data.avgPackingHours || 0) * 100) / 100, + avgOrdersPerHour: Math.round((data.avgOrdersPerHour || 0) * 100) / 100, + }, + inventory: { + avgAccuracy: Math.round((data.avgInventoryAccuracy || 0) * 100) / 100, + avgTurnover: Math.round((data.avgInventoryTurnover || 0) * 100) / 100, + }, + }, + trends, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get metrics summary: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getPerformanceTrends = async ( + worldId: string, + warehouseId: string, + metricType: "inbound" | "picking" | "packing" | "inventory", + dateStart: Date, + dateEnd: Date, +): Promise< + Array<{ + date: string; + metrics: Record; + }> +> => { + try { + const metrics = await DailyMetrics.find({ + "worldRef.worldId": worldId, + warehouseId, + date: { $gte: dateStart, $lte: dateEnd }, + }).sort({ date: 1 }); + + return metrics.map((metric) => ({ + date: metric.date?.toISOString().split("T")[0] || "", + metrics: (metric[metricType] as Record) || {}, + })); + } catch (error) { + throw new RepositoryError( + `Failed to get performance trends: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getZonePerformanceComparison = async ( + worldId: string, + warehouseId: string, + dateStart: Date, + dateEnd: Date, +): Promise< + Array<{ + zoneId: string; + metrics: { + totalOrders: number; + totalLines: number; + averagePickTime: number; + accuracy: number; + }; + }> +> => { + try { + const result = await DailyMetrics.aggregate([ + { + $match: { + "worldRef.worldId": worldId, + warehouseId, + date: { $gte: dateStart, $lte: dateEnd }, + zoneId: { $exists: true, $ne: null }, + }, + }, + { + $group: { + _id: "$zoneId", + totalOrders: { $sum: "$picking.ordersShipped" }, + totalLines: { $sum: "$picking.linesPicked" }, + totalPickingHours: { $sum: "$picking.pickingHours" }, + avgAccuracy: { $avg: "$picking.pickAccuracy" }, + dayCount: { $sum: 1 }, + }, + }, + { + $project: { + zoneId: "$_id", + metrics: { + totalOrders: "$totalOrders", + totalLines: "$totalLines", + averagePickTime: { + $cond: [ + { $gt: ["$totalLines", 0] }, + { $divide: ["$totalPickingHours", "$totalLines"] }, + 0, + ], + }, + accuracy: "$avgAccuracy", + }, + }, + }, + ]); + + return result; + } catch (error) { + throw new RepositoryError( + `Failed to get zone performance comparison: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSDailyMetricsRepository = (worldId: string) => ({ + createDailyMetrics: (data: TDailyMetricsInput) => + createDailyMetrics({ worldId } as TWorldRefModel, data), + getDailyMetricsByDateRange: ( + warehouseId: string, + dateStart: Date, + dateEnd: Date, + filters?: { + shift?: string; + zoneId?: string; + }, + ) => getDailyMetricsByDateRange(worldId, warehouseId, dateStart, dateEnd, filters), + getDailyMetricsById: (metricId: string) => getDailyMetricsById(worldId, metricId), + updateDailyMetrics: (metricId: string, data: Partial) => + updateDailyMetrics(worldId, metricId, data), + getMetricsSummary: (filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + zoneId?: string; + }) => getMetricsSummary(worldId, filters), + getPerformanceTrends: ( + warehouseId: string, + metricType: "inbound" | "picking" | "packing" | "inventory", + dateStart: Date, + dateEnd: Date, + ) => getPerformanceTrends(worldId, warehouseId, metricType, dateStart, dateEnd), + getZonePerformanceComparison: (warehouseId: string, dateStart: Date, dateEnd: Date) => + getZonePerformanceComparison(worldId, warehouseId, dateStart, dateEnd), +}); + +export type TWMSDailyMetricsRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/dc.wms.repository.ts b/packages/controlmart/src/repository/wms/dc.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..5b34f707ae65e66eea73be1d06e51c33c87851d3 --- /dev/null +++ b/packages/controlmart/src/repository/wms/dc.wms.repository.ts @@ -0,0 +1,309 @@ +import { + DistributionCenter, + type TDistributionCenterModel, + type TDistributionCenterInput, +} from "../../models/wms/dc.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; + +const createDistributionCenter = async ( + world: TWorldRefModel, + data: TDistributionCenterInput, +): Promise => { + try { + if (!data.warehouseId || !data.dcName) { + throw new RepositoryError("Warehouse ID and DC name are required", "VALIDATION_ERROR"); + } + + const existing = await DistributionCenter.findOne({ + "worldRef.worldId": world.worldId, + $or: [{ dcId: data.dcId }, { dcName: data.dcName, warehouseId: data.warehouseId }], + }); + + if (existing) { + throw new RepositoryError( + `Distribution Center with name ${data.dcName} already exists in warehouse`, + "DUPLICATE_ERROR", + ); + } + + const dc = await DistributionCenter.create({ + ...data, + worldRef: world, + }); + + return dc.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create distribution center: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDistributionCentersByStatus = async ( + worldId: string, + status: string[], + filters?: { + warehouseId?: string; + dcType?: string[]; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + operationalStatus: { $in: status }, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.dcType?.length) { + query.dcType = { $in: filters.dcType }; + } + + const dcs = await DistributionCenter.find(query).sort({ dcName: 1 }); + return dcs.map((dc) => dc.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get distribution centers by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDistributionCenterById = async ( + worldId: string, + dcId: string, +): Promise => { + try { + const dc = await DistributionCenter.findOne({ + "worldRef.worldId": worldId, + dcId, + }); + + return dc?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get distribution center by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDistributionCentersByWarehouse = async ( + worldId: string, + warehouseId: string, +): Promise => { + try { + const dcs = await DistributionCenter.find({ + "worldRef.worldId": worldId, + warehouseId, + }).sort({ dcName: 1 }); + + return dcs.map((dc) => dc.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get distribution centers by warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateDistributionCenter = async ( + worldId: string, + dcId: string, + data: Partial, +): Promise => { + try { + const dc = await DistributionCenter.findOneAndUpdate( + { "worldRef.worldId": worldId, dcId }, + { $set: data }, + { new: true }, + ); + + return dc?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update distribution center: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateOperationalStatus = async ( + worldId: string, + dcId: string, + status: string, + reason?: string, +): Promise => { + try { + const updateData: Record = { + operationalStatus: status, + }; + + if (reason) { + updateData.statusChangeReason = reason; + updateData.statusChangedAt = new Date(); + } + + const dc = await DistributionCenter.findOneAndUpdate( + { "worldRef.worldId": worldId, dcId }, + { $set: updateData }, + { new: true }, + ); + + return dc?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update operational status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDistributionCenterCapacity = async ( + worldId: string, + dcId: string, +): Promise<{ + dcId: string; + dcName: string; + totalSqFootage: number; + utilizationMetrics: { + totalZones: number; + totalBins: number; + occupiedBins: number; + utilizationPercentage: number; + }; + operationalHours: any; +}> => { + try { + const dc = await DistributionCenter.findOne({ + "worldRef.worldId": worldId, + dcId, + }); + + if (!dc) { + throw new RepositoryError("Distribution Center not found", "DATABASE_ERROR"); + } + + // In a real implementation, you'd join with Zone and Bin collections to get actual utilization + // For now, returning structure with placeholder data + return { + dcId: dc.dcId, + dcName: dc.dcName, + totalSqFootage: dc.totalSqFootage || 0, + utilizationMetrics: { + totalZones: 0, // Would calculate from zones + totalBins: 0, // Would calculate from bins + occupiedBins: 0, // Would calculate from inventory + utilizationPercentage: 0, + }, + operationalHours: dc.operatingHours || {}, + }; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to get distribution center capacity: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllDistributionCenters = async ( + worldId: string, + filters?: { + warehouseId?: string; + dcType?: string[]; + operationalStatus?: string[]; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.dcType?.length) { + query.dcType = { $in: filters.dcType }; + } + if (filters?.operationalStatus?.length) { + query.operationalStatus = { $in: filters.operationalStatus }; + } + + const dcs = await DistributionCenter.find(query).sort({ dcName: 1 }); + return dcs.map((dc) => dc.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get all distribution centers: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const isOperationalAtTime = async ( + worldId: string, + dcId: string, + checkTime: Date, +): Promise => { + try { + const dc = await DistributionCenter.findOne({ + "worldRef.worldId": worldId, + dcId, + operationalStatus: "ACTIVE", + }); + + if (!dc || !dc.operatingHours) { + return false; + } + + const dayOfWeek = checkTime + .toLocaleDateString("en-US", { weekday: "long" }) + .toLowerCase() as keyof typeof dc.operatingHours; + const dayHours = dc.operatingHours[dayOfWeek]; + + if (!dayHours || !dayHours.open || !dayHours.close) { + return false; + } + + const currentTime = checkTime.toTimeString().substring(0, 5); // HH:MM format + return currentTime >= dayHours.open && currentTime <= dayHours.close; + } catch (error) { + throw new RepositoryError( + `Failed to check operational status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSDistributionCenterRepository = (worldId: string) => ({ + createDistributionCenter: (data: TDistributionCenterInput) => + createDistributionCenter({ worldId } as TWorldRefModel, data), + getDistributionCentersByStatus: ( + status: string[], + filters?: { + warehouseId?: string; + dcType?: string[]; + }, + ) => getDistributionCentersByStatus(worldId, status, filters), + getDistributionCenterById: (dcId: string) => getDistributionCenterById(worldId, dcId), + getDistributionCentersByWarehouse: (warehouseId: string) => + getDistributionCentersByWarehouse(worldId, warehouseId), + updateDistributionCenter: (dcId: string, data: Partial) => + updateDistributionCenter(worldId, dcId, data), + updateOperationalStatus: (dcId: string, status: string, reason?: string) => + updateOperationalStatus(worldId, dcId, status, reason), + getDistributionCenterCapacity: (dcId: string) => getDistributionCenterCapacity(worldId, dcId), + getAllDistributionCenters: (filters?: { + warehouseId?: string; + dcType?: string[]; + operationalStatus?: string[]; + }) => getAllDistributionCenters(worldId, filters), + isOperationalAtTime: (dcId: string, checkTime: Date) => + isOperationalAtTime(worldId, dcId, checkTime), +}); + +export type TWMSDistributionCenterRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/dock_door.wms.repository.ts b/packages/controlmart/src/repository/wms/dock_door.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..8ff8e7e6d1006330dfc39bde17163b60855c5d6e --- /dev/null +++ b/packages/controlmart/src/repository/wms/dock_door.wms.repository.ts @@ -0,0 +1,488 @@ +import { + DockDoor, + type TDockDoorModel, + type TDockDoorInput, +} from "../../models/wms/dock_door.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createDockDoor = async ( + world: TWorldRefModel, + data: TDockDoorInput, +): Promise => { + try { + if (!data.warehouseId || !data.doorNumber || !data.doorType) { + throw new RepositoryError( + "Warehouse ID, door number, and door type are required", + "VALIDATION_ERROR", + ); + } + + const existing = await DockDoor.findOne({ + "worldRef.worldId": world.worldId, + $or: [ + { dockDoorId: data.dockDoorId }, + { doorNumber: data.doorNumber, warehouseId: data.warehouseId }, + ], + }); + + if (existing) { + throw new RepositoryError( + `Dock door with number ${data.doorNumber} already exists in warehouse`, + "DUPLICATE_ERROR", + ); + } + + const dockDoor = await DockDoor.create({ + ...data, + worldRef: world, + }); + + return dockDoor.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create dock door: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDockDoorsByWarehouse = async ( + worldId: string, + warehouseId: string, + filters?: { + doorType?: string[]; + status?: string[]; + zoneId?: string; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + }; + + if (filters?.doorType?.length) { + query.doorType = { $in: filters.doorType }; + } + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + if (filters?.zoneId) { + query.zoneId = filters.zoneId; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const dockDoors = await DockDoor.find(query) + .sort({ doorNumber: 1 }) + .limit(limit + 1); + + const hasMore = dockDoors.length > limit; + const results = hasMore ? dockDoors.slice(0, limit) : dockDoors; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((door) => door.toJSON()), + totalCount: await DockDoor.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get dock doors by warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDockDoorById = async ( + worldId: string, + dockDoorId: string, +): Promise => { + try { + const dockDoor = await DockDoor.findOne({ + "worldRef.worldId": worldId, + dockDoorId, + }); + + return dockDoor?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get dock door by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateDockDoorStatus = async ( + worldId: string, + dockDoorId: string, + status: string, + reason?: string, +): Promise => { + try { + const updateData: Record = { + status, + }; + + if (reason) { + updateData.statusChangeReason = reason; + updateData.statusChangedAt = new Date(); + } + + const dockDoor = await DockDoor.findOneAndUpdate( + { "worldRef.worldId": worldId, dockDoorId }, + { $set: updateData }, + { new: true }, + ); + + return dockDoor?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update dock door status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const assignAppointmentToDoor = async ( + worldId: string, + dockDoorId: string, + appointment: { + appointmentId: string; + carrier: string; + trailerNumber: string; + startTime: Date; + expectedEndTime: Date; + }, +): Promise => { + try { + const dockDoor = await DockDoor.findOneAndUpdate( + { "worldRef.worldId": worldId, dockDoorId, status: "AVAILABLE" }, + { + $set: { + currentAppointment: appointment, + status: "OCCUPIED", + }, + }, + { new: true }, + ); + + if (!dockDoor) { + throw new RepositoryError("Dock door not available for assignment", "VALIDATION_ERROR"); + } + + return dockDoor.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to assign appointment to dock door: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const clearAppointmentFromDoor = async ( + worldId: string, + dockDoorId: string, + completionNotes?: string, +): Promise => { + try { + const updateData: Record = { + status: "AVAILABLE", + $unset: { currentAppointment: "" }, + }; + + if (completionNotes) { + updateData.lastAppointmentNotes = completionNotes; + updateData.lastAppointmentCompleted = new Date(); + } + + const dockDoor = await DockDoor.findOneAndUpdate( + { "worldRef.worldId": worldId, dockDoorId }, + updateData, + { new: true }, + ); + + return dockDoor?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to clear appointment from dock door: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAvailableDockDoors = async ( + worldId: string, + warehouseId: string, + doorType: string, + filters?: { + zoneId?: string; + capabilities?: { + maxTrailerLength?: number; + levelingDock?: boolean; + restraintSystem?: boolean; + }; + timeSlot?: { + start: Date; + end: Date; + }; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + doorType, + status: "AVAILABLE", + }; + + if (filters?.zoneId) { + query.zoneId = filters.zoneId; + } + + // Add capability filters + if (filters?.capabilities?.maxTrailerLength) { + query["capabilities.maxTrailerLength"] = { $gte: filters.capabilities.maxTrailerLength }; + } + if (filters?.capabilities?.levelingDock) { + query["capabilities.levelingDock"] = true; + } + if (filters?.capabilities?.restraintSystem) { + query["capabilities.restraintSystem"] = true; + } + + // Note: Time slot availability would require complex appointment scheduling logic + // For now, just returning doors that match capability requirements + + const dockDoors = await DockDoor.find(query).sort({ doorNumber: 1 }); + return dockDoors.map((door) => door.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get available dock doors: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDockDoorUtilization = async ( + worldId: string, + warehouseId: string, + filters?: { + doorType?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise<{ + totalDoors: number; + availableDoors: number; + occupiedDoors: number; + maintenanceDoors: number; + utilizationPercentage: number; + utilizationByType: Array<{ + doorType: string; + totalDoors: number; + occupiedDoors: number; + utilizationPercentage: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + warehouseId, + }; + + if (filters?.doorType?.length) { + matchStage.doorType = { $in: filters.doorType }; + } + + const result = await DockDoor.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalDoors: { $sum: 1 }, + availableDoors: { $sum: { $cond: [{ $eq: ["$status", "AVAILABLE"] }, 1, 0] } }, + occupiedDoors: { $sum: { $cond: [{ $eq: ["$status", "OCCUPIED"] }, 1, 0] } }, + maintenanceDoors: { $sum: { $cond: [{ $eq: ["$status", "MAINTENANCE"] }, 1, 0] } }, + typeBreakdown: { + $push: { + doorType: "$doorType", + status: "$status", + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalDoors: 0, + availableDoors: 0, + occupiedDoors: 0, + maintenanceDoors: 0, + utilizationPercentage: 0, + utilizationByType: [], + }; + } + + const data = result[0]; + const utilizationPercentage = + data.totalDoors > 0 ? (data.occupiedDoors / data.totalDoors) * 100 : 0; + + // Calculate utilization by type + const typeMap = new Map(); + data.typeBreakdown.forEach((item: any) => { + const existing = typeMap.get(item.doorType) || { total: 0, occupied: 0 }; + existing.total += 1; + if (item.status === "OCCUPIED") { + existing.occupied += 1; + } + typeMap.set(item.doorType, existing); + }); + + const utilizationByType = Array.from(typeMap.entries()).map( + ([doorType, stats]: [string, any]) => ({ + doorType: doorType || "UNKNOWN", + totalDoors: stats.total, + occupiedDoors: stats.occupied, + utilizationPercentage: stats.total > 0 ? (stats.occupied / stats.total) * 100 : 0, + }), + ); + + return { + totalDoors: data.totalDoors, + availableDoors: data.availableDoors, + occupiedDoors: data.occupiedDoors, + maintenanceDoors: data.maintenanceDoors, + utilizationPercentage: Math.round(utilizationPercentage * 100) / 100, + utilizationByType, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get dock door utilization: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDockDoorSchedule = async ( + worldId: string, + dockDoorId: string, + dateRange: { from: Date; to: Date }, +): Promise< + Array<{ + appointmentId: string; + carrierName: string; + trailerNumber: string; + scheduledArrival: Date; + appointmentType: string; + status: string; + }> +> => { + try { + // This would typically involve joining with an appointments/schedule collection + // For now, returning the current appointment if it exists within the date range + const dockDoor = await DockDoor.findOne({ + "worldRef.worldId": worldId, + dockDoorId, + }); + + if (!dockDoor?.currentAppointment) { + return []; + } + + const appointment = dockDoor.currentAppointment; + const scheduledArrival = appointment.startTime; + + if ( + scheduledArrival && + scheduledArrival >= dateRange.from && + scheduledArrival <= dateRange.to + ) { + return [ + { + appointmentId: appointment.appointmentId || "", + carrierName: appointment.carrier || "", + trailerNumber: appointment.trailerNumber || "", + scheduledArrival: scheduledArrival, + appointmentType: "SCHEDULED", + status: dockDoor.status, + }, + ]; + } + + return []; + } catch (error) { + throw new RepositoryError( + `Failed to get dock door schedule: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSDockDoorRepository = (worldId: string) => ({ + createDockDoor: (data: TDockDoorInput) => createDockDoor({ worldId } as TWorldRefModel, data), + getDockDoorsByWarehouse: ( + warehouseId: string, + filters?: { + doorType?: string[]; + status?: string[]; + zoneId?: string; + }, + ) => getDockDoorsByWarehouse(worldId, warehouseId, filters), + getDockDoorById: (dockDoorId: string) => getDockDoorById(worldId, dockDoorId), + updateDockDoorStatus: (dockDoorId: string, status: string, reason?: string) => + updateDockDoorStatus(worldId, dockDoorId, status, reason), + assignAppointmentToDoor: ( + dockDoorId: string, + appointment: { + appointmentId: string; + carrier: string; + trailerNumber: string; + startTime: Date; + expectedEndTime: Date; + }, + ) => assignAppointmentToDoor(worldId, dockDoorId, appointment), + clearAppointmentFromDoor: (dockDoorId: string, completionNotes?: string) => + clearAppointmentFromDoor(worldId, dockDoorId, completionNotes), + getAvailableDockDoors: ( + warehouseId: string, + doorType: string, + filters?: { + zoneId?: string; + capabilities?: { + maxTrailerLength?: number; + levelingDock?: boolean; + restraintSystem?: boolean; + }; + timeSlot?: { + start: Date; + end: Date; + }; + }, + ) => getAvailableDockDoors(worldId, warehouseId, doorType, filters), + getDockDoorUtilization: ( + warehouseId: string, + filters?: { + doorType?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, + ) => getDockDoorUtilization(worldId, warehouseId, filters), + getDockDoorSchedule: (dockDoorId: string, dateRange: { from: Date; to: Date }) => + getDockDoorSchedule(worldId, dockDoorId, dateRange), +}); + +export type TWMSDockDoorRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/inbound_order.wms.repository.ts b/packages/controlmart/src/repository/wms/inbound_order.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..062315f58ff53daf82291c1987eb0d95efe531fc --- /dev/null +++ b/packages/controlmart/src/repository/wms/inbound_order.wms.repository.ts @@ -0,0 +1,643 @@ +import { + InboundOrder, + type TInboundOrderModel, + type TInboundOrderInput, +} from "../../models/wms/inbound_order.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; + +const createInboundOrder = async ( + world: TWorldRefModel, + data: TInboundOrderInput, +): Promise => { + try { + if (!data.warehouseId || !data.lines?.length) { + throw new RepositoryError("Warehouse ID and order lines are required", "VALIDATION_ERROR"); + } + + const existing = await InboundOrder.findOne({ + "worldRef.worldId": world.worldId, + $or: [ + { inboundOrderId: data.inboundOrderId }, + { poNumber: data.poNumber, warehouseId: data.warehouseId }, + ], + }); + + if (existing && data.poNumber) { + throw new RepositoryError( + `Inbound order with PO number ${data.poNumber} already exists`, + "DUPLICATE_ERROR", + ); + } + + const order = await InboundOrder.create({ + ...data, + worldRef: world, + }); + + return order.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create inbound order: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getInboundOrdersByStatus = async ( + worldId: string, + status: string[], + filters?: { + warehouseId?: string; + vendorId?: string; + dateStart?: Date; + dateEnd?: Date; + priority?: string[]; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (status.length > 0) { + query.orderStatus = { $in: status }; + } + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.vendorId) { + query["vendor.vendorId"] = filters.vendorId; + } + if (filters?.dateStart && filters?.dateEnd) { + query.expectedDeliveryDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.priority?.length) { + query.priority = { $in: filters.priority }; + } + + const orders = await InboundOrder.find(query).sort({ expectedDeliveryDate: 1, priority: 1 }); + return orders.map((order) => order.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get inbound orders by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getInboundOrderById = async ( + worldId: string, + inboundOrderId: string, +): Promise => { + try { + const order = await InboundOrder.findOne({ + "worldRef.worldId": worldId, + inboundOrderId, + }); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get inbound order by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getInboundOrderByPoNumber = async ( + worldId: string, + poNumber: string, +): Promise => { + try { + const order = await InboundOrder.findOne({ + "worldRef.worldId": worldId, + poNumber, + }); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get inbound order by PO number: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateOrderStatus = async ( + worldId: string, + inboundOrderId: string, + status: string, + statusDate?: Date, +): Promise => { + try { + const updateData: Record = { + orderStatus: status, + }; + + const timestamp = statusDate || new Date(); + + switch (status) { + case "SCHEDULED": + updateData["timing.scheduledAt"] = timestamp; + break; + case "IN_TRANSIT": + updateData["timing.inTransitAt"] = timestamp; + break; + case "ARRIVED": + updateData["timing.arrivedAt"] = timestamp; + break; + case "RECEIVING": + updateData["timing.receivingStartedAt"] = timestamp; + break; + case "RECEIVED": + updateData["timing.receivedAt"] = timestamp; + break; + case "CLOSED": + updateData["timing.closedAt"] = timestamp; + break; + } + + const order = await InboundOrder.findOneAndUpdate( + { "worldRef.worldId": worldId, inboundOrderId }, + { $set: updateData }, + { new: true }, + ); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update order status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateReceivingProgress = async ( + worldId: string, + inboundOrderId: string, + lineNumber: number, + receivedQuantity: number, + lotNumber?: string, + expirationDate?: Date, +): Promise => { + try { + const updateData: Record = { + "lines.$.receivedQuantity": receivedQuantity, + "lines.$.lineStatus": receivedQuantity > 0 ? "RECEIVED" : "RECEIVING", + }; + + if (lotNumber) { + updateData["lines.$.lotNumber"] = lotNumber; + } + if (expirationDate) { + updateData["lines.$.expirationDate"] = expirationDate; + } + + const order = await InboundOrder.findOneAndUpdate( + { + "worldRef.worldId": worldId, + inboundOrderId, + "lines.lineNumber": lineNumber, + }, + { $set: updateData }, + { new: true }, + ); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update receiving progress: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOrdersExpectedToday = async ( + worldId: string, + warehouseId: string, + targetDate?: Date, +): Promise => { + try { + const today = targetDate || new Date(); + const startOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate()); + const endOfDay = new Date(today.getFullYear(), today.getMonth(), today.getDate() + 1); + + const orders = await InboundOrder.find({ + "worldRef.worldId": worldId, + warehouseId, + expectedDeliveryDate: { + $gte: startOfDay, + $lt: endOfDay, + }, + orderStatus: { $in: ["SCHEDULED", "IN_TRANSIT", "ARRIVED"] }, + }).sort({ expectedDeliveryDate: 1, priority: 1 }); + + return orders.map((order) => order.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get orders expected today: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getReceivingMetrics = async ( + worldId: string, + filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + vendorId?: string; + }, +): Promise<{ + totalOrders: number; + completedOrders: number; + averageReceivingTime: number; + onTimeReceipts: number; + lateReceipts: number; + receivingAccuracy: number; + ordersByStatus: Array<{ + status: string; + count: number; + }>; + topVendors: Array<{ + vendorId: string; + vendorName: string; + orderCount: number; + totalLines: number; + onTimePercentage: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.vendorId) { + matchStage["vendor.vendorId"] = filters.vendorId; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await InboundOrder.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalOrders: { $sum: 1 }, + completedOrders: { + $sum: { $cond: [{ $eq: ["$orderStatus", "RECEIVED"] }, 1, 0] }, + }, + onTimeReceipts: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$orderStatus", "RECEIVED"] }, + { $lte: ["$timing.receivedAt", "$expectedDeliveryDate"] }, + ], + }, + 1, + 0, + ], + }, + }, + lateReceipts: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$orderStatus", "RECEIVED"] }, + { $gt: ["$timing.receivedAt", "$expectedDeliveryDate"] }, + ], + }, + 1, + 0, + ], + }, + }, + statusBreakdown: { + $push: "$orderStatus", + }, + vendorBreakdown: { + $push: { + vendorId: "$vendor.vendorId", + vendorName: "$vendor.vendorName", + lineCount: { $size: "$lines" }, + onTime: { + $cond: [ + { + $and: [ + { $eq: ["$orderStatus", "RECEIVED"] }, + { $lte: ["$timing.receivedAt", "$expectedDeliveryDate"] }, + ], + }, + 1, + 0, + ], + }, + }, + }, + receivingTimes: { + $push: { + $cond: [ + { $eq: ["$orderStatus", "RECEIVED"] }, + { $subtract: ["$timing.receivedAt", "$timing.receivingStartedAt"] }, + null, + ], + }, + }, + accuracyData: { + $push: { + expectedTotal: { $sum: "$lines.expectedQuantity" }, + receivedTotal: { $sum: "$lines.receivedQuantity" }, + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalOrders: 0, + completedOrders: 0, + averageReceivingTime: 0, + onTimeReceipts: 0, + lateReceipts: 0, + receivingAccuracy: 0, + ordersByStatus: [], + topVendors: [], + }; + } + + const data = result[0]; + + // Calculate average receiving time + const validTimes = data.receivingTimes.filter((time: any) => time !== null); + const averageReceivingTime = + validTimes.length > 0 + ? validTimes.reduce((sum: number, time: number) => sum + time, 0) / + validTimes.length / + (1000 * 60 * 60) // Convert to hours + : 0; + + // Calculate receiving accuracy + let totalExpected = 0; + let totalReceived = 0; + data.accuracyData.forEach((item: any) => { + totalExpected += item.expectedTotal || 0; + totalReceived += item.receivedTotal || 0; + }); + const receivingAccuracy = totalExpected > 0 ? (totalReceived / totalExpected) * 100 : 0; + + // Process status breakdown + const statusMap = new Map(); + data.statusBreakdown.forEach((status: string) => { + statusMap.set(status, (statusMap.get(status) || 0) + 1); + }); + + const ordersByStatus = Array.from(statusMap.entries()).map(([status, count]) => ({ + status, + count, + })); + + // Process vendor breakdown + const vendorMap = new Map(); + data.vendorBreakdown.forEach((item: any) => { + if (item.vendorId) { + const existing = vendorMap.get(item.vendorId) || { + vendorId: item.vendorId, + vendorName: item.vendorName, + orderCount: 0, + totalLines: 0, + onTimeCount: 0, + }; + existing.orderCount += 1; + existing.totalLines += item.lineCount; + existing.onTimeCount += item.onTime; + vendorMap.set(item.vendorId, existing); + } + }); + + const topVendors = Array.from(vendorMap.values()) + .map((vendor: any) => ({ + vendorId: vendor.vendorId, + vendorName: vendor.vendorName, + orderCount: vendor.orderCount, + totalLines: vendor.totalLines, + onTimePercentage: + vendor.orderCount > 0 ? (vendor.onTimeCount / vendor.orderCount) * 100 : 0, + })) + .sort((a, b) => b.orderCount - a.orderCount) + .slice(0, 10); + + return { + totalOrders: data.totalOrders, + completedOrders: data.completedOrders, + averageReceivingTime: Math.round(averageReceivingTime * 100) / 100, + onTimeReceipts: data.onTimeReceipts, + lateReceipts: data.lateReceipts, + receivingAccuracy: Math.round(receivingAccuracy * 100) / 100, + ordersByStatus, + topVendors, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get receiving metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOrdersByVendor = async ( + worldId: string, + vendorId: string, + filters?: { + warehouseId?: string; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + "vendor.vendorId": vendorId, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.status?.length) { + query.orderStatus = { $in: filters.status }; + } + if (filters?.dateStart && filters?.dateEnd) { + query.expectedDeliveryDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const orders = await InboundOrder.find(query).sort({ expectedDeliveryDate: -1 }); + return orders.map((order) => order.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get orders by vendor: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSInboundOrderRepository = (worldId: string) => ({ + createInboundOrder: (data: TInboundOrderInput) => + createInboundOrder({ worldId } as TWorldRefModel, data), + getInboundOrdersByStatus: ( + status: string[], + filters?: { + warehouseId?: string; + vendorId?: string; + dateStart?: Date; + dateEnd?: Date; + priority?: string[]; + }, + ) => getInboundOrdersByStatus(worldId, status, filters), + getInboundOrderById: (inboundOrderId: string) => getInboundOrderById(worldId, inboundOrderId), + getInboundOrderByPoNumber: (poNumber: string) => getInboundOrderByPoNumber(worldId, poNumber), + updateOrderStatus: ( + args: string | { inboundOrderId: string; status: string; statusDate?: Date }, + status?: string, + statusDate?: Date, + ) => + typeof args === "string" + ? updateOrderStatus(worldId, args, status!, statusDate) + : updateOrderStatus(worldId, args.inboundOrderId, args.status, args.statusDate), + updateReceivingProgress: ( + args: + | string + | { + inboundOrderId: string; + lineNumber: number; + receivedQuantity: number; + lotNumber?: string; + expirationDate?: Date; + }, + lineNumber?: number, + receivedQuantity?: number, + lotNumber?: string, + expirationDate?: Date, + ) => + typeof args === "string" + ? updateReceivingProgress( + worldId, + args, + lineNumber!, + receivedQuantity!, + lotNumber, + expirationDate, + ) + : updateReceivingProgress( + worldId, + args.inboundOrderId, + args.lineNumber, + args.receivedQuantity, + args.lotNumber, + args.expirationDate, + ), + getOrdersExpectedToday: ( + args: string | { warehouseId: string; targetDate?: Date }, + targetDate?: Date, + ) => + typeof args === "string" + ? getOrdersExpectedToday(worldId, args, targetDate) + : getOrdersExpectedToday(worldId, args.warehouseId, args.targetDate), + getReceivingMetrics: (filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + vendorId?: string; + }) => getReceivingMetrics(worldId, filters), + getOrdersByVendor: ( + args: + | string + | { + vendorId: string; + filters?: { + warehouseId?: string; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + }; + }, + filters?: { + warehouseId?: string; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, + ) => + typeof args === "string" + ? getOrdersByVendor(worldId, args, filters) + : getOrdersByVendor(worldId, args.vendorId, args.filters), + patch: (inboundOrderId: string, updates: Record) => + patchInboundOrder(worldId, inboundOrderId, updates), +}); + +/** + * Patch inbound order with partial updates + * Allowed fields: orderStatus, dates.expectedArrival, priority + */ +const patchInboundOrder = async ( + worldId: string, + inboundOrderId: string, + updates: Record, +): Promise => { + const allowedFields = ["orderStatus", "dates", "priority"]; + const $set: Record = { updatedAt: new Date() }; + + for (const [key, value] of Object.entries(updates)) { + if (value === undefined) continue; + + // Handle dot notation keys like "dates.expectedArrival" + const topLevelKey = key.split(".")[0]; + if (allowedFields.includes(topLevelKey!)) { + $set[key] = value; + } + } + + // Auto-set timestamps based on status + if (updates.orderStatus) { + const statusTimestamps: Record = { + SCHEDULED: "timing.scheduledAt", + IN_TRANSIT: "timing.inTransitAt", + ARRIVED: "timing.arrivedAt", + RECEIVING: "timing.receivingStartedAt", + RECEIVED: "timing.receivedAt", + CLOSED: "timing.closedAt", + }; + const timestampField = statusTimestamps[updates.orderStatus]; + if (timestampField) { + $set[timestampField] = new Date(); + } + } + + const order = await InboundOrder.findOneAndUpdate( + { "worldRef.worldId": worldId, inboundOrderId }, + { $set }, + { new: true }, + ); + return order?.toJSON() || null; +}; + +export type TWMSInboundOrderRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/inbound_receiving_transaction.wms.repository.ts b/packages/controlmart/src/repository/wms/inbound_receiving_transaction.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..0977a3d4ce6bacf4225f65082ccd106a20585420 --- /dev/null +++ b/packages/controlmart/src/repository/wms/inbound_receiving_transaction.wms.repository.ts @@ -0,0 +1,448 @@ +import { + ReceivingTransaction, + type TReceivingTransactionModel, + type TReceivingTransactionInput, +} from "../../models/wms/inbound_receiving_transaction.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createReceivingTransaction = async ( + world: TWorldRefModel, + data: TReceivingTransactionInput, +): Promise => { + try { + if (!data.warehouseId || !data.inboundOrderId) { + throw new RepositoryError( + "Warehouse ID and inbound order ID are required", + "VALIDATION_ERROR", + ); + } + + const transaction = await ReceivingTransaction.create({ + ...data, + worldRef: world, + }); + + return transaction.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create receiving transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTransactionById = async ( + worldId: string, + transactionId: string, +): Promise => { + try { + const transaction = await ReceivingTransaction.findOne({ + "worldRef.worldId": worldId, + transactionId, + }); + + return transaction?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get transaction by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllTransactions = async ( + worldId: string, + filters?: { + warehouseId?: string; + inboundOrderId?: string; + status?: string[]; + userId?: string; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) query.warehouseId = filters.warehouseId; + if (filters?.inboundOrderId) query.inboundOrderId = filters.inboundOrderId; + if (filters?.status?.length) query.receivingStatus = { $in: filters.status }; + if (filters?.userId) query.userId = filters.userId; + if (filters?.dateStart && filters?.dateEnd) { + query.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const transactions = await ReceivingTransaction.find(query) + .sort({ createdAt: -1 }) + .limit(limit + 1); + + const hasMore = transactions.length > limit; + const results = hasMore ? transactions.slice(0, limit) : transactions; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((t) => t.toJSON()), + totalCount: await ReceivingTransaction.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get all transactions: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateTransactionStatus = async ( + worldId: string, + transactionId: string, + status: string, + notes?: string, +): Promise => { + try { + const updateData: Record = { + status, + statusUpdatedAt: new Date(), + }; + + if (notes) { + updateData.notes = notes; + } + + const transaction = await ReceivingTransaction.findOneAndUpdate( + { "worldRef.worldId": worldId, transactionId }, + { $set: updateData }, + { new: true }, + ); + + return transaction?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update transaction status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addItemToTransaction = async ( + worldId: string, + transactionId: string, + item: { + sku: string; + productName: string; + expectedQuantity: number; + receivedQuantity: number; + unitOfMeasure: string; + lotNumber?: string; + serialNumbers?: string[]; + condition?: string; + location?: { + binId?: string; + zoneId?: string; + }; + }, +): Promise => { + try { + const transaction = await ReceivingTransaction.findOneAndUpdate( + { "worldRef.worldId": worldId, transactionId }, + { + $push: { + items: item, + }, + }, + { new: true }, + ); + + return transaction?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to add item to transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getReceivingMetrics = async ( + worldId: string, + filters?: { + warehouseId?: string; + userId?: string; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise<{ + totalTransactions: number; + completedTransactions: number; + pendingTransactions: number; + discrepancies: number; + totalItemsReceived: number; + totalItemsExpected: number; + receivingAccuracy: number; + averageProcessingTime: number; + userPerformance: Array<{ + userId: string; + transactionCount: number; + accuracy: number; + averageProcessingTime: number; + }>; + dailyVolume: Array<{ + date: string; + transactionCount: number; + itemsReceived: number; + accuracy: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.userId) { + matchStage.userId = filters.userId; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await ReceivingTransaction.aggregate([ + { $match: matchStage }, + { $unwind: { path: "$items", preserveNullAndEmptyArrays: true } }, + { + $group: { + _id: null, + totalTransactions: { $addToSet: "$transactionId" }, + completedTransactions: { + $sum: { $cond: [{ $eq: ["$status", "COMPLETED"] }, 1, 0] }, + }, + pendingTransactions: { + $sum: { $cond: [{ $ne: ["$status", "COMPLETED"] }, 1, 0] }, + }, + totalItemsReceived: { + $sum: { $ifNull: ["$items.receivedQuantity", 0] }, + }, + totalItemsExpected: { + $sum: { $ifNull: ["$items.expectedQuantity", 0] }, + }, + discrepancies: { + $sum: { + $cond: [{ $ne: ["$items.receivedQuantity", "$items.expectedQuantity"] }, 1, 0], + }, + }, + userBreakdown: { + $push: { + userId: "$userId", + transactionId: "$transactionId", + status: "$status", + itemReceived: { $ifNull: ["$items.receivedQuantity", 0] }, + itemExpected: { $ifNull: ["$items.expectedQuantity", 0] }, + processingTime: { + $cond: [ + { $eq: ["$status", "COMPLETED"] }, + { $subtract: ["$statusUpdatedAt", "$createdAt"] }, + null, + ], + }, + discrepancy: { + $cond: [{ $ne: ["$items.receivedQuantity", "$items.expectedQuantity"] }, 1, 0], + }, + }, + }, + dailyBreakdown: { + $push: { + date: { $dateEndString: { format: "%Y-%m-%d", date: "$createdAt" } }, + transactionId: "$transactionId", + itemReceived: { $ifNull: ["$items.receivedQuantity", 0] }, + itemExpected: { $ifNull: ["$items.expectedQuantity", 0] }, + discrepancy: { + $cond: [{ $ne: ["$items.receivedQuantity", "$items.expectedQuantity"] }, 1, 0], + }, + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalTransactions: 0, + completedTransactions: 0, + pendingTransactions: 0, + discrepancies: 0, + totalItemsReceived: 0, + totalItemsExpected: 0, + receivingAccuracy: 0, + averageProcessingTime: 0, + userPerformance: [], + dailyVolume: [], + }; + } + + const data = result[0]; + const totalTransactions = data.totalTransactions.length; + const receivingAccuracy = + data.totalItemsExpected > 0 ? (data.totalItemsReceived / data.totalItemsExpected) * 100 : 0; + + // Process user performance + const userMap = new Map(); + data.userBreakdown.forEach((item: any) => { + if (item.userId) { + const existing = userMap.get(item.userId) || { + userId: item.userId, + transactions: new Set(), + totalReceived: 0, + totalExpected: 0, + totalDiscrepancies: 0, + processingTimes: [], + }; + + existing.transactions.add(item.transactionId); + existing.totalReceived += item.itemReceived; + existing.totalExpected += item.itemExpected; + existing.totalDiscrepancies += item.discrepancy; + + if (item.processingTime) { + existing.processingTimes.push(item.processingTime); + } + + userMap.set(item.userId, existing); + } + }); + + const userPerformance = Array.from(userMap.values()).map((user: any) => ({ + userId: user.userId, + transactionCount: user.transactions.size, + accuracy: user.totalExpected > 0 ? (user.totalReceived / user.totalExpected) * 100 : 0, + averageProcessingTime: + user.processingTimes.length > 0 + ? user.processingTimes.reduce((sum: number, time: number) => sum + time, 0) / + user.processingTimes.length / + (1000 * 60) + : 0, + })); + + // Process daily volume + const dailyMap = new Map(); + data.dailyBreakdown.forEach((item: any) => { + const existing = dailyMap.get(item.date) || { + transactions: new Set(), + itemsReceived: 0, + itemsExpected: 0, + discrepancies: 0, + }; + + existing.transactions.add(item.transactionId); + existing.itemsReceived += item.itemReceived; + existing.itemsExpected += item.itemExpected; + existing.discrepancies += item.discrepancy; + + dailyMap.set(item.date, existing); + }); + + const dailyVolume = Array.from(dailyMap.entries()) + .map(([date, stats]: [string, any]) => ({ + date, + transactionCount: stats.transactions.size, + itemsReceived: stats.itemsReceived, + accuracy: stats.itemsExpected > 0 ? (stats.itemsReceived / stats.itemsExpected) * 100 : 0, + })) + .sort((a, b) => a.date.localeCompare(b.date)); + + const allProcessingTimes = data.userBreakdown + .filter((item: any) => item.processingTime) + .map((item: any) => item.processingTime); + + const averageProcessingTime = + allProcessingTimes.length > 0 + ? allProcessingTimes.reduce((sum: number, time: number) => sum + time, 0) / + allProcessingTimes.length / + (1000 * 60) + : 0; + + return { + totalTransactions, + completedTransactions: data.completedTransactions, + pendingTransactions: data.pendingTransactions, + discrepancies: data.discrepancies, + totalItemsReceived: data.totalItemsReceived, + totalItemsExpected: data.totalItemsExpected, + receivingAccuracy: Math.round(receivingAccuracy * 100) / 100, + averageProcessingTime: Math.round(averageProcessingTime * 100) / 100, + userPerformance, + dailyVolume, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get receiving metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSInboundReceivingTransactionRepository = (worldId: string) => ({ + createReceivingTransaction: (data: TReceivingTransactionInput) => + createReceivingTransaction({ worldId } as TWorldRefModel, data), + getTransactionById: (transactionId: string) => getTransactionById(worldId, transactionId), + getAllTransactions: (filters?: { + warehouseId?: string; + inboundOrderId?: string; + status?: string[]; + userId?: string; + dateStart?: Date; + dateEnd?: Date; + cursor?: string; + limit?: number; + }) => getAllTransactions(worldId, filters), + updateTransactionStatus: (transactionId: string, status: string, notes?: string) => + updateTransactionStatus(worldId, transactionId, status, notes), + addItemToTransaction: ( + transactionId: string, + item: { + sku: string; + productName: string; + expectedQuantity: number; + receivedQuantity: number; + unitOfMeasure: string; + lotNumber?: string; + serialNumbers?: string[]; + condition?: string; + location?: { + binId?: string; + zoneId?: string; + }; + }, + ) => addItemToTransaction(worldId, transactionId, item), + getReceivingMetrics: (filters?: { + warehouseId?: string; + userId?: string; + dateStart?: Date; + dateEnd?: Date; + }) => getReceivingMetrics(worldId, filters), +}); + +export type TWMSInboundReceivingTransactionRepository = ReturnType< + typeof WMSInboundReceivingTransactionRepository +>; diff --git a/packages/controlmart/src/repository/wms/index.ts b/packages/controlmart/src/repository/wms/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..130336391e5fde72f9ad3b4b0c2541f4465a0020 --- /dev/null +++ b/packages/controlmart/src/repository/wms/index.ts @@ -0,0 +1,27 @@ +// Core WMS Infrastructure & Configuration +export * from "./dc.wms.repository"; +export * from "./warehouse.wms.repository"; +export * from "./dock_door.wms.repository"; + +// Storage & Location Management +export * from "./bin.wms.repository"; +export * from "./zone.wms.repository"; + +// Inventory Management & Tracking +export * from "./inventory_transaction.wms.repository"; +export * from "./cycle_count.wms.repository"; +export * from "./replenishment.wms.repository"; + +// Inbound Operations +export * from "./inbound_order.wms.repository"; +export * from "./inbound_receiving_transaction.wms.repository"; + +// Outbound Operations +export * from "./outbound_order.wms.repository"; +export * from "./outbound_shipment.wms.repository"; + +// Task Management & Operations +export * from "./task.wms.repository"; + +// Analytics & Reporting +export * from "./daily_metrics.wms.repository"; diff --git a/packages/controlmart/src/repository/wms/inventory_transaction.wms.repository.ts b/packages/controlmart/src/repository/wms/inventory_transaction.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..460aa770680d7b67ca01024991b20a97c46f194e --- /dev/null +++ b/packages/controlmart/src/repository/wms/inventory_transaction.wms.repository.ts @@ -0,0 +1,511 @@ +import { + InventoryTransaction, + type TInventoryTransactionModel, + type TInventoryTransactionInput, +} from "../../models/wms/inventory_transaction.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; +import { Inventory } from "../../models/wms/inventory.wms.model"; + +const createInventoryTransaction = async ( + world: TWorldRefModel, + data: TInventoryTransactionInput, +): Promise => { + try { + if (!data.warehouseId || !data.transactionType || !data.productId || !data.quantity) { + throw new RepositoryError( + "Warehouse ID, transaction type, product ID, and quantity are required", + "VALIDATION_ERROR", + ); + } + + const transaction = await InventoryTransaction.create({ + ...data, + worldRef: world, + transactionDate: data.transactionDate || new Date(), + }); + + return transaction.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create inventory transaction: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTransactionsByProduct = async ( + worldId: string, + productId: string, + filters?: { + warehouseId?: string; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + binId?: string; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + $or: [{ productId }, { sku: productId }], + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.transactionType?.length) { + query.transactionType = { $in: filters.transactionType }; + } + if (filters?.dateStart && filters?.dateEnd) { + query.transactionDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.binId) { + query.$or = [{ fromBinId: filters.binId }, { toBinId: filters.binId }]; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const transactions = await InventoryTransaction.find(query) + .sort({ transactionDate: -1 }) + .limit(limit + 1); + + const hasMore = transactions.length > limit; + const results = hasMore ? transactions.slice(0, limit) : transactions; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((transaction) => transaction.toJSON()), + totalCount: await InventoryTransaction.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get transactions by product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTransactionsByBin = async ( + worldId: string, + binId: string, + filters?: { + warehouseId?: string; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + productId?: string; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + $or: [{ fromBinId: binId }, { toBinId: binId }], + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.transactionType?.length) { + query.transactionType = { $in: filters.transactionType }; + } + if (filters?.dateStart && filters?.dateEnd) { + query.transactionDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.productId) { + query.productId = filters.productId; + } + + const transactions = await InventoryTransaction.find(query).sort({ transactionDate: -1 }); + return transactions.map((transaction) => transaction.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get transactions by bin: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTransactionsByReference = async ( + worldId: string, + referenceType: string, + referenceId: string, +): Promise => { + try { + const transactions = await InventoryTransaction.find({ + "worldRef.worldId": worldId, + referenceType, + referenceId, + }).sort({ transactionDate: -1 }); + + return transactions.map((transaction) => transaction.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get transactions by reference: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getInventoryMovementReport = async ( + worldId: string, + filters?: { + warehouseId?: string; + productIds?: string[]; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise<{ + totalTransactions: number; + transactionsByType: Array<{ + transactionType: string; + count: number; + totalQuantity: number; + }>; + topMovingProducts: Array<{ + productId: string; + sku: string; + totalQuantity: number; + transactionCount: number; + }>; + movementsByDate: Array<{ + date: string; + transactionCount: number; + totalQuantity: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.productIds?.length) { + matchStage.productId = { $in: filters.productIds }; + } + if (filters?.transactionType?.length) { + matchStage.transactionType = { $in: filters.transactionType }; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage.transactionDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await InventoryTransaction.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalTransactions: { $sum: 1 }, + totalQuantity: { $sum: "$quantity" }, + typeBreakdown: { + $push: { + transactionType: "$transactionType", + quantity: "$quantity", + }, + }, + productBreakdown: { + $push: { + productId: "$productId", + sku: "$sku", + quantity: "$quantity", + }, + }, + dateBreakdown: { + $push: { + date: { $dateEndString: { format: "%Y-%m-%d", date: "$transactionDate" } }, + quantity: "$quantity", + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalTransactions: 0, + transactionsByType: [], + topMovingProducts: [], + movementsByDate: [], + }; + } + + const data = result[0]; + + // Process transaction types + const typeMap = new Map(); + data.typeBreakdown.forEach((item: any) => { + const existing = typeMap.get(item.transactionType) || { count: 0, totalQuantity: 0 }; + existing.count += 1; + existing.totalQuantity += Math.abs(item.quantity); + typeMap.set(item.transactionType, existing); + }); + + const transactionsByType = Array.from(typeMap.entries()).map( + ([type, stats]: [string, any]) => ({ + transactionType: type, + count: stats.count, + totalQuantity: stats.totalQuantity, + }), + ); + + // Process products + const productMap = new Map(); + data.productBreakdown.forEach((item: any) => { + const existing = productMap.get(item.productId) || { + productId: item.productId, + sku: item.sku, + transactionCount: 0, + totalQuantity: 0, + }; + existing.transactionCount += 1; + existing.totalQuantity += Math.abs(item.quantity); + productMap.set(item.productId, existing); + }); + + const topMovingProducts = Array.from(productMap.values()) + .sort((a, b) => b.totalQuantity - a.totalQuantity) + .slice(0, 10); + + // Process dates + const dateMap = new Map(); + data.dateBreakdown.forEach((item: any) => { + const existing = dateMap.get(item.date) || { transactionCount: 0, totalQuantity: 0 }; + existing.transactionCount += 1; + existing.totalQuantity += Math.abs(item.quantity); + dateMap.set(item.date, existing); + }); + + const movementsByDate = Array.from(dateMap.entries()) + .map(([date, stats]: [string, any]) => ({ + date, + transactionCount: stats.transactionCount, + totalQuantity: stats.totalQuantity, + })) + .sort((a, b) => a.date.localeCompare(b.date)); + + return { + totalTransactions: data.totalTransactions, + transactionsByType, + topMovingProducts, + movementsByDate, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get inventory movement report: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTransactionHistory = async ( + worldId: string, + filters?: { + warehouseId?: string; + productId?: string; + binId?: string; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.productId) { + query.productId = filters.productId; + } + if (filters?.binId) { + query.$or = [{ fromBinId: filters.binId }, { toBinId: filters.binId }]; + } + if (filters?.transactionType?.length) { + query.transactionType = { $in: filters.transactionType }; + } + if (filters?.dateStart && filters?.dateEnd) { + query.transactionDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + let queryBuilder = InventoryTransaction.find(query).sort({ transactionDate: -1 }); + + if (filters?.limit) { + queryBuilder = queryBuilder.limit(filters.limit); + } + + const transactions = await queryBuilder; + return transactions.map((transaction) => transaction.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get transaction history: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getInventoryAdjustments = async ( + worldId: string, + filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + userId?: string; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + transactionType: "ADJUST", + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.dateStart && filters?.dateEnd) { + query.transactionDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.userId) { + query.userId = filters.userId; + } + + const adjustments = await InventoryTransaction.find(query).sort({ transactionDate: -1 }); + return adjustments.map((adjustment) => adjustment.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get inventory adjustments: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + + +const getInventoryTotalByProduct = async ( + worldId: string, + productId: string, + warehouseId?: string, +): Promise<{ totalAvailable: number; totalOnHand: number; totalAllocated: number }> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + $or: [{ productId }, { sku: productId }], + }; + + if (warehouseId) { + matchStage.warehouseId = warehouseId; + } + + const result = await Inventory.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalAvailable: { $sum: "$quantityAvailable" }, + totalOnHand: { $sum: "$quantityOnHand" }, + totalAllocated: { $sum: "$quantityAllocated" }, + }, + }, + ]); + + if (result.length === 0) { + return { totalAvailable: 0, totalOnHand: 0, totalAllocated: 0 }; + } + + return { + totalAvailable: result[0].totalAvailable || 0, + totalOnHand: result[0].totalOnHand || 0, + totalAllocated: result[0].totalAllocated || 0, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get inventory total by product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSInventoryTransactionRepository = (worldId: string) => ({ + createInventoryTransaction: (data: TInventoryTransactionInput) => + createInventoryTransaction({ worldId } as TWorldRefModel, data), + getTransactionsByProduct: ( + productId: string, + filters?: { + warehouseId?: string; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + binId?: string; + }, + ) => getTransactionsByProduct(worldId, productId, filters), + getTransactionsByBin: ( + binId: string, + filters?: { + warehouseId?: string; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + productId?: string; + }, + ) => getTransactionsByBin(worldId, binId, filters), + getTransactionsByReference: (referenceType: string, referenceId: string) => + getTransactionsByReference(worldId, referenceType, referenceId), + getInventoryMovementReport: (filters?: { + warehouseId?: string; + productIds?: string[]; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + }) => getInventoryMovementReport(worldId, filters), + getTransactionHistory: (filters?: { + warehouseId?: string; + productId?: string; + binId?: string; + transactionType?: string[]; + dateStart?: Date; + dateEnd?: Date; + limit?: number; + }) => getTransactionHistory(worldId, filters), + getInventoryAdjustments: (filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + userId?: string; + }) => getInventoryAdjustments(worldId, filters), + getInventoryTotalByProduct: (productId: string, warehouseId?: string) => + getInventoryTotalByProduct(worldId, productId, warehouseId), +}); + +export type TWMSInventoryTransactionRepository = ReturnType< + typeof WMSInventoryTransactionRepository +>; diff --git a/packages/controlmart/src/repository/wms/outbound_order.wms.repository.ts b/packages/controlmart/src/repository/wms/outbound_order.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..21a2b179c73398d628978fdd67716afed62786f4 --- /dev/null +++ b/packages/controlmart/src/repository/wms/outbound_order.wms.repository.ts @@ -0,0 +1,689 @@ + +import { + OutboundOrder, + type TOutboundOrderModel, + type TOutboundOrderInput, +} from "../../models/wms/outbound_order.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createOutboundOrder = async ( + world: TWorldRefModel, + data: TOutboundOrderInput, +): Promise => { + try { + if (!data.warehouseId || !data.orderNumber || !data.lines?.length) { + throw new RepositoryError( + "Warehouse ID, order number, and order lines are required", + "VALIDATION_ERROR", + ); + } + + // [HARDENING] Strict Address Validation (App Logic) + if (data.shipToAddress) { + const requiredFields = ["street1", "city", "postalCode", "country"]; + const missing = requiredFields.filter((f) => !data.shipToAddress![f as keyof typeof data.shipToAddress]); + + if (missing.length > 0) { + throw new RepositoryError( + `Invalid Ship-To Address. Missing required fields: ${missing.join(", ")}`, + "VALIDATION_ERROR", + ); + } + } + + const existing = await OutboundOrder.findOne({ + "worldRef.worldId": world.worldId, + orderNumber: data.orderNumber, + }); + + if (existing) { + throw new RepositoryError( + `Outbound order with number ${data.orderNumber} already exists`, + "DUPLICATE_ERROR", + ); + } + + const order = await OutboundOrder.create({ + ...data, + worldRef: world, + }); + + return order.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create outbound order: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOutboundOrdersByStatus = async ( + worldId: string, + status: string[], + filters?: { + warehouseId?: string; + customerId?: string; + orderType?: string[]; + dateStart?: Date; + dateEnd?: Date; + priority?: string[]; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (status.length > 0) { + query.orderStatus = { $in: status }; + } + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.customerId) { + query.customerId = filters.customerId; + } + if (filters?.orderType?.length) { + query.orderType = { $in: filters.orderType }; + } + if (filters?.dateStart && filters?.dateEnd) { + query.orderDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.priority?.length) { + query.priority = { $in: filters.priority }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const orders = await OutboundOrder.find(query) + .sort({ priority: 1, orderDate: 1 }) + .limit(limit + 1); + + const hasMore = orders.length > limit; + const results = hasMore ? orders.slice(0, limit) : orders; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((order) => order.toJSON()), + totalCount: await OutboundOrder.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get outbound orders by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOutboundOrderById = async ( + worldId: string, + orderId: string, +): Promise => { + try { + const order = await OutboundOrder.findOne({ + "worldRef.worldId": worldId, + orderId, + }); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get outbound order by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOutboundOrderByNumber = async ( + worldId: string, + orderNumber: string, +): Promise => { + try { + const order = await OutboundOrder.findOne({ + "worldRef.worldId": worldId, + orderNumber, + }); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get outbound order by number: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateOrderStatus = async ( + worldId: string, + orderId: string, + status: string, + statusDate?: Date, +): Promise => { + try { + const updateData: Record = { + orderStatus: status, + }; + + const timestamp = statusDate || new Date(); + + switch (status) { + case "RELEASED": + updateData["timing.releasedAt"] = timestamp; + break; + case "ALLOCATED": + updateData["timing.allocatedAt"] = timestamp; + break; + case "PICKING": + updateData["timing.pickingStartedAt"] = timestamp; + break; + case "PICKED": + updateData["timing.pickedAt"] = timestamp; + break; + case "PACKED": + updateData["timing.packedAt"] = timestamp; + break; + case "SHIPPED": + updateData["dates.actualShipDate"] = timestamp; + break; + } + + const order = await OutboundOrder.findOneAndUpdate( + { "worldRef.worldId": worldId, orderId }, + { $set: updateData }, + { new: true }, + ); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update order status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateOrderPriority = async ( + worldId: string, + orderId: string, + priority: string, +): Promise => { + try { + const validPriorities = ["RUSH", "URGENT", "NORMAL", "STANDARD"]; + if (!validPriorities.includes(priority)) { + throw new Error(`Invalid priority. Must be one of: ${validPriorities.join(", ")}`); + } + + const order = await OutboundOrder.findOneAndUpdate( + { "worldRef.worldId": worldId, orderId }, + { $set: { orderPriority: priority, updatedAt: new Date() } }, + { new: true }, + ); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update order priority: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const allocateOrderLine = async ( + worldId: string, + orderId: string, + lineNumber: number, + allocatedQuantity: number, + allocationDetails?: { + binId: string; + quantity: number; + lotNumber?: string; + }[], +): Promise => { + try { + const updateData: Record = { + "lines.$.allocatedQuantity": allocatedQuantity, + "lines.$.lineStatus": "ALLOCATED", + }; + + if (allocationDetails) { + updateData["lines.$.allocations"] = allocationDetails; + } + + const order = await OutboundOrder.findOneAndUpdate( + { + "worldRef.worldId": worldId, + orderId, + "lines.lineNumber": lineNumber, + }, + { $set: updateData }, + { new: true }, + ); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to allocate order line: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updatePickingProgress = async ( + worldId: string, + orderId: string, + lineNumber: number, + pickedQuantity: number, +): Promise => { + try { + const order = await OutboundOrder.findOneAndUpdate( + { + "worldRef.worldId": worldId, + orderId, + "lines.lineNumber": lineNumber, + }, + { + $set: { + "lines.$.pickedQuantity": pickedQuantity, + "lines.$.lineStatus": pickedQuantity > 0 ? "PICKED" : "PICKING", + }, + }, + { new: true }, + ); + + return order?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update picking progress: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOrdersReadyForPicking = async ( + worldId: string, + warehouseId: string, + filters?: { + priority?: string[]; + orderType?: string[]; + customerId?: string; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + orderStatus: { $in: ["RELEASED", "ALLOCATED"] }, + }; + + if (filters?.priority?.length) { + query.priority = { $in: filters.priority }; + } + if (filters?.orderType?.length) { + query.orderType = { $in: filters.orderType }; + } + if (filters?.customerId) { + query.customerId = filters.customerId; + } + + const orders = await OutboundOrder.find(query).sort({ priority: 1, "timing.releasedAt": 1 }); + return orders.map((order) => order.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get orders ready for picking: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOrderFulfillmentMetrics = async ( + worldId: string, + filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + orderType?: string[]; + }, +): Promise<{ + totalOrders: number; + completedOrders: number; + averageFulfillmentTime: number; + onTimeShipments: number; + fulfillmentRate: number; + ordersByStatus: Array<{ + status: string; + count: number; + }>; + topCustomers: Array<{ + customerId: string; + customerName: string; + orderCount: number; + totalLines: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.orderType?.length) { + matchStage.orderType = { $in: filters.orderType }; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage.orderDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await OutboundOrder.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalOrders: { $sum: 1 }, + completedOrders: { + $sum: { $cond: [{ $eq: ["$orderStatus", "SHIPPED"] }, 1, 0] }, + }, + onTimeShipments: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$orderStatus", "SHIPPED"] }, + { $lte: ["$timing.shippedAt", "$requestedShipDate"] }, + ], + }, + 1, + 0, + ], + }, + }, + statusBreakdown: { + $push: "$orderStatus", + }, + customerBreakdown: { + $push: { + customerId: "$customerId", + customerName: "$customerName", + lineCount: { $size: "$lines" }, + }, + }, + fulfillmentTimes: { + $push: { + $cond: [ + { $eq: ["$orderStatus", "SHIPPED"] }, + { $subtract: ["$timing.shippedAt", "$orderDate"] }, + null, + ], + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalOrders: 0, + completedOrders: 0, + averageFulfillmentTime: 0, + onTimeShipments: 0, + fulfillmentRate: 0, + ordersByStatus: [], + topCustomers: [], + }; + } + + const data = result[0]; + + // Calculate average fulfillment time + const validTimes = data.fulfillmentTimes.filter((time: any) => time !== null); + const averageFulfillmentTime = + validTimes.length > 0 + ? validTimes.reduce((sum: number, time: number) => sum + time, 0) / + validTimes.length / + (1000 * 60 * 60) // Convert to hours + : 0; + + // Process status breakdown + const statusMap = new Map(); + data.statusBreakdown.forEach((status: string) => { + statusMap.set(status, (statusMap.get(status) || 0) + 1); + }); + + const ordersByStatus = Array.from(statusMap.entries()).map(([status, count]) => ({ + status, + count, + })); + + // Process customer breakdown + const customerMap = new Map(); + data.customerBreakdown.forEach((item: any) => { + if (item.customerId) { + const existing = customerMap.get(item.customerId) || { + customerId: item.customerId, + customerName: item.customerName, + orderCount: 0, + totalLines: 0, + }; + existing.orderCount += 1; + existing.totalLines += item.lineCount; + customerMap.set(item.customerId, existing); + } + }); + + const topCustomers = Array.from(customerMap.values()) + .sort((a, b) => b.orderCount - a.orderCount) + .slice(0, 10); + + return { + totalOrders: data.totalOrders, + completedOrders: data.completedOrders, + averageFulfillmentTime: Math.round(averageFulfillmentTime * 100) / 100, + onTimeShipments: data.onTimeShipments, + fulfillmentRate: data.totalOrders > 0 ? (data.completedOrders / data.totalOrders) * 100 : 0, + ordersByStatus, + topCustomers, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get order fulfillment metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getOrdersByCustomer = async ( + worldId: string, + customerId: string, + filters?: { + warehouseId?: string; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + customerId, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.status?.length) { + query.orderStatus = { $in: filters.status }; + } + if (filters?.dateStart && filters?.dateEnd) { + query.orderDate = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const orders = await OutboundOrder.find(query).sort({ orderDate: -1 }); + return orders.map((order) => order.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get orders by customer: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSOutboundOrderRepository = (worldId: string) => ({ + createOutboundOrder: (data: TOutboundOrderInput) => + createOutboundOrder({ worldId } as TWorldRefModel, data), + getOutboundOrdersByStatus: ( + args: + | string[] + | { + status: string[]; + warehouseId?: string; + customerId?: string; + orderType?: string[]; + dateStart?: Date; + dateEnd?: Date; + priority?: string[]; + limit?: number; + cursor?: string; + }, + filters?: { + warehouseId?: string; + customerId?: string; + orderType?: string[]; + dateStart?: Date; + dateEnd?: Date; + priority?: string[]; + limit?: number; + cursor?: string; + }, + ) => + Array.isArray(args) + ? getOutboundOrdersByStatus(worldId, args, filters) + : getOutboundOrdersByStatus(worldId, args.status, { + warehouseId: args.warehouseId, + customerId: args.customerId, + orderType: args.orderType, + dateStart: args.dateStart, + dateEnd: args.dateEnd, + priority: args.priority, + limit: args.limit, + cursor: args.cursor, + }), + getOutboundOrderById: (orderId: string) => getOutboundOrderById(worldId, orderId), + getOutboundOrderByNumber: (orderNumber: string) => getOutboundOrderByNumber(worldId, orderNumber), + updateOrderStatus: ( + args: string | { orderId: string; status: string; statusDate?: Date }, + status?: string, + statusDate?: Date, + ) => + typeof args === "string" + ? updateOrderStatus(worldId, args, status!, statusDate) + : updateOrderStatus(worldId, args.orderId, args.status, args.statusDate), + updateOrderPriority: ( + args: string | { orderId: string; priority: string }, + priority?: string, + ) => + typeof args === "string" + ? updateOrderPriority(worldId, args, priority!) + : updateOrderPriority(worldId, args.orderId, args.priority), + allocateOrderLine: ( + args: { orderId: string; lineNumber: number; allocatedQuantity: number; allocationDetails?: any[] } + ) => allocateOrderLine(worldId, args.orderId, args.lineNumber, args.allocatedQuantity, args.allocationDetails), + updatePickingProgress: ( + args: { orderId: string; lineNumber: number; pickedQuantity: number } + ) => updatePickingProgress(worldId, args.orderId, args.lineNumber, args.pickedQuantity), + getOrdersReadyForPicking: ( + warehouseId: string, + filters?: { + priority?: string[]; + orderType?: string[]; + customerId?: string; + }, + ) => getOrdersReadyForPicking(worldId, warehouseId, filters), + getOrderFulfillmentMetrics: (filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + orderType?: string[]; + }) => getOrderFulfillmentMetrics(worldId, filters), + getOrdersByCustomer: ( + customerId: string, + filters?: { + warehouseId?: string; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, + ) => getOrdersByCustomer(worldId, customerId, filters), + patch: (orderId: string, updates: Record) => + patchOutboundOrder(worldId, orderId, updates), +}); + +/** + * Patch outbound order with partial updates + * Allowed fields: orderStatus, orderPriority, dates.requiredShipDate + */ +const patchOutboundOrder = async ( + worldId: string, + orderId: string, + updates: Record, +): Promise => { + const allowedFields = ["orderStatus", "orderPriority", "dates"]; + const $set: Record = { updatedAt: new Date() }; + + for (const [key, value] of Object.entries(updates)) { + if (value === undefined) continue; + + // Handle dot notation keys like "dates.requiredShipDate" + const topLevelKey = key.split(".")[0]!; + if (allowedFields.includes(topLevelKey)) { + $set[key] = value; + } + } + + // Auto-set timestamps based on status + if (updates.orderStatus) { + const statusTimestamps: Record = { + RELEASED: "timing.releasedAt", + ALLOCATED: "timing.allocatedAt", + PICKING: "timing.pickingStartedAt", + PICKED: "timing.pickedAt", + PACKING: "timing.packingStartedAt", + PACKED: "timing.packedAt", + SHIPPED: "dates.actualShipDate", + }; + const timestampField = statusTimestamps[updates.orderStatus]; + if (timestampField) { + $set[timestampField] = new Date(); + } + } + + const order = await OutboundOrder.findOneAndUpdate( + { "worldRef.worldId": worldId, orderId }, + { $set }, + { new: true }, + ); + return order?.toJSON() || null; +}; + +export type TWMSOutboundOrderRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/outbound_shipment.wms.repository.ts b/packages/controlmart/src/repository/wms/outbound_shipment.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..c7df23c83f30c640e0f7cd3c2d0119983d6be90a --- /dev/null +++ b/packages/controlmart/src/repository/wms/outbound_shipment.wms.repository.ts @@ -0,0 +1,653 @@ + +import { + Shipment, + type TShipmentModel, + type TShipmentInput, +} from "../../models/wms/outbound_shipment.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; + +const createShipment = async ( + world: TWorldRefModel, + data: TShipmentInput & { orderId?: string; status?: string }, +): Promise => { + try { + // If lines or address are missing but orderId is provided, fetch from order + if (data.orderId && (!data.lines?.length || !data.toAddress || !data.warehouseId)) { + const { OutboundOrder } = await import("../../models/wms/outbound_order.wms.model"); + const order = await OutboundOrder.findOne({ + "worldRef.worldId": world.worldId, + orderId: data.orderId, + }); + + if (order) { + if (!data.warehouseId) { + data.warehouseId = order.warehouseId; + } + if (!data.lines?.length) { + data.lines = order.lines.map((l: any) => ({ + lineNumber: l.lineNumber, + orderId: order.orderId, + orderLineId: l.orderLineId, + sku: l.sku, + productName: l.productName, + quantityShipped: l.orderedQuantity, + quantityOrdered: l.orderedQuantity, + unitOfMeasure: l.uom, + })) as any; + } + if (!data.toAddress) { + data.toAddress = order.shipToAddress as any; + } + if (!data.carrier?.name) { + data.carrier = { + ...data.carrier, + name: order.shipping?.carrierName, + } as any; + } + if (!data.serviceLevel) { + data.serviceLevel = order.shipping?.serviceLevel; + } + } else { + console.warn(`[createShipment] Order not found for ID: ${data.orderId} in world: ${world.worldId}`); + } + } + + if (data.status && !data.shipmentStatus) { + (data as any).shipmentStatus = data.status; + } + + if (!data.warehouseId || !data.lines?.length || !data.toAddress) { + console.error("[createShipment] Validation failed:", { + hasWarehouseId: !!data.warehouseId, + linesLength: data.lines?.length, + hasToAddress: !!data.toAddress, + orderId: data.orderId, + }); + throw new RepositoryError( + "Warehouse ID, shipment lines, and to address are required", + "VALIDATION_ERROR", + ); + } + + // [HARDENING] Validate HU Type (Application Logic) + if ( + data.customFields?.huType && + !["STANDARD", "EURO", "CHEP", "GOH"].includes(data.customFields.huType) + ) { + throw new RepositoryError( + `Invalid HU Type: ${data.customFields.huType}. Allowed: STANDARD, EURO, CHEP, GOH`, + "VALIDATION_ERROR", + ); + } + + // [HARDENING] Validate Weight (Max 2000kg) + if (data.customFields?.weight && Number(data.customFields.weight) > 2000) { + throw new RepositoryError( + `Weight exceeds limit of 2000kg per HU`, + "VALIDATION_ERROR", + ); + } + + const shipment = await Shipment.create({ + ...data, + worldRef: world, + }); + + return shipment.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create shipment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentsByStatus = async ( + worldId: string, + status: string[], + filters?: { + warehouseId?: string; + carrierId?: string; + serviceLevel?: string; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + shipmentStatus: { $in: status }, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.carrierId) { + query["carrier.name"] = filters.carrierId; + } + if (filters?.serviceLevel) { + query.serviceLevel = filters.serviceLevel; + } + if (filters?.dateStart && filters?.dateEnd) { + query.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const shipments = await Shipment.find(query).sort({ createdAt: -1 }); + return shipments.map((shipment) => shipment.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get shipments by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentById = async ( + worldId: string, + shipmentId: string, +): Promise => { + try { + const shipment = await Shipment.findOne({ + "worldRef.worldId": worldId, + shipmentId, + }); + + return shipment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get shipment by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateShipmentStatus = async ( + worldId: string, + shipmentId: string, + status: string, + statusDate?: Date, + trackingNumber?: string, +): Promise => { + try { + const updateData: Record = { + shipmentStatus: status, + }; + + const timestamp = statusDate || new Date(); + + switch (status) { + case "MANIFESTED": + updateData["dates.manifestDate"] = timestamp; + break; + case "SHIPPED": + updateData["dates.actualShipTime"] = timestamp; + if (trackingNumber) { + updateData.trackingNumber = trackingNumber; + } + break; + case "IN_TRANSIT": + updateData["dates.actualShipTime"] = updateData["dates.actualShipTime"] || timestamp; + break; + case "DELIVERED": + updateData["dates.actualDeliveryDate"] = timestamp; + break; + } + + const shipment = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { $set: updateData }, + { new: true }, + ); + + return shipment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update shipment status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentsByWarehouse = async ( + worldId: string, + warehouseId: string, + filters?: { + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + carrier?: string; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + }; + + if (filters?.status?.length) { + query.shipmentStatus = { $in: filters.status }; + } + if (filters?.carrier) { + query["carrier.name"] = filters.carrier; + } + if (filters?.dateStart && filters?.dateEnd) { + query.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const shipments = await Shipment.find(query).sort({ createdAt: -1 }); + return shipments.map((shipment) => shipment.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get shipments by warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addTrackingEvent = async ( + worldId: string, + shipmentId: string, + event: { + eventType: string; + eventDate: Date; + location: string; + description: string; + carrierEventCode?: string; + }, +): Promise => { + try { + const shipment = await Shipment.findOneAndUpdate( + { "worldRef.worldId": worldId, shipmentId }, + { + $push: { + events: event, + }, + }, + { new: true }, + ); + + return shipment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to add tracking event: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentMetrics = async ( + worldId: string, + filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + carrier?: string[]; + }, +): Promise<{ + totalShipments: number; + shippedShipments: number; + deliveredShipments: number; + exceptionShipments: number; + averageTransitTime: number; + onTimeDeliveryRate: number; + shipmentsByCarrier: Array<{ + carrier: string; + count: number; + onTimeRate: number; + avgTransitTime: number; + }>; + shipmentsByDay: Array<{ + date: string; + shipmentCount: number; + deliveredCount: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.carrier?.length) { + matchStage["carrier.name"] = { $in: filters.carrier }; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await Shipment.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalShipments: { $sum: 1 }, + shippedShipments: { + $sum: { $cond: [{ $in: ["$status", ["PICKED_UP", "IN_TRANSIT", "DELIVERED"]] }, 1, 0] }, + }, + deliveredShipments: { + $sum: { $cond: [{ $eq: ["$status", "DELIVERED"] }, 1, 0] }, + }, + exceptionShipments: { + $sum: { $cond: [{ $eq: ["$status", "EXCEPTION"] }, 1, 0] }, + }, + onTimeDeliveries: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$shipmentStatus", "DELIVERED"] }, + { $lte: ["$dates.actualDeliveryDate", "$dates.estimatedDeliveryDate"] }, + ], + }, + 1, + 0, + ], + }, + }, + carrierBreakdown: { + $push: { + carrier: "$carrier.name", + status: "$shipmentStatus", + transitTime: { + $cond: [ + { $eq: ["$shipmentStatus", "DELIVERED"] }, + { $subtract: ["$dates.actualDeliveryDate", "$dates.actualShipTime"] }, + null, + ], + }, + onTime: { + $cond: [ + { + $and: [ + { $eq: ["$shipmentStatus", "DELIVERED"] }, + { $lte: ["$dates.actualDeliveryDate", "$dates.estimatedDeliveryDate"] }, + ], + }, + 1, + 0, + + ], + }, + }, + }, + dailyBreakdown: { + $push: { + date: { $dateEndString: { format: "%Y-%m-%d", date: "$createdAt" } }, + status: "$shipmentStatus", + }, + }, + transitTimes: { + $push: { + $cond: [ + { $eq: ["$shipmentStatus", "DELIVERED"] }, + { $subtract: ["$dates.actualDeliveryDate", "$dates.actualShipTime"] }, + null, + ], + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalShipments: 0, + shippedShipments: 0, + deliveredShipments: 0, + exceptionShipments: 0, + averageTransitTime: 0, + onTimeDeliveryRate: 0, + shipmentsByCarrier: [], + shipmentsByDay: [], + }; + } + + const data = result[0]; + + // Calculate average transit time + const validTransitTimes = data.transitTimes.filter((time: any) => time !== null); + const averageTransitTime = + validTransitTimes.length > 0 + ? validTransitTimes.reduce((sum: number, time: number) => sum + time, 0) / + validTransitTimes.length / + (1000 * 60 * 60 * 24) // Convert to days + : 0; + + // Calculate on-time delivery rate + const onTimeDeliveryRate = + data.deliveredShipments > 0 ? (data.onTimeDeliveries / data.deliveredShipments) * 100 : 0; + + // Process carrier breakdown + const carrierMap = new Map(); + data.carrierBreakdown.forEach((item: any) => { + if (item.carrier) { + const existing = carrierMap.get(item.carrier) || { + carrier: item.carrier, + count: 0, + deliveredCount: 0, + onTimeCount: 0, + totalTransitTime: 0, + transitTimeCount: 0, + }; + existing.count += 1; + if (item.status === "DELIVERED") { + existing.deliveredCount += 1; + existing.onTimeCount += item.onTime; + if (item.transitTime) { + existing.totalTransitTime += item.transitTime; + existing.transitTimeCount += 1; + } + } + carrierMap.set(item.carrier, existing); + } + }); + + const shipmentsByCarrier = Array.from(carrierMap.values()).map((carrier: any) => ({ + carrier: carrier.carrier, + count: carrier.count, + onTimeRate: + carrier.deliveredCount > 0 ? (carrier.onTimeCount / carrier.deliveredCount) * 100 : 0, + avgTransitTime: + carrier.transitTimeCount > 0 + ? carrier.totalTransitTime / carrier.transitTimeCount / (1000 * 60 * 60 * 24) + : 0, + })); + + // Process daily breakdown + const dailyMap = new Map(); + data.dailyBreakdown.forEach((item: any) => { + const existing = dailyMap.get(item.date) || { shipmentCount: 0, deliveredCount: 0 }; + existing.shipmentCount += 1; + if (item.status === "DELIVERED") { + existing.deliveredCount += 1; + } + dailyMap.set(item.date, existing); + }); + + const shipmentsByDay = Array.from(dailyMap.entries()) + .map(([date, stats]: [string, any]) => ({ + date, + shipmentCount: stats.shipmentCount, + deliveredCount: stats.deliveredCount, + })) + .sort((a, b) => a.date.localeCompare(b.date)); + + return { + totalShipments: data.totalShipments, + shippedShipments: data.shippedShipments, + deliveredShipments: data.deliveredShipments, + exceptionShipments: data.exceptionShipments, + averageTransitTime: Math.round(averageTransitTime * 100) / 100, + onTimeDeliveryRate: Math.round(onTimeDeliveryRate * 100) / 100, + shipmentsByCarrier, + shipmentsByDay, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get shipment metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentsReadyToShip = async ( + worldId: string, + warehouseId: string, + filters?: { + carrier?: string; + serviceLevel?: string; + priorityOrders?: boolean; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + shipmentStatus: "MANIFESTED", + }; + + if (filters?.carrier) { + query["carrier.name"] = filters.carrier; + } + if (filters?.serviceLevel) { + query.serviceLevel = filters.serviceLevel; + } + + const shipments = await Shipment.find(query).sort({ + priority: -1, + "dates.estimatedDeliveryDate": 1, + createdAt: 1, + }); + + return shipments.map((shipment) => shipment.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get shipments ready to ship: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getShipmentsByTrackingNumber = async ( + worldId: string, + trackingNumber: string, +): Promise => { + try { + const shipment = await Shipment.findOne({ + "worldRef.worldId": worldId, + trackingNumber, + }); + + return shipment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get shipment by tracking number: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSOutboundShipmentRepository = (worldId: string) => ({ + createShipment: (data: TShipmentInput) => createShipment({ worldId } as TWorldRefModel, data), + getShipmentsByStatus: ( + status: string[], + filters?: { + warehouseId?: string; + carrierId?: string; + serviceLevel?: string; + dateStart?: Date; + dateEnd?: Date; + }, + ) => getShipmentsByStatus(worldId, status, filters), + getShipmentById: (shipmentId: string) => getShipmentById(worldId, shipmentId), + updateShipmentStatus: ( + args: string | { shipmentId: string; status: string; statusDate?: Date; trackingNumber?: string }, + status?: string, + statusDate?: Date, + trackingNumber?: string, + ) => + typeof args === "string" + ? updateShipmentStatus(worldId, args, status!, statusDate, trackingNumber) + : updateShipmentStatus( + worldId, + args.shipmentId, + args.status, + args.statusDate, + args.trackingNumber, + ), + getShipmentsByWarehouse: ( + args: + | string + | { + warehouseId: string; + filters?: { + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + carrier?: string; + }; + }, + filters?: { + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + carrier?: string; + }, + ) => + typeof args === "string" + ? getShipmentsByWarehouse(worldId, args, filters) + : getShipmentsByWarehouse(worldId, args.warehouseId, args.filters), + addTrackingEvent: (args: { + shipmentId: string; + event: { + eventType: string; + eventDate: Date; + location: string; + description: string; + carrierEventCode?: string; + }; + }) => addTrackingEvent(worldId, args.shipmentId, args.event), + getShipmentMetrics: (filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + carrier?: string[]; + }) => getShipmentMetrics(worldId, filters), + getShipmentsReadyToShip: ( + args: + | string + | { + warehouseId: string; + filters?: { + carrier?: string; + serviceLevel?: string; + priorityOrders?: boolean; + }; + }, + filters?: { + carrier?: string; + serviceLevel?: string; + priorityOrders?: boolean; + }, + ) => + typeof args === "string" + ? getShipmentsReadyToShip(worldId, args, filters) + : getShipmentsReadyToShip(worldId, args.warehouseId, args.filters), + getShipmentsByTrackingNumber: (trackingNumber: string) => + getShipmentsByTrackingNumber(worldId, trackingNumber), +}); + +export type TWMSOutboundShipmentRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/replenishment.wms.repository.ts b/packages/controlmart/src/repository/wms/replenishment.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..ec3c6cf578777f96eca9fbf064a25433adeacdff --- /dev/null +++ b/packages/controlmart/src/repository/wms/replenishment.wms.repository.ts @@ -0,0 +1,468 @@ +import { + Replenishment, + type TReplenishmentModel, + type TReplenishmentInput, +} from "../../models/wms/replenishment.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; + +const createReplenishment = async ( + world: TWorldRefModel, + data: TReplenishmentInput, +): Promise => { + try { + if (!data.warehouseId || !data.productId || !data.fromBin || !data.toBin) { + throw new RepositoryError( + "Warehouse ID, product ID, from bin, and to bin are required", + "VALIDATION_ERROR", + ); + } + + const replenishment = await Replenishment.create({ + ...data, + worldRef: world, + }); + + return replenishment.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create replenishment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getReplenishmentsByStatus = async ( + worldId: string, + status: string[], + filters?: { + warehouseId?: string; + productId?: string; + replenishmentType?: string[]; + priority?: number; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + status: { $in: status }, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.productId) { + query.productId = filters.productId; + } + if (filters?.replenishmentType?.length) { + query.replenishmentType = { $in: filters.replenishmentType }; + } + if (filters?.priority !== undefined) { + query.priority = { $gte: filters.priority }; + } + + const replenishments = await Replenishment.find(query).sort({ priority: -1, createdAt: 1 }); + return replenishments.map((replenishment) => replenishment.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get replenishments by status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getReplenishmentById = async ( + worldId: string, + replenishmentId: string, +): Promise => { + try { + const replenishment = await Replenishment.findOne({ + "worldRef.worldId": worldId, + replenishmentId, + }); + + return replenishment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get replenishment by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateReplenishmentStatus = async ( + worldId: string, + replenishmentId: string, + status: string, + taskId?: string, +): Promise => { + try { + const updateData: Record = { status }; + + if (status === "TASK_CREATED" && taskId) { + updateData.taskId = taskId; + } + + const replenishment = await Replenishment.findOneAndUpdate( + { "worldRef.worldId": worldId, replenishmentId }, + { $set: updateData }, + { new: true }, + ); + + return replenishment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update replenishment status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getReplenishmentsByProduct = async ( + worldId: string, + productId: string, + filters?: { + warehouseId?: string; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + productId, + }; + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + if (filters?.dateStart && filters?.dateEnd) { + query.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const replenishments = await Replenishment.find(query).sort({ createdAt: -1 }); + return replenishments.map((replenishment) => replenishment.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get replenishments by product: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getReplenishmentsByBin = async ( + worldId: string, + binId: string, + type: "source" | "destination", + filters?: { + warehouseId?: string; + status?: string[]; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (type === "source") { + query["fromBin.binId"] = binId; + } else { + query["toBin.binId"] = binId; + } + + if (filters?.warehouseId) { + query.warehouseId = filters.warehouseId; + } + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + + const replenishments = await Replenishment.find(query).sort({ priority: -1, createdAt: 1 }); + return replenishments.map((replenishment) => replenishment.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get replenishments by bin: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const approveReplenishment = async ( + worldId: string, + replenishmentId: string, + approvedBy: string, + approvedQuantity?: number, +): Promise => { + try { + const updateData: Record = { + status: "APPROVED", + approvedBy, + approvedDate: new Date(), + }; + + if (approvedQuantity !== undefined) { + updateData["quantity.approved"] = approvedQuantity; + } + + const replenishment = await Replenishment.findOneAndUpdate( + { "worldRef.worldId": worldId, replenishmentId }, + { $set: updateData }, + { new: true }, + ); + + return replenishment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to approve replenishment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getReplenishmentMetrics = async ( + worldId: string, + filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + replenishmentType?: string[]; + }, +): Promise<{ + totalReplenishments: number; + pendingReplenishments: number; + completedReplenishments: number; + averageCompletionTime: number; + replenishmentsByType: Array<{ + type: string; + count: number; + completionRate: number; + }>; + topReplenishedProducts: Array<{ + productId: string; + sku: string; + replenishmentCount: number; + totalQuantity: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.replenishmentType?.length) { + matchStage.replenishmentType = { $in: filters.replenishmentType }; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage.createdAt = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await Replenishment.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalReplenishments: { $sum: 1 }, + pendingReplenishments: { + $sum: { + $cond: [ + { $in: ["$status", ["SUGGESTED", "APPROVED", "TASK_CREATED", "IN_PROGRESS"]] }, + 1, + 0, + ], + }, + }, + completedReplenishments: { + $sum: { $cond: [{ $eq: ["$status", "COMPLETED"] }, 1, 0] }, + }, + typeBreakdown: { + $push: { + type: "$replenishmentType", + status: "$status", + }, + }, + productBreakdown: { + $push: { + productId: "$productId", + sku: "$sku", + quantity: "$quantity.suggested", + status: "$status", + }, + }, + completionTimes: { + $push: { + $cond: [ + { $eq: ["$status", "COMPLETED"] }, + { $subtract: ["$completedDate", "$createdAt"] }, + null, + ], + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalReplenishments: 0, + pendingReplenishments: 0, + completedReplenishments: 0, + averageCompletionTime: 0, + replenishmentsByType: [], + topReplenishedProducts: [], + }; + } + + const data = result[0]; + + // Calculate average completion time + const validCompletionTimes = data.completionTimes.filter((time: any) => time !== null); + const averageCompletionTime = + validCompletionTimes.length > 0 + ? validCompletionTimes.reduce((sum: number, time: number) => sum + time, 0) / + validCompletionTimes.length / + (1000 * 60 * 60) // Convert to hours + : 0; + + // Process type breakdown + const typeMap = new Map(); + data.typeBreakdown.forEach((item: any) => { + const existing = typeMap.get(item.type) || { total: 0, completed: 0 }; + existing.total += 1; + if (item.status === "COMPLETED") { + existing.completed += 1; + } + typeMap.set(item.type, existing); + }); + + const replenishmentsByType = Array.from(typeMap.entries()).map( + ([type, stats]: [string, any]) => ({ + type: type || "UNKNOWN", + count: stats.total, + completionRate: stats.total > 0 ? (stats.completed / stats.total) * 100 : 0, + }), + ); + + // Process product breakdown + const productMap = new Map(); + data.productBreakdown.forEach((item: any) => { + if (item.status === "COMPLETED") { + const existing = productMap.get(item.productId) || { + productId: item.productId, + sku: item.sku, + replenishmentCount: 0, + totalQuantity: 0, + }; + existing.replenishmentCount += 1; + existing.totalQuantity += item.quantity || 0; + productMap.set(item.productId, existing); + } + }); + + const topReplenishedProducts = Array.from(productMap.values()) + .sort((a, b) => b.replenishmentCount - a.replenishmentCount) + .slice(0, 10); + + return { + totalReplenishments: data.totalReplenishments, + pendingReplenishments: data.pendingReplenishments, + completedReplenishments: data.completedReplenishments, + averageCompletionTime: Math.round(averageCompletionTime * 100) / 100, + replenishmentsByType, + topReplenishedProducts, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get replenishment metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const cancelReplenishment = async ( + worldId: string, + replenishmentId: string, + reason: string, + cancelledBy: string, +): Promise => { + try { + const replenishment = await Replenishment.findOneAndUpdate( + { "worldRef.worldId": worldId, replenishmentId }, + { + $set: { + status: "CANCELLED", + cancelReason: reason, + cancelledBy, + cancelledDate: new Date(), + }, + }, + { new: true }, + ); + + return replenishment?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to cancel replenishment: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSReplenishmentRepository = (worldId: string) => ({ + createReplenishment: (data: TReplenishmentInput) => + createReplenishment({ worldId } as TWorldRefModel, data), + getReplenishmentsByStatus: ( + status: string[], + filters?: { + warehouseId?: string; + productId?: string; + replenishmentType?: string[]; + priority?: number; + }, + ) => getReplenishmentsByStatus(worldId, status, filters), + getReplenishmentById: (replenishmentId: string) => getReplenishmentById(worldId, replenishmentId), + updateReplenishmentStatus: (replenishmentId: string, status: string, taskId?: string) => + updateReplenishmentStatus(worldId, replenishmentId, status, taskId), + getReplenishmentsByProduct: ( + productId: string, + filters?: { + warehouseId?: string; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, + ) => getReplenishmentsByProduct(worldId, productId, filters), + getReplenishmentsByBin: ( + binId: string, + type: "source" | "destination", + filters?: { + warehouseId?: string; + status?: string[]; + }, + ) => getReplenishmentsByBin(worldId, binId, type, filters), + approveReplenishment: (replenishmentId: string, approvedBy: string, approvedQuantity?: number) => + approveReplenishment(worldId, replenishmentId, approvedBy, approvedQuantity), + getReplenishmentMetrics: (filters?: { + warehouseId?: string; + dateStart?: Date; + dateEnd?: Date; + replenishmentType?: string[]; + }) => getReplenishmentMetrics(worldId, filters), + cancelReplenishment: (replenishmentId: string, reason: string, cancelledBy: string) => + cancelReplenishment(worldId, replenishmentId, reason, cancelledBy), +}); + +export type TWMSReplenishmentRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/task.wms.repository.ts b/packages/controlmart/src/repository/wms/task.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..852c0eed7b36a997bd246d89901ddef6e3a54719 --- /dev/null +++ b/packages/controlmart/src/repository/wms/task.wms.repository.ts @@ -0,0 +1,625 @@ +import type { TWorldRefModel } from "../../models/shared.model"; +import { Task, type TTaskInput, type TTaskModel } from "../../models/wms"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; + +const createTask = async (world: TWorldRefModel, data: TTaskInput): Promise => { + try { + if (!data.taskType) { + throw new RepositoryError("Task type is required", "VALIDATION_ERROR"); + } + // Only check for duplicates if taskId is explicitly provided + if (data.taskId) { + const existing = await Task.findOne({ + "worldRef.worldId": world.worldId, + taskId: data.taskId, + }); + + if (existing) { + throw new RepositoryError(`Task with ID ${data.taskId} already exists`, "DUPLICATE_ERROR"); + } + } + + const transaction = await Task.create({ + ...data, + worldRef: world, + "timing.createdAt": new Date(), + }); + + const jsonified = transaction.toJSON(); + return jsonified; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError(`Failed to create task: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const getTaskLogs = async ( + worldId: string, + filters?: { + taskIds?: string[]; + taskTypes?: string[]; + userIds?: string[]; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + zoneId?: string; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.taskIds?.length) { + query.taskId = { $in: filters.taskIds }; + } + if (filters?.taskTypes?.length) { + query.taskType = { $in: filters.taskTypes }; + } + if (filters?.userIds?.length) { + query["assignment.userId"] = { $in: filters.userIds }; + } + if (filters?.status?.length) { + query.taskStatus = { $in: filters.status }; + } + if (filters?.dateStart && filters?.dateEnd) { + query["timing.createdAt"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + if (filters?.zoneId) { + query.zoneId = filters.zoneId; + } + + const transactions = await Task.find(query).sort({ "timing.createdAt": -1 }); + const jsonified = transactions.map((transaction) => transaction.toJSON()); + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to get task logs: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTasksByUser = async ( + worldId: string, + userId: string, + status?: string[], +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + "assignment.userId": userId, + }; + + if (status?.length) { + query.taskStatus = { $in: status }; + } + + const transactions = await Task.find(query).sort({ priority: -1, "timing.createdAt": 1 }); + const jsonified = transactions.map((transaction) => transaction.toJSON()); + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to get tasks by user: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTaskTimestamps = async ( + worldId: string, + filters?: { + taskId?: string; + taskTypes?: string[]; + userIds?: string[]; + dateStart?: Date; + dateEnd?: Date; + includeHistorical?: boolean; + }, +): Promise< + Array<{ + taskId: string; + taskType: string; + userId?: string; + timestamps: { + createdAt?: Date; + releasedAt?: Date; + assignedAt?: Date; + startedAt?: Date; + completedAt?: Date; + estimatedDuration?: number; + actualDuration?: number; + }; + }> +> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.taskId) { + query.taskId = filters.taskId; + } + if (filters?.taskTypes?.length) { + query.taskType = { $in: filters.taskTypes }; + } + if (filters?.userIds?.length) { + query["assignment.userId"] = { $in: filters.userIds }; + } + if (filters?.dateStart && filters?.dateEnd) { + query["timing.createdAt"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + if (!filters?.includeHistorical) { + query.taskStatus = { $ne: "COMPLETED" }; + } + + const tasks = await Task.find(query) + .select("taskId taskType assignment.userId timing") + .sort({ "timing.createdAt": -1 }); + + return tasks.map((task: any) => ({ + taskId: task.taskId, + taskType: task.taskType, + userId: task.assignment?.userId || undefined, + timestamps: { + createdAt: task.timing?.createdAt || undefined, + releasedAt: task.timing?.releasedAt || undefined, + assignedAt: task.timing?.assignedAt || undefined, + startedAt: task.timing?.startedAt || undefined, + completedAt: task.timing?.completedAt || undefined, + estimatedDuration: task.timing?.estimatedDuration || undefined, + actualDuration: task.timing?.actualDuration || undefined, + }, + })); + } catch (error) { + throw new RepositoryError( + `Failed to get task timestamps: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateTaskStatus = async ( + worldId: string, + taskId: string, + status: string, + timestamp?: Date, + userId?: string, +): Promise => { + try { + const updateData: Record = { + taskStatus: status, + }; + + const now = timestamp || new Date(); + + switch (status) { + case "RELEASED": + updateData["timing.releasedAt"] = now; + break; + case "ASSIGNED": + updateData["timing.assignedAt"] = now; + if (userId) { + updateData["assignment.userId"] = userId; + } + break; + case "IN_PROGRESS": + updateData["timing.startedAt"] = now; + break; + case "COMPLETED": + updateData["timing.completedAt"] = now; + if (userId) { + updateData.completedBy = userId; + } + break; + } + + const transaction = await Task.findOneAndUpdate( + { "worldRef.worldId": worldId, taskId }, + { $set: updateData }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to update task status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getTaskPerformanceMetrics = async ( + worldId: string, + filters?: { + taskTypes?: string[]; + userIds?: string[]; + dateStart?: Date; + dateEnd?: Date; + }, +): Promise<{ + totalTasks: number; + completedTasks: number; + averageDuration: number; + onTimeCompletion: number; + productivityByUser: Array<{ + userId: string; + tasksCompleted: number; + averageDuration: number; + unitsPerHour: number; + }>; + taskTypeMetrics: Array<{ + taskType: string; + count: number; + averageDuration: number; + }>; +}> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.taskTypes?.length) { + matchStage.taskType = { $in: filters.taskTypes }; + } + if (filters?.userIds?.length) { + matchStage["assignment.userId"] = { $in: filters.userIds }; + } + if (filters?.dateStart && filters?.dateEnd) { + matchStage["timing.createdAt"] = { + $gte: filters.dateStart, + $lte: filters.dateEnd, + }; + } + + const result = await Task.aggregate([ + { $match: matchStage }, + { + $group: { + _id: null, + totalTasks: { $sum: 1 }, + completedTasks: { + $sum: { $cond: [{ $eq: ["$taskStatus", "COMPLETED"] }, 1, 0] }, + }, + averageDuration: { $avg: "$timing.actualDuration" }, + onTimeCompletions: { + $sum: { + $cond: [ + { + $and: [ + { $eq: ["$taskStatus", "COMPLETED"] }, + { + $lte: ["$timing.actualDuration", "$timing.estimatedDuration"], + }, + ], + }, + 1, + 0, + ], + }, + }, + userMetrics: { + $push: { + userId: "$assignment.userId", + taskType: "$taskType", + actualDuration: "$timing.actualDuration", + unitsPerHour: "$performance.unitsPerHour", + status: "$taskStatus", + }, + }, + taskTypeMetrics: { + $push: { + taskType: "$taskType", + actualDuration: "$timing.actualDuration", + }, + }, + }, + }, + ]); + + if (result.length === 0) { + return { + totalTasks: 0, + completedTasks: 0, + averageDuration: 0, + onTimeCompletion: 0, + productivityByUser: [], + taskTypeMetrics: [], + }; + } + + const data = result[0]; + + // Process user metrics + const userMap = new Map(); + data.userMetrics.forEach((metric: any) => { + if (metric.userId && metric.status === "COMPLETED") { + const existing = userMap.get(metric.userId) || { + userId: metric.userId, + tasksCompleted: 0, + totalDuration: 0, + totalUnitsPerHour: 0, + }; + existing.tasksCompleted += 1; + existing.totalDuration += metric.actualDuration || 0; + existing.totalUnitsPerHour += metric.unitsPerHour || 0; + userMap.set(metric.userId, existing); + } + }); + + const productivityByUser = Array.from(userMap.values()).map((user: any) => ({ + userId: user.userId, + tasksCompleted: user.tasksCompleted, + averageDuration: user.totalDuration / user.tasksCompleted, + unitsPerHour: user.totalUnitsPerHour / user.tasksCompleted, + })); + + // Process task type metrics + const taskTypeMap = new Map(); + data.taskTypeMetrics.forEach((metric: any) => { + const existing = taskTypeMap.get(metric.taskType) || { + taskType: metric.taskType, + count: 0, + totalDuration: 0, + }; + existing.count += 1; + existing.totalDuration += metric.actualDuration || 0; + taskTypeMap.set(metric.taskType, existing); + }); + + const taskTypeMetrics = Array.from(taskTypeMap.values()).map((taskType: any) => ({ + taskType: taskType.taskType, + count: taskType.count, + averageDuration: taskType.totalDuration / taskType.count, + })); + + return { + totalTasks: data.totalTasks, + completedTasks: data.completedTasks, + averageDuration: data.avgDuration || 0, + onTimeCompletion: data.onTimeCompletions, + productivityByUser, + taskTypeMetrics, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get task performance metrics: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getActiveTasks = async (worldId: string, zoneId?: string): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + taskStatus: { $in: ["CREATED", "RELEASED", "ASSIGNED", "IN_PROGRESS"] }, + }; + + if (zoneId) { + query.zoneId = zoneId; + } + + const transactions = await Task.find(query).sort({ priority: -1, "timing.createdAt": 1 }); + const jsonified = transactions.map((transaction) => transaction.toJSON()); + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to get active tasks: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addTaskScan = async ( + worldId: string, + taskId: string, + scan: { + scanType: string; + scannedValue: string; + expectedValue: string; + scanResult: string; + }, +): Promise => { + try { + const transaction = await Task.findOneAndUpdate( + { "worldRef.worldId": worldId, taskId }, + { + $push: { + scans: { + ...scan, + scannedAt: new Date(), + }, + }, + }, + { new: true }, + ); + + const jsonified = transaction?.toJSON() || null; + return jsonified; + } catch (error) { + throw new RepositoryError( + `Failed to add task scan: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const assignTask = async ( + worldId: string, + taskId: string, + assignment: { userId: string; userName: string }, +): Promise => { + try { + const task = await Task.findOneAndUpdate( + { "worldRef.worldId": worldId, taskId }, + { + $set: { + assignment: { + userId: assignment.userId, + userName: assignment.userName, + assignedAt: new Date(), + }, + taskStatus: "ASSIGNED", + "timing.assignedAt": new Date(), + }, + }, + { new: true }, + ); + return task?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to assign task: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateTaskPriority = async ( + worldId: string, + taskId: string, + priority: number, +): Promise => { + try { + const task = await Task.findOneAndUpdate( + { "worldRef.worldId": worldId, taskId }, + { $set: { priority } }, + { new: true }, + ); + return task?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update task priority: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +/** + * Patch task with partial updates + * Allowed fields: taskStatus, assignment, priority + */ +const patchTask = async ( + worldId: string, + taskId: string, + updates: Record, +): Promise => { + try { + const allowedFields = ["taskStatus", "assignment", "priority"]; + const $set: Record = { updatedAt: new Date() }; + + for (const [key, value] of Object.entries(updates)) { + if (value === undefined) continue; + + if (allowedFields.includes(key)) { + // Handle nested objects like assignment + if (typeof value === "object" && value !== null && !Array.isArray(value)) { + for (const [nestedKey, nestedValue] of Object.entries(value)) { + if (nestedValue !== undefined) { + $set[`${key}.${nestedKey}`] = nestedValue; + } + } + } else { + $set[key] = value; + } + } + } + + // Auto-set timestamps based on status + if (updates.taskStatus) { + const statusTimestamps: Record = { + RELEASED: "timing.releasedAt", + ASSIGNED: "timing.assignedAt", + IN_PROGRESS: "timing.startedAt", + COMPLETED: "timing.completedAt", + CANCELLED: "timing.cancelledAt", + }; + const timestampField = statusTimestamps[updates.taskStatus]; + if (timestampField) { + $set[timestampField] = new Date(); + } + } + + // Auto-set assignedAt when assignment is provided + if (updates.assignment && !$set["assignment.assignedAt"]) { + $set["assignment.assignedAt"] = new Date(); + } + + const task = await Task.findOneAndUpdate( + { "worldRef.worldId": worldId, taskId }, + { $set }, + { new: true }, + ); + return task?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to patch task: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSTaskRepository = (worldId: string) => ({ + createTask: (data: TTaskInput) => createTask({ worldId } as TWorldRefModel, data), + getTaskLogs: (filters?: { + taskIds?: string[]; + taskTypes?: string[]; + userIds?: string[]; + status?: string[]; + dateStart?: Date; + dateEnd?: Date; + zoneId?: string; + }) => getTaskLogs(worldId, filters), + getTasksByUser: (userId: string, status?: string[]) => getTasksByUser(worldId, userId, status), + getTaskTimestamps: (filters?: { + taskId?: string; + taskTypes?: string[]; + userIds?: string[]; + dateStart?: Date; + dateEnd?: Date; + includeHistorical?: boolean; + }) => getTaskTimestamps(worldId, filters), + updateTaskStatus: ( + args: string | { taskId: string; status: string; timestamp?: Date; userId?: string }, + status?: string, + timestamp?: Date, + userId?: string, + ) => + typeof args === "string" + ? updateTaskStatus(worldId, args, status!, timestamp, userId) + : updateTaskStatus(worldId, args.taskId, args.status, args.timestamp, args.userId), + getTaskPerformanceMetrics: (filters?: { + taskTypes?: string[]; + userIds?: string[]; + dateStart?: Date; + dateEnd?: Date; + }) => getTaskPerformanceMetrics(worldId, filters), + getActiveTasks: (args?: string | { zoneId?: string }) => + getActiveTasks(worldId, typeof args === "string" ? args : args?.zoneId), + addTaskScan: (args: { + taskId: string; + scan: { + scanType: string; + scannedValue: string; + expectedValue: string; + scanResult: string; + }; + }) => addTaskScan(worldId, args.taskId, args.scan), + assignTask: (args: { taskId: string; assignment: { userId: string; userName: string } }) => + assignTask(worldId, args.taskId, args.assignment), + updateTaskPriority: (args: { taskId: string; priority: number }) => + updateTaskPriority(worldId, args.taskId, args.priority), + patch: (taskId: string, updates: Record) => + patchTask(worldId, taskId, updates), +}); + +export type TWMSTaskRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/warehouse.wms.repository.ts b/packages/controlmart/src/repository/wms/warehouse.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..83feedaed7ceecb79cf61f01313203bd69e8307b --- /dev/null +++ b/packages/controlmart/src/repository/wms/warehouse.wms.repository.ts @@ -0,0 +1,389 @@ +import { + Warehouse, + type TWarehouseModel, + type TWarehouseInput, +} from "../../models/wms/warehouses.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createWarehouse = async ( + world: TWorldRefModel, + data: TWarehouseInput, +): Promise => { + try { + if (!data.warehouseName || !data.address || !data.timezone) { + throw new RepositoryError( + "Warehouse name, address, and timezone are required", + "VALIDATION_ERROR", + ); + } + + // Check for existing warehouse by name in world (since warehouseCode is auto-generated) + const existing = await Warehouse.findOne({ + "worldRef.worldId": world.worldId, + warehouseName: data.warehouseName, + }); + + if (existing) { + throw new RepositoryError( + `Warehouse with name ${data.warehouseName} already exists`, + "DUPLICATE_ERROR", + ); + } + + const warehouse = await Warehouse.create({ + ...data, + worldRef: world, + }); + + return warehouse.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to create warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllWarehouses = async ( + worldId: string, + filters?: { + warehouseType?: string[]; + status?: string[]; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseType?.length) { + query.warehouseType = { $in: filters.warehouseType }; + } + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const warehouses = await Warehouse.find(query) + .sort({ warehouseCode: 1 }) + .limit(limit + 1); + + const hasMore = warehouses.length > limit; + const results = hasMore ? warehouses.slice(0, limit) : warehouses; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((warehouse) => warehouse.toJSON()), + totalCount: await Warehouse.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get warehouses: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getWarehouseByCode = async ( + worldId: string, + warehouseCode: string, +): Promise => { + try { + const warehouse = await Warehouse.findOne({ + "worldRef.worldId": worldId, + warehouseCode, + }); + + return warehouse?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get warehouse by code: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getWarehouseById = async ( + worldId: string, + warehouseId: string, +): Promise => { + try { + const warehouse = await Warehouse.findOne({ + "worldRef.worldId": worldId, + warehouseId, + }); + + return warehouse?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get warehouse by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateWarehouse = async ( + worldId: string, + warehouseId: string, + data: Partial, +): Promise => { + try { + const warehouse = await Warehouse.findOneAndUpdate( + { "worldRef.worldId": worldId, warehouseId }, + { $set: data }, + { new: true }, + ); + + return warehouse?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getWarehousesByType = async ( + worldId: string, + warehouseType: string, + filters?: { + status?: string[]; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseType, + }; + + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const warehouses = await Warehouse.find(query) + .sort({ warehouseCode: 1 }) + .limit(limit + 1); + + const hasMore = warehouses.length > limit; + const results = hasMore ? warehouses.slice(0, limit) : warehouses; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((warehouse) => warehouse.toJSON()), + totalCount: await Warehouse.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get warehouses by type: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getActiveWarehouses = async ( + worldId: string, + filters?: { + warehouseType?: string[]; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + status: "ACTIVE", + }; + + if (filters?.warehouseType?.length) { + query.warehouseType = { $in: filters.warehouseType }; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const warehouses = await Warehouse.find(query) + .sort({ warehouseCode: 1 }) + .limit(limit + 1); + + const hasMore = warehouses.length > limit; + const results = hasMore ? warehouses.slice(0, limit) : warehouses; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((warehouse) => warehouse.toJSON()), + totalCount: await Warehouse.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get active warehouses: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateWarehouseStatus = async ( + worldId: string, + warehouseId: string, + status: "ACTIVE" | "DISABLED" | "ARCHIVED", +): Promise => { + try { + const warehouse = await Warehouse.findOneAndUpdate( + { "worldRef.worldId": worldId, warehouseId }, + { $set: { status } }, + { new: true }, + ); + + return warehouse?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to update warehouse status: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const searchWarehouses = async ( + worldId: string, + searchTerm: string, + filters?: { + warehouseType?: string[]; + status?: string[]; + limit?: number; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + $or: [ + { warehouseName: { $regex: searchTerm, $options: "i" } }, + { warehouseCode: { $regex: searchTerm, $options: "i" } }, + { "address.city": { $regex: searchTerm, $options: "i" } }, + { "address.state": { $regex: searchTerm, $options: "i" } }, + { "address.country": { $regex: searchTerm, $options: "i" } }, + ], + }; + + if (filters?.warehouseType?.length) { + query.warehouseType = { $in: filters.warehouseType }; + } + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const warehouses = await Warehouse.find(query).sort({ warehouseName: 1 }).limit(limit); + + return warehouses.map((warehouse) => warehouse.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to search warehouses: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getWarehousesByTimezone = async ( + worldId: string, + timezone: string, + filters?: { + warehouseType?: string[]; + status?: string[]; + limit?: number; + }, +): Promise => { + try { + const query: Record = { + "worldRef.worldId": worldId, + timezone, + }; + + if (filters?.warehouseType?.length) { + query.warehouseType = { $in: filters.warehouseType }; + } + if (filters?.status?.length) { + query.status = { $in: filters.status }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const warehouses = await Warehouse.find(query).sort({ warehouseName: 1 }).limit(limit); + + return warehouses.map((warehouse) => warehouse.toJSON()); + } catch (error) { + throw new RepositoryError( + `Failed to get warehouses by timezone: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSWarehouseRepository = (worldId: string) => ({ + createWarehouse: (data: TWarehouseInput) => createWarehouse({ worldId } as TWorldRefModel, data), + getAllWarehouses: (filters?: { + warehouseType?: string[]; + status?: string[]; + cursor?: string; + limit?: number; + }) => getAllWarehouses(worldId, filters), + getWarehouseByCode: (warehouseCode: string) => getWarehouseByCode(worldId, warehouseCode), + getWarehouseById: (warehouseId: string) => getWarehouseById(worldId, warehouseId), + updateWarehouse: (warehouseId: string, data: Partial) => + updateWarehouse(worldId, warehouseId, data), + getWarehousesByType: ( + warehouseType: string, + filters?: { + status?: string[]; + cursor?: string; + limit?: number; + }, + ) => getWarehousesByType(worldId, warehouseType, filters), + getActiveWarehouses: (filters?: { warehouseType?: string[]; cursor?: string; limit?: number }) => + getActiveWarehouses(worldId, filters), + updateWarehouseStatus: (warehouseId: string, status: "ACTIVE" | "DISABLED" | "ARCHIVED") => + updateWarehouseStatus(worldId, warehouseId, status), + searchWarehouses: ( + searchTerm: string, + filters?: { + warehouseType?: string[]; + status?: string[]; + limit?: number; + }, + ) => searchWarehouses(worldId, searchTerm, filters), + getWarehousesByTimezone: ( + timezone: string, + filters?: { + warehouseType?: string[]; + status?: string[]; + limit?: number; + }, + ) => getWarehousesByTimezone(worldId, timezone, filters), +}); + +export type TWMSWarehouseRepository = ReturnType; diff --git a/packages/controlmart/src/repository/wms/zone.wms.repository.ts b/packages/controlmart/src/repository/wms/zone.wms.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..f2b8a10864791ff42e04adaaa1a25a0ddaa13154 --- /dev/null +++ b/packages/controlmart/src/repository/wms/zone.wms.repository.ts @@ -0,0 +1,330 @@ +import { Zone, type TZoneModel, type TZoneInput } from "../../models/wms/zone.wms.model"; +import type { TWorldRefModel } from "../../models/shared.model"; +import { RepositoryError, getErrorMessage } from "../../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../../types/repository.type"; + +const createZone = async (world: TWorldRefModel, data: TZoneInput): Promise => { + try { + if (!data.zoneName || !data.warehouseId) { + throw new RepositoryError("Zone name and warehouse ID are required", "VALIDATION_ERROR"); + } + + // Check for existing zone by name in warehouse (since zoneCode is auto-generated) + const existing = await Zone.findOne({ + "worldRef.worldId": world.worldId, + zoneName: data.zoneName, + warehouseId: data.warehouseId, + }); + + if (existing) { + throw new RepositoryError( + `Zone with name ${data.zoneName} already exists in warehouse`, + "DUPLICATE_ERROR", + ); + } + + const zone = await Zone.create({ + ...data, + worldRef: world, + }); + + return zone.toJSON(); + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError(`Failed to create zone: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const getZonesByWarehouse = async ( + worldId: string, + warehouseId: string, + filters?: { + zoneType?: string[]; + temperatureControlled?: boolean; + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + warehouseId, + }; + + if (filters?.zoneType?.length) { + query.zoneType = { $in: filters.zoneType }; + } + if (filters?.temperatureControlled !== undefined) { + query.temperatureControlled = filters.temperatureControlled; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const zones = await Zone.find(query) + .sort({ zoneCode: 1 }) + .limit(limit + 1); + + const hasMore = zones.length > limit; + const results = hasMore ? zones.slice(0, limit) : zones; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((zone) => zone.toJSON()), + totalCount: await Zone.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get zones by warehouse: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getZoneByCode = async ( + worldId: string, + warehouseId: string, + zoneCode: string, +): Promise => { + try { + const zone = await Zone.findOne({ + "worldRef.worldId": worldId, + warehouseId, + zoneCode, + }); + + return zone?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get zone by code: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getZoneById = async (worldId: string, zoneId: string): Promise => { + try { + const zone = await Zone.findOne({ + "worldRef.worldId": worldId, + zoneId, + }); + + return zone?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to get zone by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateZone = async ( + worldId: string, + zoneId: string, + data: Partial, +): Promise => { + try { + const zone = await Zone.findOneAndUpdate( + { "worldRef.worldId": worldId, zoneId }, + { $set: data }, + { new: true }, + ); + + return zone?.toJSON() || null; + } catch (error) { + throw new RepositoryError(`Failed to update zone: ${getErrorMessage(error)}`, "DATABASE_ERROR"); + } +}; + +const getZonesByType = async ( + worldId: string, + zoneType: string, + warehouseId?: string, + filters?: { + cursor?: string; + limit?: number; + }, +): Promise> => { + try { + const query: Record = { + "worldRef.worldId": worldId, + zoneType, + }; + + if (warehouseId) { + query.warehouseId = warehouseId; + } + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const zones = await Zone.find(query) + .sort({ warehouseId: 1, zoneCode: 1 }) + .limit(limit + 1); + + const hasMore = zones.length > limit; + const results = hasMore ? zones.slice(0, limit) : zones; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((zone) => zone.toJSON()), + totalCount: await Zone.countDocuments(query), + limit, + hasMore, + nextCursor, + }; + } catch (error) { + throw new RepositoryError( + `Failed to get zones by type: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const addAisleToZone = async ( + worldId: string, + zoneId: string, + aisle: { aisleId: string; aisleCode: string; aisleType: string }, +): Promise => { + try { + const zone = await Zone.findOneAndUpdate( + { "worldRef.worldId": worldId, zoneId }, + { + $push: { + aisles: aisle, + }, + }, + { new: true }, + ); + + return zone?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to add aisle to zone: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const removeAisleFromZone = async ( + worldId: string, + zoneId: string, + aisleId: string, +): Promise => { + try { + const zone = await Zone.findOneAndUpdate( + { "worldRef.worldId": worldId, zoneId }, + { + $pull: { + aisles: { aisleId }, + }, + }, + { new: true }, + ); + + return zone?.toJSON() || null; + } catch (error) { + throw new RepositoryError( + `Failed to remove aisle from zone: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getZoneCapacityUtilization = async ( + worldId: string, + filters?: { + warehouseId?: string; + zoneIds?: string[]; + zoneType?: string[]; + }, +): Promise< + Array<{ + zoneId: string; + zoneCode: string; + zoneName: string; + zoneType: string; + totalCapacity: number; + usedCapacity: number; + utilizationPercentage: number; + binCount: number; + availableBinCount: number; + }> +> => { + try { + const matchStage: Record = { + "worldRef.worldId": worldId, + }; + + if (filters?.warehouseId) { + matchStage.warehouseId = filters.warehouseId; + } + if (filters?.zoneIds?.length) { + matchStage.zoneId = { $in: filters.zoneIds }; + } + if (filters?.zoneType?.length) { + matchStage.zoneType = { $in: filters.zoneType }; + } + + // This would require joining with bins and inventory collections + // For now, returning zone capacity info + const zones = await Zone.find(matchStage).select( + "zoneId zoneCode zoneName zoneType capacityCubicFeet", + ); + + return zones.map((zone) => ({ + zoneId: zone.zoneId, + zoneCode: zone.zoneCode, + zoneName: zone.zoneName, + zoneType: zone.zoneType || "STORAGE", + totalCapacity: zone.capacityCubicFeet || 0, + usedCapacity: 0, // Would need to calculate from inventory + utilizationPercentage: 0, + binCount: 0, // Would need to count from bins + availableBinCount: 0, + })); + } catch (error) { + throw new RepositoryError( + `Failed to get zone capacity utilization: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WMSZoneRepository = (worldId: string) => ({ + createZone: (data: TZoneInput) => createZone({ worldId } as TWorldRefModel, data), + getZonesByWarehouse: ( + warehouseId: string, + filters?: { + zoneType?: string[]; + temperatureControlled?: boolean; + }, + ) => getZonesByWarehouse(worldId, warehouseId, filters), + getZoneByCode: (warehouseId: string, zoneCode: string) => + getZoneByCode(worldId, warehouseId, zoneCode), + getZoneById: (zoneId: string) => getZoneById(worldId, zoneId), + updateZone: (zoneId: string, data: Partial) => updateZone(worldId, zoneId, data), + getZonesByType: (zoneType: string, warehouseId?: string) => + getZonesByType(worldId, zoneType, warehouseId), + addAisleToZone: ( + zoneId: string, + aisle: { aisleId: string; aisleCode: string; aisleType: string }, + ) => addAisleToZone(worldId, zoneId, aisle), + removeAisleFromZone: (zoneId: string, aisleId: string) => + removeAisleFromZone(worldId, zoneId, aisleId), + getZoneCapacityUtilization: (filters?: { + warehouseId?: string; + zoneIds?: string[]; + zoneType?: string[]; + }) => getZoneCapacityUtilization(worldId, filters), +}); + +export type TWMSZoneRepository = ReturnType; diff --git a/packages/controlmart/src/repository/world.repository.ts b/packages/controlmart/src/repository/world.repository.ts new file mode 100644 index 0000000000000000000000000000000000000000..d6f29265f0a0e52051132808e545a0efd21fe874 --- /dev/null +++ b/packages/controlmart/src/repository/world.repository.ts @@ -0,0 +1,264 @@ +import slugify from "slugify"; +import { capabilitySamplingService } from "../services/capability-sampling.service"; +import { + applyOffsetPagination, + buildOffsetMeta, + DEFAULT_LIMITS, + type OffsetPaginationOptions, + type OffsetPaginationMeta +} from "../utils/pagination.util"; + +import { World, type TWorldModel, type TWorldInput } from "../models/world.model"; +import { RepositoryError, getErrorMessage } from "../utils/error.util"; +import { GLOBAL_PAGE_LIMIT } from "../utils/http.util"; +import type { TRepositoryPaginatedResult } from "../types/repository.type"; + +export const createWorld = async (data: TWorldInput): Promise => { + if (!data.name) { + throw new RepositoryError("Missing required field: name", "VALIDATION_ERROR"); + } + + const existing = await World.findOne({ name: data.name }); + + if (existing) { + throw new RepositoryError(`World "${data.name}" already exists`, "DUPLICATE_ERROR"); + } + if (data.is_default === true) { + await World.updateMany({}, { $set: { is_default: false } }); + } + // Apply capability sampling if strategy is provided + if (data.samplingStrategy) { + const capabilityIds = capabilitySamplingService.applySamplingStrategy( + data.samplingStrategy, + data.personas + ); + data.capabilityIds = capabilityIds; + } + + const finalData = { + ...data, + url: data.url ? data.url : slugify(data.name, { lower: true, strict: true, trim: true }), + }; + + try { + const transaction = await World.create(finalData); + const jsonified = transaction.toJSON(); + return jsonified as TWorldModel; + } catch (err) { + if (err instanceof RepositoryError) throw err; + throw new RepositoryError(`Failed to create world: ${getErrorMessage(err)}`, "DATABASE_ERROR"); + } +}; + +const findWorldById = async (id: string): Promise => { + try { + if (!id) throw new RepositoryError("World ID required", "VALIDATION_ERROR"); + console.log("Fetching world by ID:", id); + const transaction = await World.findById(id); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to fetch world by ID: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const findWorldByName = async (name: string): Promise => { + try { + if (!name) throw new RepositoryError("World name required", "VALIDATION_ERROR"); + const transaction = await World.findOne({ name }); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to fetch world by name: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getAllWorlds = async ( + filters?: { + is_default?: boolean; + mpcCompany?: string; + search?: string; + cursor?: string; + limit?: number; + }, + pagination?: OffsetPaginationOptions +): Promise | { data: TWorldModel[]; pagination?: OffsetPaginationMeta }> => { + try { + const query: Record = {}; + if (typeof filters?.is_default === "boolean") query.is_default = filters.is_default; + if (filters?.mpcCompany) query.mpcCompany = filters.mpcCompany; + if (filters?.search) query.name = new RegExp(filters.search, "i"); + + // If offset pagination is provided, use it (Phase 5 style) + if (pagination) { + const page = pagination.page || 1; + const limit = pagination.limit || DEFAULT_LIMITS.worlds; + const { skip, limit: finalLimit } = applyOffsetPagination(page, limit); + + const total = await World.countDocuments(query); + const data = await World.find(query) + .sort({ createdAt: -1 }) + .skip(skip) + .limit(finalLimit); + + const paginationMeta = buildOffsetMeta(total, page, limit); + return { data: data.map((w) => w.toJSON() as TWorldModel), pagination: paginationMeta }; + } + + // If cursor pagination is provided (research branch style) + if (filters?.cursor) { + query._id = { $gt: filters.cursor }; + } + const limit = filters?.limit ?? GLOBAL_PAGE_LIMIT; + const worlds = await World.find(query) + .sort({ createdAt: -1 }) + .limit(limit + 1); + + const hasMore = worlds.length > limit; + const results = hasMore ? worlds.slice(0, limit) : worlds; + const nextCursor = + hasMore && results.length > 0 ? results[results.length - 1]?._id?.toString() : undefined; + + return { + items: results.map((w) => w.toJSON() as TWorldModel), + nextCursor, + totalCount: await World.countDocuments(query), + hasMore: hasMore, + limit: limit, + }; + } catch (error) { + throw new RepositoryError( + `Failed to retrieve worlds: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const updateWorld = async ( + id: string, + updateData: Partial, +): Promise => { + try { + if (updateData.is_default) { + await World.updateMany({}, { $set: { is_default: false } }); + } + + // Separate fields to set vs unset (for proper $unset of undefined fields) + const setFields: any = { updatedAt: new Date() }; + const unsetFields: any = {}; + + for (const [key, value] of Object.entries(updateData)) { + if (value === undefined) { + unsetFields[key] = ''; // MongoDB $unset syntax + } else { + setFields[key] = value; + } + } + + const updateOps: any = { $set: setFields }; + if (Object.keys(unsetFields).length > 0) { + updateOps.$unset = unsetFields; + } + + const transaction = await World.findByIdAndUpdate( + id, + updateOps, + { new: true } + ); + const jsonified = transaction?.toJSON() || null; + + if (!jsonified) { + throw new RepositoryError("World not found", "NOT_FOUND_ERROR"); + } + + return jsonified as TWorldModel; + } catch (error) { + if (error instanceof RepositoryError) throw error; + throw new RepositoryError( + `Failed to update world: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const deleteWorld = async (id: string): Promise => { + try { + const res = await World.deleteOne({ _id: id }); + return res.deletedCount > 0; + } catch (error) { + throw new RepositoryError( + `Failed to delete world: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const getDefaultWorld = async (): Promise => { + try { + const transaction = await World.findOne({ is_default: true }); + const jsonified = transaction?.toJSON() || null; + return jsonified as TWorldModel | null; + } catch (error) { + throw new RepositoryError( + `Failed to fetch default world: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const setDefaultWorld = async (id: string): Promise => { + try { + await World.updateMany({}, { $set: { is_default: false } }); + const updated = await World.findByIdAndUpdate( + id, + { $set: { is_default: true } }, + { new: true }, + ); + + if (!updated) { + throw new RepositoryError("World not found", "NOT_FOUND_ERROR"); + } + + return updated; + } catch (error) { + throw new RepositoryError( + `Failed to set default world: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +const verifyWorldCredentials = async ( + apiKey: string, + apiSecret: string, +): Promise => { + try { + const world = await World.findOne({ apiKey, apiSecret }); + return world; + } catch (error) { + throw new RepositoryError( + `Failed to verify world credentials: ${getErrorMessage(error)}`, + "DATABASE_ERROR", + ); + } +}; + +export const WorldRepository = { + createWorld, + findWorldById, + findWorldByName, + getAllWorlds, + updateWorld, + deleteWorld, + getDefaultWorld, + setDefaultWorld, + verifyWorldCredentials, +}; + +export type TWorldRepository = typeof WorldRepository; diff --git a/packages/controlmart/src/routes/auditlogs.route.ts b/packages/controlmart/src/routes/auditlogs.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..7afb0fd7bdc926497f5d4b893a38935c4694517a --- /dev/null +++ b/packages/controlmart/src/routes/auditlogs.route.ts @@ -0,0 +1,9 @@ +import { Router } from "express"; + +import { getAuditLogsController } from "../controller/auditlog.controller"; + +const auditLogsRouter = Router({ mergeParams: true }); + +auditLogsRouter.get("/", getAuditLogsController); + +export default auditLogsRouter; diff --git a/packages/controlmart/src/routes/business-rules.route.ts b/packages/controlmart/src/routes/business-rules.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..1aa84abe6b69a13db91fe2826d677587917bce89 --- /dev/null +++ b/packages/controlmart/src/routes/business-rules.route.ts @@ -0,0 +1,25 @@ +import { Router } from "express"; + +import { BusinessRulesController } from "../controller/business-rules.controller"; + +const businessRulesRouter = Router({ mergeParams: true }); + +// List and create (non-parameterized) +businessRulesRouter.post("/", BusinessRulesController.createRuleController); +businessRulesRouter.get("/", BusinessRulesController.listRulesController); + +// Testing and bulk operations (specific routes must come before /:ruleId) +businessRulesRouter.post("/test", BusinessRulesController.testRuleController); +businessRulesRouter.post("/bulk", BusinessRulesController.bulkCreateRulesController); +businessRulesRouter.delete("/bulk", BusinessRulesController.bulkDeleteRulesController); + +// Single rule operations (parameterized routes come last) +businessRulesRouter.get("/:ruleId", BusinessRulesController.getRuleController); +businessRulesRouter.put("/:ruleId", BusinessRulesController.updateRuleController); +businessRulesRouter.delete("/:ruleId", BusinessRulesController.deleteRuleController); + +// Enable/Disable +businessRulesRouter.post("/:ruleId/enable", BusinessRulesController.enableRuleController); +businessRulesRouter.post("/:ruleId/disable", BusinessRulesController.disableRuleController); + +export default businessRulesRouter; diff --git a/packages/controlmart/src/routes/capabilities.route.ts b/packages/controlmart/src/routes/capabilities.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..2b94239732427e1d7503865f07e079283965fde9 --- /dev/null +++ b/packages/controlmart/src/routes/capabilities.route.ts @@ -0,0 +1,268 @@ +/** + * Capabilities API Routes + * + * REST endpoints for capability management and execution. + */ + +import { Router, type Request, type Response } from 'express'; +import { capabilityCatalog } from '../services/capability-catalog.service'; +import { capabilityExecutor } from '../services/capability-executor.service'; +import type { CapabilityFilter } from '../types/capability.type'; +import { parseOffsetParams, applyOffsetPagination, buildOffsetMeta } from '../utils/pagination.util'; +import { + validateCapabilityController, + getCapabilityDependenciesController, + getRelatedCapabilitiesController, +} from '../controller/capability-graph.controller'; +import { + createCapabilityController, + updateCapabilityController, + deleteCapabilityController, +} from '../controller/capabilities.controller'; + +const capabilitiesRouter = Router({ mergeParams: true }); + +/** + * GET /api/capabilities + * List all capabilities with optional filtering, search, and pagination + * + * Query Parameters: + * - q: string - Full-text search query across name, description, and tags + * - domain: string[] - Filter by domain(s) + * - complexity: string - Filter by complexity level + * - services: string[] - Filter by service(s) + * - personas: string[] - Filter by persona(s) + * - patterns: string[] - Filter by pattern(s) + * - page: number - Page number for pagination (default: 1) + * - limit: number - Items per page (default: 50, max: 200) + */ +capabilitiesRouter.get('/', (req: Request, res: Response) => { + try { + const { q, domain, complexity, services, personas, patterns, persona } = req.query; + + // Parse pagination parameters + const pagination = parseOffsetParams(req.query); + + // Special case: Filter by single persona using top-level personas field + if (persona && typeof persona === 'string') { + const allCapabilities = capabilityCatalog.filterByPersona(persona); + + // Apply pagination to filtered results + const total = allCapabilities.length; + const { skip, limit } = applyOffsetPagination(pagination.page, pagination.limit); + const data = allCapabilities.slice(skip, skip + limit); + const paginationMeta = buildOffsetMeta(total, pagination.page || 1, pagination.limit || 50); + + return res.json({ + success: true, + count: data.length, + total, + data, + pagination: paginationMeta, + }); + } + + // Build filters from query parameters + const filters: CapabilityFilter & { q?: string } = {}; + + // Add search query if provided + if (q && typeof q === 'string') { + filters.q = q; + } + + if (domain) { + filters.domain = Array.isArray(domain) ? domain as string[] : [domain as string]; + } + + if (complexity) { + if (complexity === 'simple' || complexity === 'medium' || complexity === 'complex') { + filters.complexity = complexity; + } else { + return res.status(400).json({ + error: 'Invalid complexity value. Must be: simple, medium, or complex', + }); + } + } + + if (services) { + filters.services = Array.isArray(services) ? services as string[] : [services as string]; + } + + if (personas) { + filters.personas = Array.isArray(personas) ? personas as string[] : [personas as string]; + } + + if (patterns) { + filters.patterns = Array.isArray(patterns) ? patterns as string[] : [patterns as string]; + } + + // Get capabilities using enhanced filtering (supports search + filters) + const allCapabilities = + Object.keys(filters).length > 0 + ? capabilityCatalog.filterEnhanced(filters) + : capabilityCatalog.getAll(); + + // Apply pagination to filtered results + const total = allCapabilities.length; + const { skip, limit } = applyOffsetPagination(pagination.page, pagination.limit); + const data = allCapabilities.slice(skip, skip + limit); + const paginationMeta = buildOffsetMeta(total, pagination.page || 1, pagination.limit || 50); + + res.json({ + success: true, + count: data.length, + total, + data, + pagination: paginationMeta, + }); + } catch (error) { + res.status(500).json({ + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }); + } +}); + +/** + * POST /api/capabilities + * Create a new capability + * + * Body: TCapabilityInput (validated with Zod) + */ +capabilitiesRouter.post('/', createCapabilityController); + +/** + * GET /api/capabilities/:id + * Get details of a single capability + */ +capabilitiesRouter.get('/:id', (req: Request, res: Response) => { + try { + const id = req.params.id; + + if (!id) { + return res.status(400).json({ + success: false, + error: 'Capability ID is required', + }); + } + + const capability = capabilityCatalog.getById(id); + + if (!capability) { + return res.status(404).json({ + success: false, + error: `Capability not found: ${id}`, + }); + } + + res.json({ + success: true, + data: capability, + }); + } catch (error) { + res.status(500).json({ + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }); + } +}); + +/** + * PUT /api/capabilities/:id + * Update an existing capability + * + * Body: Partial (validated with Zod) + */ +capabilitiesRouter.put('/:id', updateCapabilityController); + +/** + * DELETE /api/capabilities/:id + * Delete a capability + */ +capabilitiesRouter.delete('/:id', deleteCapabilityController); + +/** + * POST /api/capabilities/:id/execute + * Execute a capability + * + * Body: + * - worldId: string (required) - World context for execution + * - inputs: object (optional) - Input parameters for the capability + */ +capabilitiesRouter.post('/:id/execute', async (req: Request, res: Response) => { + try { + const id = req.params.id; + const { worldId, inputs } = req.body; + + // Validate ID parameter + if (!id) { + return res.status(400).json({ + success: false, + error: 'Capability ID is required', + }); + } + + // Validate required fields + if (!worldId) { + return res.status(400).json({ + success: false, + error: 'worldId is required in request body', + }); + } + + // Check if capability exists + const capability = capabilityCatalog.getById(id); + if (!capability) { + return res.status(404).json({ + success: false, + error: `Capability not found: ${id}`, + }); + } + + // Execute capability + const result = await capabilityExecutor.execute({ + capabilityId: id, + worldId, + inputs, + }); + + // Return result based on status + if (result.status === 'failed') { + return res.status(500).json({ + success: false, + error: result.error, + data: result, + }); + } + + res.json({ + success: true, + data: result, + }); + } catch (error) { + res.status(500).json({ + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }); + } +}); + +/** + * GET /api/capabilities/:id/validate + * Validate if a capability's OD is executable + */ +capabilitiesRouter.get('/:id/validate', validateCapabilityController); + +/** + * GET /api/capabilities/:id/dependencies + * Get comprehensive dependency information for a capability + */ +capabilitiesRouter.get('/:id/dependencies', getCapabilityDependenciesController); + +/** + * GET /api/capabilities/:id/related + * Find capabilities related to the given capability + */ +capabilitiesRouter.get('/:id/related', getRelatedCapabilitiesController); + +export default capabilitiesRouter; diff --git a/packages/controlmart/src/routes/chaos.route.ts b/packages/controlmart/src/routes/chaos.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..15d7919f2e1d982e63ba3e83aeb7ce403cf4c8c7 --- /dev/null +++ b/packages/controlmart/src/routes/chaos.route.ts @@ -0,0 +1,123 @@ +/** + * Chaos Engineering API Routes + * + * REST endpoints for managing chaos presets and checking chaos status. + */ + +import { Router, type Request, type Response } from 'express'; +import { ChaosConfigRegistry } from '../services/chaos-config.registry'; + +const chaosRouter = Router({ mergeParams: true }); + +/** + * GET /api/chaos/presets + * List all available chaos presets with metadata + * + * Response: + * { + * success: true, + * count: number, + * data: PresetMetadata[] + * } + */ +chaosRouter.get('/presets', (req: Request, res: Response) => { + try { + const presets = ChaosConfigRegistry.listPresets(); + + res.json({ + success: true, + count: presets.length, + data: presets, + }); + } catch (error) { + res.status(500).json({ + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }); + } +}); + +/** + * GET /api/chaos/presets/:id + * Get detailed information about a specific chaos preset + * + * Response: + * { + * success: true, + * data: ChaosPolicy + * } + */ +chaosRouter.get('/presets/:id', (req: Request, res: Response) => { + try { + const { id } = req.params; + + if (!id) { + return res.status(400).json({ + success: false, + error: 'Preset ID is required', + }); + } + + const preset = ChaosConfigRegistry.loadPreset(id); + + if (!preset) { + return res.status(404).json({ + success: false, + error: `Chaos preset not found: ${id}`, + }); + } + + res.json({ + success: true, + data: preset, + }); + } catch (error) { + res.status(500).json({ + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }); + } +}); + +/** + * GET /api/chaos/status + * Get current chaos system status and configuration + * + * Response: + * { + * success: true, + * data: { + * enabled: boolean, + * activePreset: string | null, + * stats: { + * presetCount: number, + * worldPolicyCount: number, + * capabilityOverrideCount: number, + * odOverrideCount: number + * } + * } + * } + */ +chaosRouter.get('/status', (req: Request, res: Response) => { + try { + const enabled = ChaosConfigRegistry.isChaosEnabled(); + const activePreset = process.env.CHAOS_PRESET || null; + const stats = ChaosConfigRegistry.getStats(); + + res.json({ + success: true, + data: { + enabled, + activePreset, + stats, + }, + }); + } catch (error) { + res.status(500).json({ + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }); + } +}); + +export default chaosRouter; diff --git a/packages/controlmart/src/routes/docs.route.ts b/packages/controlmart/src/routes/docs.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..a01b63230df77524f9c923d262f82aebfaa5b978 --- /dev/null +++ b/packages/controlmart/src/routes/docs.route.ts @@ -0,0 +1,11 @@ + +import { Router } from "express"; +import { getApiReference, getServiceMeshDocs, getSwaggerSpec } from "../controller/docs.controller"; + +const docsRouter = Router({ mergeParams: true }); + +docsRouter.get("/swagger.json", getSwaggerSpec); +docsRouter.get("/mesh", getServiceMeshDocs); +docsRouter.get("/", getApiReference); // Make sure this is last if it catches all, though it's usually specific + +export default docsRouter; diff --git a/packages/controlmart/src/routes/edi.route.ts b/packages/controlmart/src/routes/edi.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..fa54a97c546171d426779d8c219851f737bede42 --- /dev/null +++ b/packages/controlmart/src/routes/edi.route.ts @@ -0,0 +1,33 @@ +import { Router } from "express"; + +import { + getAllEdiTransactionsController, + updateEdiStatusController, + updateEdiTransactionController, + createEdiTransactionController, + deleteEdiTransactionController, + getEdiTransactionByIdController, + requeueEdiTransactionController, + invoiceStatisticsController, + ediErrorStatsAggregationController, + getAllEdiTransactionsControllerByPagesDeprecated, + getEdiDollarAmountStatisticsController, + getEdiDashboardController, +} from "../controller/edi.controller"; + +const ediRouter = Router({ mergeParams: true }); + +ediRouter.get("/dashboard", getEdiDashboardController); +ediRouter.post("/", createEdiTransactionController); +ediRouter.get("/", getAllEdiTransactionsController); +ediRouter.get("/deprecated", getAllEdiTransactionsControllerByPagesDeprecated); +ediRouter.get("/:transactionId", getEdiTransactionByIdController); +ediRouter.patch("/:transactionId", updateEdiTransactionController); +ediRouter.patch("/:transactionId/status", updateEdiStatusController); +ediRouter.post("/:transactionId/requeue", requeueEdiTransactionController); +ediRouter.delete("/:transactionId", deleteEdiTransactionController); +ediRouter.get("/statistics/invoice", invoiceStatisticsController); +ediRouter.get("/statistics/errors", ediErrorStatsAggregationController); +ediRouter.get("/statistics/amount", getEdiDollarAmountStatisticsController); + +export default ediRouter; diff --git a/packages/controlmart/src/routes/erp.routes.ts b/packages/controlmart/src/routes/erp.routes.ts new file mode 100644 index 0000000000000000000000000000000000000000..06312e9d6a7a72264c7dec9ea0f7208956b5e34c --- /dev/null +++ b/packages/controlmart/src/routes/erp.routes.ts @@ -0,0 +1,108 @@ +import { Router } from "express"; + +import { + createERPCompanyController, + getERPCompanyByIdController, + getAllERPCompaniesController, + updateERPCompanyController, + deleteERPCompanyController, + getMpcERPCompanyController, + getRandomERPCompanyController, + bulkUpsertERPCompaniesController, + createERPProductController, + getAllERPProductsController, + updateERPProductController, + deleteERPProductController, + bulkUpsertERPProductsController, + getRandomERPProductController, + createERPOrderController, + getAllERPOrdersController, + updateERPOrderController, + updateERPOrderStatusController, + deleteERPOrderController, + createERPInvoiceController, + getAllERPInvoicesController, + updateERPInvoiceController, + updateERPInvoiceStatusController, + deleteERPInvoiceController, + createERPShipmentController, + getERPShipmentByIdController, + getAllERPShipmentsController, + updateERPShipmentController, + updateERPShipmentStatusController, + updateERPShipmentTrackingController, + addERPShipmentEventController, + addERPShipmentDocumentController, + updateERPShipmentLinesController, + bulkUpsertERPShipmentsController, + deleteERPShipmentController, + createERPPaymentController, + getERPPaymentByIdController, + getAllERPPaymentsController, + updateERPPaymentController, + updateERPPaymentStatusController, + applyERPPaymentAllocationsController, + deleteERPPaymentController, + getERPCompanyByDunsNumberController, + getERPProductByIdController, + getERPOrderByIdController, + getERPInvoiceByIdController, + getERPOperationsDashboardController, +} from "../controller/erp"; + +export const erpRouter = Router({ mergeParams: true }); + +// Operations Dashboard +erpRouter.get("/operations-dashboard", getERPOperationsDashboardController); + +erpRouter.post("/companies", createERPCompanyController); +erpRouter.get("/companies", getAllERPCompaniesController); +erpRouter.get("/companies/mpc", getMpcERPCompanyController); +erpRouter.get("/companies/random", getRandomERPCompanyController); +erpRouter.post("/companies/bulk", bulkUpsertERPCompaniesController); +erpRouter.get("/companies/:companyId", getERPCompanyByIdController); +erpRouter.put("/companies/:companyId", updateERPCompanyController); +erpRouter.delete("/companies/:companyId", deleteERPCompanyController); +erpRouter.get("/companies/duns/:dunsNumber", getERPCompanyByDunsNumberController); + +erpRouter.post("/products", createERPProductController); +erpRouter.get("/products", getAllERPProductsController); +erpRouter.get("/products/random", getRandomERPProductController); +erpRouter.post("/products/bulk", bulkUpsertERPProductsController); +erpRouter.get("/products/:productId", getERPProductByIdController); +erpRouter.put("/products/:productId", updateERPProductController); +erpRouter.delete("/products/:productId", deleteERPProductController); + +erpRouter.post("/orders", createERPOrderController); +erpRouter.get("/orders", getAllERPOrdersController); +erpRouter.get("/orders/:orderId", getERPOrderByIdController); +erpRouter.put("/orders/:orderId", updateERPOrderController); +erpRouter.put("/orders/:orderId/status", updateERPOrderStatusController); +erpRouter.delete("/orders/:orderId", deleteERPOrderController); + +erpRouter.post("/invoices", createERPInvoiceController); +erpRouter.get("/invoices", getAllERPInvoicesController); +erpRouter.get("/invoices/:invoiceId", getERPInvoiceByIdController); +erpRouter.put("/invoices/:invoiceId", updateERPInvoiceController); +erpRouter.put("/invoices/:invoiceId/status", updateERPInvoiceStatusController); +erpRouter.delete("/invoices/:invoiceId", deleteERPInvoiceController); + +erpRouter.post("/shipments", createERPShipmentController); +erpRouter.get("/shipments", getAllERPShipmentsController); +erpRouter.post("/shipments/bulk", bulkUpsertERPShipmentsController); +erpRouter.get("/shipments/:shipmentId", getERPShipmentByIdController); +erpRouter.put("/shipments/:shipmentId", updateERPShipmentController); +erpRouter.put("/shipments/:shipmentId/status", updateERPShipmentStatusController); +erpRouter.put("/shipments/:shipmentId/tracking", updateERPShipmentTrackingController); +erpRouter.post("/shipments/:shipmentId/events", addERPShipmentEventController); +erpRouter.post("/shipments/:shipmentId/documents", addERPShipmentDocumentController); +erpRouter.put("/shipments/:shipmentId/lines", updateERPShipmentLinesController); +erpRouter.delete("/shipments/:shipmentId", deleteERPShipmentController); + +erpRouter.post("/payments", createERPPaymentController); +erpRouter.get("/payments", getAllERPPaymentsController); +erpRouter.get("/payments/:paymentId", getERPPaymentByIdController); +erpRouter.put("/payments/:paymentId", updateERPPaymentController); +erpRouter.put("/payments/:paymentId/status", updateERPPaymentStatusController); +erpRouter.put("/payments/:paymentId/allocations", applyERPPaymentAllocationsController); +erpRouter.delete("/payments/:paymentId", deleteERPPaymentController); diff --git a/packages/controlmart/src/routes/finance.route.ts b/packages/controlmart/src/routes/finance.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..4f83fc7a66a9c3823a1736e513df90292c5e00e0 --- /dev/null +++ b/packages/controlmart/src/routes/finance.route.ts @@ -0,0 +1,32 @@ +import { Router } from "express"; + +import { + createFinanceTransactionController, + getFinanceTransactionByIdController, + getAllFinanceTransactionsController, + getTransactionsBySourceController, + updateFinanceTransactionController, + deleteFinanceTransactionController, + bulkInsertTransactionsController, + getFinanceStatsController, + getFinancialSummaryController, + getTransactionsByTypeController, + getTransactionsByPartnerController, +} from "../controller/finance/finance.controller"; + +const financeRouter = Router({ mergeParams: true }); + +financeRouter.post("/transactions", createFinanceTransactionController); +financeRouter.post("/transactions/bulk", bulkInsertTransactionsController); +financeRouter.get("/transactions", getAllFinanceTransactionsController); +financeRouter.get("/transactions/:transactionId", getFinanceTransactionByIdController); +financeRouter.patch("/transactions/:transactionId", updateFinanceTransactionController); +financeRouter.delete("/transactions/:transactionId", deleteFinanceTransactionController); +financeRouter.get("/transactions/source/:sourceType/:sourceId", getTransactionsBySourceController); + +financeRouter.get("/stats", getFinanceStatsController); +financeRouter.get("/summary", getFinancialSummaryController); +financeRouter.get("/stats/by-type", getTransactionsByTypeController); +financeRouter.get("/stats/by-partner", getTransactionsByPartnerController); + +export { financeRouter }; diff --git a/packages/controlmart/src/routes/knowledge-graph.route.ts b/packages/controlmart/src/routes/knowledge-graph.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..dddc49c9bf5f3fb7b3abe2aa749e944a75d2803e --- /dev/null +++ b/packages/controlmart/src/routes/knowledge-graph.route.ts @@ -0,0 +1,213 @@ +/** + * Knowledge Graph API Routes + * + * Provides endpoints to retrieve knowledge graph data filtered by world context. + */ + +import { Router, type Request, type Response } from 'express'; +import { WorldRepository } from '../repository/world.repository'; +import { CapabilityRepository } from '../repository/capability.repository'; +import { KnowledgeGraphRepository } from '../repository/knowledge-graph.repository'; +import { capabilityCatalog } from '../services/capability-catalog.service'; +import { worlds } from '../worlds'; +import type { GraphNode, GraphEdge } from '../types/knowledge-graph.type'; +import { getErrorMessage } from '../utils/error.util'; + +const knowledgeGraphRouter = Router({ mergeParams: true }); + +interface GraphStats { + totalNodes: number; + totalEdges: number; + nodesByType: Record; +} + +/** + * Compute statistics for the graph + */ +function computeStats(nodes: GraphNode[], edges: GraphEdge[]): GraphStats { + const nodesByType: Record = {}; + for (const node of nodes) { + nodesByType[node.type] = (nodesByType[node.type] || 0) + 1; + } + return { + totalNodes: nodes.length, + totalEdges: edges.length, + nodesByType, + }; +} + +/** + * Filter graph using bidirectional BFS from OD seed nodes + * This captures the complete connected subgraph including: + * - All ODs assigned to the world + * - All capabilities that use those ODs (incoming: implemented_by) + * - All personas who can perform those capabilities (incoming: can_perform) + * - All tools used by those ODs (outgoing: uses) + * - All services exposing those tools (outgoing: exposed_by) + * - All entities those tools interact with (outgoing: produces/requires/modifies) + */ +function filterGraphByODs( + nodes: GraphNode[], + edges: GraphEdge[], + seedOdIds: Set +): { nodes: GraphNode[]; edges: GraphEdge[] } { + // Build node type lookup to identify OD nodes + const nodeTypeMap = new Map(nodes.map(n => [n.id, n.type])); + + // Build bidirectional adjacency maps for traversal + const outgoing = new Map(); // node → edges leaving it + const incoming = new Map(); // node → edges entering it + + for (const edge of edges) { + if (!outgoing.has(edge.from)) outgoing.set(edge.from, []); + if (!incoming.has(edge.to)) incoming.set(edge.to, []); + outgoing.get(edge.from)!.push(edge); + incoming.get(edge.to)!.push(edge); + } + + // BFS traversal in BOTH directions from OD seeds + const visited = new Set(); + const queue: string[] = []; + + // Initialize with seed ODs that actually exist in the graph + const nodeIdSet = new Set(nodes.map(n => n.id)); + for (const odId of seedOdIds) { + if (nodeIdSet.has(odId)) { + visited.add(odId); + queue.push(odId); + } + } + + // Traverse bidirectionally, but don't enter non-seed OD nodes + // This prevents traversal to ODs from other layouts + while (queue.length > 0) { + const nodeId = queue.shift()!; + + // Traverse OUTGOING edges (OD → TOOL → SERVICE/ENTITY) + const outEdges = outgoing.get(nodeId) || []; + for (const edge of outEdges) { + if (!visited.has(edge.to)) { + // Skip OD nodes that aren't in seed set + if (nodeTypeMap.get(edge.to) === 'OD' && !seedOdIds.has(edge.to)) { + continue; + } + visited.add(edge.to); + queue.push(edge.to); + } + } + + // Traverse INCOMING edges (CAPABILITY → OD, PERSONA → CAPABILITY) + const inEdges = incoming.get(nodeId) || []; + for (const edge of inEdges) { + if (!visited.has(edge.from)) { + // Skip OD nodes that aren't in seed set + if (nodeTypeMap.get(edge.from) === 'OD' && !seedOdIds.has(edge.from)) { + continue; + } + visited.add(edge.from); + queue.push(edge.from); + } + } + } + + // Filter nodes and edges to only include visited nodes + const filteredNodes = nodes.filter(n => visited.has(n.id)); + const filteredEdges = edges.filter(e => visited.has(e.from) && visited.has(e.to)); + + return { nodes: filteredNodes, edges: filteredEdges }; +} + +/** + * GET /:worldId/knowledge-graph + * + * Returns the knowledge graph filtered by the world's assigned capabilities/ODs. + * Uses bidirectional BFS to capture the complete connected subgraph. + * + * If the world has no capabilities assigned, returns the full graph. + */ +knowledgeGraphRouter.get('/', async (req: Request, res: Response) => { + try { + const { worldId } = req.params; + + if (!worldId) { + return res.status(400).json({ + success: false, + error: 'World ID is required', + }); + } + + // 1. Get the world + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + return res.status(404).json({ + success: false, + error: `World not found: ${worldId}`, + }); + } + + // 2. Load full graph from database + const { nodes, edges } = await KnowledgeGraphRepository.loadGraph(); + + // 3. Get world's capability IDs (direct + from personas) + const capabilityIdSet = new Set(world.capabilityIds || []); + + // 3a. Get the world's layout ODs to filter capabilities + const layoutConfig = worlds[world.layout as keyof typeof worlds]; + const layoutOdIds = new Set( + (layoutConfig?.ods || []).map(od => od.id) + ); + + // 3b. Add capabilities from allowed personas, filtered by layout ODs + if (world.personas?.allowedPersonas?.length) { + const personaCapabilities = capabilityCatalog.filter({ + personas: world.personas.allowedPersonas + }); + // Only include capabilities whose OD exists in this layout + personaCapabilities + .filter(cap => layoutOdIds.has(cap.odId)) + .forEach(cap => capabilityIdSet.add(cap.id)); + } + + const capabilityIds = Array.from(capabilityIdSet); + + // 4. If no capabilities assigned, return full graph + if (capabilityIds.length === 0) { + return res.json({ + success: true, + data: { + nodes, + edges, + stats: computeStats(nodes, edges), + filtered: false, + message: 'No capabilities assigned to world - showing full graph', + }, + }); + } + + // 5. Get OD IDs from capabilities + const capabilities = await CapabilityRepository.findByIds(capabilityIds); + const seedOdIds = new Set(capabilities.map(c => c.odId)); + + // 6. Filter graph using bidirectional BFS + const filtered = filterGraphByODs(nodes, edges, seedOdIds); + + res.json({ + success: true, + data: { + nodes: filtered.nodes, + edges: filtered.edges, + stats: computeStats(filtered.nodes, filtered.edges), + filtered: true, + seedODs: Array.from(seedOdIds), + }, + }); + } catch (error) { + console.error('[knowledge-graph] Error:', error); + res.status(500).json({ + success: false, + error: getErrorMessage(error), + }); + } +}); + +export default knowledgeGraphRouter; diff --git a/packages/controlmart/src/routes/ledger.route.ts b/packages/controlmart/src/routes/ledger.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..0bebe815954200e34272099a971fbd41a791dac2 --- /dev/null +++ b/packages/controlmart/src/routes/ledger.route.ts @@ -0,0 +1,19 @@ +import { Router } from "express"; + +import { + getLedgerByWorldIdController, + updateLedgerController, + upsertLedgerController, + incrementLedgerBalancesController, + getLedgersSummaryController, +} from "../controller/finance/ledger.controller"; + +const ledgerRouter = Router({ mergeParams: true }); + +ledgerRouter.post("/", upsertLedgerController); +ledgerRouter.get("/", getLedgerByWorldIdController); +ledgerRouter.patch("/", updateLedgerController); +ledgerRouter.patch("/increment", incrementLedgerBalancesController); +ledgerRouter.get("/analytics/summary", getLedgersSummaryController); + +export default ledgerRouter; diff --git a/packages/controlmart/src/routes/logs.route.ts b/packages/controlmart/src/routes/logs.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..2a1493210c73287df0473346f7cf615ded1c33b7 --- /dev/null +++ b/packages/controlmart/src/routes/logs.route.ts @@ -0,0 +1,9 @@ +import { Router } from "express"; + +import { getLogsController } from "../controller/logs.controller"; + +const logsRouter = Router({ mergeParams: true }); + +logsRouter.get("/", getLogsController); + +export default logsRouter; diff --git a/packages/controlmart/src/routes/od.route.ts b/packages/controlmart/src/routes/od.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..0e6fdb1c94509f3f1caf574c4175be3a8f641a1c --- /dev/null +++ b/packages/controlmart/src/routes/od.route.ts @@ -0,0 +1,63 @@ +import { Router } from "express"; + +import { + runOdController, + createODController, + getODsController, + getODByIdController, + updateODController, + deleteODController, + executeODController, + scheduleODController, + getODSchedulesController, + cancelODScheduleController, + rescheduleODController, + validateODController, + getODWithSchedulesController, + bulkScheduleODsController, + pauseODScheduleController, + resumeODScheduleController, + pauseWorldSchedulesController, + resumeWorldSchedulesController, + getScheduleStatusController, +} from "../controller/od.controller"; + +const odRouter = Router({ mergeParams: true }); + +// Legacy demo execution endpoint +odRouter.post("/", runOdController); + +// CRUD operations +odRouter.post("/descriptors", createODController); +odRouter.get("/descriptors", getODsController); +odRouter.get("/descriptors/:odId", getODByIdController); +odRouter.get("/descriptors/:odId/with-schedules", getODWithSchedulesController); +odRouter.put("/descriptors/:odId", updateODController); +odRouter.delete("/descriptors/:odId", deleteODController); + +// Validation +odRouter.post("/validate", validateODController); + +// Execution +odRouter.post("/descriptors/:odId/execute", executeODController); + +// Scheduling +odRouter.post("/descriptors/:odId/schedule", scheduleODController); +odRouter.get("/descriptors/:odId/schedules", getODSchedulesController); +odRouter.put("/descriptors/:odId/schedules/:jobId", rescheduleODController); +odRouter.delete("/descriptors/:odId/schedules/:jobId", cancelODScheduleController); + +// Bulk operations +odRouter.post("/bulk-schedule", bulkScheduleODsController); + +// Pause/Resume Schedules +odRouter.put("/descriptors/:odId/schedules/:jobId/pause", pauseODScheduleController); +odRouter.put("/descriptors/:odId/schedules/:jobId/resume", resumeODScheduleController); + +// World-level bulk pause/resume +odRouter.put("/schedules/pause", pauseWorldSchedulesController); +odRouter.put("/schedules/resume", resumeWorldSchedulesController); +odRouter.get("/schedules/status", getScheduleStatusController); + + +export default odRouter; diff --git a/packages/controlmart/src/routes/persona.route.ts b/packages/controlmart/src/routes/persona.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..7e08418598b04b0458b9875d5d0639a99e72502e --- /dev/null +++ b/packages/controlmart/src/routes/persona.route.ts @@ -0,0 +1,33 @@ +/** + * Persona Routes + * + * Defines routes for persona-related endpoints. + */ + +import { Router } from 'express'; +import { PersonaController } from '../controller/persona.controller'; + +const router = Router(); + +// List all personas (with optional filtering) +router.get('/', PersonaController.listPersonasController); + +// Create a new persona +router.post('/', PersonaController.createPersonaController); + +// Get single persona by ID +router.get('/:personaId', PersonaController.getPersonaController); + +// Update a persona +router.put('/:personaId', PersonaController.updatePersonaController); + +// Delete a persona +router.delete('/:personaId', PersonaController.deletePersonaController); + +// Get capabilities for a persona +router.get( + '/:personaId/capabilities', + PersonaController.getPersonaCapabilitiesController +); + +export { router as personaRouter }; diff --git a/packages/controlmart/src/routes/registry.route.ts b/packages/controlmart/src/routes/registry.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..e5afd5ce0c26f81a59418cf515ea1d27e6c31018 --- /dev/null +++ b/packages/controlmart/src/routes/registry.route.ts @@ -0,0 +1,52 @@ +import { Router } from "express"; + +import logsRouter from "./logs.route"; +import worldRouter from "./world.route"; +import wmsRouter from "./wms.route"; +import tmsRouter from "./tms.route"; +import ediRouter from "./edi.route"; +import ticketsRouter from "./tickets.route"; +import { swaggerSpec } from "../utils/swagger.util"; +import odRouter from "./od.route"; +// import businessRulesRouter from "./business-rules.route"; // Temporarily disabled +import auditLogsRouter from "./auditlogs.route"; +// Research branch routes +import { erpRouter } from "./erp.routes"; +import docsRouter from "./docs.route"; +import { financeRouter } from "./finance.route"; +import ledgerRouter from "./ledger.route"; +// od-arch branch routes +import capabilitiesRouter from "./capabilities.route"; +import chaosRouter from "./chaos.route"; +import { personaRouter } from "./persona.route"; +import knowledgeGraphRouter from "./knowledge-graph.route"; + +import verificationRouter from "./verification.route"; + +const registryRouter = Router({ mergeParams: true }); + +registryRouter.use("/world", worldRouter); +// registryRouter.use("/rules", businessRulesRouter); // Temporarily disabled +// OD routes - support both patterns (worldId-scoped from research, global from od-arch) +registryRouter.use("/:worldId/od", odRouter); +registryRouter.use("/od", odRouter); +// od-arch catalog routes (global) +registryRouter.use("/capabilities", capabilitiesRouter); +registryRouter.use("/chaos", chaosRouter); +registryRouter.use("/personas", personaRouter); +// World-scoped routes +registryRouter.use("/:worldId/logs", logsRouter); +registryRouter.use("/:worldId/audit-logs", auditLogsRouter); +registryRouter.use("/:worldId/tickets", ticketsRouter); +registryRouter.use("/:worldId/verification", verificationRouter); +registryRouter.use("/:worldId/edi", ediRouter); +registryRouter.use("/:worldId/wms", wmsRouter); +registryRouter.use("/:worldId/tms", tmsRouter); +registryRouter.use("/:worldId/erp", erpRouter); +registryRouter.use("/:worldId/finance", financeRouter); +registryRouter.use("/:worldId/ledger", ledgerRouter); +registryRouter.use("/:worldId/knowledge-graph", knowledgeGraphRouter); + +registryRouter.use("/docs", docsRouter); + +export default registryRouter; diff --git a/packages/controlmart/src/routes/tickets.route.ts b/packages/controlmart/src/routes/tickets.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..a1e46d810401c6081e15528b9c9d1c22ca84bad9 --- /dev/null +++ b/packages/controlmart/src/routes/tickets.route.ts @@ -0,0 +1,29 @@ +import { Router } from "express"; + +import { + createTicketController, + getTicketController, + getTicketsController, + updateTicketController, + updateTicketStatusController, + updateTicketWorkNotesController, + deleteTicketController, +} from "../controller/tickets.controller"; + + +const ticketsRouter = Router({ mergeParams: true }); + +ticketsRouter.get("/", getTicketsController); + +ticketsRouter.post("/", createTicketController); + +ticketsRouter.get("/:ticketId", getTicketController); + +ticketsRouter.put("/:ticketId", updateTicketController); + +ticketsRouter.patch("/:ticketId/status", updateTicketStatusController); + +ticketsRouter.patch("/:ticketId/work-notes", updateTicketWorkNotesController); +ticketsRouter.delete("/:ticketId", deleteTicketController); + +export default ticketsRouter; diff --git a/packages/controlmart/src/routes/tms.route.ts b/packages/controlmart/src/routes/tms.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..df1997abfe6c680d8ec8eecf57ded2eee709b51e --- /dev/null +++ b/packages/controlmart/src/routes/tms.route.ts @@ -0,0 +1,78 @@ +import { Router } from "express"; + +import { + createTMSShipmentController, + getTMSShipmentByIdController, + getTMSShipmentsByStatusController, + getTMSInTransitShipmentsController, + getTMSShipmentsByCarrierController, + tenderTMSShipmentController, + acceptTMSShipmentController, + updateTMSShipmentLocationController, + updateTMSShipmentStatusController, + processTMSEdi214UpdateController, + addTMSShipmentDelayController, + createTMSShipmentStatusEventController, + createTMSCarrierController, + getTMSCarrierByIdController, + getTMSCarrierByCodeController, + getTMSActiveCarriersController, + updateTMSCarrierStatusController, + updateTMSCarrierPerformanceController, + getTMSCarrierMetricsController, + searchTMSCarriersController, + getTMSCarriersByPerformanceController, + createTMSInboundTrailerController, + getTMSInboundTrailerByIdController, + scheduleTMSTrailerAppointmentController, + updateTMSTrailerStatusController, + checkInTMSTrailerController, + startTMSTrailerUnloadingController, + completeTMSTrailerUnloadingController, + getTMSInboundTrailersByStatusController, + getTMSTrailersByAppointmentDateController, + getTMSAvailableDockDoorsController, + addTMSTrailerDelayController, + assignTMSTrailerToDockController, +} from "../controller/tms"; + +const tmsRouter = Router({ mergeParams: true }); + +tmsRouter.post("/shipments", createTMSShipmentController); +tmsRouter.get("/shipments", getTMSShipmentsByStatusController); +tmsRouter.get("/shipments/in-transit", getTMSInTransitShipmentsController); +tmsRouter.get("/shipments/:shipmentId", getTMSShipmentByIdController); +tmsRouter.get("/shipments/carrier/:carrierId", getTMSShipmentsByCarrierController); +tmsRouter.put("/shipments/:shipmentId/tender", tenderTMSShipmentController); +tmsRouter.put("/shipments/:shipmentId/accept", acceptTMSShipmentController); +tmsRouter.put("/shipments/:shipmentId/location", updateTMSShipmentLocationController); +tmsRouter.put("/shipments/:shipmentId/status", updateTMSShipmentStatusController); +tmsRouter.post("/shipments/:shipmentId/edi214", processTMSEdi214UpdateController); +tmsRouter.post("/shipments/:shipmentId/events", createTMSShipmentStatusEventController); +tmsRouter.post("/shipments/:shipmentId/delays", addTMSShipmentDelayController); + +tmsRouter.post("/carriers", createTMSCarrierController); +tmsRouter.get("/carriers", getTMSActiveCarriersController); +tmsRouter.get("/carriers/search", searchTMSCarriersController); +tmsRouter.get("/carriers/performance", getTMSCarriersByPerformanceController); +tmsRouter.get("/carriers/id/:carrierId", getTMSCarrierByIdController); +tmsRouter.get("/carriers/code/:carrierCode", getTMSCarrierByCodeController); +tmsRouter.put("/carriers/:carrierId/status", updateTMSCarrierStatusController); +tmsRouter.put("/carriers/:carrierId/performance", updateTMSCarrierPerformanceController); +tmsRouter.get("/carriers/:carrierId/metrics", getTMSCarrierMetricsController); + +tmsRouter.post("/trailers", createTMSInboundTrailerController); +tmsRouter.get("/trailers", getTMSInboundTrailersByStatusController); +tmsRouter.get("/trailers/appointments", getTMSTrailersByAppointmentDateController); +tmsRouter.get("/trailers/:trailerId", getTMSInboundTrailerByIdController); +tmsRouter.post("/trailers/:trailerId/schedule", scheduleTMSTrailerAppointmentController); +tmsRouter.put("/trailers/:trailerId/status", updateTMSTrailerStatusController); +tmsRouter.post("/trailers/:trailerId/checkin", checkInTMSTrailerController); +tmsRouter.post("/trailers/:trailerId/assign-dock", assignTMSTrailerToDockController); +tmsRouter.post("/trailers/:trailerId/start-unloading", startTMSTrailerUnloadingController); +tmsRouter.post("/trailers/:trailerId/complete-unloading", completeTMSTrailerUnloadingController); +tmsRouter.post("/trailers/:trailerId/delays", addTMSTrailerDelayController); + +tmsRouter.get("/dock-doors/available", getTMSAvailableDockDoorsController); + +export default tmsRouter; diff --git a/packages/controlmart/src/routes/verification.route.ts b/packages/controlmart/src/routes/verification.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..edd5c717c165a019bdaaabec4683e1cc68cb6b97 --- /dev/null +++ b/packages/controlmart/src/routes/verification.route.ts @@ -0,0 +1,9 @@ +import { Router } from "express"; +import { runEntityVerification, runVerification } from "../controller/verification.controller"; + +const verificationRouter = Router({ mergeParams: true }); + +verificationRouter.post("/verify-entity", runEntityVerification); +verificationRouter.post("/verify-ticket", runVerification); + +export default verificationRouter; diff --git a/packages/controlmart/src/routes/wms.route.ts b/packages/controlmart/src/routes/wms.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..9f6aec9bfeff60d5bd2694383f2a14a4873f6638 --- /dev/null +++ b/packages/controlmart/src/routes/wms.route.ts @@ -0,0 +1,324 @@ +import { Router } from "express"; + +import { + // Operations Dashboard Controller + getOperationsDashboardController, + getInventoryListController, + getInboundOrderRelationsController, + getOutboundOrderRelationsController, + updateInventoryStatusController, + patchInventoryController, + + // Bin Controller + createBinController, + getBinByCodeController, + getBinsByZoneController, + updateBinStatusController, + updateBinCapacityController, + getAvailableBinsController, + getBinUtilizationController, + + // Cycle Count Controller + createCycleCountController, + getCycleCountsByStatusController, + getCycleCountByIdController, + updateCycleCountStatusController, + assignUserToCycleCountController, + addCountToResultController, + getCycleCountsByWarehouseController, + getCycleCountVarianceReportController, + getScheduledCycleCountsController, + + // Daily Metrics Controller + createDailyMetricsController, + getDailyMetricsByDateRangeController, + getDailyMetricsByIdController, + updateDailyMetricsController, + getMetricsSummaryController, + getPerformanceTrendsController, + getZonePerformanceComparisonController, + + // Distribution Center Controller + createDistributionCenterController, + getDistributionCenterByIdController, + getDistributionCentersByStatusController, + updateDistributionCenterStatusController, + getDistributionCenterCapacityController, + getAllDistributionCentersController, + + // Dock Door Controller + createDockDoorController, + getDockDoorByIdController, + getDockDoorsByWarehouseController, + updateDockDoorStatusController, + assignAppointmentToDoorController, + clearAppointmentFromDoorController, + getAvailableDockDoorsController, + getDockDoorUtilizationController, + getDockDoorScheduleController, + + // Inbound Order Controller + createInboundOrderController, + getInboundOrdersByStatusController, + getInboundOrderByIdController, + getInboundOrderByPoNumberController, + updateOrderStatusController, + updateReceivingProgressController, + getOrdersExpectedTodayController, + getReceivingMetricsController, + getOrdersByVendorController, + patchInboundOrderController, + + // Inbound Receiving Transaction Controller + createReceivingTransactionController, + getTransactionByIdController, + getAllTransactionsController, + updateTransactionStatusController, + addItemToTransactionController, + getInboundReceivingMetricsController, + + // Inventory Transaction Controller + createInventoryTransactionController, + getTransactionsByProductController, + getTransactionsByBinController, + getTransactionsByReferenceController, + getInventoryMovementReportController, + getTransactionHistoryController, + getInventoryAdjustmentsController, + + // Outbound Order Controller + createOutboundOrderController, + getOutboundOrdersByStatusController, + getOutboundOrderByIdController, + getOutboundOrderByNumberController, + updateOutboundOrderStatusController, + updateOutboundOrderPriorityController, + allocateOrderLineController, + updatePickingProgressController, + getOrdersReadyForPickingController, + getOrderFulfillmentMetricsController, + getOrdersByCustomerController, + patchOutboundOrderController, + + // Outbound Shipment Controller + createShipmentController, + getShipmentsByStatusController, + getShipmentByIdController, + updateShipmentStatusController, + getShipmentsByWarehouseController, + addTrackingEventController, + getShipmentMetricsController, + getShipmentsReadyToShipController, + getShipmentsByTrackingNumberController, + + // Replenishment Controller + createReplenishmentController, + getReplenishmentsByStatusController, + getReplenishmentByIdController, + updateReplenishmentStatusController, + getReplenishmentsByProductController, + getReplenishmentsByBinController, + approveReplenishmentController, + getReplenishmentMetricsController, + cancelReplenishmentController, + + // Task Controller + createTaskController, + getTaskLogsController, + getTasksByUserController, + getTaskTimestampsController, + updateTaskStatusController, + getTaskPerformanceMetricsController, + getActiveTasksController, + addTaskScanController, + assignTaskController, + updateTaskPriorityController, + patchTaskController, + + // Warehouse Controller + createWarehouseController, + getAllWarehousesController, + getWarehouseByCodeController, + getWarehouseByIdController, + updateWarehouseController, + getWarehousesByTypeController, + getActiveWarehousesController, + updateWarehouseStatusController, + searchWarehousesController, + getWarehousesByTimezoneController, + + // Zone Controller + createZoneController, + getZonesByWarehouseController, + getZoneByCodeController, + getZoneByIdController, + updateZoneController, + getZonesByTypeController, + addAisleToZoneController, + removeAisleFromZoneController, + getZoneCapacityUtilizationController, +} from "../controller/wms"; + +const wmsRouter = Router({ mergeParams: true }); + +// Operations Dashboard Routes (aggregated metrics for UI) +wmsRouter.get("/operations-dashboard", getOperationsDashboardController); +wmsRouter.get("/inventory", getInventoryListController); +wmsRouter.put("/inventory/:inventoryId/status", updateInventoryStatusController); +wmsRouter.patch("/inventory/:inventoryId", patchInventoryController); + +// Bin Routes +wmsRouter.post("/bins", createBinController); +wmsRouter.get("/bins/code/:binCode", getBinByCodeController); +wmsRouter.get("/bins/zone/:zoneId", getBinsByZoneController); +wmsRouter.put("/bins/:binId/status", updateBinStatusController); +wmsRouter.put("/bins/:binId/capacity", updateBinCapacityController); +wmsRouter.get("/bins/available", getAvailableBinsController); +wmsRouter.get("/bins/utilization", getBinUtilizationController); + +// Cycle Count Routes +wmsRouter.post("/cycle-counts", createCycleCountController); +wmsRouter.get("/cycle-counts/status", getCycleCountsByStatusController); +wmsRouter.get("/cycle-counts/warehouse/:warehouseId", getCycleCountsByWarehouseController); +wmsRouter.get("/cycle-counts/variance-report", getCycleCountVarianceReportController); +wmsRouter.get("/cycle-counts/scheduled", getScheduledCycleCountsController); +wmsRouter.get("/cycle-counts/:cycleCountId", getCycleCountByIdController); +wmsRouter.put("/cycle-counts/:cycleCountId/status", updateCycleCountStatusController); +wmsRouter.put("/cycle-counts/:cycleCountId/assign", assignUserToCycleCountController); +wmsRouter.post("/cycle-counts/:cycleCountId/results", addCountToResultController); + +// Daily Metrics Routes +wmsRouter.post("/daily-metrics", createDailyMetricsController); +wmsRouter.get("/daily-metrics/daterange", getDailyMetricsByDateRangeController); +wmsRouter.get("/daily-metrics/summary", getMetricsSummaryController); +wmsRouter.get("/daily-metrics/trends", getPerformanceTrendsController); +wmsRouter.get("/daily-metrics/zone-comparison", getZonePerformanceComparisonController); +wmsRouter.get("/daily-metrics/:metricsId", getDailyMetricsByIdController); +wmsRouter.put("/daily-metrics/:metricsId", updateDailyMetricsController); + +// Distribution Center Routes +wmsRouter.post("/distribution-centers", createDistributionCenterController); +wmsRouter.get("/distribution-centers", getAllDistributionCentersController); +wmsRouter.get("/distribution-centers/status", getDistributionCentersByStatusController); +wmsRouter.get("/distribution-centers/:dcId", getDistributionCenterByIdController); +wmsRouter.put("/distribution-centers/:dcId/status", updateDistributionCenterStatusController); +wmsRouter.get("/distribution-centers/:dcId/capacity", getDistributionCenterCapacityController); + +// Dock Door Routes +wmsRouter.post("/dock-doors", createDockDoorController); +wmsRouter.get("/dock-doors/warehouse/:warehouseId", getDockDoorsByWarehouseController); +wmsRouter.get("/dock-doors/available", getAvailableDockDoorsController); +wmsRouter.get("/dock-doors/utilization", getDockDoorUtilizationController); +wmsRouter.get("/dock-doors/schedule", getDockDoorScheduleController); +wmsRouter.get("/dock-doors/:doorId", getDockDoorByIdController); +wmsRouter.put("/dock-doors/:doorId/status", updateDockDoorStatusController); +wmsRouter.put("/dock-doors/:doorId/assign", assignAppointmentToDoorController); +wmsRouter.put("/dock-doors/:doorId/clear", clearAppointmentFromDoorController); + +// Inbound Order Routes +wmsRouter.post("/inbound-orders", createInboundOrderController); +wmsRouter.get("/inbound-orders/status", getInboundOrdersByStatusController); +wmsRouter.get("/inbound-orders/po/:poNumber", getInboundOrderByPoNumberController); +wmsRouter.get("/inbound-orders/expected-today", getOrdersExpectedTodayController); +wmsRouter.get("/inbound-orders/metrics", getReceivingMetricsController); +wmsRouter.get("/inbound-orders/vendor/:vendorId", getOrdersByVendorController); +wmsRouter.get("/inbound-orders/:inboundOrderId", getInboundOrderByIdController); +wmsRouter.put("/inbound-orders/:inboundOrderId/status", updateOrderStatusController); +wmsRouter.put("/inbound-orders/:inboundOrderId/receiving-progress", updateReceivingProgressController); +wmsRouter.get("/inbound-orders/:inboundOrderId/relations", getInboundOrderRelationsController); +wmsRouter.patch("/inbound-orders/:inboundOrderId", patchInboundOrderController); + +// Inbound Receiving Transaction Routes +wmsRouter.post("/receiving-transactions", createReceivingTransactionController); +wmsRouter.get("/receiving-transactions", getAllTransactionsController); +wmsRouter.get("/receiving-transactions/metrics", getInboundReceivingMetricsController); +wmsRouter.get("/receiving-transactions/:transactionId", getTransactionByIdController); +wmsRouter.put("/receiving-transactions/:transactionId/status", updateTransactionStatusController); +wmsRouter.post("/receiving-transactions/:transactionId/items", addItemToTransactionController); + +// Inventory Transaction Routes +wmsRouter.post("/inventory-transactions", createInventoryTransactionController); +wmsRouter.get("/inventory-transactions/product/:productId", getTransactionsByProductController); +wmsRouter.get("/inventory-transactions/bin/:binId", getTransactionsByBinController); +wmsRouter.get( + "/inventory-transactions/reference/:referenceType/:referenceId", + getTransactionsByReferenceController, +); +wmsRouter.get("/inventory-transactions/movement-report", getInventoryMovementReportController); +wmsRouter.get("/inventory-transactions/history", getTransactionHistoryController); +wmsRouter.get("/inventory-transactions/adjustments", getInventoryAdjustmentsController); + +// Outbound Order Routes +wmsRouter.post("/outbound-orders", createOutboundOrderController); +wmsRouter.get("/outbound-orders/status", getOutboundOrdersByStatusController); +wmsRouter.get("/outbound-orders/number/:orderNumber", getOutboundOrderByNumberController); +wmsRouter.get("/outbound-orders/ready-for-picking/:warehouseId", getOrdersReadyForPickingController); +wmsRouter.get("/outbound-orders/metrics", getOrderFulfillmentMetricsController); +wmsRouter.get("/outbound-orders/customer/:customerId", getOrdersByCustomerController); +wmsRouter.get("/outbound-orders/:orderId", getOutboundOrderByIdController); +wmsRouter.put("/outbound-orders/:orderId/status", updateOutboundOrderStatusController); +wmsRouter.put("/outbound-orders/:orderId/priority", updateOutboundOrderPriorityController); +wmsRouter.put("/outbound-orders/:orderId/lines/:lineId/allocate", allocateOrderLineController); +wmsRouter.put("/outbound-orders/:orderId/picking-progress", updatePickingProgressController); +wmsRouter.get("/outbound-orders/:orderId/relations", getOutboundOrderRelationsController); +wmsRouter.patch("/outbound-orders/:orderId", patchOutboundOrderController); + +// Outbound Shipment Routes +wmsRouter.post("/shipments", createShipmentController); +wmsRouter.get("/shipments/status", getShipmentsByStatusController); +wmsRouter.get("/shipments/warehouse/:warehouseId", getShipmentsByWarehouseController); +wmsRouter.get("/shipments/metrics", getShipmentMetricsController); +wmsRouter.get("/shipments/ready-to-ship/:warehouseId", getShipmentsReadyToShipController); +wmsRouter.get("/shipments/tracking/:trackingNumber", getShipmentsByTrackingNumberController); +wmsRouter.get("/shipments/:shipmentId", getShipmentByIdController); +wmsRouter.put("/shipments/:shipmentId/status", updateShipmentStatusController); +wmsRouter.post("/shipments/:shipmentId/tracking-events", addTrackingEventController); + +// Replenishment Routes +wmsRouter.post("/replenishments", createReplenishmentController); +wmsRouter.get("/replenishments/status", getReplenishmentsByStatusController); +wmsRouter.get("/replenishments/product/:productId", getReplenishmentsByProductController); +wmsRouter.get("/replenishments/bin/:binId", getReplenishmentsByBinController); +wmsRouter.get("/replenishments/metrics", getReplenishmentMetricsController); +wmsRouter.get("/replenishments/:replenishmentId", getReplenishmentByIdController); +wmsRouter.put("/replenishments/:replenishmentId/status", updateReplenishmentStatusController); +wmsRouter.put("/replenishments/:replenishmentId/approve", approveReplenishmentController); +wmsRouter.put("/replenishments/:replenishmentId/cancel", cancelReplenishmentController); + +// Task Routes +wmsRouter.post("/tasks", createTaskController); +wmsRouter.get("/tasks", getTaskLogsController); +wmsRouter.get("/tasks/user/:userId", getTasksByUserController); +wmsRouter.get("/tasks/timestamps", getTaskTimestampsController); +wmsRouter.put("/tasks/:taskId/status", updateTaskStatusController); +wmsRouter.get("/tasks/metrics", getTaskPerformanceMetricsController); +wmsRouter.get("/tasks/active", getActiveTasksController); +wmsRouter.post("/tasks/:taskId/scans", addTaskScanController); +wmsRouter.put("/tasks/:taskId/assign", assignTaskController); +wmsRouter.put("/tasks/:taskId/priority", updateTaskPriorityController); +wmsRouter.patch("/tasks/:taskId", patchTaskController); + +// Warehouse Routes +wmsRouter.post("/warehouses", createWarehouseController); +wmsRouter.get("/warehouses", getAllWarehousesController); +wmsRouter.get("/warehouses/code/:warehouseCode", getWarehouseByCodeController); +wmsRouter.get("/warehouses/type/:warehouseType", getWarehousesByTypeController); +wmsRouter.get("/warehouses/active", getActiveWarehousesController); +wmsRouter.get("/warehouses/search", searchWarehousesController); +wmsRouter.get("/warehouses/timezone/:timezone", getWarehousesByTimezoneController); +wmsRouter.get("/warehouses/:warehouseId", getWarehouseByIdController); +wmsRouter.put("/warehouses/:warehouseId", updateWarehouseController); +wmsRouter.put("/warehouses/:warehouseId/status", updateWarehouseStatusController); + +// Zone Routes +wmsRouter.post("/zones", createZoneController); +wmsRouter.get("/zones/warehouse/:warehouseId", getZonesByWarehouseController); +wmsRouter.get("/zones/warehouse/:warehouseId/code/:zoneCode", getZoneByCodeController); +wmsRouter.get("/zones/type/:zoneType", getZonesByTypeController); +wmsRouter.get("/zones/:zoneId", getZoneByIdController); +wmsRouter.put("/zones/:zoneId", updateZoneController); +wmsRouter.post("/zones/:zoneId/aisles", addAisleToZoneController); +wmsRouter.delete("/zones/:zoneId/aisles/:aisleId", removeAisleFromZoneController); +wmsRouter.get("/zones/:zoneId/capacity-utilization", getZoneCapacityUtilizationController); + +export default wmsRouter; diff --git a/packages/controlmart/src/routes/world.route.ts b/packages/controlmart/src/routes/world.route.ts new file mode 100644 index 0000000000000000000000000000000000000000..2a1fb42c73b38023b298c4fc561abb8f20153e5c --- /dev/null +++ b/packages/controlmart/src/routes/world.route.ts @@ -0,0 +1,24 @@ +import { Router } from "express"; + +import { WorldController } from "../controller/world.controller"; + +const worldRouter = Router({ mergeParams: true }); + +worldRouter.post("/", WorldController.createWorldController); +// Research branch routes +worldRouter.post("/act", WorldController.actController); +worldRouter.post("/:worldId/reset", WorldController.resetWorldController); +worldRouter.get("/layouts", WorldController.getWorldLayoutsController); +worldRouter.get("/layouts/:layoutId", WorldController.getWorldLayoutByIdController); +// od-arch branch routes +worldRouter.get("/:worldId/capabilities", WorldController.getWorldCapabilitiesController); +worldRouter.get("/:worldId/chaos", WorldController.getWorldChaosController); +worldRouter.put("/:worldId/chaos", WorldController.updateWorldChaosController); +worldRouter.delete("/:worldId/chaos", WorldController.deleteWorldChaosController); +// Common routes +worldRouter.get("/:worldId", WorldController.getWorldController); +worldRouter.patch("/:worldId", WorldController.updateWorldController); +worldRouter.get("/", WorldController.listWorldsController); +worldRouter.delete("/:worldId", WorldController.deleteWorldController); + +export default worldRouter; diff --git a/packages/controlmart/src/scripts/validate-tool-annotations.ts b/packages/controlmart/src/scripts/validate-tool-annotations.ts new file mode 100644 index 0000000000000000000000000000000000000000..ae898390ad057e27a901d736db998850e7c16a66 --- /dev/null +++ b/packages/controlmart/src/scripts/validate-tool-annotations.ts @@ -0,0 +1,198 @@ +/** + * Tool Annotation Validation Script + * + * Validates that tool annotations are in sync with: + * 1. Actual tool definitions in the registry + * 2. Tools used in OD definitions + * + * Run: bun run src/scripts/validate-tool-annotations.ts + */ + +import { getAllToolIds } from '../operational-descriptor/tools/registry.tool'; +import { ODRegistry } from '../services/od-registry.service'; +import { WMS_TOOL_ANNOTATIONS } from '../utils/wms/tool-annotations.wms'; +import { ERP_TOOL_ANNOTATIONS } from '../utils/erp/tool-annotations.erp'; +import { TMS_TOOL_ANNOTATIONS } from '../utils/tms/tool-annotations.tms'; +import { EDI_TOOL_ANNOTATIONS } from '../utils/edi/tool-annotations.edi'; +import { FINANCE_TOOL_ANNOTATIONS } from '../utils/finance/tool-annotations.finance'; +import { MANUFACTURING_TOOL_ANNOTATIONS } from '../utils/manufacturing/tool-annotations.manufacturing'; +import type { OperationalDescriptor, Step } from '../types/od.type'; + +// ANSI colors for terminal output +const colors = { + green: '\x1b[32m', + red: '\x1b[31m', + yellow: '\x1b[33m', + cyan: '\x1b[36m', + reset: '\x1b[0m', + bold: '\x1b[1m', +}; + +function log(message: string) { + console.log(message); +} + +function logSuccess(message: string) { + console.log(`${colors.green}✓${colors.reset} ${message}`); +} + +function logError(message: string) { + console.log(`${colors.red}✗${colors.reset} ${message}`); +} + +function logWarning(message: string) { + console.log(`${colors.yellow}!${colors.reset} ${message}`); +} + +/** + * Get all annotated tool IDs from all annotation files + * Constructs full toolId as `${service}.${toolId}` + */ +function getAnnotatedToolIds(): Set { + const allAnnotations = [ + ...WMS_TOOL_ANNOTATIONS, + ...ERP_TOOL_ANNOTATIONS, + ...TMS_TOOL_ANNOTATIONS, + ...EDI_TOOL_ANNOTATIONS, + ...FINANCE_TOOL_ANNOTATIONS, + ...MANUFACTURING_TOOL_ANNOTATIONS, + ]; + return new Set(allAnnotations.map(a => `${a.service}.${a.toolId}`)); +} + +/** + * Check if a step is an MCP step (has service and tool properties) + */ +function isMcpStep(step: Step): step is Step & { service: string; tool: string } { + return step.type === 'mcp' && 'service' in step && 'tool' in step; +} + +/** + * Extract all tool IDs used in ODs + */ +function getToolsUsedInODs(): Set { + const toolsUsed = new Set(); + const odIds = ODRegistry.getAllODIds(); + + for (const odId of odIds) { + try { + const od = ODRegistry.buildOD(odId); + if (!od) continue; + + for (const step of od.steps) { + if (isMcpStep(step)) { + const toolId = `${step.service}.${step.tool}`; + toolsUsed.add(toolId); + } + } + } catch (error) { + // Some ODs may require config to build, skip them + logWarning(`Could not build OD '${odId}' for validation (may require config)`); + } + } + + return toolsUsed; +} + +/** + * Main validation function + */ +function validateToolAnnotations(): boolean { + log(''); + log(`${colors.bold}Tool Annotation Validation${colors.reset}`); + log('=========================='); + log(''); + + // Collect data + const registeredTools = new Set(getAllToolIds()); + const annotatedTools = getAnnotatedToolIds(); + const odUsedTools = getToolsUsedInODs(); + + // Stats + logSuccess(`${registeredTools.size} tools in registry`); + logSuccess(`${annotatedTools.size} tools annotated`); + logSuccess(`${odUsedTools.size} tools used in ODs`); + log(''); + + let hasErrors = false; + const errors: string[] = []; + + // Check 1: Annotations should reference tools that exist in registry + // This is a WARNING only - annotations may exist for tools being developed + const orphanAnnotations: string[] = []; + for (const toolId of annotatedTools) { + if (!registeredTools.has(toolId)) { + orphanAnnotations.push(toolId); + } + } + + if (orphanAnnotations.length > 0) { + // NOTE: This is now a warning, not an error. Annotations may exist for: + // - Tools that are being developed + // - Tools that are deprecated but annotations kept for documentation + log(`${colors.yellow}Warning: Annotations exist but tools not in registry:${colors.reset}`); + for (const toolId of orphanAnnotations.sort()) { + log(` - ${toolId}`); + } + log(''); + } + + // Check 2: Tools used in ODs should have annotations + const missingAnnotations: string[] = []; + for (const toolId of odUsedTools) { + if (!annotatedTools.has(toolId)) { + missingAnnotations.push(toolId); + } + } + + if (missingAnnotations.length > 0) { + hasErrors = true; + errors.push(`${colors.red}Tools used in ODs but not annotated:${colors.reset}`); + for (const toolId of missingAnnotations.sort()) { + errors.push(` - ${toolId}`); + } + errors.push(''); + } + + // Check 3 (warning only): Registered tools without annotations + const unannotatedRegistered: string[] = []; + for (const toolId of registeredTools) { + if (!annotatedTools.has(toolId)) { + unannotatedRegistered.push(toolId); + } + } + + if (unannotatedRegistered.length > 0) { + log(`${colors.yellow}Warning: Registered tools without annotations (not used in ODs):${colors.reset}`); + for (const toolId of unannotatedRegistered.sort()) { + log(` - ${toolId}`); + } + log(''); + } + + // Print errors + if (errors.length > 0) { + log('Errors:'); + for (const error of errors) { + log(error); + } + } + + // Summary + if (hasErrors) { + const totalIssues = missingAnnotations.length; + logError(`Validation FAILED - ${totalIssues} issue(s) found`); + return false; + } else { + if (orphanAnnotations.length > 0) { + logSuccess(`Validation PASSED with ${orphanAnnotations.length} warning(s) - annotations are mostly in sync`); + } else { + logSuccess('Validation PASSED - all annotations are in sync'); + } + return true; + } +} + +// Run validation +const isValid = validateToolAnnotations(); +process.exit(isValid ? 0 : 1); diff --git a/packages/controlmart/src/services/__tests__/chaos-cascade.test.ts b/packages/controlmart/src/services/__tests__/chaos-cascade.test.ts new file mode 100644 index 0000000000000000000000000000000000000000..e9cda50319ad1c97ddfe1108f1e9a74f4df50f9b --- /dev/null +++ b/packages/controlmart/src/services/__tests__/chaos-cascade.test.ts @@ -0,0 +1,423 @@ +/** + * Integration tests for Chaos Priority Cascade (MORPH-412) + * + * Tests the complete priority cascade: + * 1. CHAOS_ENABLED env var (master kill-switch) + * 2. Step-level override + * 3. OD-level policy + * 4. Capability-level override + * 5. World-level policy + * 6. Global preset + * 7. Default (disabled) + */ + +import { describe, it, expect, beforeEach, afterEach } from 'bun:test'; +import { ChaosConfigRegistry } from '../chaos-config.registry'; +import type { ChaosContext } from '../chaos-config.registry'; +import type { ChaosPolicy } from '../../types/od.type'; + +describe('Chaos Priority Cascade (MORPH-412)', () => { + // Save original env vars + const originalChaosEnabled = process.env.CHAOS_ENABLED; + const originalChaosPreset = process.env.CHAOS_PRESET; + + beforeEach(() => { + // Clear all overrides before each test + ChaosConfigRegistry.clearOverrides(); + // Reset env vars + delete process.env.CHAOS_ENABLED; + delete process.env.CHAOS_PRESET; + }); + + afterEach(() => { + // Restore original env vars + if (originalChaosEnabled !== undefined) { + process.env.CHAOS_ENABLED = originalChaosEnabled; + } else { + delete process.env.CHAOS_ENABLED; + } + if (originalChaosPreset !== undefined) { + process.env.CHAOS_PRESET = originalChaosPreset; + } else { + delete process.env.CHAOS_PRESET; + } + // Clear overrides + ChaosConfigRegistry.clearOverrides(); + }); + + describe('1. Master Kill-Switch (CHAOS_ENABLED env var)', () => { + it('should disable all chaos when CHAOS_ENABLED=false', () => { + process.env.CHAOS_ENABLED = 'false'; + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.enabled).toBe(false); + expect(result.policy.probability).toBe(0); + expect(result.source).toBe('env'); + }); + + it('should override all other levels when CHAOS_ENABLED=false', () => { + process.env.CHAOS_ENABLED = 'false'; + + // Set chaos at all levels + const stepChaos: ChaosPolicy = { enabled: true, probability: 0.9, scenarios: [] }; + const odChaos: ChaosPolicy = { enabled: true, probability: 0.8, scenarios: [] }; + const capChaos: ChaosPolicy = { enabled: true, probability: 0.7, scenarios: [] }; + const worldChaos: ChaosPolicy = { enabled: true, probability: 0.6, scenarios: [] }; + + ChaosConfigRegistry.setODChaos('od-1', odChaos); + ChaosConfigRegistry.setCapabilityChaos('cap-1', capChaos); + ChaosConfigRegistry.setWorldChaosPolicy('world-1', worldChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + stepChaos, + }); + + // Env var still wins + expect(result.policy.enabled).toBe(false); + expect(result.source).toBe('env'); + }); + }); + + describe('2. Step-Level Override', () => { + it('should use step-level chaos from context', () => { + const stepChaos: ChaosPolicy = { enabled: true, probability: 0.9, scenarios: [] }; + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + stepChaos, + }); + + expect(result.policy.probability).toBe(0.9); + expect(result.source).toBe('step'); + }); + + it('should use step-level chaos from registry', () => { + const stepChaos: ChaosPolicy = { enabled: true, probability: 0.85, scenarios: [] }; + ChaosConfigRegistry.setStepChaos('od-1', 'step-1', stepChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.probability).toBe(0.85); + expect(result.source).toBe('step'); + }); + + it('should prioritize context stepChaos over registry stepChaos', () => { + const contextStepChaos: ChaosPolicy = { enabled: true, probability: 0.95, scenarios: [] }; + const registryStepChaos: ChaosPolicy = { enabled: true, probability: 0.75, scenarios: [] }; + + ChaosConfigRegistry.setStepChaos('od-1', 'step-1', registryStepChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + stepChaos: contextStepChaos, + }); + + expect(result.policy.probability).toBe(0.95); + expect(result.source).toBe('step'); + }); + + it('should override all lower levels', () => { + const stepChaos: ChaosPolicy = { enabled: true, probability: 0.9, scenarios: [] }; + const odChaos: ChaosPolicy = { enabled: true, probability: 0.8, scenarios: [] }; + const capChaos: ChaosPolicy = { enabled: true, probability: 0.7, scenarios: [] }; + const worldChaos: ChaosPolicy = { enabled: true, probability: 0.6, scenarios: [] }; + + ChaosConfigRegistry.setODChaos('od-1', odChaos); + ChaosConfigRegistry.setCapabilityChaos('cap-1', capChaos); + ChaosConfigRegistry.setWorldChaosPolicy('world-1', worldChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + stepChaos, + }); + + expect(result.policy.probability).toBe(0.9); + expect(result.source).toBe('step'); + }); + }); + + describe('3. OD-Level Policy', () => { + it('should use OD-level chaos when no step-level chaos', () => { + const odChaos: ChaosPolicy = { enabled: true, probability: 0.8, scenarios: [] }; + ChaosConfigRegistry.setODChaos('od-1', odChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.probability).toBe(0.8); + expect(result.source).toBe('od'); + }); + + it('should override capability, world, and global levels', () => { + const odChaos: ChaosPolicy = { enabled: true, probability: 0.8, scenarios: [] }; + const capChaos: ChaosPolicy = { enabled: true, probability: 0.7, scenarios: [] }; + const worldChaos: ChaosPolicy = { enabled: true, probability: 0.6, scenarios: [] }; + + ChaosConfigRegistry.setODChaos('od-1', odChaos); + ChaosConfigRegistry.setCapabilityChaos('cap-1', capChaos); + ChaosConfigRegistry.setWorldChaosPolicy('world-1', worldChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.probability).toBe(0.8); + expect(result.source).toBe('od'); + }); + }); + + describe('4. Capability-Level Override', () => { + it('should use capability-level chaos when no step or OD chaos', () => { + const capChaos: ChaosPolicy = { enabled: true, probability: 0.7, scenarios: [] }; + ChaosConfigRegistry.setCapabilityChaos('cap-1', capChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.probability).toBe(0.7); + expect(result.source).toBe('capability'); + }); + + it('should override world and global levels', () => { + const capChaos: ChaosPolicy = { enabled: true, probability: 0.7, scenarios: [] }; + const worldChaos: ChaosPolicy = { enabled: true, probability: 0.6, scenarios: [] }; + + ChaosConfigRegistry.setCapabilityChaos('cap-1', capChaos); + ChaosConfigRegistry.setWorldChaosPolicy('world-1', worldChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.probability).toBe(0.7); + expect(result.source).toBe('capability'); + }); + }); + + describe('5. World-Level Policy', () => { + it('should use world-level chaos when no higher levels set', () => { + const worldChaos: ChaosPolicy = { enabled: true, probability: 0.6, scenarios: [] }; + ChaosConfigRegistry.setWorldChaosPolicy('world-1', worldChaos); + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.probability).toBe(0.6); + expect(result.source).toBe('world'); + }); + + it('should override global preset', () => { + const worldChaos: ChaosPolicy = { enabled: true, probability: 0.6, scenarios: [] }; + ChaosConfigRegistry.setWorldChaosPolicy('world-1', worldChaos); + + // Set global preset via env + process.env.CHAOS_PRESET = 'moderate'; + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.probability).toBe(0.6); + expect(result.source).toBe('world'); + }); + }); + + describe('6. Global Preset', () => { + it('should use global preset when no other levels set', () => { + // Set a preset via env var + process.env.CHAOS_PRESET = 'moderate'; + + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + // moderate preset has probability 0.15 + expect(result.policy.enabled).toBe(true); + expect(result.policy.probability).toBeGreaterThan(0); + expect(result.source).toBe('global'); + }); + }); + + describe('7. Default (Disabled)', () => { + it('should return disabled policy when no levels set', () => { + const result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + + expect(result.policy.enabled).toBe(false); + expect(result.policy.probability).toBe(0); + expect(result.source).toBe('global'); + }); + }); + + describe('Source Tracking', () => { + it('should correctly report source for each level', () => { + // Test env source + process.env.CHAOS_ENABLED = 'false'; + let result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'w', odId: 'o', stepId: 's' + }); + expect(result.source).toBe('env'); + + // Reset and test step source + delete process.env.CHAOS_ENABLED; + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'w', + odId: 'o', + stepId: 's', + stepChaos: { enabled: true, probability: 0.5, scenarios: [] } + }); + expect(result.source).toBe('step'); + + // Test OD source + ChaosConfigRegistry.setODChaos('o', { enabled: true, probability: 0.5, scenarios: [] }); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'w', odId: 'o', stepId: 's' + }); + expect(result.source).toBe('od'); + + // Test capability source + ChaosConfigRegistry.clearOverrides(); + ChaosConfigRegistry.setCapabilityChaos('c', { enabled: true, probability: 0.5, scenarios: [] }); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'w', capabilityId: 'c', odId: 'o', stepId: 's' + }); + expect(result.source).toBe('capability'); + + // Test world source + ChaosConfigRegistry.clearOverrides(); + ChaosConfigRegistry.setWorldChaosPolicy('w', { enabled: true, probability: 0.5, scenarios: [] }); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'w', odId: 'o', stepId: 's' + }); + expect(result.source).toBe('world'); + + // Test global source + ChaosConfigRegistry.clearOverrides(); + process.env.CHAOS_PRESET = 'moderate'; + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'w', odId: 'o', stepId: 's' + }); + expect(result.source).toBe('global'); + + // Test default source + delete process.env.CHAOS_PRESET; + ChaosConfigRegistry.clearOverrides(); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'w', odId: 'o', stepId: 's' + }); + expect(result.source).toBe('global'); + }); + }); + + describe('Complete Priority Cascade', () => { + it('should respect complete priority order', () => { + // Set policies at all levels with different probabilities + const policies = { + step: { enabled: true, probability: 0.9, scenarios: [] }, + od: { enabled: true, probability: 0.8, scenarios: [] }, + capability: { enabled: true, probability: 0.7, scenarios: [] }, + world: { enabled: true, probability: 0.6, scenarios: [] }, + global: { enabled: true, probability: 0.5, scenarios: [] }, + }; + + // Set all policies + ChaosConfigRegistry.setODChaos('od-1', policies.od); + ChaosConfigRegistry.setCapabilityChaos('cap-1', policies.capability); + ChaosConfigRegistry.setWorldChaosPolicy('world-1', policies.world); + + // Test that step wins + let result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + stepChaos: policies.step, + }); + expect(result.policy.probability).toBe(0.9); + expect(result.source).toBe('step'); + + // Remove step, OD should win + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-1', + stepId: 'step-1', + }); + expect(result.policy.probability).toBe(0.8); + expect(result.source).toBe('od'); + + // Remove OD, capability should win + ChaosConfigRegistry.setODChaos('od-1', { enabled: false, probability: 0, scenarios: [] }); + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-1', + odId: 'od-2', // Different OD to skip OD level + stepId: 'step-1', + }); + expect(result.policy.probability).toBe(0.7); + expect(result.source).toBe('capability'); + + // Remove capability, world should win + result = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: 'world-1', + capabilityId: 'cap-2', // Different capability + odId: 'od-2', + stepId: 'step-1', + }); + expect(result.policy.probability).toBe(0.6); + expect(result.source).toBe('world'); + }); + }); +}); diff --git a/packages/controlmart/src/services/agent.service.ts b/packages/controlmart/src/services/agent.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..c25486b829e03f289725db9eca273860bb781ad7 --- /dev/null +++ b/packages/controlmart/src/services/agent.service.ts @@ -0,0 +1,178 @@ +import { OpenAI } from "openai"; +import { z } from "zod"; +import type { TMessage, TModelConfigs } from "../types/ai.type"; +import { buildFinalPrompt, formatMessagesFormat } from "../utils/ai.util"; +import { validateSchemaSafe } from "../utils/validators/validate-schema"; +import type { ChatCompletion } from "openai/resources"; +import { loadEnv } from "../utils/env.util"; + +let openaiInstance: OpenAI | null = null; + +const getOpenAI = (): OpenAI => { + if (!openaiInstance) { + openaiInstance = new OpenAI({ + apiKey: loadEnv().OPENAI_API_KEY, + }); + } + return openaiInstance; +}; + +export const buildAgent = async ({ + prompt, + agentInput, + schema, + modelConfigs, + conversationHistory = [], +}: { + prompt: string; + agentInput: object; + schema: { + input: z.ZodType; + output: z.ZodType; + }; + modelConfigs: TModelConfigs; + conversationHistory?: TMessage[]; +}): Promise => { + const validatedInput = validateSchemaSafe(schema.input, agentInput); + if (!validatedInput.success) { + throw new Error(`Input validation failed: ${validatedInput.error.message}`); + } + + const messages = buildFinalPrompt(prompt, conversationHistory, agentInput, schema.input); + + // Force stream to false if JSON format is requested + const shouldStream = modelConfigs.stream && modelConfigs.responseFormat !== "json_object"; + + const completion = await getOpenAI().chat.completions.create({ + messages: formatMessagesFormat(messages), + model: modelConfigs.model, + max_tokens: modelConfigs.maxTokens, + temperature: modelConfigs.temperature, + top_p: modelConfigs.topP, + frequency_penalty: modelConfigs.frequencyPenalty, + presence_penalty: modelConfigs.presencePenalty, + stop: modelConfigs.stop.length > 0 ? modelConfigs.stop : undefined, + stream: false, // Always false to ensure ChatCompletion type + response_format: { + type: modelConfigs.responseFormat, + }, + logprobs: modelConfigs.log, + }); + + return completion; +}; + +export const runAgent = async ({ + prompt, + agentInput, + schema, + modelConfigs, + conversationHistory = [], +}: { + prompt: string; + agentInput: object; + schema: { + input: z.ZodType; + output: z.ZodType; + }; + modelConfigs: TModelConfigs; + conversationHistory?: TMessage[]; +}) => { + const completion = await buildAgent({ + prompt, + agentInput, + schema, + modelConfigs, + conversationHistory, + }); + + const result = completion.choices[0]?.message?.content || ""; + + if (modelConfigs.responseFormat === "json_object") { + try { + const parsedOutput = JSON.parse(result); + const validatedOutput = validateSchemaSafe(schema.output, parsedOutput); + + if (!validatedOutput.success) { + throw new Error(`Output validation failed: ${validatedOutput.error.message}`); + } + + return { + success: true, + data: validatedOutput.data, + raw: result, + usage: completion.usage, + }; + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : "Unknown error", + raw: result, + usage: completion.usage, + }; + } + } + + return { + success: true, + data: result, + raw: result, + usage: completion.usage, + }; +}; + +export const runStreamingAgent = async ({ + prompt, + agentInput, + schema, + modelConfigs, + conversationHistory = [], + onChunk, +}: { + prompt: string; + agentInput: object; + schema: { + input: z.ZodType; + output: z.ZodType; + }; + modelConfigs: TModelConfigs; + conversationHistory?: TMessage[]; + onChunk?: (chunk: string) => void; +}) => { + const validatedInput = validateSchemaSafe(schema.input, agentInput); + if (!validatedInput.success) { + throw new Error(`Input validation failed: ${validatedInput.error.message}`); + } + + const messages = buildFinalPrompt(prompt, conversationHistory, agentInput, schema.input); + + const stream = await getOpenAI().chat.completions.create({ + messages: formatMessagesFormat(messages), + model: modelConfigs.model, + max_tokens: modelConfigs.maxTokens, + temperature: modelConfigs.temperature, + top_p: modelConfigs.topP, + frequency_penalty: modelConfigs.frequencyPenalty, + presence_penalty: modelConfigs.presencePenalty, + stop: modelConfigs.stop.length > 0 ? modelConfigs.stop : undefined, + stream: true, + logprobs: modelConfigs.log, + }); + + let fullContent = ""; + + for await (const chunk of stream) { + const content = chunk.choices[0]?.delta?.content || ""; + fullContent += content; + + if (onChunk && content) { + onChunk(content); + } + } + + return { + success: true, + data: fullContent, + raw: fullContent, + }; +}; diff --git a/packages/controlmart/src/services/audit-logger.service.ts b/packages/controlmart/src/services/audit-logger.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..92797181a41845a1ee4aff7675043efe8ca816e3 --- /dev/null +++ b/packages/controlmart/src/services/audit-logger.service.ts @@ -0,0 +1,5 @@ +import { createMongoTransportLogger } from "../utils/logger.util"; + +// Create a singleton instance of the mongo logger +// Using top-level await is supported in Bun/ESM environments +export const auditLogger = await createMongoTransportLogger({}); diff --git a/packages/controlmart/src/services/auto-seed.service.ts b/packages/controlmart/src/services/auto-seed.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..f268c7641d20b950c1a2472a24d677e63cfe5730 --- /dev/null +++ b/packages/controlmart/src/services/auto-seed.service.ts @@ -0,0 +1,118 @@ +/** + * Auto-Seed Service + * + * Automatically seeds the database with initial data on server startup + * if the collections are empty. This provides a seamless development experience. + * + * Seeding order: + * 1. Capabilities (from INITIAL_CAPABILITIES) + * 2. Personas (from personaCatalog) + * 3. Knowledge Graph (built from annotations) + */ + +import type { Logger } from 'pino'; +import { Capability } from '../models/capability.model'; +import { Persona } from '../models/persona.model'; +import { KnowledgeGraphNode } from '../models/knowledge-graph-node.model'; +import { CapabilityRepository } from '../repository/capability.repository'; +import { PersonaRepository } from '../repository/persona.repository'; +import { KnowledgeGraphRepository } from '../repository/knowledge-graph.repository'; +import { INITIAL_CAPABILITIES } from '../capabilities/catalog'; +import { personaCatalog } from '../personas/catalog'; +import { capabilityCatalog } from './capability-catalog.service'; +import { buildKnowledgeGraphFromAnnotations } from './knowledge-graph-builder.service'; + +export interface AutoSeedResult { + capabilities: { seeded: boolean; count: number }; + personas: { seeded: boolean; count: number }; + knowledgeGraph: { seeded: boolean; nodes: number; edges: number }; +} + +export interface AutoSeedOptions { + force?: boolean; +} + +/** + * Auto-seeds the database with initial data if collections are empty. + * Safe to call on every startup - only seeds if data is missing. + * + * Prerequisites: + * - MongoDB must be connected + * - ODRegistry must be initialized (for knowledge graph building) + * + * @param logger - Logger instance for output + * @param options - Optional settings (force: re-seed even if data exists) + * @returns Result object indicating what was seeded + */ +export async function autoSeedIfEmpty( + logger: Logger, + options: AutoSeedOptions = {} +): Promise { + const { force = false } = options; + + const result: AutoSeedResult = { + capabilities: { seeded: false, count: 0 }, + personas: { seeded: false, count: 0 }, + knowledgeGraph: { seeded: false, nodes: 0, edges: 0 }, + }; + + // ======================================== + // 1. Seed capabilities if empty (or force) + // ======================================== + if (force) { + await Capability.deleteMany({}); + logger.info('[auto-seed] Force mode: cleared capabilities'); + } + const capCount = await Capability.countDocuments(); + if (capCount === 0) { + logger.info('[auto-seed] Seeding capabilities from catalog...'); + for (const cap of INITIAL_CAPABILITIES) { + await CapabilityRepository.create(cap); + } + result.capabilities = { seeded: true, count: INITIAL_CAPABILITIES.length }; + logger.info({ count: INITIAL_CAPABILITIES.length }, '[auto-seed] Capabilities seeded'); + + // Initialize the catalog so knowledge graph builder can access capabilities + await capabilityCatalog.initialize(); + logger.info('[auto-seed] Capability catalog initialized for graph building'); + } + + // ======================================== + // 2. Seed personas if empty (or force) + // ======================================== + if (force) { + await Persona.deleteMany({}); + logger.info('[auto-seed] Force mode: cleared personas'); + } + const personaCount = await Persona.countDocuments(); + if (personaCount === 0) { + logger.info('[auto-seed] Seeding personas from catalog...'); + for (const persona of personaCatalog) { + await PersonaRepository.create(persona); + } + result.personas = { seeded: true, count: personaCatalog.length }; + logger.info({ count: personaCatalog.length }, '[auto-seed] Personas seeded'); + } + + // ======================================== + // 3. Always rebuild knowledge graph on startup + // ======================================== + // Clear and rebuild to ensure any OD/capability/annotation changes are reflected + await KnowledgeGraphRepository.clearGraph(); + logger.info('[auto-seed] Rebuilding knowledge graph from annotations...'); + + // If capabilities weren't seeded this run but exist, we need to init the catalog + if (!result.capabilities.seeded && capCount > 0) { + await capabilityCatalog.initialize(); + } + + const { nodes, edges } = buildKnowledgeGraphFromAnnotations(logger); + await KnowledgeGraphRepository.saveGraph(nodes, edges); + result.knowledgeGraph = { seeded: true, nodes: nodes.length, edges: edges.length }; + logger.info( + { nodes: nodes.length, edges: edges.length }, + '[auto-seed] Knowledge graph rebuilt and saved' + ); + + return result; +} diff --git a/packages/controlmart/src/services/capability-catalog.service.ts b/packages/controlmart/src/services/capability-catalog.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..8673294a9c0ca2c15e78eac773375998bdf83db6 --- /dev/null +++ b/packages/controlmart/src/services/capability-catalog.service.ts @@ -0,0 +1,305 @@ +/** + * Capability Catalog Service + * + * In-memory service for managing and querying capabilities. + * Loads capabilities from MongoDB and caches them in memory. + * Singleton pattern ensures single source of truth for capability data. + */ + +import type { Capability, CapabilityFilter } from '../types/capability.type'; +import { CapabilityRepository } from '../repository/capability.repository'; + +export class CapabilityCatalog { + private capabilities: Map; + private initialized = false; + + constructor() { + this.capabilities = new Map(); + } + + /** + * Initialize catalog by loading all capabilities from MongoDB + * Must be called during app startup before using the catalog + */ + async initialize(): Promise { + // Note: getAll() now returns {data, pagination}, but without pagination param it returns all + const result = await CapabilityRepository.getAll(); + const dbCapabilities = result.data; + dbCapabilities.forEach((cap) => { + this.capabilities.set(cap.id, cap); + }); + this.initialized = true; + console.log(`[CapabilityCatalog] Loaded ${this.capabilities.size} capabilities from MongoDB`); + } + + /** + * Reload catalog from database + * Should be called after CRUD operations that modify capabilities + */ + async reload(): Promise { + this.capabilities.clear(); + await this.initialize(); + console.log(`[CapabilityCatalog] Reloaded ${this.capabilities.size} capabilities`); + } + + /** + * Get all capabilities + */ + getAll(): Capability[] { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + return Array.from(this.capabilities.values()); + } + + /** + * Get a single capability by ID + * @param id - Capability ID + * @returns Capability or null if not found + */ + getById(id: string): Capability | null { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + return this.capabilities.get(id) || null; + } + + /** + * Filter capabilities based on criteria + * @param filters - Filter criteria + * @returns Array of capabilities matching the filters + */ + filter(filters: CapabilityFilter): Capability[] { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + return this.getAll().filter((cap) => { + // Filter by domain (any match) + if (filters.domain && filters.domain.length > 0) { + const hasMatchingDomain = filters.domain.some((d) => + cap.tags.domain.includes(d) + ); + if (!hasMatchingDomain) return false; + } + + // Filter by complexity (exact match) + if (filters.complexity && cap.tags.complexity !== filters.complexity) { + return false; + } + + // Filter by services (all must be present) + if (filters.services && filters.services.length > 0) { + const hasAllServices = filters.services.every((s) => + cap.tags.services.includes(s) + ); + if (!hasAllServices) return false; + } + + // Filter by personas (any match) + if (filters.personas && filters.personas.length > 0) { + if (!cap.tags.personas || cap.tags.personas.length === 0) { + return false; + } + const hasMatchingPersona = filters.personas.some((p) => + cap.tags.personas?.includes(p) + ); + if (!hasMatchingPersona) return false; + } + + // Filter by patterns (any match) + if (filters.patterns && filters.patterns.length > 0) { + if (!cap.tags.patterns || cap.tags.patterns.length === 0) { + return false; + } + const hasMatchingPattern = filters.patterns.some((p) => + cap.tags.patterns?.includes(p) + ); + if (!hasMatchingPattern) return false; + } + + return true; + }); + } + + /** + * Get count of capabilities + */ + count(): number { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + return this.capabilities.size; + } + + /** + * Check if a capability exists + * @param id - Capability ID + */ + exists(id: string): boolean { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + return this.capabilities.has(id); + } + + /** + * Filter capabilities by persona + * Returns all capabilities that the given persona can perform + * @param personaId - Persona ID + * @returns Array of capabilities accessible to this persona + */ + filterByPersona(personaId: string): Capability[] { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + return this.getAll().filter((cap) => + cap.personas && cap.personas.includes(personaId) + ); + } + + /** + * Search capabilities by query string + * Performs full-text search across name, description, and tags + * Returns results ranked by relevance + * + * @param query - Search query string + * @returns Array of capabilities sorted by relevance (highest first) + */ + search(query: string): Capability[] { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + if (!query || query.trim() === '') { + return this.getAll(); + } + + const lowerQuery = query.toLowerCase().trim(); + const results: Array<{ capability: Capability; score: number }> = []; + + for (const cap of this.getAll()) { + let score = 0; + + // Exact name match (highest priority) + if (cap.name.toLowerCase() === lowerQuery) { + score += 100; + } + // Name contains query + else if (cap.name.toLowerCase().includes(lowerQuery)) { + score += 80; + } + + // Description contains query + if (cap.description.toLowerCase().includes(lowerQuery)) { + score += 60; + } + + // Domain tags match + if (cap.tags.domain.some((d) => d.toLowerCase().includes(lowerQuery))) { + score += 40; + } + + // Service tags match + if (cap.tags.services.some((s) => s.toLowerCase().includes(lowerQuery))) { + score += 40; + } + + // Persona tags match + if (cap.tags.personas?.some((p) => p.toLowerCase().includes(lowerQuery))) { + score += 35; + } + + // Pattern tags match + if (cap.tags.patterns?.some((p) => p.toLowerCase().includes(lowerQuery))) { + score += 30; + } + + // Only include results with a score > 0 + if (score > 0) { + results.push({ capability: cap, score }); + } + } + + // Sort by score descending + results.sort((a, b) => b.score - a.score); + + return results.map((r) => r.capability); + } + + /** + * Enhanced filtering with optional search query + * Combines full-text search with tag-based filtering + * + * @param filters - Filter criteria including optional search query + * @returns Array of capabilities matching both search and filters + */ + filterEnhanced(filters: CapabilityFilter & { q?: string }): Capability[] { + if (!this.initialized) { + throw new Error("CapabilityCatalog not initialized. Call initialize() first."); + } + const { q, ...tagFilters } = filters; + + // If search query provided, search first + let capabilities: Capability[]; + if (q && q.trim() !== '') { + capabilities = this.search(q); + } else { + capabilities = this.getAll(); + } + + // If no tag filters, return search results + if (Object.keys(tagFilters).length === 0) { + return capabilities; + } + + // Apply tag filters to search results + return capabilities.filter((cap) => { + // Filter by domain (any match) + if (tagFilters.domain && tagFilters.domain.length > 0) { + const hasMatchingDomain = tagFilters.domain.some((d) => + cap.tags.domain.includes(d) + ); + if (!hasMatchingDomain) return false; + } + + // Filter by complexity (exact match) + if (tagFilters.complexity && cap.tags.complexity !== tagFilters.complexity) { + return false; + } + + // Filter by services (all must be present) + if (tagFilters.services && tagFilters.services.length > 0) { + const hasAllServices = tagFilters.services.every((s) => + cap.tags.services.includes(s) + ); + if (!hasAllServices) return false; + } + + // Filter by personas (any match) + if (tagFilters.personas && tagFilters.personas.length > 0) { + if (!cap.tags.personas || cap.tags.personas.length === 0) { + return false; + } + const hasMatchingPersona = tagFilters.personas.some((p) => + cap.tags.personas?.includes(p) + ); + if (!hasMatchingPersona) return false; + } + + // Filter by patterns (any match) + if (tagFilters.patterns && tagFilters.patterns.length > 0) { + if (!cap.tags.patterns || cap.tags.patterns.length === 0) { + return false; + } + const hasMatchingPattern = tagFilters.patterns.some((p) => + cap.tags.patterns?.includes(p) + ); + if (!hasMatchingPattern) return false; + } + + return true; + }); + } +} + +// Singleton instance +export const capabilityCatalog = new CapabilityCatalog(); diff --git a/packages/controlmart/src/services/capability-executor.service.ts b/packages/controlmart/src/services/capability-executor.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..2da3e4c45353c299ca01bf47ed8f055b70172d2c --- /dev/null +++ b/packages/controlmart/src/services/capability-executor.service.ts @@ -0,0 +1,267 @@ +/** + * Capability Executor Service + * + * Maps capabilities to ODs and executes them. + * Phase 1: Real OD execution with world context and service tools. + */ + +import { capabilityCatalog } from './capability-catalog.service'; +import { ODRegistry } from './od-registry.service'; +import { WorldRepository } from '../repository/world.repository'; +import { initOperationalDescriptor } from '../operational-descriptor/init.od'; +import { executeOperationalDescriptor } from '../operational-descriptor/executor.od'; +import { createAllServiceTools } from './service-tools-factory.service'; +import { ChaosConfigRegistry } from './chaos-config.registry'; +import { knowledgeGraph } from './knowledge-graph.service'; +import { ChaosTelemetryCollector } from './chaos-telemetry.service'; +import { auditLogger } from './audit-logger.service'; +import type { + Capability, + CapabilityExecutionResult, +} from '../types/capability.type'; +import type { ChaosPolicy, ChaosCascadeResolution } from '../types/od.type'; +import type { Logger } from 'pino'; +import { EService } from '../utils/service-mesh.util'; +import { createAppLogger } from '../utils/logger.util'; +import { getIdFromMongoObject } from '../utils/mongo.util'; + +export interface ExecuteCapabilityInput { + capabilityId: string; + worldId: string; + inputs?: any; + logger?: Logger; +} + +export class CapabilityExecutor { + private logger: Logger; + + constructor() { + this.logger = createAppLogger({}); + } + + /** + * Execute a capability by ID + * + * @param input - Execution input parameters + * @returns Execution result + */ + async execute( + input: ExecuteCapabilityInput + ): Promise { + const { capabilityId, worldId, inputs } = input; + const startTime = Date.now(); + + // 1. Get capability from catalog + const capability = capabilityCatalog.getById(capabilityId); + if (!capability) { + throw new Error(`Capability not found: ${capabilityId}`); + } + + // 2. Validate worldId + if (!worldId) { + throw new Error('worldId is required'); + } + + // 3. Check world scope before execution (needed for error path) + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World not found: ${worldId}`); + } + + const capabilityIds = world.capabilityIds || []; + const capabilityInWorld = capabilityIds.length === 0 || capabilityIds.includes(capability.id); + + // 4. Pre-execution validation (permissive mode) + const validation = knowledgeGraph.validateOD(capability.odId); + if (!validation.isExecutable) { + this.logger.warn( + { + capabilityId: capability.id, + odId: capability.odId, + worldId, + warnings: validation.warnings, + missingDependencies: validation.missingDependencies, + requiredServices: validation.requiredServices, + requiredEntities: validation.requiredEntities, + }, + 'Capability has validation warnings - executing anyway (permissive mode)' + ); + } else if (validation.warnings.length > 0) { + this.logger.info( + { + capabilityId: capability.id, + odId: capability.odId, + worldId, + warnings: validation.warnings, + }, + 'Capability has warnings but is executable' + ); + } + + try { + // 4. Execute the OD + // Phase 0: This is a stub. In later phases, this will: + // - Map capability.odId to an actual OD builder/definition + // - Initialize OD context with world, tools, logger + // - Execute OD via executeOperationalDescriptor() + // - Return actual results + + const { result, chaosMetadata } = await this.executeOD(capability, worldId, inputs); + + const durationMs = Date.now() - startTime; + + return { + capabilityId, + odId: capability.odId, + worldId, + result, + executedAt: new Date(), + durationMs, + status: 'success', + capabilityInWorld, + chaosMetadata, + }; + } catch (error) { + const durationMs = Date.now() - startTime; + + return { + capabilityId, + odId: capability.odId, + worldId, + result: null, + executedAt: new Date(), + durationMs, + status: 'failed', + error: error instanceof Error ? error.message : String(error), + capabilityInWorld, + // Note: chaosMetadata not available on error since executeOD threw before telemetry could be collected + }; + } + } + + /** + * Execute the underlying OD with real world context + * Phase 1: Real implementation with OD execution + * + * @private + */ + private async executeOD( + capability: Capability, + worldId: string, + inputs?: any + ): Promise { + // 1. Get world by ID + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World not found: ${worldId}`); + } + + // 1.5. Check if capability is in world's scope (permissive enforcement) + const capabilityIds = world.capabilityIds || []; + const isInWorld = capabilityIds.length === 0 || capabilityIds.includes(capability.id); + + if (!isInWorld) { + this.logger.warn( + { + capabilityId: capability.id, + worldId, + worldCapabilities: capabilityIds, + }, + 'Capability not in world scope - executing anyway (permissive mode)' + ); + } + + // 2. Initialize operational descriptor context + // This sets up repositories, company data, etc. + const context = await initOperationalDescriptor( + world.name, + EService.OD, + { lookupByName: true } + ); + + // 3. Resolve chaos policy using priority cascade + // Priority: CHAOS_ENABLED env → step → OD → capability → world → global preset → default + const worldIdStr = getIdFromMongoObject(world) || ''; + const { + policy: chaosPolicy, + source: chaosSource + } = ChaosConfigRegistry.resolveChaosPolicy({ + worldId: worldIdStr, + capabilityId: capability.id, + odId: capability.odId, + stepId: '', // Will be resolved per-step during execution + }); + + this.logger.info( + { + worldId: worldIdStr, + capabilityId: capability.id, + odId: capability.odId, + chaosEnabled: chaosPolicy.enabled, + chaosProbability: chaosPolicy.probability, + scenarioCount: chaosPolicy.scenarios.length, + chaosSource, + }, + 'Resolved chaos policy for capability execution' + ); + + // 3.5. Build cascade resolution metadata for telemetry (MORPH-413) + const cascadeResolution: ChaosCascadeResolution = { + finalSource: chaosSource as ChaosCascadeResolution['finalSource'], + }; + + // Track preset/policy IDs based on source + // Note: Currently we don't have preset IDs stored in policies, so we track source only + // Future enhancement: Store preset IDs in world.chaos, capability.chaos, etc. + + // Create telemetry collector + const chaosTelemetry = new ChaosTelemetryCollector( + cascadeResolution, + chaosPolicy + ); + + // 4. Get OD builder from registry with resolved chaos + const od = ODRegistry.buildOD(capability.odId, { + chaosProbability: chaosPolicy.enabled ? chaosPolicy.probability : 0.0, + }); + + if (!od) { + throw new Error(`OD not found in registry: ${capability.odId}`); + } + + // 5. Create service tools + const serviceTools = createAllServiceTools({ + context, + worldId: context.worldId, + logger: this.logger, + flowId: `cap-${capability.id}-${Date.now()}`, + }); + + // 6. Create execution context by merging inputs into context + // The inputs from the API request become available as context variables + const executionContext = { + ...inputs, // Spread inputs so they're available in templates like {{ orderId }} + __world: world, + __startTime: new Date().toISOString(), + }; + + // 7. Execute the OD with capability context for chaos resolution + const runResult = await executeOperationalDescriptor(od, { + world, + tools: serviceTools, + logger: this.logger, + capabilityId: capability.id, // Pass capability ID for chaos resolution + chaosTelemetry, // Pass telemetry collector (MORPH-413) + auditLogger, // Pass audit logger for MongoDB WorldLog persistence + }); + + // 8. Return the run result with chaos metadata + return { + result: runResult, + chaosMetadata: chaosTelemetry.getTelemetry(), + }; + } +} + +// Singleton instance +export const capabilityExecutor = new CapabilityExecutor(); diff --git a/packages/controlmart/src/services/capability-sampling.service.ts b/packages/controlmart/src/services/capability-sampling.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..174225ad0d7d798be0c7b30b574dd7fa97360ec5 --- /dev/null +++ b/packages/controlmart/src/services/capability-sampling.service.ts @@ -0,0 +1,186 @@ +import seedrandom from 'seedrandom'; +import { capabilityCatalog } from './capability-catalog.service'; +import type { SamplingStrategy, CapabilityFilter, PersonaConfig } from '../models/world.model.type'; +import { createAppLogger } from '../utils/logger.util'; + +class CapabilitySamplingService { + private static instance: CapabilitySamplingService; + private logger = createAppLogger({}); + + private constructor() {} + + public static getInstance(): CapabilitySamplingService { + if (!CapabilitySamplingService.instance) { + CapabilitySamplingService.instance = new CapabilitySamplingService(); + } + return CapabilitySamplingService.instance; + } + + /** + * Apply sampling strategy to select capabilities + * @param strategy - The sampling strategy to apply + * @param personaConfig - Optional persona configuration to filter capabilities + * @returns Array of capability IDs + */ + public applySamplingStrategy( + strategy: SamplingStrategy, + personaConfig?: PersonaConfig + ): string[] { + this.logger.info({ strategy, personaConfig }, 'Applying sampling strategy'); + + // First, apply the base sampling strategy + let capabilityIds: string[]; + switch (strategy.type) { + case 'all': + capabilityIds = this.getAllCapabilities(); + break; + + case 'filter': + capabilityIds = this.filterCapabilities(strategy.filter); + break; + + case 'random': + capabilityIds = this.randomSample(strategy.count, strategy.seed); + break; + + case 'seeded': + capabilityIds = this.seededSample(strategy.count, strategy.seed); + break; + + default: + this.logger.error({ strategy }, 'Unknown sampling strategy type'); + throw new Error(`Unknown sampling strategy type: ${(strategy as any).type}`); + } + + // Then, apply persona filtering if provided + if (personaConfig) { + capabilityIds = this.applyPersonaFilter(capabilityIds, personaConfig); + } + + return capabilityIds; + } + + /** + * Filter capabilities based on persona configuration + * @param capabilityIds - Initial set of capability IDs + * @param personaConfig - Persona configuration + * @returns Filtered array of capability IDs + */ + private applyPersonaFilter( + capabilityIds: string[], + personaConfig: PersonaConfig + ): string[] { + // If allowedPersonas is specified, filter to capabilities accessible by those personas + if (personaConfig.allowedPersonas && personaConfig.allowedPersonas.length > 0) { + const originalCount = capabilityIds.length; + const allowedSet = new Set(); + + // For each allowed persona, get their capabilities + for (const personaId of personaConfig.allowedPersonas) { + const personaCapabilities = capabilityCatalog.filterByPersona(personaId); + personaCapabilities.forEach(cap => allowedSet.add(cap.id)); + } + + // Filter to only capabilities that allowed personas can access + capabilityIds = capabilityIds.filter(id => allowedSet.has(id)); + + this.logger.info( + { + allowedPersonas: personaConfig.allowedPersonas, + originalCount, + filteredCount: capabilityIds.length, + }, + 'Applied persona filter' + ); + } + + return capabilityIds; + } + + private getAllCapabilities(): string[] { + const capabilities = capabilityCatalog.getAll(); + const ids = capabilities.map(c => c.id); + + this.logger.info( + { count: ids.length }, + 'Selected all capabilities' + ); + + return ids; + } + + private filterCapabilities(filter: CapabilityFilter): string[] { + const capabilities = capabilityCatalog.filter(filter); + const ids = capabilities.map(c => c.id); + + this.logger.info( + { filter, count: ids.length }, + 'Filtered capabilities' + ); + + return ids; + } + + private randomSample(count: number, seed?: number): string[] { + const allCapabilities = this.getAllCapabilities(); + + if (count >= allCapabilities.length) { + this.logger.info( + { count, available: allCapabilities.length }, + 'Requested count >= available, returning all' + ); + return allCapabilities; + } + + const rng = seed !== undefined ? seedrandom(seed.toString()) : Math.random; + const sampled = this.sample(allCapabilities, count, rng); + + this.logger.info( + { count, seed, sampled: sampled.length }, + 'Random sampled capabilities' + ); + + return sampled; + } + + private seededSample(count: number, seed: number): string[] { + const allCapabilities = this.getAllCapabilities(); + + if (count >= allCapabilities.length) { + this.logger.info( + { count, available: allCapabilities.length }, + 'Requested count >= available, returning all' + ); + return allCapabilities; + } + + const rng = seedrandom(seed.toString()); + const sampled = this.sample(allCapabilities, count, rng); + + this.logger.info( + { count, seed, sampled: sampled.length }, + 'Seeded sampled capabilities' + ); + + return sampled; + } + + private sample( + items: string[], + count: number, + rng: () => number + ): string[] { + const result: string[] = []; + const pool = [...items]; + + for (let i = 0; i < count && pool.length > 0; i++) { + const randomIndex = Math.floor(rng() * pool.length); + result.push(pool[randomIndex]); + pool.splice(randomIndex, 1); + } + + return result; + } +} + +export const capabilitySamplingService = CapabilitySamplingService.getInstance(); diff --git a/packages/controlmart/src/services/chaos-config.registry.ts b/packages/controlmart/src/services/chaos-config.registry.ts new file mode 100644 index 0000000000000000000000000000000000000000..40c771ff81a0a79914877b5a6fa863afba4922e1 --- /dev/null +++ b/packages/controlmart/src/services/chaos-config.registry.ts @@ -0,0 +1,431 @@ +/** + * Chaos Configuration Registry + * + * Centralized registry for managing chaos engineering configurations across + * the OD architecture. Implements priority cascade for chaos resolution. + * + * Priority (highest to lowest): + * 1. CHAOS_ENABLED environment variable (master kill-switch) + * 2. Step-level chaos override (from OD definition) + * 3. OD-level chaos policy + * 4. Capability-level override + * 5. World-level chaos policy + * 6. System default preset + */ + +import fs from 'fs'; +import path from 'path'; +import type { ChaosPolicy, ChaosScenario } from '../types/od.type'; +import { createAppLogger } from '../utils/logger.util'; + +import type { TWorldChaosConfig } from '../models/world.model.type'; + +export interface ChaosPreset { + id: string; + name: string; + description: string; + globalProbability: number; + scenarios: ChaosScenario[]; +} + +export interface PresetMetadata { + id: string; + name: string; + description: string; + globalProbability: number; + scenarioCount: number; +} + +export interface ChaosContext { + worldId: string; + capabilityId?: string; + odId: string; + stepId: string; + stepChaos?: ChaosPolicy; // NEW: Step can provide chaos config directly + service?: string; + tool?: string; +} + +/** + * Chaos Config Registry Service + * Singleton service managing chaos configurations + */ +class ChaosConfigRegistryService { + private static instance: ChaosConfigRegistryService; + private logger = createAppLogger({}); + private presets: Map = new Map(); + private worldConfigs: Map = new Map(); + private worldPolicies: Map = new Map(); // Keep for legacy compatibility if needed, or remove? + private capabilityOverrides: Map = new Map(); + private odOverrides: Map = new Map(); + private stepOverrides: Map> = new Map(); // odId -> stepId -> policy + + private constructor() { + this.loadPresetsFromDisk(); + } + + public static getInstance(): ChaosConfigRegistryService { + if (!ChaosConfigRegistryService.instance) { + ChaosConfigRegistryService.instance = new ChaosConfigRegistryService(); + } + return ChaosConfigRegistryService.instance; + } + + /** + * Load all chaos presets from config/chaos-presets directory + */ + private loadPresetsFromDisk(): void { + try { + const presetsDir = path.join(__dirname, '../../config/chaos-presets'); + + if (!fs.existsSync(presetsDir)) { + this.logger.warn({ presetsDir }, 'Chaos presets directory not found'); + return; + } + + const files = fs.readdirSync(presetsDir).filter(f => f.endsWith('.json')); + + for (const file of files) { + try { + const filePath = path.join(presetsDir, file); + const content = fs.readFileSync(filePath, 'utf8'); + const preset: ChaosPreset = JSON.parse(content); + + this.presets.set(preset.id, preset); + this.logger.info( + { presetId: preset.id, scenarioCount: preset.scenarios.length }, + `Loaded chaos preset: ${preset.name}` + ); + } catch (error) { + this.logger.error({ file, error }, `Failed to load chaos preset: ${file}`); + } + } + + this.logger.info( + { presetCount: this.presets.size }, + 'Chaos Config Registry initialized' + ); + } catch (error) { + this.logger.error({ error }, 'Failed to load chaos presets'); + } + } + + /** + * Check if chaos is globally enabled via environment variable + */ + public isChaosEnabled(): boolean { + const envFlag = process.env.CHAOS_ENABLED; + if (envFlag !== undefined) { + const enabled = envFlag.toLowerCase() === 'true'; + this.logger.debug({ enabled, source: 'env' }, 'Chaos enabled check'); + return enabled; + } + return true; // Default: enabled + } + + /** + * Load a chaos preset by ID + */ + public loadPreset(presetId: string): ChaosPolicy | null { + const preset = this.presets.get(presetId); + if (!preset) { + this.logger.warn({ presetId }, 'Chaos preset not found'); + return null; + } + + return { + enabled: true, + probability: preset.globalProbability, + scenarios: preset.scenarios, + }; + } + + /** + * List all available presets + */ + public listPresets(): PresetMetadata[] { + return Array.from(this.presets.values()).map(preset => ({ + id: preset.id, + name: preset.name, + description: preset.description, + globalProbability: preset.globalProbability, + scenarioCount: preset.scenarios.length, + })); + } + + /** + * Get world-level chaos policy + */ + public getWorldChaosPolicy(worldId: string): ChaosPolicy | null { + return this.worldPolicies.get(worldId) || null; + } + + /** + * Set world-level chaos policy + */ + public setWorldChaosPolicy(worldId: string, policy: ChaosPolicy): void { + this.worldPolicies.set(worldId, policy); + this.logger.info( + { worldId, probability: policy.probability }, + 'Set world chaos policy' + ); + } + + /** + * Get capability-level chaos override + */ + public getCapabilityChaos(capabilityId: string): ChaosPolicy | null { + return this.capabilityOverrides.get(capabilityId) || null; + } + + /** + * Set capability-level chaos override + */ + public setCapabilityChaos(capabilityId: string, policy: ChaosPolicy): void { + this.capabilityOverrides.set(capabilityId, policy); + this.logger.info( + { capabilityId, probability: policy.probability }, + 'Set capability chaos override' + ); + } + + /** + * Get OD-level chaos policy + */ + public getODChaos(odId: string): ChaosPolicy | null { + return this.odOverrides.get(odId) || null; + } + + /** + * Set OD-level chaos policy (runtime override) + */ + public setODChaos(odId: string, policy: ChaosPolicy): void { + this.odOverrides.set(odId, policy); + this.logger.info( + { odId, probability: policy.probability }, + 'Set OD chaos override' + ); + } + + /** + * Get step-level chaos policy + */ + public getStepChaos(odId: string, stepId: string): ChaosPolicy | null { + const odSteps = this.stepOverrides.get(odId); + if (!odSteps) return null; + return odSteps.get(stepId) || null; + } + + /** + * Set step-level chaos policy + */ + public setStepChaos(odId: string, stepId: string, policy: ChaosPolicy): void { + if (!this.stepOverrides.has(odId)) { + this.stepOverrides.set(odId, new Map()); + } + this.stepOverrides.get(odId)!.set(stepId, policy); + this.logger.info( + { odId, stepId, probability: policy.probability }, + 'Set step chaos override' + ); + } + + /** + * Get the global preset from environment variable or default + */ + private getGlobalPreset(): ChaosPolicy | null { + const presetId = process.env.CHAOS_PRESET; + if (presetId) { + const policy = this.loadPreset(presetId); + if (policy) { + this.logger.debug({ presetId, source: 'env' }, 'Using global preset from env'); + return policy; + } + } + return null; + } + + /** + * Get default chaos policy (disabled) + */ + private getDefaultPolicy(): ChaosPolicy { + return { + enabled: false, + probability: 0, + scenarios: [], + }; + } + + /** + * Apply environment variable overrides to a policy + */ + private applyEnvOverrides(policy: ChaosPolicy): ChaosPolicy { + const overrideProbability = process.env.CHAOS_GLOBAL_PROBABILITY; + const overrideSeed = process.env.CHAOS_GLOBAL_SEED; + + if (overrideProbability !== undefined) { + const prob = parseFloat(overrideProbability); + if (!isNaN(prob) && prob >= 0 && prob <= 1) { + policy = { ...policy, probability: prob }; + this.logger.debug({ probability: prob }, 'Applied env probability override'); + } + } + + if (overrideSeed !== undefined) { + policy = { ...policy, seed: overrideSeed }; + this.logger.debug({ seed: overrideSeed }, 'Applied env seed override'); + } + + return policy; + } + + /** + * Set world-level chaos configuration + */ + public setWorldChaosConfiguration(worldId: string, config: TWorldChaosConfig): void { + this.worldConfigs.set(worldId, config); + } + + /** + * Get world-level chaos configuration + */ + public getWorldChaosConfiguration(worldId: string): TWorldChaosConfig | undefined { + return this.worldConfigs.get(worldId); + } + + /** + * Resolve chaos policy using priority cascade + * + * Priority (highest to lowest): + * 1. CHAOS_ENABLED env var (master kill-switch) + * 2. Step-level override (from context.stepChaos or registry) + * 3. OD-level policy + * 4. Capability-level override + * 5. World-level policy + * 6. Global preset (from CHAOS_PRESET env var) + * 7. Default (disabled) + * + * @returns Object containing the resolved policy and the source level + */ + public resolveChaosPolicy(context: ChaosContext): { + policy: ChaosPolicy; + source: 'env' | 'step' | 'od' | 'capability' | 'world' | 'global'; + } { + // 1. Check master kill-switch (CHAOS_ENABLED env var) + if (!this.isChaosEnabled()) { + this.logger.debug({ context }, 'Chaos disabled by CHAOS_ENABLED env var'); + return { + policy: this.getDefaultPolicy(), + source: 'env', + }; + } + + // 2. Check Step-level override (highest priority) + // Check context.stepChaos first (direct from step definition), then registry + const stepPolicy = context.stepChaos || this.getStepChaos(context.odId, context.stepId); + if (stepPolicy) { + this.logger.debug( + { odId: context.odId, stepId: context.stepId, probability: stepPolicy.probability }, + 'Using step-level chaos override' + ); + return { policy: this.applyEnvOverrides(stepPolicy), source: 'step' }; + } + + // 3. OD-level policy + const odPolicy = this.odOverrides.get(context.odId); + if (odPolicy) { + this.logger.debug( + { odId: context.odId, probability: odPolicy.probability }, + 'Using OD-level chaos policy' + ); + return { policy: this.applyEnvOverrides(odPolicy), source: 'od' }; + } + + // 4. Capability-level override + if (context.capabilityId) { + const capPolicy = this.capabilityOverrides.get(context.capabilityId); + if (capPolicy) { + this.logger.debug( + { capabilityId: context.capabilityId, probability: capPolicy.probability }, + 'Using capability-level chaos override' + ); + return { policy: this.applyEnvOverrides(capPolicy), source: 'capability' }; + } + } + + // 5. World-level policy (New Structured Config) + const worldConfig = this.worldConfigs.get(context.worldId); + + if (worldConfig) { + const { infraChaosEnabled } = worldConfig; + + // Infra Chaos -> Use 'infra' preset (System faults, network issues) + // Process Chaos is handled entirely by OD inputs/logic, not by the tool registry. + if (infraChaosEnabled) { + this.logger.debug({ worldId: context.worldId }, 'Using world-level chaos: INFRA ONLY'); + const preset = this.loadPreset('infra'); + return { + policy: { + enabled: true, + probability: 0.1, + scenarios: preset?.scenarios || [] + }, + source: 'world' + }; + } + } + + // 5b. World-level policy (Legacy) + const worldPolicy = this.worldPolicies.get(context.worldId); + if (worldPolicy) { + this.logger.debug( + { worldId: context.worldId, probability: worldPolicy.probability }, + 'Using legacy world-level chaos policy' + ); + return { policy: this.applyEnvOverrides(worldPolicy), source: 'world' }; + } + + // 6. Global preset (from CHAOS_PRESET env var) + const globalPreset = this.getGlobalPreset(); + if (globalPreset) { + this.logger.debug( + { probability: globalPreset.probability }, + 'Using global preset chaos policy' + ); + return { policy: this.applyEnvOverrides(globalPreset), source: 'global' }; + } + + // 7. Default (disabled) + return { policy: this.getDefaultPolicy(), source: 'global' }; + } + /** + * Clear all runtime overrides (useful for testing) + */ + public clearOverrides(): void { + this.worldPolicies.clear(); + this.capabilityOverrides.clear(); + this.odOverrides.clear(); + this.stepOverrides.clear(); + this.logger.info('Cleared all chaos overrides'); + } + + /** + * Get registry statistics + */ + public getStats(): { + presetCount: number; + worldPolicyCount: number; + capabilityOverrideCount: number; + odOverrideCount: number; + } { + return { + presetCount: this.presets.size, + worldPolicyCount: this.worldPolicies.size, + capabilityOverrideCount: this.capabilityOverrides.size, + odOverrideCount: this.odOverrides.size, + }; + } +} + +// Export singleton instance +export const ChaosConfigRegistry = ChaosConfigRegistryService.getInstance(); \ No newline at end of file diff --git a/packages/controlmart/src/services/chaos-telemetry.service.ts b/packages/controlmart/src/services/chaos-telemetry.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..5ab155d7d0019ebb239a3dd7384d66b56a15dd63 --- /dev/null +++ b/packages/controlmart/src/services/chaos-telemetry.service.ts @@ -0,0 +1,83 @@ +/** + * Chaos Telemetry Service (MORPH-413) + * + * Collects and aggregates chaos injection telemetry across OD execution. + * Tracks every chaos injection with full metadata for observability and debugging. + */ + +import type { + ChaosInjectionMetadata, + ChaosCascadeResolution, + ChaosTelemetry, + ChaosInjectionResult, + ChaosPolicy, +} from '../types/od.type'; + +export class ChaosTelemetryCollector { + private injections: ChaosInjectionMetadata[] = []; + private cascadeResolution: ChaosCascadeResolution; + private enabled: boolean = false; + private probability: number = 0; + private seed?: string; + + constructor( + cascadeResolution: ChaosCascadeResolution, + policy: ChaosPolicy + ) { + this.cascadeResolution = cascadeResolution; + this.enabled = policy.enabled; + this.probability = policy.probability; + this.seed = policy.seed; + } + + /** + * Record a chaos injection that occurred during step execution + */ + recordInjection( + stepId: string, + stepName: string, + chaosResult: ChaosInjectionResult, + configSource: 'step' | 'od' | 'capability' | 'world' | 'global' | 'env' + ): void { + this.injections.push({ + stepId, + stepName, + scenarioType: chaosResult.scenario.type, + scenarioDescription: chaosResult.scenario.description, + configSource, + probability: this.probability, + seed: this.seed, + timestamp: chaosResult.injectedAt, + modifications: chaosResult.modifications, + config: chaosResult.scenario.config, + }); + } + + /** + * Get the complete telemetry data + */ + getTelemetry(): ChaosTelemetry { + return { + enabled: this.enabled, + injectionCount: this.injections.length, + injections: this.injections, + cascadeResolution: this.cascadeResolution, + probability: this.probability, + seed: this.seed, + }; + } + + /** + * Check if chaos is enabled + */ + isEnabled(): boolean { + return this.enabled; + } + + /** + * Get the current injection count + */ + getInjectionCount(): number { + return this.injections.length; + } +} diff --git a/packages/controlmart/src/services/knowledge-graph-builder.service.ts b/packages/controlmart/src/services/knowledge-graph-builder.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..2578f197f1d3c778cdfe6adbda68a6d3078ad803 --- /dev/null +++ b/packages/controlmart/src/services/knowledge-graph-builder.service.ts @@ -0,0 +1,216 @@ +/** + * Knowledge Graph Builder Service + * + * Standalone function to build a knowledge graph from tool annotations, + * capabilities, and ODs. Used by auto-seed service during startup. + */ + +import { Graph } from 'graphlib'; +import type { Logger } from 'pino'; +import { createAppLogger } from '../utils/logger.util'; +import { capabilityCatalog } from './capability-catalog.service'; +import { ODRegistry } from './od-registry.service'; +import type { GraphNode, GraphEdge, NodeType, EdgeType } from '../types/knowledge-graph.type'; +import type { Step } from '../types/od.type'; + +// Import all tool annotations +import { WMS_TOOL_ANNOTATIONS } from '../utils/wms/tool-annotations.wms'; +import { ERP_TOOL_ANNOTATIONS } from '../utils/erp/tool-annotations.erp'; +import { TMS_TOOL_ANNOTATIONS } from '../utils/tms/tool-annotations.tms'; +import { EDI_TOOL_ANNOTATIONS } from '../utils/edi/tool-annotations.edi'; +import { FINANCE_TOOL_ANNOTATIONS } from '../utils/finance/tool-annotations.finance'; +import { MANUFACTURING_TOOL_ANNOTATIONS } from '../utils/manufacturing/tool-annotations.manufacturing'; + +/** + * Builds a knowledge graph from tool annotations, capabilities, and ODs. + * This is a standalone function that doesn't require KnowledgeGraphService. + * + * Prerequisites: + * - ODRegistry must be initialized (ODs registered) + * - CapabilityCatalog must be initialized (capabilities loaded from DB) + * + * @param logger - Optional logger instance + * @returns Object containing nodes and edges arrays + */ +export function buildKnowledgeGraphFromAnnotations(logger?: Logger): { + nodes: GraphNode[]; + edges: GraphEdge[]; +} { + const log = logger || createAppLogger({}); + const graph = new Graph({ directed: true, multigraph: true }); + + // ======================================== + // 1. Build from Capability Catalog + // ======================================== + const capabilities = capabilityCatalog.getAll(); + + for (const capability of capabilities) { + // Add CAPABILITY node + graph.setNode(capability.id, { + id: capability.id, + type: 'CAPABILITY' as NodeType, + label: capability.name, + metadata: { + complexity: capability.tags.complexity, + domain: capability.tags.domain, + version: capability.version, + }, + }); + + // Edge: CAPABILITY → OD (implemented_by) + graph.setEdge(capability.id, capability.odId, { type: 'implemented_by' as EdgeType }, 'implemented_by'); + + // Edges: PERSONA → CAPABILITY (can_perform) + if (capability.tags.personas) { + for (const persona of capability.tags.personas) { + if (!graph.node(persona)) { + graph.setNode(persona, { + id: persona, + type: 'PERSONA' as NodeType, + label: persona, + }); + } + graph.setEdge(persona, capability.id, { type: 'can_perform' as EdgeType }, 'can_perform'); + } + } + } + + // ======================================== + // 2. Build from OD Registry + // ======================================== + const odIds = ODRegistry.getAllODIds(); + + for (const odId of odIds) { + // Add OD node + graph.setNode(odId, { + id: odId, + type: 'OD' as NodeType, + label: odId, + metadata: {}, + }); + + // Build OD to extract tool references + const od = ODRegistry.buildOD(odId); + if (!od) { + log.warn({ odId }, 'Failed to build OD for graph construction'); + continue; + } + + // Recursively extract tools from steps + const extractTools = (steps: Step[]): void => { + for (const step of steps) { + if (step.type === 'mcp') { + // Edge: OD → TOOL (uses) + const toolId = `${step.service}.${step.tool}`; + graph.setEdge(odId, toolId, { type: 'uses' as EdgeType }, 'uses'); + } + if (step.type === 'map' && step.children) { + extractTools(step.children); + } + } + }; + extractTools(od.steps); + } + + // ======================================== + // 3. Build from Tool Annotations + // ======================================== + const allAnnotations = [ + ...WMS_TOOL_ANNOTATIONS, + ...ERP_TOOL_ANNOTATIONS, + ...TMS_TOOL_ANNOTATIONS, + ...EDI_TOOL_ANNOTATIONS, + ...FINANCE_TOOL_ANNOTATIONS, + ...MANUFACTURING_TOOL_ANNOTATIONS, + ]; + + // Create SERVICE nodes + const services = ['wms', 'erp', 'tms', 'edi', 'finance', 'manufacturing']; + for (const service of services) { + graph.setNode(service, { + id: service, + type: 'SERVICE' as NodeType, + label: service.toUpperCase(), + }); + } + + const entitySet = new Set(); + + for (const annotation of allAnnotations) { + // Add TOOL node + graph.setNode(annotation.toolId, { + id: annotation.toolId, + type: 'TOOL' as NodeType, + label: annotation.toolId, + metadata: { + service: annotation.service, + description: annotation.description, + }, + }); + + // Edge: TOOL → SERVICE (exposed_by) + graph.setEdge(annotation.toolId, annotation.service, { type: 'exposed_by' as EdgeType }, 'exposed_by'); + + // Edges: TOOL → ENTITY (produces) + for (const entity of annotation.produces) { + if (!graph.node(entity)) { + graph.setNode(entity, { id: entity, type: 'ENTITY' as NodeType, label: entity }); + } + graph.setEdge(annotation.toolId, entity, { type: 'produces' as EdgeType }, 'produces'); + entitySet.add(entity); + } + + // Edges: TOOL → ENTITY (requires) + for (const entity of annotation.requires) { + if (!graph.node(entity)) { + graph.setNode(entity, { id: entity, type: 'ENTITY' as NodeType, label: entity }); + } + graph.setEdge(annotation.toolId, entity, { type: 'requires' as EdgeType }, 'requires'); + entitySet.add(entity); + } + + // Edges: TOOL → ENTITY (modifies) + for (const entity of annotation.modifies) { + if (!graph.node(entity)) { + graph.setNode(entity, { id: entity, type: 'ENTITY' as NodeType, label: entity }); + } + graph.setEdge(annotation.toolId, entity, { type: 'modifies' as EdgeType }, 'modifies'); + entitySet.add(entity); + } + + // Edges: TOOL → ENTITY (fetches - read-only retrieval) + for (const entity of annotation.fetches || []) { + if (!graph.node(entity)) { + graph.setNode(entity, { id: entity, type: 'ENTITY' as NodeType, label: entity }); + } + graph.setEdge(annotation.toolId, entity, { type: 'fetches' as EdgeType }, 'fetches'); + entitySet.add(entity); + } + } + + // ======================================== + // 4. Extract nodes and edges from graph + // ======================================== + const nodes: GraphNode[] = graph.nodes().map(id => { + const node = graph.node(id); + if (!node) throw new Error(`Node ${id} has undefined data`); + return node; + }); + + const edges: GraphEdge[] = graph.edges().map(e => { + const edgeData = graph.edge(e); + return { + from: e.v, + to: e.w, + type: edgeData.type, + metadata: edgeData.metadata || {}, + }; + }); + + log.info( + { nodeCount: nodes.length, edgeCount: edges.length, entityCount: entitySet.size }, + '[knowledge-graph-builder] Built knowledge graph from annotations' + ); + + return { nodes, edges }; +} diff --git a/packages/controlmart/src/services/knowledge-graph.service.ts b/packages/controlmart/src/services/knowledge-graph.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..61753c97a453f5543dedbc03192e38226e310364 --- /dev/null +++ b/packages/controlmart/src/services/knowledge-graph.service.ts @@ -0,0 +1,869 @@ +/** + * Knowledge Graph Service + * + * Manages the knowledge graph for capability dependency validation and analysis. + * Uses graphlib for in-memory directed graph storage and queries. + * Loads graph from MongoDB on startup (DB-first architecture). + */ + +import { Graph } from 'graphlib'; +import { createAppLogger } from '../utils/logger.util'; +import { capabilityCatalog } from './capability-catalog.service'; +import { ODRegistry } from './od-registry.service'; +import { KnowledgeGraphRepository } from '../repository/knowledge-graph.repository'; +import { WMS_TOOL_ANNOTATIONS } from '../utils/wms/tool-annotations.wms'; +import { ERP_TOOL_ANNOTATIONS } from '../utils/erp/tool-annotations.erp'; +import { TMS_TOOL_ANNOTATIONS } from '../utils/tms/tool-annotations.tms'; +import { EDI_TOOL_ANNOTATIONS } from '../utils/edi/tool-annotations.edi'; +import { FINANCE_TOOL_ANNOTATIONS } from '../utils/finance/tool-annotations.finance'; +import { MANUFACTURING_TOOL_ANNOTATIONS } from '../utils/manufacturing/tool-annotations.manufacturing'; +import type { + GraphNode, + GraphEdge, + EdgeType, + ValidationResult, + DependencyInfo, + RelatedCapability, +} from '../types/knowledge-graph.type'; +import type { Step } from '../types/od.type'; + +class KnowledgeGraphService { + private graph: Graph; + private logger = createAppLogger({}); + private initialized: boolean = false; + + constructor() { + this.graph = new Graph({ directed: true, multigraph: true }); + } + + /** + * Initialize the knowledge graph from MongoDB + * Loads graph from database on startup (DB-first architecture) + */ + public async initialize(): Promise { + if (this.initialized) { + this.logger.warn('Knowledge graph already initialized'); + return; + } + + this.logger.info('Initializing knowledge graph from MongoDB...'); + const startTime = Date.now(); + + try { + // Load from database (primary source of truth) + await this.loadFromDB(); + + const duration = Date.now() - startTime; + const nodeCount = this.graph.nodeCount(); + const edgeCount = this.graph.edgeCount(); + + // Validate graph integrity + const validation = this.validateGraphIntegrity(); + if (!validation.valid) { + this.logger.error({ + invalidNodeCount: validation.invalidNodes.length, + invalidNodes: validation.invalidNodes, + }, 'Graph loaded with invalid nodes'); + throw new Error(`Graph has ${validation.invalidNodes.length} invalid nodes with undefined data`); + } + + this.logger.info({ + nodeCount, + edgeCount, + duration, + source: 'mongodb' + }, 'Knowledge graph initialized from database'); + + this.initialized = true; + } catch (error) { + this.logger.error({ error }, 'Failed to initialize knowledge graph'); + throw error; + } + } + + /** + * Load knowledge graph from MongoDB + * Replaces current graph with database content + */ + public async loadFromDB(): Promise { + const { nodes, edges } = await KnowledgeGraphRepository.loadGraph(); + + // Check if database is empty + if (nodes.length === 0) { + throw new Error( + 'Knowledge graph database is empty. Run migration script to populate: ' + + 'bun run scripts/migrate-knowledge-graph.ts' + ); + } + + // Clear existing graph + this.graph = new Graph({ directed: true, multigraph: true }); + + // Load nodes + nodes.forEach(node => { + this.graph.setNode(node.id, node); + }); + + // Load edges (use type as edge name for multigraph support) + edges.forEach(edge => { + this.graph.setEdge(edge.from, edge.to, { + type: edge.type, + metadata: edge.metadata + }, edge.type); + }); + + this.logger.info({ + nodeCount: nodes.length, + edgeCount: edges.length + }, 'Loaded knowledge graph from database'); + } + + /** + * Save current knowledge graph to MongoDB + * Used by migration scripts and admin operations + */ + public async saveToDB(): Promise { + // Extract all nodes from graph + const nodes: GraphNode[] = this.graph.nodes().map(id => { + const node = this.graph.node(id); + if (!node) { + throw new Error(`Node ${id} has undefined data`); + } + return node; + }); + + // Extract all edges from graph + const edges: GraphEdge[] = this.graph.edges().map(e => { + const edgeData = this.graph.edge(e); + return { + from: e.v, + to: e.w, + type: edgeData.type, + metadata: edgeData.metadata || {} + }; + }); + + await KnowledgeGraphRepository.saveGraph(nodes, edges); + + this.logger.info({ + nodeCount: nodes.length, + edgeCount: edges.length + }, 'Saved knowledge graph to database'); + } + + /** + * Build graph from code annotations + * Used by migration scripts to populate database from code + * NOT called during normal application startup + */ + public buildGraphFromAnnotations(): void { + this.logger.info('Building knowledge graph from annotations...'); + + // Clear existing graph + this.graph = new Graph({ directed: true, multigraph: true }); + + // Check if ODRegistry has been initialized + const registeredODs = Array.from(ODRegistry['registry'].keys()); + if (registeredODs.length === 0) { + this.logger.warn('ODRegistry appears empty'); + } + + // Build from all sources + this.buildFromCapabilityCatalog(); + this.buildFromODRegistry(); + this.buildFromToolAnnotations(); + + this.logger.info({ + nodeCount: this.graph.nodeCount(), + edgeCount: this.graph.edgeCount() + }, 'Built knowledge graph from annotations'); + } + + /** + * Add a node to the graph + */ + public addNode(node: GraphNode): void { + this.graph.setNode(node.id, node); + } + + /** + * Add an edge to the graph + * Uses edge type as name for multigraph support (allows multiple edges between same nodes) + */ + public addEdge(from: string, to: string, type: EdgeType, metadata?: any): void { + this.graph.setEdge(from, to, { type, ...metadata }, type); + } + + /** + * Get a node from the graph + */ + public getNode(id: string): GraphNode | null { + return this.graph.node(id) || null; + } + + /** + * Get outgoing edges from a node, optionally filtered by type + */ + public getOutgoingEdges(nodeId: string, type?: EdgeType): GraphEdge[] { + const edges = this.graph.outEdges(nodeId) || []; + + return edges + .map(e => ({ + from: e.v, + to: e.w, + ...this.graph.edge(e), + })) + .filter(e => !type || e.type === type); + } + + /** + * Validate graph integrity + * Checks all nodes have proper data (not undefined) + */ + public validateGraphIntegrity(): { valid: boolean; invalidNodes: string[] } { + const invalidNodes: string[] = []; + + for (const nodeId of this.graph.nodes()) { + const node = this.graph.node(nodeId); + if (!node || !node.type) { + invalidNodes.push(nodeId); + } + } + + return { + valid: invalidNodes.length === 0, + invalidNodes, + }; + } + + /** + * Build graph from capability catalog + * Creates CAPABILITY nodes and implemented_by edges + */ + private buildFromCapabilityCatalog(): void { + const capabilities = capabilityCatalog.getAll(); + + for (const capability of capabilities) { + // Add capability node + this.addNode({ + id: capability.id, + type: 'CAPABILITY', + label: capability.name, + metadata: { + complexity: capability.tags.complexity, + domain: capability.tags.domain, + version: capability.version, + }, + }); + + // Add edge: capability → OD + this.addEdge(capability.id, capability.odId, 'implemented_by'); + + // Add persona edges if personas are defined + if (capability.tags.personas) { + for (const persona of capability.tags.personas) { + // Ensure persona node exists + if (!this.getNode(persona)) { + this.addNode({ + id: persona, + type: 'PERSONA', + label: persona, + }); + } + + // Add edge: persona → capability + this.addEdge(persona, capability.id, 'can_perform'); + } + } + } + + this.logger.debug( + { count: capabilities.length }, + 'Built capability nodes from catalog' + ); + } + + /** + * Build graph from OD registry + * Creates OD nodes and uses edges to tools + */ + private buildFromODRegistry(): void { + // Get all registered OD IDs + const odIds = Array.from(ODRegistry['registry'].keys()); + + for (const odId of odIds) { + // Add OD node + this.addNode({ + id: odId, + type: 'OD', + label: odId, + metadata: {}, + }); + + // Build the OD to inspect its steps and extract tools + const od = ODRegistry.buildOD(odId); + if (!od) { + this.logger.warn({ odId }, 'Failed to build OD for graph construction'); + continue; + } + + // Recursively extract tools from OD steps + const extractTools = (steps: Step[]): void => { + for (const step of steps) { + // Only McpStep has external tool calls + if (step.type === 'mcp') { + // Create OD → TOOL edge + this.addEdge(odId, step.tool, 'uses'); + } + + // Handle MapStep children recursively + if (step.type === 'map' && step.children) { + extractTools(step.children); + } + } + }; + + extractTools(od.steps); + } + + this.logger.debug( + { count: odIds.length }, + 'Built OD nodes from registry' + ); + } + + /** + * Build graph from tool annotations + * Creates SERVICE, TOOL, and ENTITY nodes with relationship edges + * (MORPH-315) + */ + private buildFromToolAnnotations(): void { + // Combine all annotations + const allAnnotations = [ + ...WMS_TOOL_ANNOTATIONS, + ...ERP_TOOL_ANNOTATIONS, + ...TMS_TOOL_ANNOTATIONS, + ...EDI_TOOL_ANNOTATIONS, + ...FINANCE_TOOL_ANNOTATIONS, + ...MANUFACTURING_TOOL_ANNOTATIONS, + ]; + + // Create service nodes + const services = ['wms', 'erp', 'tms', 'edi', 'finance']; + for (const service of services) { + this.addNode({ + id: service, + type: 'SERVICE', + label: service.toUpperCase(), + }); + } + + // Track unique entities for logging + const entitySet = new Set(); + + // Process each annotation + for (const annotation of allAnnotations) { + // Create tool node + this.addNode({ + id: annotation.toolId, + type: 'TOOL', + label: annotation.toolId, + metadata: { + service: annotation.service, + description: annotation.description, + }, + }); + + // Tool → Service edge + this.addEdge(annotation.toolId, annotation.service, 'exposed_by'); + + // Tool → Entity edges (produces) + for (const entity of annotation.produces) { + this.addNode({ + id: entity, + type: 'ENTITY', + label: entity, + }); + this.addEdge(annotation.toolId, entity, 'produces'); + entitySet.add(entity); + } + + // Tool → Entity edges (requires) + for (const entity of annotation.requires) { + this.addNode({ + id: entity, + type: 'ENTITY', + label: entity, + }); + this.addEdge(annotation.toolId, entity, 'requires'); + entitySet.add(entity); + } + + // Tool → Entity edges (modifies) + for (const entity of annotation.modifies) { + this.addNode({ + id: entity, + type: 'ENTITY', + label: entity, + }); + this.addEdge(annotation.toolId, entity, 'modifies'); + entitySet.add(entity); + } + + // Tool → Entity edges (fetches - read-only retrieval) + for (const entity of annotation.fetches || []) { + this.addNode({ + id: entity, + type: 'ENTITY', + label: entity, + }); + this.addEdge(annotation.toolId, entity, 'fetches'); + entitySet.add(entity); + } + } + + this.logger.debug( + { + toolCount: allAnnotations.length, + serviceCount: services.length, + entityCount: entitySet.size, + }, + 'Built graph from tool annotations' + ); + } + + // ============================================================ + // Helper Methods for Capability Queries + // ============================================================ + + /** + * Get services used by a capability + * Traverses: CAPABILITY → OD → TOOL → SERVICE + */ + private getCapabilityServices(capabilityId: string): string[] { + // Get OD for this capability + const odEdges = this.getOutgoingEdges(capabilityId, 'implemented_by'); + if (odEdges.length === 0) return []; + + const odId = odEdges[0].to; + + // Get tools used by this OD + const toolEdges = this.getOutgoingEdges(odId, 'uses'); + + // Get services for each tool + const services = new Set(); + for (const toolEdge of toolEdges) { + const serviceEdges = this.getOutgoingEdges(toolEdge.to, 'exposed_by'); + for (const serviceEdge of serviceEdges) { + services.add(serviceEdge.to); + } + } + + return Array.from(services); + } + + /** + * Get entities produced by a capability + * Traverses: CAPABILITY → OD → TOOL → ENTITY (produces + modifies) + */ + private getCapabilityProducedEntities(capabilityId: string): string[] { + const odEdges = this.getOutgoingEdges(capabilityId, 'implemented_by'); + if (odEdges.length === 0) return []; + + const odId = odEdges[0].to; + const toolEdges = this.getOutgoingEdges(odId, 'uses'); + + const entities = new Set(); + for (const toolEdge of toolEdges) { + // Entities produced by tools + const producedEdges = this.getOutgoingEdges(toolEdge.to, 'produces'); + for (const entityEdge of producedEdges) { + entities.add(entityEdge.to); + } + + // Entities modified by tools (also outputs) + const modifiedEdges = this.getOutgoingEdges(toolEdge.to, 'modifies'); + for (const entityEdge of modifiedEdges) { + entities.add(entityEdge.to); + } + } + + return Array.from(entities); + } + + /** + * Get entities required by a capability + * Traverses: CAPABILITY → OD → TOOL → ENTITY (requires) + */ + private getCapabilityRequiredEntities(capabilityId: string): string[] { + const odEdges = this.getOutgoingEdges(capabilityId, 'implemented_by'); + if (odEdges.length === 0) return []; + + const odId = odEdges[0].to; + const toolEdges = this.getOutgoingEdges(odId, 'uses'); + + const entities = new Set(); + for (const toolEdge of toolEdges) { + const requiredEdges = this.getOutgoingEdges(toolEdge.to, 'requires'); + for (const entityEdge of requiredEdges) { + entities.add(entityEdge.to); + } + } + + return Array.from(entities); + } + + // ============================================================ + // Query Operations (Stubs for later tickets) + // ============================================================ + + /** + * Validate if an OD can be executed + * (MORPH-316) + */ + public validateOD(odId: string): ValidationResult { + const result: ValidationResult = { + capabilityId: '', // Filled by caller + odId, + isExecutable: true, + warnings: [], + missingDependencies: [], + requiredServices: [], + requiredEntities: [], + }; + + // 1. Get OD definition + const od = ODRegistry.buildOD(odId); + if (!od) { + result.isExecutable = false; + result.warnings.push({ + type: 'missing_tool', + message: `OD not found: ${odId}`, + details: { odId }, + }); + return result; + } + + // 2. Track available entities (from previous steps) + const availableEntities = new Set(); + + // 3. Recursive helper to extract and validate tools + const validateSteps = (steps: Step[]): void => { + for (const step of steps) { + // Only McpStep uses tools + if (step.type === 'mcp') { + const toolId = step.tool; + + // Check if tool exists in graph + const toolNode = this.getNode(toolId); + if (!toolNode) { + result.isExecutable = false; + result.warnings.push({ + type: 'missing_tool', + message: `Tool not found in knowledge graph: ${toolId}`, + details: { + toolId, + stepId: step.id, + stepName: step.name, + service: step.service, + }, + }); + result.missingDependencies.push(toolId); + continue; // Skip further validation for this tool + } + + // Get tool's service + const serviceEdges = this.getOutgoingEdges(toolId, 'exposed_by'); + if (serviceEdges.length > 0) { + const serviceId = serviceEdges[0].to; + if (!result.requiredServices.includes(serviceId)) { + result.requiredServices.push(serviceId); + } + } else { + // Tool has no service mapping (annotation missing exposed_by edge) + this.logger.warn({ toolId }, `Tool has no service mapping in graph`); + } + + // Check required entities + const requiredEdges = this.getOutgoingEdges(toolId, 'requires'); + for (const edge of requiredEdges) { + const entityId = edge.to; + + // Add to requiredEntities list + if (!result.requiredEntities.includes(entityId)) { + result.requiredEntities.push(entityId); + } + + // Check if entity is available from previous steps + if (!availableEntities.has(entityId)) { + // Permissive mode: warn but don't fail + // Entity might be provided as runtime input + result.warnings.push({ + type: 'missing_entity', + message: `Tool '${toolId}' requires entity '${entityId}' which is not available from previous steps`, + details: { + toolId, + entityId, + stepId: step.id, + stepName: step.name, + note: 'This entity might be provided as input at runtime', + }, + }); + } + } + + // Add produced entities to available set + const producedEdges = this.getOutgoingEdges(toolId, 'produces'); + for (const edge of producedEdges) { + availableEntities.add(edge.to); + } + + // Add modified entities to available set + const modifiedEdges = this.getOutgoingEdges(toolId, 'modifies'); + for (const edge of modifiedEdges) { + availableEntities.add(edge.to); + } + } + + // Handle MapStep children recursively + if (step.type === 'map' && step.children) { + validateSteps(step.children); + } + } + }; + + // 4. Validate all steps + validateSteps(od.steps); + + // 5. Log validation results + this.logger.debug({ + odId, + isExecutable: result.isExecutable, + warningCount: result.warnings.length, + missingDependencyCount: result.missingDependencies.length, + requiredServices: result.requiredServices, + requiredEntityCount: result.requiredEntities.length, + }, 'OD validation completed'); + + return result; + } + + /** + * Find capabilities related to a given capability + * (MORPH-317) + * + * Finds related capabilities based on: + * - Shared services (relationshipType: 'shares-service') + * - Shared entities (relationshipType: 'shares-entity') + * - Prerequisites (relationshipType: 'prerequisite') + * - Suggested next (relationshipType: 'suggested-next') + */ + public findRelatedCapabilities(capabilityId: string): RelatedCapability[] { + const startTime = Date.now(); + + // 1. Validate capability exists + const capability = this.getNode(capabilityId); + if (!capability || capability.type !== 'CAPABILITY') { + this.logger.warn({ capabilityId }, 'Capability not found for findRelatedCapabilities'); + return []; + } + + // 2. Get capability's services and entities + const myServices = this.getCapabilityServices(capabilityId); + const myProducedEntities = this.getCapabilityProducedEntities(capabilityId); + const myRequiredEntities = this.getCapabilityRequiredEntities(capabilityId); + + // 3. Find all other capabilities + const allCapabilities = this.graph.nodes() + .map(id => this.getNode(id)) + .filter(n => n?.type === 'CAPABILITY' && n.id !== capabilityId) + .filter(n => n !== null) as GraphNode[]; + + // 4. Calculate similarity for each capability + const results: RelatedCapability[] = []; + + for (const otherCap of allCapabilities) { + const otherServices = this.getCapabilityServices(otherCap.id); + const otherProduced = this.getCapabilityProducedEntities(otherCap.id); + const otherRequired = this.getCapabilityRequiredEntities(otherCap.id); + + // Check shared services + const sharedServices = myServices.filter(s => otherServices.includes(s)); + if (sharedServices.length > 0) { + const similarity = sharedServices.length / Math.max(myServices.length, otherServices.length); + results.push({ + id: otherCap.id, + name: otherCap.label, + relationshipType: 'shares-service', + similarity, + reason: `Shares ${sharedServices.length} service(s): ${sharedServices.join(', ')}` + }); + } + + // Check shared entities + const myAllEntities = [...myProducedEntities, ...myRequiredEntities]; + const otherAllEntities = [...otherProduced, ...otherRequired]; + const sharedEntities = myAllEntities.filter(e => otherAllEntities.includes(e)); + const uniqueSharedEntities = [...new Set(sharedEntities)]; + + if (uniqueSharedEntities.length > 0) { + const similarity = uniqueSharedEntities.length / Math.max(myAllEntities.length, otherAllEntities.length); + results.push({ + id: otherCap.id, + name: otherCap.label, + relationshipType: 'shares-entity', + similarity, + reason: `Shares ${uniqueSharedEntities.length} entity/entities: ${uniqueSharedEntities.join(', ')}` + }); + } + + // Check prerequisites (other produces what this requires) + const prerequisites = otherProduced.filter(e => myRequiredEntities.includes(e)); + if (prerequisites.length > 0 && myRequiredEntities.length > 0) { + const similarity = prerequisites.length / myRequiredEntities.length; + results.push({ + id: otherCap.id, + name: otherCap.label, + relationshipType: 'prerequisite', + similarity, + reason: `Produces required entities: ${prerequisites.join(', ')}` + }); + } + + // Check suggested-next (this produces what other requires) + const suggestedNext = otherRequired.filter(e => myProducedEntities.includes(e)); + if (suggestedNext.length > 0 && myProducedEntities.length > 0) { + const similarity = suggestedNext.length / myProducedEntities.length; + results.push({ + id: otherCap.id, + name: otherCap.label, + relationshipType: 'suggested-next', + similarity, + reason: `Requires produced entities: ${suggestedNext.join(', ')}` + }); + } + } + + // 5. Sort by similarity descending and return top 10 + const topResults = results + .sort((a, b) => b.similarity - a.similarity) + .slice(0, 10); + + const duration = Date.now() - startTime; + this.logger.debug({ + capabilityId, + resultCount: topResults.length, + duration, + }, 'findRelatedCapabilities completed'); + + return topResults; + } + + /** + * Get dependency information for a capability + * (MORPH-318) + * + * Returns comprehensive dependency information including: + * - Tools used with service and execution order + * - Services required + * - Input entities (required externally, not produced internally) + * - Output entities (produced by the OD) + */ + public getDependencies(capabilityId: string): DependencyInfo { + const startTime = Date.now(); + + // Initialize result with Sets for deduplication + const result: DependencyInfo = { + capabilityId, + odId: '', + tools: [], + services: [], + inputEntities: [], + outputEntities: [], + }; + + // Get capability's OD via implemented_by edge + const odEdges = this.getOutgoingEdges(capabilityId, 'implemented_by'); + if (odEdges.length === 0) { + this.logger.warn({ capabilityId }, 'No OD found for capability'); + return result; + } + + const odId = odEdges[0].to; + result.odId = odId; + + // Get tools used by this OD (order preserved) + const toolEdges = this.getOutgoingEdges(odId, 'uses'); + + // Track entities produced within this OD + const producedInThisOD = new Set(); + const servicesSet = new Set(); + const inputEntitiesSet = new Set(); + const outputEntitiesSet = new Set(); + + // Process each tool in order + for (let i = 0; i < toolEdges.length; i++) { + const toolId = toolEdges[i].to; + + // Get service for this tool + const serviceEdges = this.getOutgoingEdges(toolId, 'exposed_by'); + const service = serviceEdges.length > 0 ? serviceEdges[0].to : 'unknown'; + servicesSet.add(service); + + // Add to tools list with metadata + result.tools.push({ + toolId, + service, + stepIndex: i, + required: true, + }); + + // Get required entities for this tool + const requiredEdges = this.getOutgoingEdges(toolId, 'requires'); + for (const edge of requiredEdges) { + const entityId = edge.to; + // If not produced earlier in this OD, it's an external input + if (!producedInThisOD.has(entityId)) { + inputEntitiesSet.add(entityId); + } + } + + // Get produced entities from this tool + const producedEdges = this.getOutgoingEdges(toolId, 'produces'); + for (const edge of producedEdges) { + const entityId = edge.to; + producedInThisOD.add(entityId); + outputEntitiesSet.add(entityId); + } + + // Also track modified entities as outputs + const modifiedEdges = this.getOutgoingEdges(toolId, 'modifies'); + for (const edge of modifiedEdges) { + const entityId = edge.to; + producedInThisOD.add(entityId); + outputEntitiesSet.add(entityId); + } + } + + // Convert Sets to arrays + result.services = Array.from(servicesSet); + result.inputEntities = Array.from(inputEntitiesSet); + result.outputEntities = Array.from(outputEntitiesSet); + + const duration = Date.now() - startTime; + this.logger.debug({ + capabilityId, + odId, + toolCount: result.tools.length, + serviceCount: result.services.length, + inputEntityCount: result.inputEntities.length, + outputEntityCount: result.outputEntities.length, + duration, + }, 'getDependencies completed'); + + return result; + } +} + +// Singleton instance +export const knowledgeGraph = new KnowledgeGraphService(); diff --git a/packages/controlmart/src/services/mongo.service.ts b/packages/controlmart/src/services/mongo.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..4dd63a4fc619f303e6e7d5652962f13a24f0e16b --- /dev/null +++ b/packages/controlmart/src/services/mongo.service.ts @@ -0,0 +1,177 @@ +import mongoose from "mongoose"; + +type TMongoConnectOptions = { + uri: string; + dbName?: string; + log?: boolean; +}; + +const mongoState = { + isConnected: false, + uri: "", + dbName: "", +}; + +export const connectMongo = async ({ uri, dbName, log = true }: TMongoConnectOptions) => { + if (mongoState.isConnected && mongoose.connection.readyState === 1) { + if (log) console.log(`[mongo] Already connected to ${mongoState.dbName}`); + return mongoose.connection; + } + + try { + mongoose.set("strictQuery", false); + const conn = await mongoose.connect(uri, { dbName }); + mongoState.isConnected = true; + mongoState.uri = uri; + mongoState.dbName = dbName ?? conn.connection.name; + + if (log) console.log(`[mongo] Connected → ${conn.connection.host}/${mongoState.dbName}`); + + return conn.connection; + } catch (err) { + console.error("[mongo] Connection error:", err); + throw err; + } +}; + +export const disconnectMongo = async (log = true) => { + if (!mongoState.isConnected) { + if (log) console.log("[mongo] No active connection to close"); + return; + } + await mongoose.disconnect(); + mongoState.isConnected = false; + if (log) console.log("[mongo] Disconnected"); +}; + +export const checkMongoConnection = () => { + const state = mongoose.connection.readyState; + const states: Record = { + 0: "disconnected", + 1: "connected", + 2: "connecting", + 3: "disconnecting", + }; + return { + isConnected: state === 1, + state: states[state] ?? "unknown", + uri: mongoState.uri, + dbName: mongoState.dbName, + }; +}; + +export const createCollectionsIfNotExist = async ({ + models, + collectionNames = [], + log = true, +}: { + models?: mongoose.Model[]; + collectionNames?: string[]; + log?: boolean; +}) => { + if (!mongoState.isConnected || mongoose.connection.readyState !== 1) { + throw new Error("[mongo] Cannot create collections — not connected"); + } + + const existing = ((await mongoose.connection.db!.listCollections().toArray()) ?? []).map( + (c) => c.name, + ); + + // Handle Mongoose models + if (models) { + for (const model of models) { + const name = model.collection.name; + if (!existing.includes(name)) { + await mongoose.connection.createCollection(name); + if (log) console.log(`[mongo] Created collection: ${name}`); + existing.push(name); // Add to existing list so we don't try to create it again if it's also in collectionNames + } else if (log) { + console.log(`[mongo] Collection already exists: ${name}`); + } + + // Ensure indexes are created + await model.init().catch((err) => { + console.error(`[mongo] Index creation failed for ${name}:`, err); + }); + } + } + + // Handle raw collection names + for (const name of collectionNames) { + if (!existing.includes(name)) { + await mongoose.connection.createCollection(name); + if (log) console.log(`[mongo] Created collection: ${name}`); + existing.push(name); + } else if (log) { + console.log(`[mongo] Collection already exists: ${name}`); + } + } + + if (log) console.log("[mongo] Collection and index check complete"); +}; + +/** + * Synchronizes indexes for all registered models (or specific models if provided), + * removing stale indexes and creating new ones. + * This is useful when schema index definitions change but old indexes remain in MongoDB. + * + * Unlike `model.init()` which only ensures indexes exist, `syncIndexes()` will: + * - Drop indexes that exist in MongoDB but are not defined in the schema + * - Create indexes that are defined in the schema but don't exist in MongoDB + * + * @param models - Optional array of models to sync. If not provided, syncs all registered models. + */ +export const syncModelIndexes = async ({ models, log = true }: { models?: mongoose.Model[]; log?: boolean } = {}) => { + if (!mongoState.isConnected || mongoose.connection.readyState !== 1) { + throw new Error("[mongo] Cannot sync indexes — not connected"); + } + + // Use provided models or default to all registered models + const modelsToSync = models ?? Object.values(mongoose.models); + + if (log) console.log(`[mongo] Syncing indexes for ${modelsToSync.length} model(s)...`); + + for (const model of modelsToSync) { + const name = model.collection.name; + try { + const result = await model.syncIndexes(); + if (log && result.length > 0) { + console.log(`[mongo] Synced indexes for ${name}, dropped stale indexes:`, result); + } + } catch (err) { + console.error(`[mongo] Index sync failed for ${name}:`, err); + } + } + + if (log) console.log("[mongo] Index synchronization complete"); +}; + +export const clearWorldData = async (worldId: string) => { + if (!mongoState.isConnected) { + throw new Error("[mongo] Cannot clear world data — not connected"); + } + + const models = mongoose.models; + const deletions: Promise[] = []; + + console.log(`[mongo] Clearing data for world: ${worldId}`); + + for (const modelName of Object.keys(models)) { + if (modelName === "World") continue; // Skip the World definition itself + + const model = models[modelName]; + if (!model) continue; + + // Check if schema has worldRef.worldId + // We can check paths safely + const hasWorldRef = model.schema.path("worldRef.worldId"); + + if (hasWorldRef) { + console.log(`[mongo] Deleting from ${modelName}...`); + deletions.push(model.deleteMany({ "worldRef.worldId": worldId })); + } + } + + await Promise.all(deletions); + console.log(`[mongo] Cleared data for world: ${worldId}`); +}; diff --git a/packages/controlmart/src/services/od-registry.service.ts b/packages/controlmart/src/services/od-registry.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..989929637758623403dba7d7c6686521ff7685e6 --- /dev/null +++ b/packages/controlmart/src/services/od-registry.service.ts @@ -0,0 +1,329 @@ +/** + * OD Registry Service + * + * Centralized registry for mapping capability/OD IDs to their builder factory functions. + * This service manages the registration and retrieval of OD builders. + * + * Usage: + * ODRegistry.registerBuilder('order-fulfillment-standard-v1', buildOrderFulfillmentOD); + * const builder = ODRegistry.getBuilder('order-fulfillment-standard-v1'); + * const od = ODRegistry.buildOD('order-fulfillment-standard-v1', config); + */ + +import type { OperationalDescriptor } from "../types/od.type"; +import type { GenericODBuilder, GenericODBuilderConfig } from "../operational-descriptor/generic-builder.od"; +import { createAppLogger } from "../utils/logger.util"; + +/** + * Type definition for OD builder factory function + * Takes optional config and returns a GenericODBuilder instance + */ +export type ODBuilderFactory = (config?: GenericODBuilderConfig) => GenericODBuilder; + +/** + * Type definition for a built OD generator function + * Takes optional config and returns a complete OperationalDescriptor + */ +export type ODGenerator = (config?: GenericODBuilderConfig) => OperationalDescriptor; + +/** + * Registry entry containing either a builder factory or a static OD + */ +interface ODRegistryEntry { + odId: string; + type: 'builder' | 'static'; + builderFactory?: ODBuilderFactory; + staticOD?: OperationalDescriptor; + metadata: { + registeredAt: Date; + description?: string; + version?: string; + category?: string; + }; +} + +/** + * OD Registry Service + * Singleton service for managing OD builders + */ +class ODRegistryService { + private static instance: ODRegistryService; + private registry: Map; + private logger = createAppLogger({}); + + private constructor() { + this.registry = new Map(); + this.logger.info("ODRegistry initialized"); + } + + /** + * Get singleton instance + */ + static getInstance(): ODRegistryService { + if (!ODRegistryService.instance) { + ODRegistryService.instance = new ODRegistryService(); + } + return ODRegistryService.instance; + } + + /** + * Register an OD builder factory + * + * @param odId - Unique identifier for the OD + * @param builderFactory - Factory function that creates a GenericODBuilder + * @param metadata - Optional metadata about the OD + */ + registerBuilder( + odId: string, + builderFactory: ODBuilderFactory, + metadata?: { + description?: string; + version?: string; + category?: string; + } + ): void { + if (this.registry.has(odId)) { + this.logger.warn({ odId }, `OD builder already registered, overwriting: ${odId}`); + } + + this.registry.set(odId, { + odId, + type: 'builder', + builderFactory, + metadata: { + registeredAt: new Date(), + ...metadata, + }, + }); + + this.logger.info({ odId, metadata }, `Registered OD builder: ${odId}`); + } + + /** + * Register multiple OD builders at once + * + * @param builders - Array of builder registration objects + */ + registerBulk( + builders: Array<{ + odId: string; + builderFactory: ODBuilderFactory; + metadata?: { + description?: string; + version?: string; + category?: string; + }; + }> + ): void { + for (const builder of builders) { + this.registerBuilder(builder.odId, builder.builderFactory, builder.metadata); + } + this.logger.info({ count: builders.length }, `Registered ${builders.length} OD builders in bulk`); + } + + /** + * Register a pre-built (static) OperationalDescriptor + * Use this for JSON-based ODs that don't need a builder factory + * + * @param od - The OperationalDescriptor to register + * @param metadata - Optional metadata about the OD + */ + registerOD( + od: OperationalDescriptor, + metadata?: { + description?: string; + version?: string; + category?: string; + } + ): void { + const odId = od.id; + if (this.registry.has(odId)) { + this.logger.warn({ odId }, `OD already registered, overwriting: ${odId}`); + } + + this.registry.set(odId, { + odId, + type: 'static', + staticOD: od, + metadata: { + registeredAt: new Date(), + description: metadata?.description || od.description, + version: metadata?.version || od.version, + category: metadata?.category || od.namespace, + }, + }); + + this.logger.info({ odId, version: od.version }, `Registered static OD: ${odId}`); + } + + /** + * Register multiple pre-built (static) OperationalDescriptors at once + * Use this for batch registration of JSON-based ODs + * + * @param ods - Array of OperationalDescriptors to register + */ + registerODs(ods: OperationalDescriptor[]): void { + for (const od of ods) { + this.registerOD(od); + } + this.logger.info({ count: ods.length }, `Registered ${ods.length} static ODs in bulk`); + } + + /** + * Get an OD builder factory by ID + * + * @param odId - The OD identifier + * @returns The builder factory function or null if not found + */ + getBuilder(odId: string): ODBuilderFactory | null { + const entry = this.registry.get(odId); + if (!entry) { + this.logger.warn({ odId }, `OD builder not found: ${odId}`); + return null; + } + return entry.builderFactory; + } + + /** + * Build an OperationalDescriptor using a registered builder or return static OD + * + * @param odId - The OD identifier + * @param config - Optional configuration for the builder (ignored for static ODs) + * @returns The built OperationalDescriptor or null if not found + */ + buildOD(odId: string, config?: GenericODBuilderConfig): OperationalDescriptor | null { + const entry = this.registry.get(odId); + if (!entry) { + this.logger.error({ odId }, `Cannot build OD - not found: ${odId}`); + return null; + } + + try { + // For static ODs, return a deep copy to prevent mutation + if (entry.type === 'static' && entry.staticOD) { + const od = JSON.parse(JSON.stringify(entry.staticOD)) as OperationalDescriptor; + this.logger.info({ odId, stepCount: od.steps.length }, `Retrieved static OD: ${odId}`); + return od; + } + + // For builder ODs, use the builder factory + if (entry.type === 'builder' && entry.builderFactory) { + const builder = entry.builderFactory(config); + const od = builder.build(); + this.logger.info({ odId, stepCount: od.steps.length }, `Built OD: ${odId}`); + return od; + } + + this.logger.error({ odId, type: entry.type }, `Invalid registry entry for: ${odId}`); + return null; + } catch (error) { + this.logger.error( + { + odId, + error: error instanceof Error ? error.message : String(error), + }, + `Failed to build OD: ${odId}` + ); + return null; + } + } + + /** + * Get a static OperationalDescriptor directly (without copying) + * Use buildOD() if you need a mutable copy + * + * @param odId - The OD identifier + * @returns The OperationalDescriptor or null if not found/not static + */ + getOD(odId: string): OperationalDescriptor | null { + const entry = this.registry.get(odId); + if (!entry) { + return null; + } + if (entry.type === 'static' && entry.staticOD) { + return entry.staticOD; + } + // For builders, we need to build it + return this.buildOD(odId); + } + + /** + * Check if an OD builder is registered + * + * @param odId - The OD identifier + * @returns True if the builder is registered + */ + hasBuilder(odId: string): boolean { + return this.registry.has(odId); + } + + /** + * Get all registered OD IDs + * + * @returns Array of all registered OD IDs + */ + getAllODIds(): string[] { + return Array.from(this.registry.keys()); + } + + /** + * Get registry statistics + * + * @returns Statistics about the registry + */ + getStats(): { + totalRegistered: number; + byCategory: Record; + registeredIds: string[]; + } { + const byCategory: Record = {}; + + for (const entry of this.registry.values()) { + const category = entry.metadata.category || "uncategorized"; + byCategory[category] = (byCategory[category] || 0) + 1; + } + + return { + totalRegistered: this.registry.size, + byCategory, + registeredIds: this.getAllODIds(), + }; + } + + /** + * Get metadata for a registered OD + * + * @param odId - The OD identifier + * @returns Metadata or null if not found + */ + getMetadata(odId: string): ODRegistryEntry["metadata"] | null { + const entry = this.registry.get(odId); + return entry ? entry.metadata : null; + } + + /** + * Unregister an OD builder (useful for testing) + * + * @param odId - The OD identifier + * @returns True if successfully unregistered + */ + unregister(odId: string): boolean { + const deleted = this.registry.delete(odId); + if (deleted) { + this.logger.info({ odId }, `Unregistered OD builder: ${odId}`); + } + return deleted; + } + + /** + * Clear all registered builders (useful for testing) + */ + clear(): void { + const count = this.registry.size; + this.registry.clear(); + this.logger.warn({ count }, `Cleared all ${count} OD builders from registry`); + } +} + +// Export singleton instance +export const ODRegistry = ODRegistryService.getInstance(); diff --git a/packages/controlmart/src/services/persona-registry.service.ts b/packages/controlmart/src/services/persona-registry.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..c256314adda62fa66ae36c687a2201105bf6476b --- /dev/null +++ b/packages/controlmart/src/services/persona-registry.service.ts @@ -0,0 +1,174 @@ +/** + * Persona Registry Service + * + * Singleton service for loading, querying, and filtering personas. + * Loads personas from MongoDB and caches them in memory. + * Provides bidirectional lookup between personas and capabilities. + */ + +import { PersonaRepository } from '../repository/persona.repository'; +import type { Persona, PersonaFilter } from '../types/persona.type'; + +/** + * PersonaRegistry - Singleton service for persona management + */ +class PersonaRegistry { + private personas: Map; + private capabilityPersonaMap: Map; + private initialized = false; + + constructor() { + this.personas = new Map(); + this.capabilityPersonaMap = new Map(); + } + + /** + * Initialize the registry by loading personas from MongoDB + * and building reverse mapping (capability → personas) + * Must be called during app startup before using the registry + */ + async initialize(): Promise { + // Note: getAll() now returns {data, pagination}, but without pagination param it returns all + const result = await PersonaRepository.getAll(); + const dbPersonas = result.data; + + // Clear existing data + this.personas.clear(); + this.capabilityPersonaMap.clear(); + + // Load personas and build reverse mapping + dbPersonas.forEach((persona) => { + this.personas.set(persona.id, persona); + + // Build reverse mapping: capability → personas + persona.capabilityIds.forEach((capId) => { + if (!this.capabilityPersonaMap.has(capId)) { + this.capabilityPersonaMap.set(capId, []); + } + this.capabilityPersonaMap.get(capId)!.push(persona.id); + }); + }); + + this.initialized = true; + console.log(`[PersonaRegistry] Loaded ${this.personas.size} personas from MongoDB`); + } + + /** + * Reload registry from database + * Should be called after CRUD operations that modify personas + */ + async reload(): Promise { + this.personas.clear(); + this.capabilityPersonaMap.clear(); + await this.initialize(); + console.log(`[PersonaRegistry] Reloaded ${this.personas.size} personas`); + } + + /** + * Get all personas + */ + getAll(): Persona[] { + if (!this.initialized) { + throw new Error("PersonaRegistry not initialized. Call initialize() first."); + } + return Array.from(this.personas.values()); + } + + /** + * Get persona by ID + * @returns Persona or null if not found + */ + getById(id: string): Persona | null { + if (!this.initialized) { + throw new Error("PersonaRegistry not initialized. Call initialize() first."); + } + return this.personas.get(id) || null; + } + + /** + * Filter personas by criteria + * @param filter - Filter criteria (role, department, accessLevel, tags) + * @returns Array of personas matching all provided criteria + */ + filter(filter: PersonaFilter): Persona[] { + if (!this.initialized) { + throw new Error("PersonaRegistry not initialized. Call initialize() first."); + } + let results = this.getAll(); + + if (filter.role) { + results = results.filter((p) => p.role === filter.role); + } + + if (filter.department) { + results = results.filter((p) => p.department === filter.department); + } + + if (filter.accessLevel) { + results = results.filter( + (p) => p.metadata?.accessLevel === filter.accessLevel + ); + } + + if (filter.tags && filter.tags.length > 0) { + results = results.filter((p) => + filter.tags!.some((tag) => p.metadata?.tags?.includes(tag)) + ); + } + + return results; + } + + /** + * Get capability IDs for a persona + * @param personaId - Persona ID + * @returns Array of capability IDs (empty if persona not found) + */ + getCapabilitiesForPersona(personaId: string): string[] { + if (!this.initialized) { + throw new Error("PersonaRegistry not initialized. Call initialize() first."); + } + const persona = this.getById(personaId); + return persona ? persona.capabilityIds : []; + } + + /** + * Get personas that can perform a capability + * @param capabilityId - Capability ID + * @returns Array of personas (empty if capability not found or no personas) + */ + getPersonasForCapability(capabilityId: string): Persona[] { + if (!this.initialized) { + throw new Error("PersonaRegistry not initialized. Call initialize() first."); + } + const personaIds = this.capabilityPersonaMap.get(capabilityId) || []; + return personaIds + .map((id) => this.getById(id)) + .filter((p) => p !== null) as Persona[]; + } + + /** + * Get count of personas in registry + */ + getCount(): number { + if (!this.initialized) { + throw new Error("PersonaRegistry not initialized. Call initialize() first."); + } + return this.personas.size; + } + + /** + * Check if persona exists + */ + exists(personaId: string): boolean { + if (!this.initialized) { + throw new Error("PersonaRegistry not initialized. Call initialize() first."); + } + return this.personas.has(personaId); + } +} + +/** + * Singleton export + */ +export const personaRegistry = new PersonaRegistry(); diff --git a/packages/controlmart/src/services/scheduler.service.ts b/packages/controlmart/src/services/scheduler.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..bdc312179d0623b70d7403befbb3af7bfea8d0c8 --- /dev/null +++ b/packages/controlmart/src/services/scheduler.service.ts @@ -0,0 +1,331 @@ +import { Agenda, Job } from "@hokify/agenda"; +import { loadEnv } from "../utils/env.util"; +import type { Logger } from "pino"; +import { createAppLogger } from "../utils/logger.util"; +import { getMongoObjectById, isValidObjectId } from "../utils/mongo.util"; + +let SCHEDULER_ENABLED = false; + +const logger = createAppLogger({ service: "scheduler" }); + +const scheduler = new Agenda({ + db: { + address: `${loadEnv().MONGO_URI}/${loadEnv().DB_NAME}`, + collection: "schedules", + }, + processEvery: "1 seconds", + maxConcurrency: 10, + defaultConcurrency: 5, + lockLimit: 0, + defaultLockLimit: 0, + defaultLockLifetime: 1 * 60 * 1000, +}); + +const defineJob = ( + jobName: string, + handler: (job: Job, logger: Logger) => Promise, +): void => { + scheduler.define(jobName, async (job: Job) => { + const jobLogger = logger.child({ + jobId: job.attrs._id, + jobName: job.attrs.name, + }); + + await handler(job, jobLogger); + }); +}; + +const startScheduler = async (): Promise => { + if (SCHEDULER_ENABLED) { + logger.warn("Scheduler is already started"); + return; + } + + try { + await scheduler.start(); + SCHEDULER_ENABLED = true; + logger.info("Scheduler started successfully"); + } catch (error) { + logger.error( + { + error: error instanceof Error ? error.message : String(error), + }, + "Failed to start scheduler", + ); + throw error; + } +}; + +const stopScheduler = async (): Promise => { + if (!SCHEDULER_ENABLED) { + logger.warn("Scheduler is not running"); + return; + } + + try { + await scheduler.stop(); + SCHEDULER_ENABLED = false; + logger.info("Scheduler stopped successfully"); + } catch (error) { + logger.error( + { + error: error instanceof Error ? error.message : String(error), + }, + "Failed to stop scheduler", + ); + throw error; + } +}; + +const scheduleJob = async ( + time: string | Date, + jobName: string, + jobData: T, +): Promise> => { + const job = await scheduler.schedule(time, jobName, jobData); + + logger.info( + { + jobName, + scheduledFor: job.attrs.nextRunAt, + jobId: job.attrs._id, + }, + `Scheduled job for one-time execution`, + ); + + return job; +}; + +const scheduleRecurringJob = async ( + interval: string, + jobName: string, + jobData: T, + options?: { skipImmediate?: boolean }, +): Promise> => { + const job = await scheduler.every(interval, jobName, jobData, options); + + logger.info( + { + jobName, + interval, + nextRunAt: job.attrs.nextRunAt, + jobId: job.attrs._id, + }, + `Scheduled job for recurring execution`, + ); + + return job; +}; + +const createRecurringJob = async ( + interval: string, + jobName: string, + jobData: T, + uniqueQuery?: Record, + options?: { nextRunAt?: Date | string }, +): Promise> => { + const job = scheduler.create(jobName, jobData); + job.repeatEvery(interval); + + if (uniqueQuery) { + job.unique(uniqueQuery); + } + + if (options?.nextRunAt) { + job.schedule(options.nextRunAt); + } + + await job.save(); + + logger.info( + { + jobName, + interval, + nextRunAt: job.attrs.nextRunAt, + jobId: job.attrs._id, + uniqueQuery, + options, + }, + `Created recurring job`, + ); + + return job; +}; + +const cancelJob = async (jobId: string): Promise => { + const query = isValidObjectId(jobId) + ? { _id: getMongoObjectById(jobId) } + : { _id: jobId }; + const numRemoved = await scheduler.cancel(query as any); + + logger.info( + { + jobId, + numRemoved, + }, + `Cancelled scheduled job`, + ); + + return numRemoved; +}; + +const cancelJobsByQuery = async (query: Record): Promise => { + const numRemoved = await scheduler.cancel(query as any); + + logger.info( + { + query, + numRemoved, + }, + `Cancelled scheduled jobs by query`, + ); + + return numRemoved; +}; + +const listScheduledJobs = async (query: Record = {}): Promise => { + const jobs = await scheduler.jobs(query); + return jobs; +}; + +const getJobById = async (jobId: string): Promise => { + const isValid = isValidObjectId(jobId); + const query = isValid + ? { _id: getMongoObjectById(jobId) } + : { _id: jobId }; + const jobs = await scheduler.jobs(query as any); + return jobs[0] ?? null; +}; + +const rescheduleJob = async (jobId: string, newTime: string | Date): Promise => { + const job = await getJobById(jobId); + if (!job) { + logger.warn({ jobId }, "Job not found for rescheduling"); + return null; + } + + job.schedule(newTime); + await job.save(); + + logger.info( + { + jobId, + newScheduledTime: job.attrs.nextRunAt, + }, + "Rescheduled job", + ); + + return job; +}; + +const pauseJob = async (jobId: string): Promise => { + const job = await getJobById(jobId); + if (!job) { + logger.warn({ jobId }, "Job not found for pausing"); + return null; + } + + job.disable(); + await job.save(); + + logger.info( + { + jobId, + nextRunAt: job.attrs.nextRunAt, + }, + "Paused job", + ); + + return job; +}; + +const resumeJob = async (jobId: string): Promise => { + const job = await getJobById(jobId); + if (!job) { + logger.warn({ jobId }, "Job not found for resuming"); + return null; + } + + job.enable(); + await job.save(); + + logger.info( + { + jobId, + nextRunAt: job.attrs.nextRunAt, + }, + "Resumed job", + ); + + return job; +}; + +const pauseJobsByQuery = async (query: Record): Promise => { + const jobs = await scheduler.jobs(query); + if (jobs.length === 0) return 0; + + const results = await Promise.allSettled( + jobs.map(async (job) => { + job.disable(); + return job.save(); + }), + ); + + const successful = results.filter((r) => r.status === "fulfilled").length; + + logger.info( + { + query, + found: jobs.length, + paused: successful, + }, + "Paused jobs by query", + ); + + return successful; +}; + +const resumeJobsByQuery = async (query: Record): Promise => { + const jobs = await scheduler.jobs(query); + if (jobs.length === 0) return 0; + + const results = await Promise.allSettled( + jobs.map(async (job) => { + job.enable(); + return job.save(); + }), + ); + + const successful = results.filter((r) => r.status === "fulfilled").length; + + logger.info( + { + query, + found: jobs.length, + resumed: successful, + }, + "Resumed jobs by query", + ); + + return successful; +}; + +export { + startScheduler, + stopScheduler, + scheduleJob, + scheduleRecurringJob, + createRecurringJob, + cancelJob, + cancelJobsByQuery, + listScheduledJobs, + getJobById, + rescheduleJob, + pauseJob, + resumeJob, + pauseJobsByQuery, + resumeJobsByQuery, + defineJob, + logger as schedulerLogger, + scheduler, +}; diff --git a/packages/controlmart/src/services/service-now.tickets.service.ts b/packages/controlmart/src/services/service-now.tickets.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..bf9d05ea7e3384a8a7fbc1afee5172f9d93225db --- /dev/null +++ b/packages/controlmart/src/services/service-now.tickets.service.ts @@ -0,0 +1,56 @@ +import type { TWorldItsmTicketInput } from "../models/tickets.model"; +import { createAppLogger } from "../utils/logger.util"; +import { httpRequest } from "../utils/http.util"; +import type { TServiceNowTicketInput } from "../types/servicenow.type"; + +const logger = createAppLogger({ + service: "service-now-ticket-service", +}); + +const createIrisAdapter = (ticket: TServiceNowTicketInput) => { + return { + clientId: "demo", + service: "servicenow", + sessionId: "demo", + tool_name: "create_incident", + params: { + data: ticket, + }, + }; +} + +const worldTicketServiceNowAdaptor = (ticket: TWorldItsmTicketInput): TServiceNowTicketInput => { + return { + short_description: `[SIMULATOR] World: ${ticket.worldRef?.worldId} - ${ticket.title}`, + description: ticket.description, + impact: ticket.impact, + urgency: ticket.urgency, + priority: ticket.priority, + category: ticket.category, + caller_id: ticket.requester.toHexString(), + assignment_group: ticket.status, + state: ticket.status, + }; +}; + +export const createTicketOnServiceNow = async (ticket: TWorldItsmTicketInput) => { + try { + const response = await httpRequest("https://iris.production.corp.skyfall.ai/mcp/trigger", { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: createIrisAdapter(worldTicketServiceNowAdaptor(ticket)), + }); + return response; + } catch (error) { + logger.error({ error }, "Failed to create ticket"); + throw error; + } +}; + + + + + + diff --git a/packages/controlmart/src/services/service-tools-factory.service.ts b/packages/controlmart/src/services/service-tools-factory.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..d725dc529f99070f40c5f8c3d305664846ba9d92 --- /dev/null +++ b/packages/controlmart/src/services/service-tools-factory.service.ts @@ -0,0 +1,60 @@ +/** + * Service Tools Factory + * + * Creates service tools for different service types (WMS, TMS, ERP, EDI) + * Used during OD execution to provide service-specific tool functions + */ + +import type { Logger } from "pino"; +import type { IOperationalDescriptorContext } from "../operational-descriptor/init.od"; +// import { createWmsServiceTools } from "../utils/wms/service-tools.wms.util"; // File deleted +import { createTmsServiceTools } from "../utils/tms/service-tools.tms.util"; +import { createServiceTools as createEdiErpServiceTools } from "../utils/edi/service-tools.edi.util"; + +export interface ServiceToolsConfig { + context: IOperationalDescriptorContext; + worldId: string; + logger: Logger; + flowId?: string; +} + +/** + * Create all service tools for OD execution + * + * Returns a nested object with tools organized by service type: + * { + * wms: { tool1: fn, tool2: fn, ... }, + * tms: { tool1: fn, tool2: fn, ... }, + * erp: { tool1: fn, tool2: fn, ... }, + * edi: { tool1: fn, tool2: fn, ... } + * } + */ +export function createAllServiceTools( + config: ServiceToolsConfig +): Record Promise>> { + const { context, worldId, logger, flowId = "default-flow" } = config; + + // WMS tools - temporarily disabled (service-tools.wms.util.ts deleted) + const wmsTools = { wms: {} }; + + // Create TMS tools + const tmsTools = createTmsServiceTools({ + context, + worldId, + logger, + }); + + // Create EDI and ERP tools (they come together from the same factory) + const ediErpTools = createEdiErpServiceTools({ + context, + flowId, + logger, + }); + + // Combine all tools + return { + ...wmsTools, + ...tmsTools, + ...ediErpTools, + }; +} diff --git a/packages/controlmart/src/services/ticketing-ai.service.ts b/packages/controlmart/src/services/ticketing-ai.service.ts new file mode 100644 index 0000000000000000000000000000000000000000..6e9506109b7b810c67f7ff06acb4fdad1ed625f1 --- /dev/null +++ b/packages/controlmart/src/services/ticketing-ai.service.ts @@ -0,0 +1,211 @@ +import { z } from "zod"; +import { runAgent } from "./agent.service"; +import type { TLogQueueModel } from "../models/log-queue.model"; +import type { TWorldItsmTicketInput } from "../models/tickets.model"; +import { worlds } from "../worlds"; +import { EServices } from "../utils/service-mesh.util"; + +const TicketOutputSchema = z.object({ + title: z.string().describe("Concise summary of the incident"), + description: z + .string() + .describe( + "Detailed description including root cause (chaos), impact analysis, and affected components" + ), + priority: z.enum(["low", "medium", "high", "critical"]).describe("Priority based on business impact"), + impact: z.enum(["low", "medium", "high"]).describe("Scope of the issue"), + urgency: z.enum(["low", "medium", "high"]).describe("Time sensitivity"), + category: z.enum([...EServices, "infrastructure", "other"] as any).describe("System component (ERP, WMS, TMS, etc.)"), + type: z.enum(["incident", "problem"]).default("incident"), +}); + +const SYSTEM_PROMPT = ` +You are an expert ITSM (IT Service Management) Ticket Generator for "ControlMart", a complex supply chain simulation. + +Your goal is to generate realistic Incident Tickets based on execution logs and chaos events. +CRITICAL INSTRUCTION: You must adopt the PERSONA of the reporter. + +1. DATA SOURCE: + - You have access to "Chaos Events" (the root cause). + - You have access to "Execution Logs" (the symptoms). + - You have access to "Context" (the state of the transaction). + +2. ENTITY EXTRACTION (MANDATORY): + - You MUST extract IDs from the "Context" section (e.g., Shipment IDs, Order IDs, Product SKUs, location codes). + - Look specifically for fields like "id", "_id", "shipmentId", "orderId", "sku", "carrierId" etc whatever is relevant to the failure. + - If a failure involved a specific entity, you MUST include its ID in the description. + - Example Context: { "shipment": { "id": "SH-123", "status": "assigned" } } -> Ticket Description: "Shipment SH-123 failed..." + +3. FILTERING INSTRUCTIONS: + - REMOVE simulator jargon: Do not mention "OD", "Operational Descriptor", "MCP", "Tool", "Step ID", "Run Ids" or "Chaos". + - FOCUS on business impact: Describe the issue in terms of business process failure. + - FILTER metadata: Do not include internal system metadata (e.g., MongoDB ObjectIDs, timestamp strings) unless they are critical business identifiers. + +4. PERSONA BEHAVIOR: + - If the Persona is a HUMAN ROLE (e.g., "Store Operator", "Warehouse Manager", "Procurement Officer", "Truck Driver"): + - You DO NOT know technical details (stack traces, JSON objects, error codes). + - You ONLY know that your workflow failed (e.g., "I couldn't create the order", "The truck didn't arrive"). + - Your description must be purely SYMPTOM-BASED. + - You should sound like a user who is frustrated or concerned about the business impact. + + - If the Persona is "System" or "Automated Job": + - You ARE a monitoring tool (e.g., "Dynatrace", "Datadog"). + - You REPORT the technical error but phrased as a system outage or service failure. + - You still avoid simulator jargon and focus on the Service (WMS, ERP, TMS). + +5. OUTPUT FORMAT: + You must return a valid JSON object with the following structure. + The "description" field MUST follow this narrative structure (implicitly or explicitly): + - WHO: Who is reporting / Who is affected? + - WHAT: What business process is broken? + - If a process returned NO results (e.g. "No orders found"), SPECIFY THE SEARCH CRITERIA from the inputs (e.g. "Checked for Orders with status SHIPPED"). + - WHERE: Which specific entity ID or location is affected? (REQUIRED) + - WHEN: (Implied "Just now") + - WHY: What is the visible symptom? + + { + "title": "Short summary (e.g. 'Unable to process Shipment ')", + "description": "Natural language narrative covering the 5Ws above. MUST include Entity IDs found in Context.", + "priority": "low" | "medium" | "high" | "critical", + "impact": "low" | "medium" | "high", + "urgency": "low" | "medium" | "high", + "category": "${EServices.join('" | "')}" | "infrastructure" | "other", + "type": "incident" + } +`; + +export const analyzeLogQueueAndGenerateTicket = async ( + logQueue: TLogQueueModel, + worldLayout: string = "perishables-food-manufacturer" +): Promise | null> => { + const worldConfig = worlds[worldLayout]; + const worldContext = worldConfig + ? `World Context: ${worldConfig.id} \nDescription: ${worldConfig.description} ` + : "Unknown World Context"; + + const chaosEntries = logQueue.entries || []; + const logSnippet = logQueue.logs.slice(-50).join("\n"); + + const persona = logQueue.persona || "System"; + const isSystem = persona === "System"; + + const relevantStepIds = new Set( + chaosEntries + .filter(e => e.chaosType || e.modifications) + .map(e => e.stepId) + ); + const lastEntry = chaosEntries[chaosEntries.length - 1]; + if (lastEntry?.stepId) { + relevantStepIds.add(lastEntry.stepId); + } + + const contextSnapshots = (logQueue.entries || []) + .filter(e => (e.context && Object.keys(e.context).length > 0) || e.data) + .filter(e => relevantStepIds.has(e.stepId)) + .map(e => { + const snapshot: any = { stepId: e.stepId, timestamp: e.timestamp }; + + if (e.context) { + const ctxStr = JSON.stringify(e.context); + snapshot.context = ctxStr.length > 5000 ? "Context too large (>5kb), truncated." : e.context; + } + + if (e.data) { + const dataStr = JSON.stringify(e.data); + snapshot.data = dataStr.length > 5000 ? "Data too large (>5kb), truncated." : e.data; + snapshot.dataType = e.inputType || e.outputType; + } + + return snapshot; + }) + .slice(-5); // Keep last 5 snapshots to avoid token overflow + + // Extract precise OD Context from World Docs + let odDocContext = ""; + if (worldConfig && worldConfig.docs) { + const docs: any = worldConfig.docs; + const allOds = [...(docs.operationalDescriptors?.standardActors || []), ...(docs.operationalDescriptors?.backgroundActors || [])]; + const matchingOd = allOds.find((od: any) => od.id === logQueue.odId); + + if (matchingOd) { + odDocContext = ` +OD DESIGN CONTEXT (From World Definition): +Name: ${matchingOd.name} +Description: ${matchingOd.description} +Key Steps: ${(matchingOd.keySteps || []).join(", ")} +Inputs: ${(matchingOd.inputs || []).join(", ")} +Outputs: ${(matchingOd.outputs || []).join(", ")} + `; + } + } + + // Prepare metadata for verification + const metadata: Record = { + odId: logQueue.odId, + odName: logQueue.odName, + runId: logQueue.runId, + failedStepId: lastEntry?.stepId, + failureType: lastEntry?.chaosType || "UNKNOWN", + contextSnapshots, + }; + + const prompt = ` +${worldContext} +${odDocContext} + +REPORTING PERSONA: ${persona} +${isSystem ? "(This is an AUTOMATED MONITORING ALERT)" : "(This is a HUMAN USER REPORT. Act dumb about technical details.)"} + +OPERATIONAL DESCRIPTOR: +ID: ${logQueue.odId} +Name: ${logQueue.odName} + +CHAOS EVENTS & ERRORS (ROOT CAUSE - INTERNAL USE ONLY): +${JSON.stringify(chaosEntries.filter(e => e.chaosType || e.modifications).slice(-10), null, 2)} + +CONTEXT (STATE SNAPSHOTS - FOR ENTITY EXTRACTION): +${JSON.stringify(contextSnapshots, null, 2)} + +EXECUTION LOGS (SYMPTOMS): +${logSnippet} + +TASK: +Create an Incident Ticket based on the PERSONA rules defined in the System Prompt. +${!isSystem ? "REMEMBER: The user does NOT know this is chaos. They only see the failure." : ""} +`; + + + const result = await runAgent({ + prompt: SYSTEM_PROMPT + "\n\n" + prompt, + agentInput: {}, + schema: { + input: z.object({}), + output: TicketOutputSchema, + }, + modelConfigs: { + model: "gpt-4o", + temperature: 0.2, + maxTokens: 10000, + topP: 1, + frequencyPenalty: 0, + presencePenalty: 0, + stop: [], + responseFormat: "json_object", + stream: false, + log: true, + }, + }); + + if (result.success && result.data) { + return { + ...result.data, + metadata: { + ...metadata, + // AI might extract specific entity IDs, we can merge them here if we wanted to ask AI for them explicitly + } + } as Partial; + } + + console.error("Failed to generate ticket from logs:", result.error); + return null; +}; diff --git a/packages/controlmart/src/types/ai.type.ts b/packages/controlmart/src/types/ai.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..a21e60aeb34cf573eabe94e5a0f6f9c234ede80a --- /dev/null +++ b/packages/controlmart/src/types/ai.type.ts @@ -0,0 +1,29 @@ +import { z } from "zod"; + +export type TMessage = { + role: "system" | "user" | "assistant"; + content: string; +}; + +export type TAgentConfig = { + prompt: string; + agentInput: object; + schema: { + input: z.ZodType; + output: z.ZodType; + }; + modelConfigs: TModelConfigs; +}; + +export type TModelConfigs = { + model: string; + maxTokens: number; + temperature: number; + topP: number; + frequencyPenalty: number; + presencePenalty: number; + stop: string[]; + stream: boolean; + responseFormat: "text" | "json_object"; + log: boolean; +}; diff --git a/packages/controlmart/src/types/capability.type.ts b/packages/controlmart/src/types/capability.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..5c9a60771b0ad1b3b2523d70ccf0a09d507e6cab --- /dev/null +++ b/packages/controlmart/src/types/capability.type.ts @@ -0,0 +1,123 @@ +/** + * Capability Type Definitions + * + * Capabilities represent semantic business functions or processes that personas can perform. + * Each capability is implemented by one or more Operational Descriptors (ODs). + */ + +import type { ChaosPolicy, ChaosTelemetry } from './od.type'; + +export interface Capability { + /** Unique identifier for the capability */ + id: string; + + /** Human-readable name */ + name: string; + + /** Detailed description of what this capability does */ + description: string; + + /** Multi-dimensional tags for organization and filtering */ + tags: CapabilityTags; + + /** Persona IDs that can perform this capability */ + personas: string[]; + + /** The OD that implements this capability */ + odId: string; + + /** Semantic version */ + version: string; + + /** Optional metadata */ + metadata?: CapabilityMetadata; + + /** Optional capability-level chaos configuration */ + chaos?: ChaosPolicy; +} + +export interface CapabilityTags { + /** Business domains this capability belongs to (e.g., "fulfillment", "inventory") */ + domain: string[]; + + /** Complexity level */ + complexity: 'simple' | 'medium' | 'complex'; + + /** Services this capability uses (e.g., "erp", "wms", "tms") */ + services: string[]; + + /** Personas that can perform this capability (e.g., "store-manager", "warehouse-worker") */ + personas?: string[]; + + /** Workflow patterns (e.g., "sequential", "parallel", "compensating") */ + patterns?: string[]; +} + +export interface CapabilityMetadata { + /** Author or team that created this capability */ + author?: string; + + /** When the capability was created */ + createdAt?: Date; + + /** Estimated execution duration in milliseconds */ + estimatedDuration?: number; + + /** Additional custom metadata */ + [key: string]: any; +} + +/** + * Filter criteria for querying capabilities + */ +export interface CapabilityFilter { + /** Filter by domain(s) */ + domain?: string[]; + + /** Filter by complexity level */ + complexity?: 'simple' | 'medium' | 'complex'; + + /** Filter by service(s) */ + services?: string[]; + + /** Filter by persona(s) */ + personas?: string[]; + + /** Filter by pattern(s) */ + patterns?: string[]; +} + +/** + * Result of executing a capability + */ +export interface CapabilityExecutionResult { + /** The capability that was executed */ + capabilityId: string; + + /** The underlying OD that was executed */ + odId: string; + + /** The world context */ + worldId: string; + + /** Execution result from the OD */ + result: any; + + /** When the execution completed */ + executedAt: Date; + + /** Execution duration in milliseconds */ + durationMs?: number; + + /** Execution status */ + status: 'success' | 'failed' | 'partial'; + + /** Error message if failed */ + error?: string; + + /** Whether the capability is in the world's capability scope (permissive mode flag) */ + capabilityInWorld?: boolean; + + /** Enhanced chaos telemetry (MORPH-413) */ + chaosMetadata?: ChaosTelemetry; +} diff --git a/packages/controlmart/src/types/company.type.ts b/packages/controlmart/src/types/company.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..a82adeec262261dfbe829f41224e80fb8ef08dec --- /dev/null +++ b/packages/controlmart/src/types/company.type.ts @@ -0,0 +1,13 @@ +export type TCompany = { + company_name: string; + abbr: string; + default_currency: string; + country: string; + create_chart_of_accounts_based_on: + | "Standard Template" + | "Industry Template" + | "No Chart of Accounts"; + chart_of_accounts: "Standard" | "US GAAP" | "UK GAAP" | "French GAAP" | "German GAAP"; + allow_inter_company_transaction: 0 | 1; + date_of_commencement: string; +}; diff --git a/packages/controlmart/src/types/customer.type.ts b/packages/controlmart/src/types/customer.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..21380562639ec019c60f8c9a8dc5d2ac4f114ca8 --- /dev/null +++ b/packages/controlmart/src/types/customer.type.ts @@ -0,0 +1,92 @@ +export interface IDemographics { + age: number; + gender: string; + income_range: string | null; + education: string; + marital_status: string; + location: string; +} + +export interface IPreferences { + favorite_categories: string[]; + brand_loyalty: number; + price_sensitivity: number; + preferred_channel: string; +} + +export interface IBehavior { + purchase_frequency: string; + avg_basket_size: number; + churn_risk: number; + loyalty_score: number; +} + +export interface ITraits { + personality: string; + values: string[]; + interests: string[]; +} + +export type TCustomer = { + customer_id: string; + name: string; + contact: { + email: string; + phone: string; + website?: string; + taxId?: string; + mobile?: string; + }; + demographics: IDemographics; + preferences: IPreferences; + behavior: IBehavior; + traits: ITraits; + tier?: "Bronze" | "Silver" | "Gold" | "Platinum"; +}; + +export interface IOdooManyToOne { + id: number; + name: string; +} + +export type OdooManyToMany = number[]; + +export interface IOdooPartner { + id: number; + ref?: string; + name: string; + company_type: "person" | "company"; + is_company: boolean; + + email?: string; + phone?: string; + mobile?: string; + website?: string; + + street?: string; + street2?: string; + city?: string; + zip?: string; + state_id?: IOdooManyToOne; + country_id?: IOdooManyToOne; + + vat?: string; + company_registry?: string; + + title?: IOdooManyToOne; + function?: string; + additional_info?: string; + + category_id?: OdooManyToMany; + + customer_rank?: number; + supplier_rank?: number; + company_id?: IOdooManyToOne; + + comment?: string; + + active?: boolean; + trust?: "normal" | "trusted" | "blocked"; + create_date?: string; + write_date?: string; +} diff --git a/packages/controlmart/src/types/exployee.type.ts b/packages/controlmart/src/types/exployee.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..a4450f78143d49429372a7d7bc3a3ff82e417911 --- /dev/null +++ b/packages/controlmart/src/types/exployee.type.ts @@ -0,0 +1,65 @@ +export interface Name { + first: string; + last: string; +} + +export interface Contact { + email: string; + phone: string; + location: string; +} + +export interface Salary { + base: number; + bonus: number; + currency: string; +} + +export interface Traits { + personality: string; + work_style: string; + attrition_risk: number; +} + +export type TEmployee = { + employee_id: string; + name: Name; + contact: Contact; + dob: string; + hire_date: string; + department: string; + team: string; + job_title: string; + level: string; + employment_type: string; + salary: Salary; + manager_id: string | null; + skills: string[]; + traits: Traits; + events: any[]; +}; + +export type TDepartment = + | "IT" + | "Sales" + | "Marketing" + | "HR" + | "Finance" + | "Support" + | "Operations"; + +export type TITTeam = "Infrastructure" | "Development" | "QA" | "Security" | "Data"; +export type TSalesTeam = "Domestic" | "International" | "Online" | "Field"; +export type TMarketingTeam = "Content" | "SEO" | "Social Media" | "Events"; +export type THRTeam = "Recruitment" | "Employee Relations" | "Compensation & Benefits"; +export type TFinanceTeam = "Accounting" | "Payroll" | "Financial Planning"; +export type TSupportTeam = "Technical Support" | "Customer Service" | "Field Support"; +export type TOperationsTeam = "Logistics" | "Procurement" | "Facilities" | "Stores"; + +export type TJobLevel = { + title: string; + level: string; + salaryRange: { min: number; max: number }; + bonusRange: { min: number; max: number }; + isManager: boolean; +}; diff --git a/packages/controlmart/src/types/generic.type.ts b/packages/controlmart/src/types/generic.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..0999c79e8fe97c23e661770ec0aedcee729b0b91 --- /dev/null +++ b/packages/controlmart/src/types/generic.type.ts @@ -0,0 +1,19 @@ +export type TOdooCountry = { + id: number; + name: string; + code: string; + phone_code: string; +}; + +export type TOdooState = { + id: number; + name: string; + code: string; + country_id: TOdooCountry; +}; + +export type TOdooTag = { + id: number; + name: string; + color: string; +}; diff --git a/packages/controlmart/src/types/http.utils.type.ts b/packages/controlmart/src/types/http.utils.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..6c71e51eb04f52ebbe9b0aaa5f2295ba9e01fde1 --- /dev/null +++ b/packages/controlmart/src/types/http.utils.type.ts @@ -0,0 +1,14 @@ +type HttpMethod = "GET" | "POST" | "PUT" | "PATCH" | "DELETE"; + +export type HttpRequestOptions = { + method?: HttpMethod; + headers?: Record; + queryParams?: Record; + body?: any; + retries?: number; + retryDelayMs?: number; + retryOn?: number[]; + timeoutMs?: number; + backoffFactor?: number; + jitter?: boolean; +}; diff --git a/packages/controlmart/src/types/index.ts b/packages/controlmart/src/types/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..09bbab9d728a203fb4ec4fcafa9c0950cd84abae --- /dev/null +++ b/packages/controlmart/src/types/index.ts @@ -0,0 +1,17 @@ +/** + * Central export point for all type definitions + */ + +// Knowledge Graph Types +export type { + NodeType, + EdgeType, + GraphNode, + GraphEdge, + ValidationResult, + ValidationWarning, + DependencyInfo, + ToolDependency, + RelatedCapability, + ToolAnnotation, +} from './knowledge-graph.type'; diff --git a/packages/controlmart/src/types/knowledge-graph.type.ts b/packages/controlmart/src/types/knowledge-graph.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..1d41e6fc8e4c1e15078bde283369e0dea3ecdc12 --- /dev/null +++ b/packages/controlmart/src/types/knowledge-graph.type.ts @@ -0,0 +1,195 @@ +/** + * Knowledge Graph Type Definitions + * + * Defines the structure of the knowledge graph used for capability validation + * and dependency analysis. + */ + +/** + * Node types in the knowledge graph + */ +export type NodeType = + | 'PERSONA' // Roles/actors (Store Manager, Warehouse Worker) + | 'CAPABILITY' // Business functions (Order Fulfillment, Inventory Check) + | 'OD' // Operational Descriptors (workflow definitions) + | 'TOOL' // API operations (createOrder, allocateInventory) + | 'SERVICE' // System boundaries (ERP, WMS, TMS, EDI) + | 'ENTITY'; // Data objects (Order, Product, Inventory) + +/** + * Edge types representing relationships in the knowledge graph + */ +export type EdgeType = + | 'can_perform' // Persona → Capability: which capabilities a persona can execute + | 'implemented_by' // Capability → OD: how a capability is implemented + | 'uses' // OD → Tool: which tools an OD invokes + | 'exposed_by' // Tool → Service: which service provides a tool + | 'produces' // Tool → Entity: entities created by a tool + | 'fetches' // Tool → Entity: entities retrieved (read-only) by a tool + | 'requires' // Tool → Entity: entities needed as input + | 'modifies' // Tool → Entity: entities mutated by a tool + | 'prerequisite'; // Tool → Tool: sequencing requirements (deferred to Phase 4) + +/** + * Graph node representing an entity in the knowledge graph + */ +export interface GraphNode { + /** Unique identifier */ + id: string; + + /** Type of node */ + type: NodeType; + + /** Human-readable label */ + label: string; + + /** Additional metadata specific to node type */ + metadata?: Record; +} + +/** + * Graph edge representing a relationship between nodes + */ +export interface GraphEdge { + /** Source node ID */ + from: string; + + /** Target node ID */ + to: string; + + /** Type of relationship */ + type: EdgeType; + + /** Additional metadata specific to edge type */ + metadata?: Record; +} + +/** + * Result of validating an OD's executability + * (Stub for MORPH-316) + */ +export interface ValidationResult { + /** ID of the capability being validated */ + capabilityId: string; + + /** ID of the OD being validated */ + odId: string; + + /** Whether the OD can be executed */ + isExecutable: boolean; + + /** List of validation warnings */ + warnings: ValidationWarning[]; + + /** List of missing dependencies (tools or entities) */ + missingDependencies: string[]; + + /** Services required by this OD */ + requiredServices: string[]; + + /** Entities required as input */ + requiredEntities: string[]; +} + +/** + * Warning issued during validation + */ +export interface ValidationWarning { + /** Type of validation issue */ + type: 'missing_tool' | 'missing_entity' | 'service_unavailable'; + + /** Human-readable message */ + message: string; + + /** Additional details about the warning */ + details?: any; +} + +/** + * Dependency information for a capability + * (Stub for MORPH-318) + */ +export interface DependencyInfo { + /** ID of the capability */ + capabilityId: string; + + /** ID of the OD */ + odId: string; + + /** Tools used by this capability */ + tools: ToolDependency[]; + + /** Services required */ + services: string[]; + + /** Entities required as input (not produced internally) */ + inputEntities: string[]; + + /** Entities produced as output */ + outputEntities: string[]; +} + +/** + * Tool dependency information + */ +export interface ToolDependency { + /** Tool identifier */ + toolId: string; + + /** Service providing the tool */ + service: string; + + /** Step index in OD (0-based) */ + stepIndex: number; + + /** Whether this tool is required */ + required: boolean; +} + +/** + * Related capability suggestion + * (Stub for MORPH-317) + */ +export interface RelatedCapability { + /** Capability ID */ + id: string; + + /** Capability name */ + name: string; + + /** Type of relationship */ + relationshipType: 'shares-service' | 'shares-entity' | 'prerequisite' | 'suggested-next'; + + /** Similarity score (0.0-1.0) */ + similarity: number; + + /** Human-readable explanation */ + reason?: string; +} + +/** + * Tool annotation for knowledge graph building + * (Used in MORPH-313, MORPH-314, MORPH-315) + */ +export interface ToolAnnotation { + /** Exact tool identifier used in OD steps */ + toolId: string; + + /** Service providing this tool (wms, erp, tms, edi) */ + service: string; + + /** Entities created by this tool (new records) */ + produces: string[]; + + /** Entities retrieved/fetched by this tool (read-only, existing records) */ + fetches?: string[]; + + /** Entities required as input to this tool */ + requires: string[]; + + /** Entities modified/mutated by this tool */ + modifies: string[]; + + /** Human-readable description of what this tool does */ + description?: string; +} diff --git a/packages/controlmart/src/types/models/index.ts b/packages/controlmart/src/types/models/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..659c9e1f605d94836b9ee6e7178735a45273d01e --- /dev/null +++ b/packages/controlmart/src/types/models/index.ts @@ -0,0 +1,2 @@ +export * from "./world.model.type"; +export * from "./od.model.type"; diff --git a/packages/controlmart/src/types/models/od.model.type.ts b/packages/controlmart/src/types/models/od.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..27325bb6947b802ee421e004d9084a0796dd30fd --- /dev/null +++ b/packages/controlmart/src/types/models/od.model.type.ts @@ -0,0 +1,8 @@ +import type { Document } from "mongoose"; + +export type TOperationalDescriptor = Document & { + odId: string; + name: string; + description?: string; + data: Record; +}; diff --git a/packages/controlmart/src/types/models/world.model.type.ts b/packages/controlmart/src/types/models/world.model.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..d47d144eb34e407a04de470f61246be9c46a5e57 --- /dev/null +++ b/packages/controlmart/src/types/models/world.model.type.ts @@ -0,0 +1,47 @@ +import { Document } from "mongoose"; + +export type TWorld = Document & { + name: string; + url: string; + apiKey: string; + apiSecret: string; + is_default: boolean; + description?: string; + createdAt: Date; + updatedAt: Date; + companies: string[]; +}; + +export enum EWorldDataType { + Company = "Company", + People = "People", + Warehouse = "Warehouse", + Customer = "Customer", + Product = "Product", + CustomerLoyalty = "CustomerLoyalty", +} + +export type TWorldData = Document & { + worldId: string; + name: string; + companyId: string; + type: EWorldDataType; + data: Record; + createdAt: Date; + updatedAt: Date; +}; + +// Repository input/output types +export type CreateWorldInput = Omit; +export type UpdateWorldInput = Partial>; +export type CreateWorldDataInput = Omit; +export type UpdateWorldDataInput = Partial< + Omit +>; + +export type WorldDataFilter = { + worldId?: string; + companyId?: string; + type?: EWorldDataType; + name?: string; +}; diff --git a/packages/controlmart/src/types/od.type.ts b/packages/controlmart/src/types/od.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..64e5bc23273a3a11b6bfa4c2bae3f47ae2c3c19e --- /dev/null +++ b/packages/controlmart/src/types/od.type.ts @@ -0,0 +1,299 @@ +import type { Logger } from "pino"; +import type Ajv from "ajv"; + +import type { LogQueueStorage } from "../operational-descriptor/log-queue.storage"; +import type { TWorldModel } from "../models/world.model"; + +export interface OperationalDescriptor { + id: string; + name: string; + type?: "standard" | "background_job"; + persona?: string; + version: string; + description?: string; + namespace?: string; + inputSchema?: Record; + runPolicy?: RunPolicy; + steps: Step[]; + assertions?: Assertion[]; // Add this line + chaos?: ChaosPolicy; // Global chaos policy for entire workflow +} + +export interface Binding { + type: "literal" | "template" | "jmesPath"; + value: any; + template?: any; +} + +export interface OutputBinding { + storeAs: string; + extract?: string; +} + +export interface RetryPolicy { + maxRetries?: number; + backoff?: "fixed" | "exponential" | "linear"; + baseMs?: number; + maxBackoffMs?: number; + jitter?: boolean; +} + +export interface Condition { + language?: "jmespath" | "jsonata" | "cel" | "javascript"; + expression: string; +} + +export interface ChaosPolicy { + enabled: boolean; + probability: number; // 0.0 to 1.0 - overall chance chaos occurs + scenarios: ChaosScenario[]; + seed?: string; // For reproducible chaos + persistCorruptedData?: boolean; // When true, persist corrupted data instead of throwing errors + preprocessInput?: Function; + postprocessOutput?: Function; +} + +export interface ChaosScenario { + type: + | "data_corruption" + | "missing_data" + | "stale_data" + | "format_change" + | "permission_denied" + | "rate_limit" + | "partial_data" + | "duplicate_data" + | "invalid_state" + | "dependency_failure" + | "timing_issue"; + weight: number; // Relative weight for scenario selection + description: string; + config: ChaosConfig; +} + +export interface ChaosConfig { + // Data corruption chaos + corruptFields?: string[]; // Which fields to corrupt + corruptionType?: "null" | "wrong_type" | "invalid_format" | "random_value"; + + // Missing data chaos + missingFields?: string[]; // Fields to remove + missingRecords?: boolean; // Return empty results + throwError?: boolean; // Whether to throw error for missing records (default: true) + + // Stale data chaos + staleDataAge?: number; // How old the data should appear (in minutes) + + // Format change chaos + schemaChanges?: SchemaChange[]; + + // Permission/access chaos + permissionError?: string; + + // Rate limiting chaos + rateLimitDelay?: number; // ms to delay + rateLimitMessage?: string; + + // Partial data chaos + partialResults?: { + percentage: number; // 0-100, how much data to return + randomize: boolean; // random subset vs first N items + }; + + // State chaos + invalidStates?: string[]; // Return records in invalid states + + // Dependency chaos + dependencyService?: string; // Which service is "down" + cascadeFailure?: boolean; // Should failure cascade +} + +export interface SchemaChange { + field: string; + change: "rename" | "remove" | "change_type" | "add_nested" | "flatten"; + newName?: string; // for rename + newType?: "string" | "number" | "boolean" | "object" | "array"; + newStructure?: any; // for complex changes +} + +export interface RunPolicy { + idempotencyKeyExpr?: string; + deduplicationWindowMs?: number; + storeRuns?: boolean; + failureMode?: "fail_fast" | "continue" | "compensate"; +} + +export type Step = McpStep | NoopStep | MapStep | ScriptStep | BranchStep | ExitEarlyStep; + +interface BaseStep { + id: string; + name: string; + description?: string; + condition?: Condition; + input?: Binding; + output?: OutputBinding; + retry?: RetryPolicy; + timeoutMs?: number; + continueOnError?: boolean; + compensationStepId?: string | null; + children?: Step[]; + assertions?: Assertion[]; // Add this line too for step-level assertions + chaos?: ChaosPolicy; // Step-level chaos injection +} + +export interface McpStep extends BaseStep { + type: "mcp"; + service: string; + tool: string; + input: Binding; +} + +export interface NoopStep extends BaseStep { + type: "noop"; + input: Binding; +} + +export interface MapStep extends BaseStep { + type: "map"; + mapSpec: { + iterable: Binding; + itemName: string; + concurrency?: number; + }; + children: Step[]; +} + +export interface Assertion { + id: string; + description?: string; + expression: string; + language?: "jmespath" | "jsonata" | "cel" | "javascript"; + continueOnFailure?: boolean; +} + +export interface AssertionResult { + assertionId: string; + passed: boolean; + actualValue?: any; + error?: string; + evaluatedAt: string; +} + +export interface StepExecutionResult { + stepId: string; + status: "success" | "failed" | "skipped" | "timeout" | "exited"; + startTime: string; + endTime: string; + durationMs: number; + output?: any; + error?: string; + assertionResults?: AssertionResult[]; + chaosInjected?: ChaosInjectionResult; // Track what chaos was applied +} + +export interface ChaosInjectionResult { + triggered: boolean; + scenario: ChaosScenario; + injectedAt: string; + originalOutput?: any; // What the output would have been without chaos + modifications: string[]; // List of modifications made +} + +/** + * Enhanced telemetry for chaos injection tracking (MORPH-413) + */ +export interface ChaosInjectionMetadata { + stepId: string; + stepName: string; + scenarioType: string; + scenarioDescription: string; + configSource: 'step' | 'od' | 'capability' | 'world' | 'global' | 'env'; + probability: number; + seed?: string; + timestamp: string; + modifications: string[]; + config?: any; +} + +export interface ChaosCascadeResolution { + worldPolicy?: string; // Preset ID if from world + capabilityOverride?: string; // Preset ID if from capability + odPolicy?: string; // Preset ID if from OD + stepPolicy?: string; // Preset ID if from step + finalSource: 'env' | 'step' | 'od' | 'capability' | 'world' | 'global'; +} + +export interface ChaosTelemetry { + enabled: boolean; + injectionCount: number; + injections: ChaosInjectionMetadata[]; + cascadeResolution: ChaosCascadeResolution; + probability: number; + seed?: string; +} + +export interface RunResult { + runId: string; + worldId: string; + descriptorId: string; + descriptorVersion: string; + status: "success" | "failed" | "partial"; + startTime: string; + endTime: string; + durationMs: number; + stepResults: StepExecutionResult[]; + totalSteps: number; + successfulSteps: number; + failedSteps: number; + skippedSteps: number; + chaosMetadata?: ChaosTelemetry; // Enhanced chaos telemetry (MORPH-413) +} + +export interface WorldContext { + worldId: string; + runId: string; + traceId: string; + config: Record; + createdAt: Date; +} + +export interface ExecutionOptions { + world: TWorldModel; + tools: Record Promise>>; + logger: Logger; + validator?: Ajv; + input?: Record; + logQueueStorage?: LogQueueStorage; + auditLogger?: Logger; + odMetadata?: { + id: string; + name: string; + persona?: string; + }; + capabilityId?: string; // For chaos resolution context + chaosTelemetry?: any; // ChaosTelemetryCollector (MORPH-413) - using any to avoid circular import +} + +export type Context = Record; + +export interface ScriptStep extends BaseStep { + type: "script"; + script: string; + language?: "javascript" | "typescript"; + input?: Binding; +} + +export interface BranchStep extends BaseStep { + type: "branch"; + branchSpec: { + condition: Condition; + then: Step[]; + else?: Step[]; + }; +} + +export interface ExitEarlyStep extends BaseStep { + type: "exit_early"; + exitCondition: Condition; + message?: string; // Optional message to log when exiting early +} diff --git a/packages/controlmart/src/types/persona.type.ts b/packages/controlmart/src/types/persona.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..a4e37d48feb2238dcd745fbcdade76096d3919fc --- /dev/null +++ b/packages/controlmart/src/types/persona.type.ts @@ -0,0 +1,88 @@ +/** + * Persona System Type Definitions + * + * Personas represent supply chain roles (Store Manager, Warehouse Worker, etc.) + * and their associated capabilities. + */ + +/** + * Primary role categories for personas + */ +export type PersonaRole = 'operational' | 'management' | 'specialist' | 'system'; + +/** + * Department or functional area + */ +export type PersonaDepartment = + | 'warehouse' + | 'transportation' + | 'customer-service' + | 'inventory' + | 'edi' + | 'returns' + | 'store-operations'; + +/** + * Access level for capability restrictions + */ +export type PersonaAccessLevel = 'basic' | 'advanced' | 'admin'; + +/** + * Additional metadata for personas + */ +export interface PersonaMetadata { + /** Permission strings */ + permissions?: string[]; + + /** Access level for capability restrictions */ + accessLevel?: PersonaAccessLevel; + + /** Tags for filtering and grouping */ + tags?: string[]; + + /** Display order priority (lower = higher priority) */ + priority?: number; +} + +/** + * Main persona interface representing a supply chain role + */ +export interface Persona { + /** Unique identifier (kebab-case) */ + id: string; + + /** Display name */ + name: string; + + /** Description of the role and responsibilities */ + description: string; + + /** Primary role category */ + role: PersonaRole; + + /** Department or functional area */ + department?: PersonaDepartment; + + /** Capabilities this persona can perform */ + capabilityIds: string[]; + + /** Additional metadata */ + metadata?: PersonaMetadata; +} + +/** + * Filter criteria for querying personas + */ +export interface PersonaFilter { + /** Filter by role category */ + role?: PersonaRole; + + /** Filter by department */ + department?: PersonaDepartment; + + /** Filter by access level */ + accessLevel?: PersonaAccessLevel; + + /** Filter by tags (any match) */ + tags?: string[]; +} diff --git a/packages/controlmart/src/types/product.type.ts b/packages/controlmart/src/types/product.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..4fe4b3f86a1b6098040fafaec8ae3af667428c0c --- /dev/null +++ b/packages/controlmart/src/types/product.type.ts @@ -0,0 +1,13 @@ +export type TProduct = { + product_id: string; + product: string; + category: string; + sub_category: string; + brand: string; + sale_price: number; + market_price: number; + type: string; + rating: number; + description: string; + image: string; +}; diff --git a/packages/controlmart/src/types/repository.type.ts b/packages/controlmart/src/types/repository.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..148d1708e1d63519922f93ebbc7bf3ef51370992 --- /dev/null +++ b/packages/controlmart/src/types/repository.type.ts @@ -0,0 +1,7 @@ +export type TRepositoryPaginatedResult = { + items: T[]; + nextCursor?: string; + totalCount: number; + hasMore: boolean; + limit: number; +}; diff --git a/packages/controlmart/src/types/servicenow.type.ts b/packages/controlmart/src/types/servicenow.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..a1eab6f876e8c65eb72bde3033a332f781d690f8 --- /dev/null +++ b/packages/controlmart/src/types/servicenow.type.ts @@ -0,0 +1,11 @@ +export type TServiceNowTicketInput = { + short_description: string; + description: string; + impact: string; + urgency: string; + priority: string; + category: string; + caller_id: string; + assignment_group: string; + state: string; +}; \ No newline at end of file diff --git a/packages/controlmart/src/types/store.type.ts b/packages/controlmart/src/types/store.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..2e08e410a06101803a1f599f2822f4946c5a6135 --- /dev/null +++ b/packages/controlmart/src/types/store.type.ts @@ -0,0 +1,21 @@ +export type TStore = { + store_id: string; + code: string; + name: string; + type: "Flagship" | "Outlet" | "Warehouse" | "Mall Kiosk"; + address: { + street: string; + city: string; + state: string; + zip: string; + country: string; + }; + contact: { + phone: string; + email: string; + }; + manager: string; + capacity: number; + opening_hours: string; + established_date: string; +}; diff --git a/packages/controlmart/src/types/warehouse.type.ts b/packages/controlmart/src/types/warehouse.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..e4aff71534d24c6b89ee40a58fe87e823c8135e0 --- /dev/null +++ b/packages/controlmart/src/types/warehouse.type.ts @@ -0,0 +1,7 @@ +export type TWarehouse = { + warehouse_name: string; + company: string; + is_group: 0; + phone_no: string; + email: string; +}; diff --git a/packages/controlmart/src/types/world.type.ts b/packages/controlmart/src/types/world.type.ts new file mode 100644 index 0000000000000000000000000000000000000000..3d0b0a3c81e6b212d0075459bc838ea028f4074f --- /dev/null +++ b/packages/controlmart/src/types/world.type.ts @@ -0,0 +1,12 @@ +import type { OperationalDescriptor } from "./od.type"; + +export interface TWorldConfig { + id: string; + layoutName: string; + shortDescription: string; + description: string; + docs: () => any; + dataSeeder: Function; + odSeeder: Function; + ods?: OperationalDescriptor[]; +} diff --git a/packages/controlmart/src/utils/ai.util.ts b/packages/controlmart/src/utils/ai.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..43fe09c5eb19e438a8cdee701e63a2e9c3217ab8 --- /dev/null +++ b/packages/controlmart/src/utils/ai.util.ts @@ -0,0 +1,71 @@ +import type { TMessage } from "../types/ai.type"; +import type { ChatCompletionMessageParam } from "openai/resources"; +import { z } from "zod"; +import { validateSchemaSafe } from "./validators/validate-schema"; + +export const formatMessagesFormat = (messages: TMessage[]): ChatCompletionMessageParam[] => { + return messages.map((message) => { + return { + role: message.role, + content: message.content, + }; + }); +}; + +export const formatSystemPrompt = ( + prompt: string, + inputs: object, + inputSchema: z.ZodType, +): TMessage[] => { + return [ + { + role: "system", + content: agentPromptFormatter({ + prompt, + inputs, + inputSchema, + }), + }, + ]; +}; + +export const buildFinalPrompt = ( + prompt: string, + messages: TMessage[], + inputs: object = {}, + inputSchema: z.ZodType = z.object({}), +): TMessage[] => { + return [...formatSystemPrompt(prompt, inputs, inputSchema), ...messages]; +}; + +export const agentPromptFormatter = ({ + prompt, + inputs, + inputSchema, +}: { + prompt: string; + inputs: object; + inputSchema: z.ZodType; +}) => { + const validateInput = validateSchemaSafe(inputSchema, inputs); + if (!validateInput.success) { + throw new Error(validateInput.error.message); + } + + const formattedPrompt = prompt.replace(/\{\{\s*([\w.]+)\s*\}\}/g, (match, key) => { + const keys = key.split("."); + let value: any = inputs; + + for (const k of keys) { + if (value && typeof value === "object" && k in value) { + value = value[k as keyof typeof value]; + } else { + return match; + } + } + + return String(value); + }); + + return formattedPrompt; +}; diff --git a/packages/controlmart/src/utils/edi/index.ts b/packages/controlmart/src/utils/edi/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..f891cd8c9fee37fc8da950433869d2be2d487e4d --- /dev/null +++ b/packages/controlmart/src/utils/edi/index.ts @@ -0,0 +1,4 @@ +export * from "./parsers.edi.util"; +export * from "./service-tools.edi.util"; +export * from "./validation.edi.util"; +export * from "./od-builders.edi.util"; diff --git a/packages/controlmart/src/utils/edi/od-builders-refactored.edi.util.ts b/packages/controlmart/src/utils/edi/od-builders-refactored.edi.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..e7c61494a7772d9e68a5257246e68b649188e249 --- /dev/null +++ b/packages/controlmart/src/utils/edi/od-builders-refactored.edi.util.ts @@ -0,0 +1,353 @@ +import type { Logger } from "pino"; + +import type { OperationalDescriptor } from "../../types/od.type"; +import type { TCompanyGenerate } from "../../models/erp/company.erp.model"; +import type { IOperationalDescriptorContext } from "../../operational-descriptor/init.od"; +import { + GenericODBuilder, + GenericODBuilderFactory, +} from "../../operational-descriptor/generic-builder.od"; + +export interface EdiODBuilderConfig { + sender: TCompanyGenerate; + receiver: TCompanyGenerate; + logger: Logger; + context: IOperationalDescriptorContext; + chaosProbability?: number; + customSteps?: any[]; +} + +export class RefactoredPurchaseOrderEdiODBuilder { + private builder: GenericODBuilder; + private config: EdiODBuilderConfig; + + constructor(config: EdiODBuilderConfig) { + this.config = config; + this.builder = GenericODBuilderFactory.createEdiBuilder({ + name: `Purchase Order EDI Processing`, + description: `Process 850 (Purchase Order) EDI from ${config.sender.name} to ${config.receiver.name}`, + chaosProbability: config.chaosProbability || 0.0, + }); + } + + private generateId(prefix: string): string { + const timestamp = new Date().toISOString(); + const { sender, receiver } = this.config; + return `${prefix}-${sender.companyId}-to-${receiver.companyId}-${timestamp}`; + } + + build(): OperationalDescriptor { + const { sender, receiver, context } = this.config; + const id = this.generateId("po-edi"); + + // Set the ID and add custom chaos scenarios + this.builder + .setId(id) + .addChaosScenario( + this.builder.createDataCorruptionScenario( + 35, + "Corrupt EDI data", + ["items", "orderDate"], + "wrong_type", + ), + ) + .addChaosScenario( + this.builder.createMissingDataScenario(30, "Missing required EDI fields", ["items"]), + ) + .addChaosScenario( + this.builder.createFormatChangeScenario(20, "Invalid EDI format structure", [ + { + field: "ediSegments.ST", + change: "remove", + }, + ]), + ); + + // Generate Purchase Order Data + this.builder.addMcpStep({ + id: "generatePurchaseOrder", + name: "Generate Purchase Order Data", + service: "edi", + tool: "generatePurchaseOrder", + input: { + sender, + receiver, + poNumber: `PO-${Math.random().toString(36).substr(2, 9)}`, + items: [ + { + sku: `SKU-${Math.random().toString(36).substr(2, 6)}`, + quantity: Math.floor(Math.random() * 100) + 1, + unitPrice: Math.floor(Math.random() * 100) + 10, + }, + ], + department: "PROCUREMENT", + }, + inputType: "literal", + outputStoreAs: "purchaseOrderData", + }); + + // Generate EDI 850 Document with custom chaos + this.builder.addEdiStep( + "generateEDI850", + "Generate EDI 850 Document", + "generateEDI850", + "{{purchaseOrderData}}", + "ediDocument", + { + enabled: true, + probability: 0.2, + scenarios: [ + this.builder.createFormatChangeScenario( + 50, + "CTT01 does not equal count of PO1 segments", + [{ field: "CTT01", change: "change_type", newType: "string" }], + ), + this.builder.createFormatChangeScenario( + 50, + "GS08 version differs from partner requirement", + [{ field: "GS08", change: "change_type", newType: "string" }], + ), + this.builder.createFormatChangeScenario(50, "N101 value violates partner spec", [ + { field: "N101", change: "change_type", newType: "string" }, + ]), + ], + postprocessOutput: async (output: any, scenarioType: string) => { + let edi = String(output); + if (scenarioType === "format_change") { + const pick = Math.random(); + if (pick < 1 / 3) { + if (edi.includes("CTT*")) { + edi = edi.replace(/CTT\*(\d+)/, (_m, g1) => `CTT*${Number(g1) + 1}`); + } + } else if (pick < 2 / 3) { + edi = edi.replace( + /(GS\*[^~]*\*)(004010)(~)/, + (_m, p1, _ver, p3) => `${p1}006020${p3}`, + ); + } else { + edi = edi.replace(/N1\*ST\*/g, "N1*ZZ*"); + } + } + await context.ediTransactionRepo.updateEdiTransaction(id + "po", { + status: "ERRORED", + rawEdi: edi, + }); + return edi; + }, + }, + ); + + // Exchange Sender and Receiver + this.builder.addEdiStep( + "exchange_sender_receiver", + "Exchange Sender and Receiver", + "exchange_sender_receiver", + { erpOrder: "{{purchaseOrderData}}" }, + "purchaseOrderData", + ); + + // Send Receipt Acknowledgment + this.builder.addEdiStep( + "sendReceiptAcknowledgment", + "Send Acknowledgment", + "send_acknowledgment", + { erpOrder: "{{purchaseOrderData}}" }, + "acknowledgment", + ); + + // Create Purchase Order in ERP + this.builder.addErpStep( + "createPurchaseOrderInERP", + "Create Purchase Order in ERP", + "create_purchase_order", + { + supplier_id: 1, + order_lines: "{{purchaseOrderData.items}}", + notes: "Generated from EDI 850", + reference: "{{purchaseOrderData.poNumber}}", + parsedOrder: "{{parsedOrder}}", + erpOrder: "{{purchaseOrderData}}", + }, + "erpOrder", + this.builder.createRetryPolicy({ + maxRetries: 1, + backoff: "fixed", + baseMs: 500, + }), + ); + + // Send PO Acknowledgment + this.builder.addEdiStep( + "sendPOAcknowledgment", + "Send Acknowledgment", + "send_acknowledgment", + { erpOrder: "{{purchaseOrderData}}" }, + "acknowledgment", + ); + + // Generate Invoice with chaos + this.builder.addEdiStep( + "generateInvoice", + "Generate Invoice", + "generate_invoice", + { + erpOrder: "{{purchaseOrderData}}", + parsedOrder: "{{parsedOrder}}", + }, + "invoice", + { + enabled: true, + probability: 0.1, + scenarios: [ + this.builder.createDataCorruptionScenario( + 100, + "Corrupt advanced ship notice data", + ["receiverDUNS"], + "random_value", + ), + this.builder.createDataCorruptionScenario( + 100, + "Invoice price differs from PO price for same item", + [], + "random_value", + ), + this.builder.createDataCorruptionScenario( + 100, + "Sum of SN1 quantities ≠ ERP/shipment total", + [], + "random_value", + ), + ], + preprocessInput: (input: string) => { + const { EdiParserFactory } = require("./parsers.edi.util"); + const { senderDUNS, receiverDUNS, items } = EdiParserFactory.parse("810", input); + return { senderDUNS, receiverDUNS, items }; + }, + postprocessOutput: async (output: any, scenarioType: string) => { + const { EdiGenerators } = require("../../helpers/edi/generators.edi.helper"); + const { senderDUNS, receiverDUNS, items } = output; + let invoiceStr = EdiGenerators.generateInvoice(senderDUNS, receiverDUNS, items); + if (scenarioType === "data_corruption") { + invoiceStr = invoiceStr + .split("~\n") + .map((seg: string) => { + if (seg.startsWith("IT1*")) { + const parts = seg.split("*"); + const price = parseFloat(parts[4] || "0"); + parts[4] = (price + 1).toFixed(2); + return parts.join("*"); + } + return seg; + }) + .join("~\n"); + } + await context.ediTransactionRepo.updateEdiTransaction(id + "invoice", { + status: "ERRORED", + rawEdi: invoiceStr, + }); + return invoiceStr; + }, + }, + ); + + // Send Advanced Ship Notice + this.builder.addEdiStep( + "sendAdvancedShipNotice", + "Send Advanced Ship Notice", + "send_advanced_ship_notice", + { + invoice: "{{invoice}}", + parsedOrder: "{{parsedOrder}}", + erpOrder: "{{purchaseOrderData}}", + }, + "advancedShipNotice", + { + enabled: true, + probability: 0.1, + scenarios: [ + this.builder.createDataCorruptionScenario( + 100, + "Corrupt advanced ship notice data", + ["receiverDUNS"], + "random_value", + ), + this.builder.createMissingDataScenario(100, "Missing required EDI fields", []), + this.builder.createDataCorruptionScenario( + 100, + "Missing required EDI fields", + ["senderDUNS"], + "random_value", + ), + ], + preprocessInput: (input: string) => { + const { EdiParserFactory } = require("./parsers.edi.util"); + const { senderDUNS, receiverDUNS, items } = EdiParserFactory.parse("856", input); + return { senderDUNS, receiverDUNS, items }; + }, + postprocessOutput: async (output: any, scenarioType: string) => { + const { EdiGenerators } = require("../../helpers/edi/generators.edi.helper"); + const { senderDUNS, receiverDUNS, items } = output; + const advancedShipNotice = EdiGenerators.generateAdvancedShipNotice( + senderDUNS, + receiverDUNS, + items, + scenarioType === "missing_data", + ); + await context.ediTransactionRepo.updateEdiTransaction(id + "asn", { + status: "ERRORED", + rawEdi: advancedShipNotice, + }); + return advancedShipNotice; + }, + }, + ); + + // Send Invoice + this.builder.addEdiStep( + "SendInvoice", + "Send Invoice", + "send_invoice", + { invoice: "{{invoice}}" }, + "invoiceSent", + ); + + // Validate EDI + this.builder.addEdiStep( + "validateEDI", + "Validate EDI", + "validate_edi", + { + edi: "{{ediDocument}}", + erpOrder: "{{purchaseOrderData}}", + }, + "validationResult", + ); + + // Add custom steps if provided + if (this.config.customSteps) { + this.builder.addSteps(this.config.customSteps); + } + + return this.builder.build(); + } +} + +export class RefactoredEdiODBuilderFactory { + static createPurchaseOrderBuilder( + config: EdiODBuilderConfig, + ): RefactoredPurchaseOrderEdiODBuilder { + return new RefactoredPurchaseOrderEdiODBuilder(config); + } + + static createBuilder( + type: string, + config: EdiODBuilderConfig, + ): RefactoredPurchaseOrderEdiODBuilder { + switch (type) { + case "purchase-order": + return new RefactoredPurchaseOrderEdiODBuilder(config); + default: + throw new Error(`Unsupported EDI OD builder type: ${type}`); + } + } +} diff --git a/packages/controlmart/src/utils/edi/od-builders.edi.util.ts b/packages/controlmart/src/utils/edi/od-builders.edi.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..c986fca8bae127996760c4adb7b3f11309db85f4 --- /dev/null +++ b/packages/controlmart/src/utils/edi/od-builders.edi.util.ts @@ -0,0 +1,454 @@ +import type { Logger } from "pino"; + +import type { OperationalDescriptor } from "../../types/od.type"; +import type { TCompanyGenerate } from "../../models/erp/company.erp.model"; +import type { IOperationalDescriptorContext } from "../../operational-descriptor/init.od"; + +export interface EdiODBuilderConfig { + sender: TCompanyGenerate; + receiver: TCompanyGenerate; + logger: Logger; + context: IOperationalDescriptorContext; + chaosProbability?: number; + customSteps?: any[]; +} + +export interface ChaosScenario { + type: string; + weight: number; + description: string; + config: any; +} + +export abstract class BaseEdiODBuilder { + protected config: EdiODBuilderConfig; + + constructor(config: EdiODBuilderConfig) { + this.config = config; + } + + protected generateId(prefix: string): string { + const timestamp = new Date().getTime().toString(); + const { sender, receiver } = this.config; + return `${prefix}-${sender.companyId}-to-${receiver.companyId}-${timestamp}`; + } + + protected createBaseChaosConfig(): any { + return { + probability: this.config.chaosProbability || 0.0, + enabled: true, + scenarios: this.getDefaultChaosScenarios(), + }; + } + + protected getDefaultChaosScenarios(): ChaosScenario[] { + return [ + { + type: "data_corruption", + weight: 35, + description: "Corrupt EDI data", + config: { + corruptFields: ["items", "orderDate"], + corruptionType: "wrong_type", + }, + }, + { + type: "missing_data", + weight: 30, + description: "Missing required EDI fields", + config: { + missingFields: ["items"], + }, + }, + { + type: "format_change", + weight: 20, + description: "Invalid EDI format structure", + config: { + schemaChanges: [ + { + field: "ediSegments.ST", + change: "remove", + }, + ], + }, + }, + ]; + } + + protected createEdiGenerationStep( + stepId: string, + stepName: string, + tool: string, + inputValue: any, + outputStoreAs: string, + chaosConfig?: any, + ) { + const step: any = { + id: stepId, + name: stepName, + type: "mcp" as const, + service: "edi", + tool, + input: { + type: "template" as const, + value: inputValue, + }, + output: { storeAs: outputStoreAs }, + }; + + if (chaosConfig) { + step.chaos = chaosConfig; + } + + return step; + } + + protected createErpStep( + stepId: string, + stepName: string, + tool: string, + inputValue: any, + outputStoreAs: string, + retryConfig?: any, + ) { + const step: any = { + id: stepId, + name: stepName, + type: "mcp" as const, + service: "erp", + tool, + input: { + type: "template" as const, + value: inputValue, + }, + output: { storeAs: outputStoreAs }, + }; + + if (retryConfig) { + step.retry = retryConfig; + } + + return step; + } + + abstract build(): OperationalDescriptor; +} + +export class PurchaseOrderEdiODBuilder extends BaseEdiODBuilder { + build(): OperationalDescriptor { + const { sender, receiver, logger, context } = this.config; + const timestamp = new Date().toISOString(); + const id = this.generateId("po-edi"); + + const steps = [ + { + id: "generatePurchaseOrder", + name: "Generate Purchase Order Data", + type: "mcp" as const, + service: "edi", + tool: "generatePurchaseOrder", + input: { + type: "literal" as const, + value: { + sender, + receiver, + poNumber: `PO-${Math.random().toString(36).substr(2, 9)}`, + items: [ + { + sku: `SKU-${Math.random().toString(36).substr(2, 6)}`, + quantity: Math.floor(Math.random() * 100) + 1, + unitPrice: Math.floor(Math.random() * 100) + 10, + }, + ], + department: "PROCUREMENT", + }, + }, + output: { storeAs: "purchaseOrderData" }, + }, + + this.createEdiGenerationStep( + "generateEDI850", + "Generate EDI 850 Document", + "generateEDI850", + "{{purchaseOrderData}}", + "ediDocument", + { + probability: 0.0, + enabled: true, + scenarios: [ + { + type: "format_change", + weight: 50, + description: "CTT01 does not equal count of PO1 segments", + config: { + schemaChanges: [{ field: "CTT01", change: "change_type", newType: "string" }], + }, + }, + { + type: "format_change", + weight: 50, + description: "GS08 version differs from partner requirement", + config: { + schemaChanges: [{ field: "GS08", change: "change_type", newType: "string" }], + }, + }, + { + type: "format_change", + weight: 50, + description: "N101 value violates partner spec", + config: { + schemaChanges: [{ field: "N101", change: "change_type", newType: "string" }], + }, + }, + ], + postprocessOutput: async (output: any, scenarioType: string) => { + let edi = String(output); + if (scenarioType === "format_change") { + const pick = Math.random(); + if (pick < 1 / 3) { + if (edi.includes("CTT*")) { + edi = edi.replace(/CTT\*(\d+)/, (_m, g1) => `CTT*${Number(g1) + 1}`); + } + } else if (pick < 2 / 3) { + edi = edi.replace( + /(GS\*[^~]*\*)(004010)(~)/, + (_m, p1, _ver, p3) => `${p1}006020${p3}`, + ); + } else { + edi = edi.replace(/N1\*ST\*/g, "N1*ZZ*"); + } + } + await context.ediTransactionRepo.updateEdiTransaction(id + "po", { + status: "ERRORED", + rawEdi: edi, + }); + return edi; + }, + }, + ), + + this.createEdiGenerationStep( + "exchange_sender_receiver", + "Exchange Sender and Receiver", + "exchange_sender_receiver", + { erpOrder: "{{purchaseOrderData}}" }, + "purchaseOrderData", + ), + + this.createEdiGenerationStep( + "sendReceiptAcknowledgment", + "Send Acknowledgment", + "send_acknowledgment", + { erpOrder: "{{purchaseOrderData}}" }, + "acknowledgment", + ), + + this.createErpStep( + "createPurchaseOrderInERP", + "Create Purchase Order in ERP", + "create_purchase_order", + { + supplier_id: 1, + order_lines: "{{purchaseOrderData.items}}", + notes: "Generated from EDI 850", + reference: "{{purchaseOrderData.poNumber}}", + parsedOrder: "{{parsedOrder}}", + erpOrder: "{{purchaseOrderData}}", + }, + "erpOrder", + { + maxRetries: 1, + backoff: "fixed", + baseMs: 500, + }, + ), + + this.createEdiGenerationStep( + "sendPOAcknowledgment", + "Send Acknowledgment", + "send_acknowledgment", + { erpOrder: "{{purchaseOrderData}}" }, + "acknowledgment", + ), + + this.createEdiGenerationStep( + "generateInvoice", + "Generate Invoice", + "generate_invoice", + { + erpOrder: "{{purchaseOrderData}}", + parsedOrder: "{{parsedOrder}}", + }, + "invoice", + { + probability: 0.05, + enabled: true, + scenarios: [ + { + type: "data_corruption", + weight: 100, + description: "Corrupt advanced ship notice data", + config: { + corruptFields: ["receiverDUNS"], + corruptionType: "random_value", + }, + }, + { + type: "data_corruption", + weight: 0, + description: "Invoice price differs from PO price for same item", + config: { corruptionType: "random_value" }, + }, + { + type: "data_corruption", + weight: 0, + description: "Sum of SN1 quantities ≠ ERP/shipment total", + config: { corruptionType: "random_value" }, + }, + ], + preprocessInput: (input: string) => { + const { EdiParserFactory } = require("./parsers.edi.util"); + const { senderDUNS, receiverDUNS, items } = EdiParserFactory.parse("810", input); + return { senderDUNS, receiverDUNS, items }; + }, + postprocessOutput: async (output: any, scenarioType: string) => { + const { EdiGenerators } = require("../../helpers/edi/generators.edi.helper"); + const { senderDUNS, receiverDUNS, items } = output; + let invoiceStr = EdiGenerators.generateInvoice(senderDUNS, receiverDUNS, items); + if (scenarioType === "data_corruption") { + invoiceStr = invoiceStr + .split("~\n") + .map((seg: string) => { + if (seg.startsWith("IT1*")) { + const parts = seg.split("*"); + const price = parseFloat(parts[4] || "0"); + parts[4] = (price + 1).toFixed(2); + return parts.join("*"); + } + return seg; + }) + .join("~\n"); + } + await context.ediTransactionRepo.updateEdiTransaction(id + "invoice", { + status: "ERRORED", + rawEdi: invoiceStr, + }); + return invoiceStr; + }, + }, + ), + + this.createEdiGenerationStep( + "sendAdvancedShipNotice", + "Send Advanced Ship Notice", + "send_advanced_ship_notice", + { + invoice: "{{invoice}}", + parsedOrder: "{{parsedOrder}}", + erpOrder: "{{purchaseOrderData}}", + }, + "advancedShipNotice", + { + probability: 0.1, + enabled: true, + scenarios: [ + { + type: "data_corruption", + weight: 50, + description: "Corrupt advanced ship notice data", + config: { + corruptFields: ["receiverDUNS"], + corruptionType: "random_value", + }, + }, + { + type: "missing_data", + weight: 100, + description: "Missing required EDI fields", + config: { + missingFields: [], + }, + }, + { + type: "data_corruption", + weight: 50, + description: "Corrupt sender DUNS", + config: { + corruptFields: ["senderDUNS"], + corruptionType: "random_value", + }, + }, + ], + preprocessInput: (input: string) => { + const { EdiParserFactory } = require("./parsers.edi.util"); + const { senderDUNS, receiverDUNS, items } = EdiParserFactory.parse("856", input); + return { senderDUNS, receiverDUNS, items }; + }, + postprocessOutput: async (output: any, scenarioType: string) => { + const { EdiGenerators } = require("../../helpers/edi/generators.edi.helper"); + const { senderDUNS, receiverDUNS, items } = output; + const advancedShipNotice = EdiGenerators.generateAdvancedShipNotice( + senderDUNS, + receiverDUNS, + items, + scenarioType === "missing_data", + ); + await context.ediTransactionRepo.updateEdiTransaction(id + "asn", { + status: "ERRORED", + rawEdi: advancedShipNotice, + }); + return advancedShipNotice; + }, + }, + ), + + this.createEdiGenerationStep( + "SendInvoice", + "Send Invoice", + "send_invoice", + { invoice: "{{invoice}}" }, + "invoiceSent", + ), + + this.createEdiGenerationStep( + "validateEDI", + "Validate EDI", + "validate_edi", + { + edi: "{{ediDocument}}", + erpOrder: "{{purchaseOrderData}}", + }, + "validationResult", + ), + ]; + + if (this.config.customSteps) { + steps.push(...this.config.customSteps); + } + + return { + id, + name: `Purchase Order EDI Processing`, + version: "1.0.0", + description: `Process 850 (Purchase Order) EDI from ${sender.name} to ${receiver.name}`, + chaos: this.createBaseChaosConfig(), + steps, + }; + } +} + +export class EdiODBuilderFactory { + static createPurchaseOrderBuilder(config: EdiODBuilderConfig): PurchaseOrderEdiODBuilder { + return new PurchaseOrderEdiODBuilder(config); + } + + static createBuilder(type: string, config: EdiODBuilderConfig): BaseEdiODBuilder { + switch (type) { + case "purchase-order": + return new PurchaseOrderEdiODBuilder(config); + default: + throw new Error(`Unsupported EDI OD builder type: ${type}`); + } + } +} diff --git a/packages/controlmart/src/utils/edi/parsers.edi.util.ts b/packages/controlmart/src/utils/edi/parsers.edi.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..6e2bec79f52f7dacdab07b39a21061d02594aba4 --- /dev/null +++ b/packages/controlmart/src/utils/edi/parsers.edi.util.ts @@ -0,0 +1,200 @@ +import type { TItem } from "../../helpers/edi/generators.edi.helper"; + +export class EdiSegmentParser { + static parseSegments(edi: string): string[] { + return edi + .split("~") + .map((s) => s.trim()) + .filter((s) => s.length > 0); + } + + static findSegment(segments: string[], segmentType: string): string[] { + const segment = segments.find((s) => s.startsWith(`${segmentType}*`)); + return segment ? segment.split("*") : []; + } + + static findSegments(segments: string[], segmentType: string): string[][] { + return segments.filter((s) => s.startsWith(`${segmentType}*`)).map((s) => s.split("*")); + } +} + +export class Edi850Parser { + static parse(edi850: string) { + const segments = EdiSegmentParser.parseSegments(edi850); + + const ISA = EdiSegmentParser.findSegment(segments, "ISA"); + const GS = EdiSegmentParser.findSegment(segments, "GS"); + const BEG = EdiSegmentParser.findSegment(segments, "BEG"); + const N1_ST = EdiSegmentParser.findSegment(segments, "N1*ST"); + const N1_BT = EdiSegmentParser.findSegment(segments, "N1*BT"); + + const poNumber = BEG?.[3] || ""; + const orderDate = BEG?.[5] || ""; + const senderDUNS = ISA?.[6] || GS?.[2] || ""; + const receiverDUNS = ISA?.[8] || GS?.[3] || ""; + + const items = EdiSegmentParser.findSegments(segments, "PO1").map((parts) => ({ + lineNumber: parts[1], + quantity: parseFloat(parts[2] || "0"), + unitPrice: parseFloat(parts[4] || "0"), + sku: parts[7], + vendorPart: parts[9] || "", + })); + const totalAmount = items.reduce((sum, item) => sum + item.unitPrice * item.quantity, 0); + + return { + totalAmount, + poNumber, + orderDate, + senderDUNS, + receiverDUNS, + shipToName: N1_ST?.[2] || "", + billToName: N1_BT?.[2] || "", + items, + }; + } + + static extractItems(edi: string): TItem[] { + const segments = EdiSegmentParser.parseSegments(edi); + return EdiSegmentParser.findSegments(segments, "PO1").map((parts) => ({ + quantity: parseFloat(parts[2] || "0"), + unitPrice: parseFloat(parts[4] || "0"), + sku: parts[7] || "", + product_id: parseFloat(parts[9] || "0"), + })); + } +} + +export class Edi810Parser { + static parse(edi810: string) { + const segments = EdiSegmentParser.parseSegments(edi810); + + const ISA = EdiSegmentParser.findSegment(segments, "ISA"); + const GS = EdiSegmentParser.findSegment(segments, "GS"); + const BIG = EdiSegmentParser.findSegment(segments, "BIG"); + const N1_ST = EdiSegmentParser.findSegment(segments, "N1*ST"); + const N1_BT = EdiSegmentParser.findSegment(segments, "N1*BT"); + const TDS = EdiSegmentParser.findSegment(segments, "TDS"); + + const invoiceNumber = BIG?.[2] || ""; + const invoiceDate = BIG?.[1] || ""; + const poNumber = BIG?.[3] || ""; + const senderDUNS = ISA?.[6] || GS?.[2] || ""; + const receiverDUNS = ISA?.[8] || GS?.[3] || ""; + const totalCents = parseFloat(TDS?.[1] || "0"); + const totalAmount = totalCents / 100; + + const items = EdiSegmentParser.findSegments(segments, "IT1").map((parts) => ({ + quantity: parseFloat(parts[2] || "0"), + unitPrice: parseFloat(parts[4] || "0"), + sku: parts[7], + product_id: parts[9] || "", + })); + + return { + invoiceNumber, + invoiceDate, + poNumber, + senderDUNS, + receiverDUNS, + shipToName: N1_ST?.[2] || "", + billToName: N1_BT?.[2] || "", + totalAmount, + items, + }; + } + + static extractItems(edi: string): TItem[] { + const segments = EdiSegmentParser.parseSegments(edi); + return EdiSegmentParser.findSegments(segments, "IT1").map((parts) => ({ + quantity: parseFloat(parts[2] || "0"), + unitPrice: parseFloat(parts[4] || "0"), + sku: parts[7] || "", + product_id: parseFloat(parts[9] || "0"), + })); + } +} + +export class Edi856Parser { + static parse(edi: string) { + const segments = EdiSegmentParser.parseSegments(edi); + console.log("segments", segments); + const AS = EdiSegmentParser.findSegment(segments, "GS"); + console.log("AS" + AS); + const senderDUNS = AS?.[2] || ""; + const receiverDUNS = AS?.[6] || ""; + console.log("senderDUNS", senderDUNS); + console.log("receiverDUNS", receiverDUNS); + const items: TItem[] = []; + for (let i = 0; i < segments.length; i++) { + if (segments[i]?.startsWith("LIN*")) { + const linParts = segments[i]?.split("*"); + const sn1Parts = segments[i + 1]?.startsWith("SN1*") ? segments[i + 1]?.split("*") : []; + const pidParts = segments[i + 2]?.startsWith("PID*") ? segments[i + 2]?.split("*") : []; + items.push({ + sku: linParts?.[3] || "", + quantity: parseFloat(sn1Parts?.[3] || "0"), + product_id: parseFloat(pidParts?.[5] || "0"), + unitPrice: parseFloat(pidParts?.[7] || "0"), + }); + } + } + + const totalAmount = items.reduce((sum, item) => sum + item.unitPrice * item.quantity, 0); + + return { senderDUNS, receiverDUNS, items, totalAmount }; + } + + static extractItems(edi: string): TItem[] { + const segments = EdiSegmentParser.parseSegments(edi); + const items: TItem[] = []; + + for (let i = 0; i < segments.length; i++) { + if (segments[i]?.startsWith("LIN*")) { + const linParts = segments[i]?.split("*"); + const sn1Parts = segments[i + 1]?.startsWith("SN1*") ? segments[i + 1]?.split("*") : []; + const pidParts = segments[i + 2]?.startsWith("PID*") ? segments[i + 2]?.split("*") : []; + items.push({ + sku: linParts?.[3] || "", + quantity: parseFloat(sn1Parts?.[3] || "0"), + product_id: parseFloat(pidParts?.[5] || "0"), + unitPrice: parseFloat(pidParts?.[7] || "0"), + }); + } + } + + return items; + } +} + +export class EdiParserFactory { + static getParser(docType: string) { + switch (docType) { + case "850": + return Edi850Parser; + case "810": + return Edi810Parser; + case "856": + return Edi856Parser; + default: + throw new Error(`Unsupported EDI document type: ${docType}`); + } + } + + static parse(docType: string, edi: string) { + const Parser = this.getParser(docType); + return Parser.parse(edi); + } + + static extractItems(docType: string, edi: string): TItem[] { + const Parser = this.getParser(docType); + return Parser.extractItems(edi); + } +} + +export const getParamsFromInvoice = Edi810Parser.parse; +export const getParamsFromPayOrder = Edi850Parser.parse; +export const getParamsFromAdvancedShipNotice = Edi856Parser.parse; +export const extractItemsFrom850 = Edi850Parser.extractItems; +export const extractItemsFrom810 = Edi810Parser.extractItems; +export const extractItemsFrom856 = Edi856Parser.extractItems; diff --git a/packages/controlmart/src/utils/edi/service-tools.edi.util.ts b/packages/controlmart/src/utils/edi/service-tools.edi.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..b1e9e0b99ba67048e72597bde6e4941804ff89c9 --- /dev/null +++ b/packages/controlmart/src/utils/edi/service-tools.edi.util.ts @@ -0,0 +1,237 @@ +import fs from "fs"; + +import type { Logger } from "pino"; + +import type { IOperationalDescriptorContext } from "../../operational-descriptor/init.od"; +import { EdiGenerators } from "../../helpers/edi/generators.edi.helper"; +import { httpRequest } from "../http.util"; + +export interface EdiServiceToolsConfig { + context: IOperationalDescriptorContext; + flowId: string; + logger: Logger; +} + +export class EdiServiceTools { + private config: EdiServiceToolsConfig; + + constructor(config: EdiServiceToolsConfig) { + this.config = config; + } + + async exchange_sender_receiver(input: { erpOrder: any }) { + const { sender, receiver } = input.erpOrder; + return { + ...input.erpOrder, + sender: receiver, + receiver: sender, + }; + } + + async generatePurchaseOrder(input: any) { + const { sender, receiver, poNumber, items, department } = input; + return { + sender, + receiver, + poNumber, + items: items.map((item: any) => ({ + ...item, + product_id: Math.floor(Math.random() * 1000) + 1, + })), + department, + orderDate: new Date().toISOString().slice(0, 10), + }; + } + + async generateEDI850(orderData: any) { + const { logger, flowId } = this.config; + logger.info({ orderData, purchaseOrderId: flowId }, "Generating EDI 850"); + + if (!orderData.items || orderData.items.length === 0) { + throw new Error("Cannot generate EDI 850: No items in purchase order"); + } + + const edi850 = EdiGenerators.generateEDI850(orderData); + await this.config.context.ediTransactionRepo.createEdiTransaction({ + transactionId: flowId + "po", + partnerId: orderData.sender.duns, + customerId: orderData.receiver.duns, + docType: "850", + direction: "INBOUND", + timestamp: new Date(), + status: "RECEIVED", + rawEdi: edi850, + flowId, + }); + return edi850; + } + + async send_acknowledgment(input: { erpOrder: any }) { + const { flowId } = this.config; + const acknowledgmentEDIStr = EdiGenerators.generateReceiptAcknowledgment( + input.erpOrder.sender.duns, + input.erpOrder.receiver.duns, + input.erpOrder.items, + ); + + await this.config.context.ediTransactionRepo.createEdiTransaction({ + transactionId: flowId + "acknowledgment" + Math.random().toString(36).substring(2, 5), + partnerId: input.erpOrder.sender.duns, + customerId: input.erpOrder.receiver.duns, + docType: "997", + direction: "OUTBOUND", + timestamp: new Date(), + status: "DELIVERED", + rawEdi: acknowledgmentEDIStr, + flowId, + }); + return acknowledgmentEDIStr; + } + + async generate_invoice(input: any) { + const { flowId } = this.config; + const invoiceEDIStr = EdiGenerators.generateInvoice( + input.erpOrder.sender.duns, + input.erpOrder.receiver.duns, + input.erpOrder.items, + ); + + const invoiceObj = await this.config.context.ediTransactionRepo.createEdiTransaction({ + transactionId: flowId + "invoice", + partnerId: input.erpOrder.sender.duns, + customerId: input.erpOrder.receiver.duns, + docType: "810", + direction: "OUTBOUND", + timestamp: new Date(), + status: "DELIVERED", + rawEdi: invoiceEDIStr, + flowId, + }); + return invoiceEDIStr; + } + + async send_invoice(input: any) { + return ""; + } + + async send_advanced_ship_notice(input: any) { + const { flowId } = this.config; + let advancedShipNoticeEDIStr = EdiGenerators.generateAdvancedShipNotice( + input.erpOrder.sender.duns, + input.erpOrder.receiver.duns, + input.erpOrder.items, + ); + // if (Math.random() < 0.15) { + // advancedShipNoticeEDIStr = advancedShipNoticeEDIStr + // .split("~\n") + // .map((seg) => { + // if (seg.startsWith("SN1*")) { + // const parts = seg.split("*"); + // const qty = parseFloat(parts[2] || "0"); + // parts[2] = (qty + 1).toString(); + // return parts.join("*"); + // } + // return seg; + // }) + // .join("~\n"); + // } + + const asnObj = await this.config.context.ediTransactionRepo.createEdiTransaction({ + transactionId: flowId + "asn", + partnerId: input.erpOrder.sender.duns, + customerId: input.erpOrder.receiver.duns, + docType: "856", + direction: "OUTBOUND", + timestamp: new Date(), + status: "DELIVERED", + rawEdi: advancedShipNoticeEDIStr, + flowId, + }); + return advancedShipNoticeEDIStr; + } + + async convert_edi_to_json(input: any) { + try { + const response = (await httpRequest("http://localhost:8003/tools/convert_edi_to_json", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: input, + })) as any; + return JSON.parse(response.data); + } catch (error) { + console.log(" Using mock EDI to JSON conversion (service unavailable)"); + return { transactionType: "850", segments: [], parsed: true }; + } + } + + getTools() { + return { + exchange_sender_receiver: this.exchange_sender_receiver.bind(this), + generatePurchaseOrder: this.generatePurchaseOrder.bind(this), + generateEDI850: this.generateEDI850.bind(this), + send_acknowledgment: this.send_acknowledgment.bind(this), + generate_invoice: this.generate_invoice.bind(this), + send_invoice: this.send_invoice.bind(this), + send_advanced_ship_notice: this.send_advanced_ship_notice.bind(this), + convert_edi_to_json: this.convert_edi_to_json.bind(this), + }; + } +} + +export class ErpServiceTools { + private config: EdiServiceToolsConfig; + + constructor(config: EdiServiceToolsConfig) { + this.config = config; + } + + async create_purchase_order(input: any) { + try { + console.log(" Creating PO in ERP for:", input.reference); + const response = { + order_id: Math.floor(Math.random() * 10000), + order_number: `PO-${Date.now()}`, + status: "confirmed", + total_amount: input.order_lines.reduce( + (sum: number, line: any) => sum + (line.quantity || 0) * (line.unitPrice || 0), + 0, + ), + created_at: new Date().toISOString(), + supplier_id: input.supplier_id, + reference: input.reference, + }; + return response; + } catch (error) { + console.log("error", error); + console.log("Using mock ERP purchase order creation (service unavailable)"); + throw new Error("ERP service integration failed"); + } + } + + async get_purchase_order(poOrderNumber: string) { + try { + const response = fs.readFileSync(`${poOrderNumber}.json`, "utf8"); + return JSON.parse(response); + } catch (error) { + console.log("Using mock ERP purchase order retrieval (service unavailable)"); + throw new Error("ERP service integration failed"); + } + } + + getTools() { + return { + create_purchase_order: this.create_purchase_order.bind(this), + get_purchase_order: this.get_purchase_order.bind(this), + }; + } +} + +export function createServiceTools(config: EdiServiceToolsConfig) { + const ediTools = new EdiServiceTools(config); + const erpTools = new ErpServiceTools(config); + + return { + edi: ediTools.getTools(), + erp: erpTools.getTools(), + }; +} diff --git a/packages/controlmart/src/utils/edi/tool-annotations.edi.ts b/packages/controlmart/src/utils/edi/tool-annotations.edi.ts new file mode 100644 index 0000000000000000000000000000000000000000..6cce35a5daaa57d28ba10c9519793d0540485cb0 --- /dev/null +++ b/packages/controlmart/src/utils/edi/tool-annotations.edi.ts @@ -0,0 +1,101 @@ +/** + * EDI Tool Annotations for Knowledge Graph + * + * Annotations for EDI (Electronic Data Interchange) tools + * used in operational descriptors. + */ + +import type { ToolAnnotation } from '../../types'; + +/** + * Annotated EDI tools for knowledge graph building + * + * Section 1: Research OD tools (dotted naming convention) + * Section 2: Legacy tools (camelCase naming convention) + */ +export const EDI_TOOL_ANNOTATIONS: ToolAnnotation[] = [ + // ============================================================ + // Research OD Tools (used in perishables-food-manufacturer ODs) + // ============================================================ + + // --- EDI Generation Tools --- + { + toolId: 'edi.generate.850', + service: 'edi', + produces: ['EDI850Document', 'EDITransaction'], + requires: ['Order', 'Company'], + modifies: [], + description: 'Generate EDI 850 Purchase Order document', + }, + { + toolId: 'edi.generate.855', + service: 'edi', + produces: ['EDI855Document', 'EDITransaction'], + requires: ['Order', 'Company'], + modifies: [], + description: 'Generate EDI 855 Purchase Order Acknowledgment', + }, + { + toolId: 'edi.generate.856', + service: 'edi', + produces: ['EDI856Document', 'EDITransaction'], + requires: ['Shipment', 'Order', 'Company'], + modifies: [], + description: 'Generate EDI 856 Advanced Shipping Notice (ASN)', + }, + { + toolId: 'edi.generate.810', + service: 'edi', + produces: ['EDI810Document', 'EDITransaction'], + requires: ['Invoice', 'Order', 'Company'], + modifies: [], + description: 'Generate EDI 810 Invoice document', + }, + + // --- EDI Validation & Storage Tools --- + { + toolId: 'edi.validation.check', + service: 'edi', + produces: ['ValidationResult'], + requires: ['EDIDocument'], + modifies: [], + description: 'Validate an EDI document against X12 standards', + }, + { + toolId: 'edi.transaction.create', + service: 'edi', + produces: ['EDITransaction'], + requires: ['EDIDocument', 'Company'], + modifies: [], + description: 'Store an EDI transaction record', + }, + + // ============================================================ + // Legacy Tools (kept for backward compatibility) + // ============================================================ + + { + toolId: 'edi.generateEDI850', + service: 'edi', + produces: ['EDI850Document', 'EDITransaction'], + requires: ['Order'], + modifies: [], + description: 'Generates EDI 850 Purchase Order document from ERP order data in X12 format', + }, + { + toolId: 'edi.send_advanced_ship_notice', + service: 'edi', + produces: ['EDI856Document', 'EDITransaction'], + requires: ['Order', 'Shipment'], + modifies: [], + description: 'Generates EDI 856 Advanced Ship Notice from order and shipment tracking information', + }, + { + toolId: 'edi.generate_invoice', + service: 'edi', + produces: ['EDI810Document', 'EDITransaction'], + requires: ['Order', 'Invoice'], + modifies: [], + description: 'Generates EDI 810 Invoice document from ERP invoice in X12 format', + }, +]; diff --git a/packages/controlmart/src/utils/edi/validation.edi.util.ts b/packages/controlmart/src/utils/edi/validation.edi.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..44516aa0d3922968173cf1523d64f334b469a505 --- /dev/null +++ b/packages/controlmart/src/utils/edi/validation.edi.util.ts @@ -0,0 +1,221 @@ +import type { Logger } from "pino"; + +export interface EdiValidationResult { + isValid: boolean; + errors: Array<{ code: string; message: string }>; + warnings: string[]; +} + +export interface EdiValidationConfig { + flowId: string; + logger: Logger; + context: any; +} + +// Internal error map +const DEFAULT_EDI_ERROR_MAP = { + MISSING_BIG: { code: "AK304=3", msg: "BIG segment missing at position 020" }, + INVALID_DATE_BIG01: { code: "AK403=8", msg: "BIG01 date invalid; expected CCYYMMDD" }, + INVALID_CODE_N104: { code: "AK403=7", msg: "N1/BT DUNS not found in Partner Master" }, + INVALID_CHARS_REF: { code: "AK403=6", msg: "Employer ID must be 9 numeric" }, + VALUE_MISMATCH_TDS01: { code: "AK403=7", msg: "TDS total ≠ sum of line extensions" }, + INVALID_PRECISION_IT104: { code: "AK403=7", msg: "Invalid unit price format or decimal precision" }, + COUNT_MISMATCH_CTT01_IT1: { code: "AK403=7", msg: "CTT01 line count ≠ # of IT1" }, + INVALID_CURRENCY: { code: "AK403=7", msg: "Currency not permitted (expects USD)" }, + DUPLICATE_INVOICE: { code: "824", msg: "Duplicate invoice number in 12-mo window" }, + CONTROL_MISMATCH_ST_SE: { code: "AK502=3", msg: "ST02 ≠ SE02" }, + INVALID_DATE_FORMAT_DTM: { code: "AK403=8", msg: "Invalid date format; expected CCYYMMDD" }, + INVALID_ENTITY_CODE: { code: "AK403=7", msg: "Invalid entity identifier code" }, + INVALID_POSTAL_SHIPTO: { code: "AK403=7", msg: "Invalid/missing Ship-To postal code" }, + INVALID_CHANGE_TYPE: { code: "AK403=7", msg: "BCH01 change type not allowed" }, + VERSION_MISMATCH: { code: "AK403=7", msg: "GS08 version not as per partner profile" }, + COUNT_MISMATCH_CTT01_POC: { code: "AK403=7", msg: "CTT01 ≠ # of POC segments" }, + MISSING_BCA: { code: "IK304=3", msg: "BCA missing at pos 020" }, + INVALID_ACK_CODE: { code: "AK403=7", msg: "ACK status code invalid" }, + MISSING_ACK_UOM: { code: "AK403=2", msg: "ACK02 present but ACK03 missing" }, + MISSING_BSN: { code: "IK304=3", msg: "BSN segment missing" }, + RELATIONAL_ERROR_HL: { code: "IK304=4", msg: "HL parent reference invalid" }, + MISSING_SSCC: { code: "IK304=3", msg: "Missing SSCC at pallet/case" }, + INVALID_SCAC: { code: "AK403=7", msg: "Invalid SCAC for method/partner" }, + QTY_EXCEEDS_ASN: { code: "824", msg: "ASN qty exceeds accepted/shipped qty" }, + SLA_BREACH_BSN: { code: "SLA", msg: "ASN received after gate-in by >30m" }, + FUNCTIONAL_REJECT: { code: "AK5/IK5", msg: "Functional/implementation reject" }, + ELEMENT_ERROR: { code: "AK3/IK3", msg: "Segment has data element errors" }, + INVALID_TA105: { code: "TA105", msg: "Invalid date/time or duplicate control #" }, + PARSER_ERROR: { code: "PARSER", msg: "Invalid delimiter set" }, + DELAYED_MDN: { code: "SLA", msg: "Delayed 997/MDN beyond 60m" }, + DUPLICATE_ISA: { code: "TA105", msg: "Duplicate interchange control number" } +}; + +// Helper for parsing EDI segments +const parseEdiSegments = (edi: string) => edi.split("~").map(s => s.trim()).filter(s => s.length > 0); + +// --- Context-Aware Validation Helpers --- + +const validateQuantityConsistency = (segments: string[], context: any): Array<{ code: string, message: string }> => { + const errors: Array<{ code: string, message: string }> = []; + if (!context) return errors; + + // 856 ASN checks: Sum of SN1 quantities vs ERP/Shipment total + const sn1Segments = segments.filter(s => s.startsWith("SN1*")); + let shipmentEqSum = 0; + sn1Segments.forEach(s => { + const parts = s.split("*"); + shipmentEqSum += parseFloat(parts[2] || "0") || 0; + }); + + const sourceItems = context.items || context.lines || []; + const erpQtySum = sourceItems.reduce((sum: number, item: any) => sum + (Number(item.quantity) || Number(item.quantityOrdered) || 0), 0); + + // Tolerance check + if (Math.abs(shipmentEqSum - erpQtySum) > 0.01 && erpQtySum > 0) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.QTY_EXCEEDS_ASN.code, message: DEFAULT_EDI_ERROR_MAP.QTY_EXCEEDS_ASN.msg }); + } + + return errors; +}; + +export const validateEdiTransaction = (rawEdi: string, docType: string, context?: any): EdiValidationResult => { + const segments = parseEdiSegments(rawEdi); + const errors: Array<{ code: string, message: string }> = []; + + // --- Common Structural Checks --- + if (segments.length <= 1 && rawEdi.length > 50) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.PARSER_ERROR.code, message: DEFAULT_EDI_ERROR_MAP.PARSER_ERROR.msg }); + return { isValid: false, errors, warnings: [] }; + } + + const st = segments.find(s => s.startsWith("ST*")); + const se = segments.find(s => s.startsWith("SE*")); + if (st && se) { + const stParts = st.split("*"); + const seParts = se.split("*"); + if (stParts[2] !== seParts[2]) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.CONTROL_MISMATCH_ST_SE.code, message: DEFAULT_EDI_ERROR_MAP.CONTROL_MISMATCH_ST_SE.msg }); + } + } + + // --- Specific 810 (Invoice) Checks --- + if (docType === "810") { + const big = segments.find(s => s.startsWith("BIG*")); + if (!big) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.MISSING_BIG.code, message: DEFAULT_EDI_ERROR_MAP.MISSING_BIG.msg }); + } else { + const bigParts = big.split("*"); + const date = bigParts[1] || ""; + if (!/^\d{8}$/.test(date)) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.INVALID_DATE_BIG01.code, message: DEFAULT_EDI_ERROR_MAP.INVALID_DATE_BIG01.msg }); + } + if (bigParts[2] && bigParts[2].endsWith("999")) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.DUPLICATE_INVOICE.code, message: DEFAULT_EDI_ERROR_MAP.DUPLICATE_INVOICE.msg }); + } + } + + const tds = segments.find(s => s.startsWith("TDS*")); + if (tds) { + const tdsParts = tds.split("*"); + if (tdsParts[1] === "INVALID") { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.VALUE_MISMATCH_TDS01.code, message: DEFAULT_EDI_ERROR_MAP.VALUE_MISMATCH_TDS01.msg }); + } + } + + segments.filter(s => s.startsWith("IT1*")).forEach(s => { + const parts = s.split("*"); + const price = parts[4]; + if (price && price.includes(".") && (price.split(".")[1]?.length || 0) > 4) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.INVALID_PRECISION_IT104.code, message: DEFAULT_EDI_ERROR_MAP.INVALID_PRECISION_IT104.msg }); + } + }); + + const cur = segments.find(s => s.startsWith("CUR*")); + if (cur) { + const parts = cur.split("*"); + if (parts[2] !== "USD") { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.INVALID_CURRENCY.code, message: DEFAULT_EDI_ERROR_MAP.INVALID_CURRENCY.msg }); + } + } + } + + // --- Specific 856 (ASN) Checks --- + if (docType === "856") { + const bsn = segments.find(s => s.startsWith("BSN*")); + if (!bsn) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.MISSING_BSN.code, message: DEFAULT_EDI_ERROR_MAP.MISSING_BSN.msg }); + } else { + if (bsn.includes("LATE")) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.SLA_BREACH_BSN.code, message: DEFAULT_EDI_ERROR_MAP.SLA_BREACH_BSN.msg }); + } + } + + const hls = segments.filter(s => s.startsWith("HL*")); + const ids = new Set(hls.map(s => s.split("*")[1])); + hls.forEach(hl => { + const parts = hl.split("*"); + const parentId = parts[2]; + if (parentId && parentId !== "" && !ids.has(parentId)) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.RELATIONAL_ERROR_HL.code, message: DEFAULT_EDI_ERROR_MAP.RELATIONAL_ERROR_HL.msg }); + } + }); + + if (rawEdi.includes("HL*") && !rawEdi.includes("MAN*")) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.MISSING_SSCC.code, message: DEFAULT_EDI_ERROR_MAP.MISSING_SSCC.msg }); + } + + const td5 = segments.find(s => s.startsWith("TD5*")); + if (td5) { + const parts = td5.split("*"); + if (parts[3] === "BAD") { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.INVALID_SCAC.code, message: DEFAULT_EDI_ERROR_MAP.INVALID_SCAC.msg }); + } + } + + // Context check for ASN (Quantity Mismatch vs PO) + if (context) { + errors.push(...validateQuantityConsistency(segments, context)); + } + } + + // --- General Checks --- + segments.filter(s => s.startsWith("N1*")).forEach(s => { + const parts = s.split("*"); + if (parts[1] === "XX") { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.INVALID_ENTITY_CODE.code, message: DEFAULT_EDI_ERROR_MAP.INVALID_ENTITY_CODE.msg }); + } + if (parts[4] === "INVALID") { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.INVALID_CODE_N104.code, message: DEFAULT_EDI_ERROR_MAP.INVALID_CODE_N104.msg }); + } + }); + + segments.filter(s => s.startsWith("REF*")).forEach(s => { + const parts = s.split("*"); + if (parts[2] && /[^a-zA-Z0-9]/.test(parts[2])) { + errors.push({ code: DEFAULT_EDI_ERROR_MAP.INVALID_CHARS_REF.code, message: DEFAULT_EDI_ERROR_MAP.INVALID_CHARS_REF.msg }); + } + }); + + return { + isValid: errors.length === 0, + errors, + warnings: [] + }; +}; + +export class EdiValidator { + private config: EdiValidationConfig; + + constructor(config: EdiValidationConfig) { + this.config = config; + } + + async validate_edi(input: { edi: string; erpOrder: any }): Promise { + return { isValid: true, errors: [], warnings: [] }; + } + + validateEdiFormat(edi: string, docType: string): EdiValidationResult { + return validateEdiTransaction(edi, docType, this.config.context); + } +} + +export function createEdiValidator(config: EdiValidationConfig): EdiValidator { + return new EdiValidator(config); +} diff --git a/packages/controlmart/src/utils/env.util.ts b/packages/controlmart/src/utils/env.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..df0df6eb9e582d625b5eb38c83a33d919ccbd955 --- /dev/null +++ b/packages/controlmart/src/utils/env.util.ts @@ -0,0 +1,66 @@ +import dotenv from "dotenv"; + +import path from "path"; +import { existsSync } from "fs"; + +// Determine path to .env +// Prioritize local .env next to executable (for binary distribution) +const execDir = path.dirname(process.execPath); +const localEnvPath = path.join(execDir, ".env"); +// Fallback to default (process.cwd()) if local .env doesn't exist +const envPath = existsSync(localEnvPath) ? localEnvPath : undefined; + +dotenv.config({ path: envPath, override: true }); + +type EnvShape = { + NODE_ENV: "development" | "production"; + PORT: number; + MONGO_URI: string; + DB_NAME: string; + JWT_SECRET?: string; + SERVICE_NAME?: string; + LOG_LEVEL?: string; + OPENAI_API_KEY?: string; + MAX_TICKET_RETRIES?: number; + ENABLE_CHAOS?: boolean; + SEND_TICKETS_TO_SERVICENOW?: boolean; + IIRIS_URL?: string; +}; + +const requiredKeys = ["MONGO_URI", "DB_NAME", "PORT", "OPENAI_API_KEY"] as const; + +export const loadEnv = (): EnvShape => { + const missing = requiredKeys.filter((key) => !process.env[key]); + if (missing.length > 0) { + console.error(`[env] Missing environment variables: ${missing.join(", ")}`); + process.exit(1); + } + + const NODE_ENV = process.env.NODE_ENV as "development" | "production"; + const PORT = Number(process.env.PORT) || 8282; + const MONGO_URI = process.env.MONGO_URI!; + const DB_NAME = process.env.DB_NAME!; + const JWT_SECRET = process.env.JWT_SECRET; + const SERVICE_NAME = process.env.SERVICE_NAME || "controlmart"; + const LOG_LEVEL = process.env.LOG_LEVEL || "info"; + const OPENAI_API_KEY = process.env.OPENAI_API_KEY; + const MAX_TICKET_RETRIES = Number(process.env.MAX_TICKET_RETRIES) || 3; + const ENABLE_CHAOS = process.env.ENABLE_CHAOS === "true"; + const SEND_TICKETS_TO_SERVICENOW = process.env.SEND_TICKETS_TO_SERVICENOW === "true"; + const IIRIS_URL = process.env.IIRIS_URL; + + return { + NODE_ENV, + PORT, + MONGO_URI, + DB_NAME, + JWT_SECRET, + SERVICE_NAME, + LOG_LEVEL, + OPENAI_API_KEY, + MAX_TICKET_RETRIES, + ENABLE_CHAOS, + SEND_TICKETS_TO_SERVICENOW, + IIRIS_URL, + }; +}; diff --git a/packages/controlmart/src/utils/erp/tool-annotations.erp.ts b/packages/controlmart/src/utils/erp/tool-annotations.erp.ts new file mode 100644 index 0000000000000000000000000000000000000000..d64eae35802e1793be93bbbee641889feb96e540 --- /dev/null +++ b/packages/controlmart/src/utils/erp/tool-annotations.erp.ts @@ -0,0 +1,182 @@ +/** + * ERP Tool Annotations for Knowledge Graph + * + * Annotations for ERP tools used in operational descriptors. + * Each annotation describes what entities a tool produces, requires, and modifies. + */ + +import type { ToolAnnotation } from '../../types'; + +/** + * Annotated ERP tools for knowledge graph building + * + * Section 1: Research OD tools (dotted naming convention) + * Section 2: Legacy tools (snake_case naming convention) + */ +export const ERP_TOOL_ANNOTATIONS: ToolAnnotation[] = [ + // ============================================================ + // Research OD Tools (used in perishables-food-manufacturer ODs) + // ============================================================ + + // --- Order Tools --- + { + toolId: 'erp.order.get_by_id', + service: 'erp', + produces: [], + fetches: ['Order'], + requires: [], + modifies: [], + description: 'Get an order by its ID', + }, + { + toolId: 'erp.order.get_all', + service: 'erp', + produces: [], + fetches: ['Order'], + requires: [], + modifies: [], + description: 'Get all orders with optional filters', + }, + { + toolId: 'erp.order.create', + service: 'erp', + produces: ['Order'], + requires: ['Company', 'Product'], + modifies: [], + description: 'Create a new order (purchase or sales)', + }, + { + toolId: 'erp.order.update_status', + service: 'erp', + produces: [], + requires: ['Order'], + modifies: ['Order'], + description: 'Update the status of an order', + }, + + // --- Company Tools --- + { + toolId: 'erp.company.get_mpc', + service: 'erp', + produces: [], + fetches: ['Company'], + requires: [], + modifies: [], + description: 'Get the main production company (MPC)', + }, + { + toolId: 'erp.company.get_all', + service: 'erp', + produces: [], + fetches: ['Company'], + requires: [], + modifies: [], + description: 'Get all companies with optional filters', + }, + { + toolId: 'erp.company.get_by_id', + service: 'erp', + produces: [], + fetches: ['Company'], + requires: [], + modifies: [], + description: 'Get a company by its ID', + }, + { + toolId: 'erp.company.get_random_customer', + service: 'erp', + produces: [], + fetches: ['Company'], + requires: [], + modifies: [], + description: 'Get a random customer company for testing/simulation', + }, + + // --- Product Tools --- + { + toolId: 'erp.product.get_by_id', + service: 'erp', + produces: [], + fetches: ['Product'], + requires: [], + modifies: [], + description: 'Get a product by its ID', + }, + { + toolId: 'erp.product.get_random', + service: 'erp', + produces: [], + fetches: ['Product'], + requires: [], + modifies: [], + description: 'Get a random product for testing/simulation', + }, + { + toolId: 'erp.product.get_all', + service: 'erp', + produces: [], + fetches: ['Product'], + requires: [], + modifies: [], + description: 'Get all products with optional filters', + }, + + // --- Invoice Tools --- + { + toolId: 'erp.invoice.create', + service: 'erp', + produces: ['Invoice'], + requires: ['Order', 'Company'], + modifies: [], + description: 'Create an invoice for an order', + }, + { + toolId: 'erp.invoice.update', + service: 'erp', + produces: [], + requires: ['Invoice'], + modifies: ['Invoice'], + description: 'Update an invoice (status, payment info)', + }, + { + toolId: 'erp.invoice.get_by_order', + service: 'erp', + produces: [], + fetches: ['Invoice'], + requires: ['Order'], + modifies: [], + description: 'Get an invoice by its associated order ID', + }, + + // --- Payment Tools --- + { + toolId: 'erp.payment.create', + service: 'erp', + produces: ['Payment'], + requires: ['Invoice', 'Company'], + modifies: ['Invoice'], + description: 'Create a payment record and associate it with an invoice', + }, + + // ============================================================ + // Legacy Tools (kept for backward compatibility) + // ============================================================ + + { + toolId: 'erp.create_purchase_order', + service: 'erp', + produces: ['Order'], + requires: ['Customer', 'Product'], + modifies: [], + description: 'Creates a purchase order in ERP system with supplier, product lines, and pricing', + }, + { + toolId: 'erp.get_purchase_order', + service: 'erp', + produces: [], + fetches: ['OrderDetails'], + requires: ['Order'], + modifies: [], + description: 'Retrieves complete purchase order details by order number', + }, +]; diff --git a/packages/controlmart/src/utils/error.util.ts b/packages/controlmart/src/utils/error.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..45e9f3aacca19c8f12bb261d6fc002c11ca959ed --- /dev/null +++ b/packages/controlmart/src/utils/error.util.ts @@ -0,0 +1,82 @@ +import type { NextFunction, Request, Response } from "express"; + +import { sendResponse } from "./http.util"; + +type RepositoryErrorCode = + | "VALIDATION_ERROR" + | "DATABASE_ERROR" + | "NOT_FOUND_ERROR" + | "DUPLICATE_ERROR" + | "CONSTRAINT_ERROR"; + +export class RepositoryError extends Error { + constructor( + message: string, + public readonly code?: RepositoryErrorCode, + ) { + super(message); + this.name = "RepositoryError"; + } + + toJSON() { + return { + name: this.name, + message: this.message, + code: this.code, + stack: this.stack + }; + } +} + +export const getErrorMessage = (error: unknown): string => { + if (error instanceof Error) { + return error.message; + } + return String(error); +}; + +// Lazy-load logger to avoid circular dependency +let _logger: any = null; +const getLogger = () => { + if (!_logger) { + try { + const indexModule = require("../.."); + _logger = indexModule.logger; + } catch (err) { + // Fallback to console if logger not available + _logger = console; + } + } + return _logger; +}; + +export const globalErrorHandler = ( + err: unknown, + req: Request, + res: Response, + _next: NextFunction, +) => { + const logger = getLogger(); + logger.error({ err, req }, `${getErrorMessage(err) ?? "Unknown error"}`); + if (err instanceof RepositoryError) { + sendResponse({ + res, + status: + err.code === "VALIDATION_ERROR" + ? 400 + : err.code === "NOT_FOUND_ERROR" + ? 404 + : err.code === "DUPLICATE_ERROR" + ? 409 + : 500, + error: err.message, + }); + return; + } + + sendResponse({ + res, + status: 500, + error: getErrorMessage(err), + }); +}; diff --git a/packages/controlmart/src/utils/finance/tool-annotations.finance.ts b/packages/controlmart/src/utils/finance/tool-annotations.finance.ts new file mode 100644 index 0000000000000000000000000000000000000000..a87f86eb44ea5c3f98344049dbe87fb5a8f71401 --- /dev/null +++ b/packages/controlmart/src/utils/finance/tool-annotations.finance.ts @@ -0,0 +1,119 @@ +/** + * Finance Tool Annotations for Knowledge Graph + * + * Annotations for Finance/Ledger tools used in operational descriptors. + * Each annotation describes what entities a tool produces, requires, and modifies. + */ + +import type { ToolAnnotation } from '../../types'; + +/** + * Annotated Finance tools for knowledge graph building + * + * Common Finance Entities: + * - Ledger: Financial ledger with cash, receivables, payables + * - Transaction: Financial transaction record + * - Payment: Payment record + * - FinancialSummary: Aggregated financial summary + */ +export const FINANCE_TOOL_ANNOTATIONS: ToolAnnotation[] = [ + // ============================================================ + // Ledger Tools + // ============================================================ + + { + toolId: 'finance.ledger.get', + service: 'finance', + produces: [], + fetches: ['Ledger'], + requires: [], + modifies: [], + description: 'Get the current financial ledger state', + }, + { + toolId: 'finance.ledger.increment', + service: 'finance', + produces: [], + requires: ['Ledger'], + modifies: ['Ledger'], + description: 'Increment a ledger account balance', + }, + { + toolId: 'finance.ledger.ensure', + service: 'finance', + produces: ['Ledger'], + requires: [], + modifies: [], + description: 'Ensure ledger exists, create if not present', + }, + + // ============================================================ + // Summary Tools + // ============================================================ + + { + toolId: 'finance.summary.get', + service: 'finance', + produces: [], + fetches: ['FinancialSummary'], + requires: [], + modifies: [], + description: 'Get financial summary with totals and metrics', + }, + + // ============================================================ + // Transaction Tools + // ============================================================ + + { + toolId: 'finance.transaction.get_all', + service: 'finance', + produces: [], + fetches: ['Transaction'], + requires: [], + modifies: [], + description: 'Get all financial transactions with optional filters', + }, + { + toolId: 'finance.transaction.create', + service: 'finance', + produces: ['Transaction'], + requires: [], + modifies: ['Ledger'], + description: 'Create a financial transaction and update ledger', + }, + + // ============================================================ + // Aggregation Tools + // ============================================================ + + { + toolId: 'finance.aggregate.by_type', + service: 'finance', + produces: ['AggregateReport'], + requires: ['Transaction'], + modifies: [], + description: 'Aggregate transactions by type (income, expense, etc.)', + }, + { + toolId: 'finance.aggregate.by_partner', + service: 'finance', + produces: ['AggregateReport'], + requires: ['Transaction'], + modifies: [], + description: 'Aggregate transactions by trading partner', + }, + + // ============================================================ + // Payment Tools + // ============================================================ + + { + toolId: 'finance.payment.create', + service: 'finance', + produces: ['Payment'], + requires: ['Invoice'], + modifies: ['Ledger', 'Invoice'], + description: 'Create a payment record and update ledger', + }, +]; diff --git a/packages/controlmart/src/utils/http.util.ts b/packages/controlmart/src/utils/http.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..62e6760fcf174afc50471bf765bfe17e0e76fd75 --- /dev/null +++ b/packages/controlmart/src/utils/http.util.ts @@ -0,0 +1,169 @@ +import { setTimeout as sleep } from "node:timers/promises"; + +import type { Response, Request } from "express"; + +import { getErrorMessage } from "./error.util"; +import type { HttpRequestOptions } from "../types/http.utils.type"; + +export const GLOBAL_PAGE_LIMIT = 20; + +type SendResponseArgs = { + res: Response; + status?: number; + data?: any; + error?: string | Error; + stream?: boolean; + event?: string; + enableEvents?: boolean; // New parameter to explicitly enable SSE + pagination?: { + limit: number; + previousCursor: string | null; + nextCursor: string | null; + totalCount: number; + hasMore: boolean; + }; + meta?: Record; +}; + +export const sendResponse = ({ + res, + status = 200, + data, + error, + stream, + event = "message", + enableEvents, + pagination, + meta = {}, +}: SendResponseArgs) => { + try { + const isStream = + stream ?? + enableEvents ?? + (res.getHeader("Content-Type") === "text/event-stream" || + res.getHeader("X-Stream-Mode") === "true" || + (event && event !== "message")); // Enable streaming for custom events + + if (res.writableEnded || res.headersSent) return; + + const timestamp = new Date().toISOString(); + const success = !error; + const basePayload = { + success, + status, + ...(success ? { data } : { error: getErrorMessage(error) }), + meta: { event, timestamp, ...meta }, + ...(pagination ? { pagination } : {}), + }; + + if (isStream) { + console.log( + `[SSE] Sending event: ${event}, data:`, + data ? JSON.stringify(data).substring(0, 100) : "null", + ); + // Ensure headers are flushed for SSE + if (!res.headersSent) { + res.flushHeaders(); + } + res.write(`event: ${event}\n`); + res.write(`data: ${JSON.stringify(basePayload)}\n\n`); + // Force flush the response buffer + if (res.flush) { + res.flush(); + } + } else { + res.status(status).json(basePayload); + } + } catch (err) { + console.error(`[sendResponse:error] ${getErrorMessage(err)}`); + } +}; + +export const asyncHandler = + (fn: any) => + (req: Request, res: Response, next: any): Promise => + Promise.resolve(fn(req, res, next)).catch(next); + +async function withTimeout(promise: Promise, ms: number): Promise { + let timeoutId: NodeJS.Timeout; + const timeout = new Promise((_, reject) => { + timeoutId = setTimeout(() => reject(new Error(`Timeout after ${ms}ms`)), ms); + }); + + try { + return await Promise.race([promise, timeout]); + } finally { + clearTimeout(timeoutId!); + } +} + +export async function httpRequest(url: string, options: HttpRequestOptions = {}): Promise { + const { + method = "GET", + headers = {}, + body, + retries = 3, + retryDelayMs = 500, + queryParams, + retryOn = [408, 429, 500, 502, 503, 504], + timeoutMs = 10_000, + backoffFactor = 2, + jitter = true, + } = options; + + let attempt = 0; + let lastError: any; + + while (attempt <= retries) { + try { + if (queryParams && Object.keys(queryParams).length > 0) { + const urlObj = new URL(url); + Object.entries(queryParams).forEach(([key, value]) => { + urlObj.searchParams.append(key, value); + }); + url = urlObj.toString(); + } + const res = await withTimeout( + fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined, + }), + timeoutMs, + ); + + if (res.ok) { + const text = await res.text(); + try { + return JSON.parse(text) as T; + } catch { + return text as unknown as T; + } + } + + if (retryOn.includes(res.status)) { + throw new Error(`Retryable HTTP error: ${res.status}`); + } + + throw new Error(`HTTP error: ${res.status} ${res.statusText}`); + } catch (err: any) { + lastError = err; + console.warn( + `HTTP request failed (attempt ${attempt + 1}): ${ + err.message + }. Payload: ${JSON.stringify(body)}`, + ); + if (attempt >= retries) break; + + let delay = retryDelayMs * Math.pow(backoffFactor, attempt); + if (jitter) { + delay = delay * (0.5 + Math.random() * 0.5); + } + + await sleep(delay); + attempt++; + } + } + + throw lastError; +} diff --git a/packages/controlmart/src/utils/log-transport.util.ts b/packages/controlmart/src/utils/log-transport.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..fbede61ff6a2c0b0fd0b2f6eae058201a752da38 --- /dev/null +++ b/packages/controlmart/src/utils/log-transport.util.ts @@ -0,0 +1,79 @@ +import crypto from "crypto"; + +import { WorldLog, type TWorldLogInput } from "../models/logs.model"; + + +export class MongoBufferedWorldLogStream { + private buffer: any[] = []; + private readonly flushSize: number; + private readonly flushInterval: number; + private flushTimer: NodeJS.Timeout | null = null; + + constructor(options?: { flushSize?: number; flushInterval?: number }) { + this.flushSize = options?.flushSize || 25; + this.flushInterval = options?.flushInterval || 2000; + this.scheduleFlush(); + } + + private scheduleFlush() { + if (this.flushTimer) clearTimeout(this.flushTimer); + this.flushTimer = setTimeout(() => this.flush(), this.flushInterval); + } + + async write(logLine: string) { + try { + const log = JSON.parse(logLine); + const logDoc: TWorldLogInput = { + worldRef: log.worldRef || (log.worldId ? { worldId: log.worldId } : undefined), + logId: crypto.randomUUID(), + timestamp: new Date(log.time || Date.now()), + serviceType: log.serviceType || "other", + level: mapLevel(log.level) as TWorldLogInput["level"], + msg: log.msg || log.message || "", + metadata: log.metadata || log, + }; + + this.buffer.push(logDoc); + if (this.buffer.length >= this.flushSize) await this.flush(); + } catch (err) { + console.error("Failed to parse or buffer log:", err); + } + } + + private async flush() { + if (this.buffer.length === 0) return; + const docs = this.buffer.splice(0, this.buffer.length); + try { + await WorldLog.insertMany(docs, { ordered: false }); + } catch (err) { + console.error("WorldLog batch insert failed:", err); + } finally { + this.scheduleFlush(); + } + } + + async close() { + if (this.flushTimer) clearTimeout(this.flushTimer); + await this.flush(); + } +} + +const mapLevel = (level: number | string): TWorldLogInput["level"] => { + if (typeof level === "string") { + return level as TWorldLogInput["level"]; + } + const map: Record = { + 10: "trace", + 20: "debug", + 30: "info", + 40: "warn", + 50: "error", + 60: "fatal", + }; + const mappedLevel = map[level]; + if (!mappedLevel) { + console.warn(`Unknown log level: ${level}`); + return "info" as TWorldLogInput["level"]; + } + return mappedLevel as TWorldLogInput["level"]; +}; diff --git a/packages/controlmart/src/utils/logger.util.ts b/packages/controlmart/src/utils/logger.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..fa810dab435a7ed11572467b166020c0363a10e6 --- /dev/null +++ b/packages/controlmart/src/utils/logger.util.ts @@ -0,0 +1,122 @@ +import type { Logger } from "pino"; +import pino from "pino"; +import { pinoHttp } from "pino-http"; +import { faker } from "@faker-js/faker"; + +import { loadEnv } from "./env.util"; +import { checkMongoConnection, connectMongo } from "../services/mongo.service"; +import { MongoBufferedWorldLogStream } from "./log-transport.util"; +import type { IOperationalDescriptorContext } from "../operational-descriptor/init.od"; + +export type TLoggerConfig = { + service?: string; + env?: string; + level?: string; + base?: Record; +}; + +export const createAppLogger = (config: TLoggerConfig = {}): Logger => { + const { service, env, level, base } = config; + + return pino({ + level: level || loadEnv().LOG_LEVEL || "info", + base: { + service: service || loadEnv().SERVICE_NAME || "app", + env: env || loadEnv().NODE_ENV || "production", + ...base, + }, + timestamp: pino.stdTimeFunctions.isoTime, + formatters: { + level(label) { + return { level: label }; + }, + }, + redact: { + paths: [ + "req.headers.authorization", + "req.body.password", + "req.body.token", + "req.body.secret", + ], + remove: true, + }, + }); +}; + +export const createChildLogger = (parent: Logger, bindings: Record) => + parent.child(bindings); + +export const createHttpLogger = (logger: Logger) => + pinoHttp({ + logger, + autoLogging: true, + serializers: { + req(req) { + return { + method: req.method, + url: req.url, + id: req.id, + remoteAddress: req.remoteAddress, + remotePort: req.remotePort, + }; + }, + res(res) { + return { statusCode: res.statusCode }; + }, + }, + }); + +export const createMongoTransportLogger = async (opts: { + flushSize?: number; + flushInterval?: number; +}): Promise => { + const envs = loadEnv(); + if (!checkMongoConnection().isConnected) { + await connectMongo({ uri: envs.MONGO_URI, dbName: envs.DB_NAME }); + } + + const stream = new MongoBufferedWorldLogStream({ + flushSize: opts.flushSize, + flushInterval: opts.flushInterval, + }); + + const multi = pino.multistream([{ stream: process.stdout }, { stream: stream as any }]); + + const mongoLogger = pino( + { + level: loadEnv().LOG_LEVEL || "info", + timestamp: pino.stdTimeFunctions.isoTime, + base: {}, + formatters: { + level(label) { + return { level: label }; + }, + }, + }, + multi, + ); + + process.on("beforeExit", async () => await stream.close()); + process.on("SIGINT", async () => { + await stream.close(); + process.exit(0); + }); + + return mongoLogger; +}; + +export async function generateLog( + level: "error" | "info", + data: any, + message: string, + context: IOperationalDescriptorContext, +) { + await context.worldLogRepo.createLog({ + logId: faker.string.uuid(), + level, + timestamp: new Date(), + serviceType: context.serviceType, + msg: message, + metadata: data, + }); +} diff --git a/packages/controlmart/src/utils/manufacturing/tool-annotations.manufacturing.ts b/packages/controlmart/src/utils/manufacturing/tool-annotations.manufacturing.ts new file mode 100644 index 0000000000000000000000000000000000000000..ff0e9f24b61f3b1420208006ed34dd97fed92c23 --- /dev/null +++ b/packages/controlmart/src/utils/manufacturing/tool-annotations.manufacturing.ts @@ -0,0 +1,104 @@ +import type { ToolAnnotation } from '../../types'; +export const MANUFACTURING_TOOL_ANNOTATIONS: ToolAnnotation[] = [ + { + toolId: 'manufacturing.production.run.create', + service: 'manufacturing', + produces: ['ProductionRun'], + requires: ['Order'], + modifies: [], + description: 'Create a new production run for a production order', + }, + { + toolId: 'manufacturing.production.run.update_status', + service: 'manufacturing', + produces: [], + requires: ['ProductionRun'], + modifies: ['ProductionRun'], + description: 'Update the status of a production run (CREATED, IN_PROGRESS, COMPLETED)', + }, + { + toolId: 'manufacturing.production.run.get_by_order', + service: 'manufacturing', + produces: [], + fetches: ['ProductionRun'], + requires: ['Order'], + modifies: [], + description: 'Get all production runs for a specific production order', + }, + { + toolId: 'manufacturing.production.run.get_by_id', + service: 'manufacturing', + produces: [], + fetches: ['ProductionRun'], + requires: [], + modifies: [], + description: 'Get a production run by its ID', + }, + { + toolId: 'manufacturing.production.run.get_all', + service: 'manufacturing', + produces: [], + fetches: ['ProductionRun'], + requires: [], + modifies: [], + description: 'Get all production runs with optional filters', + }, + { + toolId: 'manufacturing.bom.get_by_product', + service: 'manufacturing', + produces: [], + fetches: ['BillOfMaterials'], + requires: ['Product'], + modifies: [], + description: 'Get the bill of materials for a product', + }, + { + toolId: 'manufacturing.bom.create', + service: 'manufacturing', + produces: ['BillOfMaterials'], + requires: ['Product'], + modifies: [], + description: 'Create a bill of materials for a product', + }, + { + toolId: 'manufacturing.bom.update', + service: 'manufacturing', + produces: [], + requires: ['BillOfMaterials'], + modifies: ['BillOfMaterials'], + description: 'Update a bill of materials', + }, + { + toolId: 'manufacturing.schedule.create', + service: 'manufacturing', + produces: ['ProductionSchedule'], + requires: ['Order'], + modifies: [], + description: 'Create a production schedule for manufacturing orders', + }, + { + toolId: 'manufacturing.schedule.get_by_date_range', + service: 'manufacturing', + produces: [], + fetches: ['ProductionSchedule'], + requires: [], + modifies: [], + description: 'Get production schedules within a date range', + }, + { + toolId: 'manufacturing.quality.inspection.create', + service: 'manufacturing', + produces: ['QualityInspection'], + requires: ['ProductionRun'], + modifies: [], + description: 'Create a quality inspection for a production run', + }, + { + toolId: 'manufacturing.quality.inspection.update', + service: 'manufacturing', + produces: [], + requires: ['QualityInspection'], + modifies: ['QualityInspection', 'ProductionRun'], + description: 'Update quality inspection results (pass/fail)', + }, +]; diff --git a/packages/controlmart/src/utils/mongo.util.ts b/packages/controlmart/src/utils/mongo.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..af467ab8b059e76c558c81aaa9e5072f3161a746 --- /dev/null +++ b/packages/controlmart/src/utils/mongo.util.ts @@ -0,0 +1,34 @@ +import mongoose from "mongoose"; +import { ServiceMesh } from "./service-mesh.util"; + +export const getIdFromMongoObject = (obj: any): string => { + return obj?._id?.toString() || ""; +}; + +export const getMongoObjectId = (id: string): mongoose.Types.ObjectId => { + return new mongoose.Types.ObjectId(id); +}; + +export const isValidMongoId = (id: string): boolean => { + return mongoose.Types.ObjectId.isValid(id); +}; + +export const generateIdByService = (servicePrefix: string, subService: string): string => { + const objectId = new mongoose.Types.ObjectId().toHexString(); + const check = ServiceMesh.exists(servicePrefix) && ServiceMesh.can(servicePrefix, subService); + if (!check) { + throw new Error(`Invalid service or sub-service: ${servicePrefix}/${subService}`); + } + return `${servicePrefix}:${subService}:${objectId.toString()}`; +}; + +export const isValidObjectId = (id: string): boolean => { + return mongoose.Types.ObjectId.isValid(id); +}; + +export const getMongoObjectById = (id: string): mongoose.Types.ObjectId => { + if (!isValidObjectId(id)) { + throw new Error(`Invalid object ID: ${id}`); + } + return new mongoose.Types.ObjectId(id); +}; \ No newline at end of file diff --git a/packages/controlmart/src/utils/pagination.util.ts b/packages/controlmart/src/utils/pagination.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..6ed539eaf7f712dcb8635a908c543fae769ea635 --- /dev/null +++ b/packages/controlmart/src/utils/pagination.util.ts @@ -0,0 +1,187 @@ +/** + * Pagination Utilities + * + * Provides utilities for cursor-based and offset-based pagination. + */ + +/** + * Cursor-based pagination options + * Best for large datasets and time-series data + */ +export interface CursorPaginationOptions { + limit?: number; + cursor?: string | null; + direction?: 'forward' | 'backward'; +} + +/** + * Offset-based pagination options + * Best for small datasets with page number requirements + */ +export interface OffsetPaginationOptions { + page?: number; + limit?: number; +} + +/** + * Cursor pagination metadata + */ +export interface CursorPaginationMeta { + limit: number; + cursor: string | null; + previousCursor: string | null; + hasMore: boolean; +} + +/** + * Offset pagination metadata + */ +export interface OffsetPaginationMeta { + page: number; + limit: number; + total: number; + totalPages: number; + hasNext: boolean; + hasPrevious: boolean; +} + +/** + * Default pagination limits + */ +export const DEFAULT_LIMITS = { + capabilities: 50, + personas: 20, + worlds: 25, + logs: 200, + audit: 100, + default: 50, +} as const; + +/** + * Apply cursor-based pagination to a MongoDB query + * + * @param query - MongoDB query object + * @param options - Cursor pagination options + * @param cursorField - Field to use for cursor (default: '_id') + * @returns Modified query and limit + */ +export function applyCursorPagination( + query: Record, + options: CursorPaginationOptions, + cursorField: string = '_id' +): { query: Record; limit: number } { + const limit = options.limit || DEFAULT_LIMITS.default; + const modifiedQuery = { ...query }; + + if (options.cursor) { + const direction = options.direction || 'forward'; + modifiedQuery[cursorField] = direction === 'forward' + ? { $gt: options.cursor } + : { $lt: options.cursor }; + } + + return { query: modifiedQuery, limit }; +} + +/** + * Build cursor pagination metadata from results + * + * @param results - Array of results + * @param options - Cursor pagination options + * @param getId - Function to extract cursor value from item + * @returns Cursor pagination metadata + */ +export function buildCursorMeta( + results: T[], + options: CursorPaginationOptions, + getId: (item: T) => string +): CursorPaginationMeta { + const limit = options.limit || DEFAULT_LIMITS.default; + const hasMore = results.length === limit; + const cursor = results.length > 0 ? getId(results[results.length - 1]) : null; + + return { + limit, + cursor, + previousCursor: options.cursor || null, + hasMore, + }; +} + +/** + * Apply offset-based pagination + * + * @param page - Page number (1-indexed) + * @param limit - Items per page + * @returns Skip and limit values for MongoDB query + */ +export function applyOffsetPagination( + page: number = 1, + limit: number = DEFAULT_LIMITS.default +): { skip: number; limit: number } { + // Ensure page is at least 1 + const validPage = Math.max(1, page); + const skip = (validPage - 1) * limit; + + return { skip, limit }; +} + +/** + * Build offset pagination metadata + * + * @param total - Total count of items + * @param page - Current page number + * @param limit - Items per page + * @returns Offset pagination metadata + */ +export function buildOffsetMeta( + total: number, + page: number = 1, + limit: number = DEFAULT_LIMITS.default +): OffsetPaginationMeta { + const validPage = Math.max(1, page); + const totalPages = Math.ceil(total / limit); + + return { + page: validPage, + limit, + total, + totalPages, + hasNext: validPage < totalPages, + hasPrevious: validPage > 1, + }; +} + +/** + * Parse pagination query parameters from request + * + * @param query - Express request query object + * @returns Parsed offset pagination options + */ +export function parseOffsetParams(query: any): OffsetPaginationOptions { + const page = query.page ? parseInt(query.page, 10) : 1; + const limit = query.limit ? parseInt(query.limit, 10) : DEFAULT_LIMITS.default; + + return { + page: isNaN(page) ? 1 : page, + limit: isNaN(limit) ? DEFAULT_LIMITS.default : Math.min(limit, 200), // Max 200 + }; +} + +/** + * Parse cursor pagination query parameters from request + * + * @param query - Express request query object + * @returns Parsed cursor pagination options + */ +export function parseCursorParams(query: any): CursorPaginationOptions { + const limit = query.limit ? parseInt(query.limit, 10) : DEFAULT_LIMITS.default; + const cursor = query.cursor || null; + const direction = query.direction === 'backward' ? 'backward' : 'forward'; + + return { + limit: isNaN(limit) ? DEFAULT_LIMITS.default : Math.min(limit, 200), // Max 200 + cursor, + direction, + }; +} diff --git a/packages/controlmart/src/utils/patch.util.ts b/packages/controlmart/src/utils/patch.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..9893cf966cc1cc3b84ca3cbfba9faff20bc4a82a --- /dev/null +++ b/packages/controlmart/src/utils/patch.util.ts @@ -0,0 +1,120 @@ +/** + * Generic patch utility for WMS entities + * Allows partial updates with field validation and auto-timestamps + */ + +export interface PatchResult { + $set: Record; + fieldsUpdated: string[]; +} + +/** + * Build a MongoDB $set operation from partial updates + * Only allows fields in the allowedFields list + * + * @param updates - Partial object with fields to update + * @param allowedFields - List of allowed field names (supports dot notation for nested fields) + * @returns Object with $set operation and list of fields that will be updated + */ +export function buildPatchUpdate( + updates: Record, + allowedFields: string[], +): PatchResult { + const $set: Record = {}; + const fieldsUpdated: string[] = []; + + for (const [key, value] of Object.entries(updates)) { + // Skip null/undefined values unless explicitly allowed + if (value === undefined) continue; + + // Check if field is allowed (exact match or nested field prefix) + const isAllowed = + allowedFields.includes(key) || + allowedFields.some((f) => key.startsWith(f + ".")) || + allowedFields.some((f) => f.startsWith(key + ".")); + + if (isAllowed) { + // Handle nested objects - flatten to dot notation for MongoDB + if (typeof value === "object" && value !== null && !Array.isArray(value) && !(value instanceof Date)) { + for (const [nestedKey, nestedValue] of Object.entries(value)) { + const fullKey = `${key}.${nestedKey}`; + if (nestedValue !== undefined) { + $set[fullKey] = nestedValue; + fieldsUpdated.push(fullKey); + } + } + } else { + $set[key] = value; + fieldsUpdated.push(key); + } + } + } + + // Always update the updatedAt timestamp + $set.updatedAt = new Date(); + + return { $set, fieldsUpdated }; +} + +/** + * Status-to-timestamp mapping for auto-populating timing fields + */ +export const STATUS_TIMESTAMP_MAP: Record> = { + // Inventory statuses don't have timing fields + inventory: {}, + + // Inbound order statuses + inboundOrder: { + SCHEDULED: "timing.scheduledAt", + IN_TRANSIT: "timing.inTransitAt", + ARRIVED: "timing.arrivedAt", + RECEIVING: "timing.receivingStartedAt", + RECEIVED: "timing.receivedAt", + CLOSED: "timing.closedAt", + CANCELLED: "timing.cancelledAt", + }, + + // Outbound order statuses + outboundOrder: { + RELEASED: "timing.releasedAt", + ALLOCATED: "timing.allocatedAt", + PICKING: "timing.pickingStartedAt", + PICKED: "timing.pickedAt", + PACKING: "timing.packingStartedAt", + PACKED: "timing.packedAt", + SHIPPED: "dates.actualShipDate", + CANCELLED: "timing.cancelledAt", + }, + + // Task statuses + task: { + RELEASED: "timing.releasedAt", + ASSIGNED: "timing.assignedAt", + IN_PROGRESS: "timing.startedAt", + COMPLETED: "timing.completedAt", + CANCELLED: "timing.cancelledAt", + }, +}; + +/** + * Apply auto-timestamps based on status changes + * + * @param $set - The $set object to modify + * @param entityType - Type of entity (inventory, inboundOrder, outboundOrder, task) + * @param statusField - Name of the status field in the update + */ +export function applyStatusTimestamp( + $set: Record, + entityType: keyof typeof STATUS_TIMESTAMP_MAP, + statusField: string = "status", +): void { + const statusValue = $set[statusField]; + if (!statusValue) return; + + const timestampMap = STATUS_TIMESTAMP_MAP[entityType]; + const timestampField = timestampMap[statusValue]; + + if (timestampField) { + $set[timestampField] = new Date(); + } +} diff --git a/packages/controlmart/src/utils/repository.utils.ts b/packages/controlmart/src/utils/repository.utils.ts new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/packages/controlmart/src/utils/service-mesh.util.ts b/packages/controlmart/src/utils/service-mesh.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..662c195a9c881127b8dfc7caf4c08293d7162e6f --- /dev/null +++ b/packages/controlmart/src/utils/service-mesh.util.ts @@ -0,0 +1,278 @@ +import { openApiSpec } from "../docs"; + +export enum EService { + ERP = "erp", + WMS = "wms", + FINANCE = "finance", + EDI = "edi", + TMS = "tms", + MANUFACTURING = "manufacturing", + OD = "od", + CAPABILITY = "capability", + KNOWLEDGE_GRAPH = "knowledge-graph", + PERSONA = "persona", + TICKETS = "tickets", + WORLD = "world", + AUDIT = "audit", + LOGS = "logs", + OTHER = "other", + BACKGROUND_JOB = "background-job", +} + +export const EServices = Object.values(EService); +export const ServicesSet = new Set(EServices); +export type TService = (typeof EServices)[number]; + +// Type assertion to ensure SUB_SERVICES keys match TService +export const SUB_SERVICES: Record = { + erp: ["company", "order", "product", "shipment", "payment", "invoice"], + wms: [ + "appointment", + "inventory", + "inventory-transaction", + "inbound", + "inbound-order", + "outbound-order", + "outbound-shipment", + "receiving-transaction", + "labor", + "task", + "shipment", + "rotacart", + "equipment", + "bin", + "zone", + "cycle-count", + "daily-metrics", + "dc", + "dock-door", + "replenishment", + "warehouse", + ], + finance: ["finance", "ledger", "transaction"], + edi: ["transaction", "generate", "statistics"], + tms: ["shipment", "inbound_trailer", "carrier", "lane", "shipment_status_event"], + manufacturing: ["production-run", "work-order"], + od: ["od"], + capability: ["capability"], + "knowledge-graph": ["node", "edge"], + persona: ["persona"], + tickets: ["ticket"], + world: ["world", "layout"], + audit: ["audit"], + logs: ["log"], + other: [], + "background-job": ["execute", "schedule", "cancel"], +}; + +export const serviceExists = (service: string): service is TService => + EServices.includes(service as TService); + +/** + * Service Mesh Entry Point + */ +export const ServiceMesh = { + /** + * Check if a service exists in the mesh. + */ + exists(service: string): service is TService { + return serviceExists(service); + }, + + /** + * Get valid actions (sub-services) for a service. + */ + getActions(service: string): readonly string[] | null { + if (!serviceExists(service)) return null; + return SUB_SERVICES[service]; + }, + + /** + * Check if a service supports a specific action. + */ + can(service: string, action: string): boolean { + if (!serviceExists(service)) return false; + const actions = SUB_SERVICES[service]; + if (!actions) return false; + // Check for exact match or singular version of plural action (heuristic) + return actions.includes(action) || actions.includes(action.replace(/s$/, "")); + }, + + /** + * Get formatted usage documentation for a specific action (sub-service). + */ + getSubServiceDocs(service: string, action: string): IEndpointDocs[] { + const serviceNode = SERVICE_DOCS_REGISTRY[service.toLowerCase()]; + if (!serviceNode) return []; + + const normalizedAction = action.toLowerCase(); + + // Heuristic: Filter endpoints that contain the action name in their path + return serviceNode.endpoints.filter(ep => { + const hasActionInPath = ep.path.includes(`/${normalizedAction}`) || + ep.path.includes(`/${normalizedAction}s`); + return hasActionInPath; + }); + }, + + /** + * Get the full documentation registry. + */ + getRegistry() { + return SERVICE_DOCS_REGISTRY; + }, + + /** + * Find endpoints with optional filtering. + */ + findEndpoints(service: string, filters: { action?: string; method?: THttpMethod } = {}): IEndpointDocs[] { + const serviceNode = SERVICE_DOCS_REGISTRY[service.toLowerCase()]; + if (!serviceNode) return []; + + let endpoints = serviceNode.endpoints; + + // Filter by Action (SubService) + if (filters.action) { + const normalizedAction = filters.action.toLowerCase(); + endpoints = endpoints.filter(ep => { + return ep.path.includes(`/${normalizedAction}`) || ep.path.includes(`/${normalizedAction}s`); + }); + } + + // Filter by HTTP Method + if (filters.method) { + endpoints = endpoints.filter(ep => ep.method === filters.method); + } + + return endpoints; + }, + + /** + * Get formatted usage documentation for a specific endpoint. + */ + getFormattedEndpointDocs(ep: IEndpointDocs, options: { includeExamples?: boolean } = { includeExamples: true }): string { + const lines: string[] = []; + lines.push(`Endpoint: [${ep.method.toUpperCase()}] ${ep.path}`); + lines.push(`Summary: ${ep.summary}`); + if (ep.description) lines.push(`Description:\n${ep.description.trim().split('\n').map(l => ' ' + l).join('\n')}`); + + if (ep.parameters && ep.parameters.length > 0) { + lines.push(`\nParameters:`); + ep.parameters.forEach((p: any) => { + const required = p.required ? ' (required)' : ''; + const desc = p.description ? ` - ${p.description}` : ''; + const example = p.example && options.includeExamples ? ` [e.g. ${p.example}]` : ''; + lines.push(` • ${p.name} (${p.in})${required}${desc}${example}`); + }); + } + + if (ep.requestBody) { + lines.push(`\nRequest Body:`); + const content = ep.requestBody.content?.['application/json']; + + // Add full example if present and requested + if (options.includeExamples) { + if (content?.example) { + lines.push(`\n Example Payload:\n${JSON.stringify(content.example, null, 2).split('\n').map(l => ' ' + l).join('\n')}\n`); + } else if (content?.examples) { + // Handle multiple examples structure if needed + const firstExampleKey = Object.keys(content.examples)[0]; + if (firstExampleKey && content.examples[firstExampleKey].value) { + lines.push(`\n Example Payload:\n${JSON.stringify(content.examples[firstExampleKey].value, null, 2).split('\n').map(l => ' ' + l).join('\n')}\n`); + } + } else if (content?.schema?.example) { + lines.push(`\n Example Payload:\n${JSON.stringify(content.schema.example, null, 2).split('\n').map(l => ' ' + l).join('\n')}\n`); + } + } + + if (content?.schema) { + const schema = content.schema; + if (schema.properties) { + lines.push(` Schema Properties:`); + Object.entries(schema.properties).forEach(([key, prop]: [string, any]) => { + const isReq = schema.required?.includes(key) ? ' (required)' : ''; + const type = prop.type; + const desc = prop.description ? ` - ${prop.description}` : ''; + const example = prop.example && options.includeExamples ? ` [e.g. ${Array.isArray(prop.example) ? prop.example.join(', ') : prop.example}]` : ''; + + lines.push(` • ${key}${isReq}: ${type}${desc}${example}`); + + // Nested object properties + if (prop.type === 'object' && prop.properties) { + Object.entries(prop.properties).forEach(([subKey, subProp]: [string, any]) => { + lines.push(` - ${subKey}: ${subProp.type}`); + }); + } + // Array items + if (prop.type === 'array' && prop.items) { + if (prop.items.type === 'object' && prop.items.properties) { + const keys = Object.keys(prop.items.properties).join(', '); + lines.push(` - [items]: Object { ${keys} }`); + } else { + lines.push(` - [items]: ${prop.items.type}`); + } + } + }); + } + } + } + return lines.join('\n'); + } +}; + +// --- Documentation Parsing Logic --- + +export type THttpMethod = 'get' | 'post' | 'put' | 'delete' | 'patch'; + +export interface IEndpointDocs { + path: string; + method: THttpMethod; + summary: string; + description?: string; + parameters?: any[]; + requestBody?: any; + responses?: any; +} + +export interface IServiceDocs { + name: string; + endpoints: IEndpointDocs[]; +} + +const populateDocsRegistry = (spec: any): Record => { + const registry: Record = {}; + const paths = spec.paths || {}; + + Object.entries(paths).forEach(([pathKey, pathItem]: [string, any]) => { + const methods: THttpMethod[] = ['get', 'post', 'put', 'delete', 'patch']; + methods.forEach(method => { + if (pathItem && pathItem[method]) { + const op = pathItem[method]; + let serviceName = "other"; + + if (op.tags && Array.isArray(op.tags) && op.tags.length > 0) { + serviceName = op.tags[0].toLowerCase(); + } + + if (!registry[serviceName]) { + registry[serviceName] = { name: serviceName, endpoints: [] }; + } + + registry[serviceName]?.endpoints?.push({ + path: pathKey, + method: method, + summary: op.summary || '', + description: op.description, + parameters: op.parameters, + requestBody: op.requestBody, + responses: op.responses + }); + } + }); + }); + + return registry; +}; + +// Execute parsing once at module load time +export const SERVICE_DOCS_REGISTRY = populateDocsRegistry(openApiSpec); diff --git a/packages/controlmart/src/utils/swagger.util.ts b/packages/controlmart/src/utils/swagger.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..cda4b8954159ee138c53b8c13d048415a8a16279 --- /dev/null +++ b/packages/controlmart/src/utils/swagger.util.ts @@ -0,0 +1,19 @@ +import swaggerJsdoc from "swagger-jsdoc"; + +import { openApiSpec } from "../docs"; + +export const swaggerOptions = { + definition: { + openapi: "3.1.0", + info: { + title: "Morpheus Controlmart API", + version: "1.0.0", + description: "API documentation for the Morpheus Controlmart system", + }, + }, + apis: ["../docs/**/*.ts"], +}; + +export const swaggerSpec = openApiSpec; + +export const legacySwaggerSpec = swaggerJsdoc(swaggerOptions); diff --git a/packages/controlmart/src/utils/tms/service-tools.tms.util.ts b/packages/controlmart/src/utils/tms/service-tools.tms.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..b507fc0170e60c010a9f3bd39fff0e562c6f1530 --- /dev/null +++ b/packages/controlmart/src/utils/tms/service-tools.tms.util.ts @@ -0,0 +1,77 @@ +import type { Logger } from "pino"; + +import type { IOperationalDescriptorContext } from "../../operational-descriptor/init.od"; +import { + TmsCarrierRepository, + TmsInboundTrailerRepository, + TmsShipmentRepository, +} from "../../repository/tms"; +import type { + TTmsCarrierRepository, + TTmsInboundTrailerRepository, + TTmsShipmentRepository, +} from "../../repository/tms"; +import type { TCarrierInput } from "../../models/tms/carrier.tms.model"; +import type { TShipmentInput } from "../../models/tms/shipment.tms.model"; + +export interface TmsServiceToolsConfig { + context: IOperationalDescriptorContext; + worldId: string; + logger: Logger; +} + +export class TmsServiceTools { + private config: TmsServiceToolsConfig; + private carrierRepo: TTmsCarrierRepository; + private shipmentRepo: TTmsShipmentRepository; + private inboundTrailerRepo: TTmsInboundTrailerRepository; + + constructor(config: TmsServiceToolsConfig) { + this.config = config; + this.carrierRepo = TmsCarrierRepository(config.worldId); + this.shipmentRepo = TmsShipmentRepository(config.worldId); + this.inboundTrailerRepo = TmsInboundTrailerRepository(config.worldId); + } + + async createCarrier(input: TCarrierInput) { + return this.carrierRepo.createCarrier(input); + } + + async createShipment(input: TShipmentInput) { + console.log("TMS: Create Shipment", input); + const result = await this.shipmentRepo.createShipment(input); + console.log("TMS: Create Shipment Result", result); + return result; + } + + async tenderShipment(input: { + shipmentId: string; + carrierInfo: { + carrierId: string; + carrierName: string; + carrierCode: string; + scacCode: string; + }; + }) { + console.log("TMS: Tender Shipment", input); + const result = await this.shipmentRepo.tenderShipment(input.shipmentId, input.carrierInfo); + console.log("TMS: Tender Shipment Result", result); + return result; + } + + getTools() { + return { + tenderShipment: this.tenderShipment.bind(this), + createCarrier: this.createCarrier.bind(this), + createShipment: this.createShipment.bind(this), + }; + } +} + +export function createTmsServiceTools(config: TmsServiceToolsConfig) { + const tmsTools = new TmsServiceTools(config); + + return { + tms: tmsTools.getTools(), + }; +} diff --git a/packages/controlmart/src/utils/tms/tool-annotations.tms.ts b/packages/controlmart/src/utils/tms/tool-annotations.tms.ts new file mode 100644 index 0000000000000000000000000000000000000000..fd4c4a315267e73015cdd7c55541a7f389c38019 --- /dev/null +++ b/packages/controlmart/src/utils/tms/tool-annotations.tms.ts @@ -0,0 +1,67 @@ +/** + * TMS Tool Annotations for Knowledge Graph + * + * Annotations for TMS (Transportation Management System) tools + * used in operational descriptors. + */ + +import type { ToolAnnotation } from '../../types'; + +/** + * Annotated TMS tools for knowledge graph building + * + * Section 1: Research OD tools (dotted naming convention) + * Section 2: Legacy tools (camelCase naming convention) + */ +export const TMS_TOOL_ANNOTATIONS: ToolAnnotation[] = [ + // ============================================================ + // Research OD Tools (used in perishables-food-manufacturer ODs) + // ============================================================ + + { + toolId: 'tms.carrier.get_active', + service: 'tms', + produces: [], + fetches: ['Carrier'], + requires: [], + modifies: [], + description: 'Get active carriers available for shipping', + }, + { + toolId: 'tms.shipment.create', + service: 'tms', + produces: ['Shipment'], + requires: ['Order', 'Carrier'], + modifies: [], + description: 'Create a shipment with carrier assignment', + }, + + // ============================================================ + // Legacy Tools (kept for backward compatibility) + // ============================================================ + + { + toolId: 'tms.createCarrier', + service: 'tms', + produces: ['Carrier'], + requires: [], + modifies: [], + description: 'Creates a new carrier entity in TMS with carrier ID, code, name, and type information', + }, + { + toolId: 'tms.createShipment', + service: 'tms', + produces: ['Shipment'], + requires: ['Order'], + modifies: [], + description: 'Creates a shipment from an order with origin, destination, carrier info, and status tracking', + }, + { + toolId: 'tms.tenderShipment', + service: 'tms', + produces: ['TenderResponse'], + requires: ['Shipment', 'Carrier'], + modifies: ['Shipment'], + description: 'Tenders shipment to a carrier for acceptance and updates shipment status', + }, +]; diff --git a/packages/controlmart/src/utils/validators/business-rules.validator.ts b/packages/controlmart/src/utils/validators/business-rules.validator.ts new file mode 100644 index 0000000000000000000000000000000000000000..2c4cc839a3253f219c5d6cf6d9e1c627c52aba8e --- /dev/null +++ b/packages/controlmart/src/utils/validators/business-rules.validator.ts @@ -0,0 +1,133 @@ +import { z } from "zod"; + +/** + * Business Rules API Validation Schemas + * Phase 4: REST API for Business Rule Management + */ + +// Condition schema +const ConditionSchema = z.object({ + language: z.enum(["jmespath", "javascript"], { + errorMap: () => ({ message: "Language must be either jmespath or javascript" }), + }), + expression: z.string().trim().min(1, "Condition expression is required"), +}); + +// Action schema +const ActionSchema = z.object({ + type: z.enum(["validate", "transform", "createRecord", "triggerOD", "log"], { + errorMap: () => ({ + message: "Action type must be one of: validate, transform, createRecord, triggerOD, log", + }), + }), + config: z.record(z.any()).describe("Action-specific configuration object"), +}); + +// Create Rule Schema - POST /rules +export const CreateRuleSchema = z.object({ + ruleId: z + .string() + .trim() + .min(1, "Rule ID is required") + .regex(/^[a-z0-9-]+$/, "Rule ID must contain only lowercase letters, numbers, and hyphens"), + name: z.string().trim().min(1, "Rule name is required"), + description: z.string().trim().optional(), + domain: z.enum(["ERP", "WMS", "TMS", "EDI"], { + errorMap: () => ({ message: "Domain must be one of: ERP, WMS, TMS, EDI" }), + }), + targetCollection: z.string().trim().min(1, "Target collection is required"), + trigger: z + .array(z.string().trim().min(1)) + .min(1, "At least one trigger is required") + .describe("Array of trigger points (e.g., before_insert, after_update)"), + executionMode: z.enum(["sync", "async"], { + errorMap: () => ({ message: "Execution mode must be either sync or async" }), + }), + priority: z + .number() + .int("Priority must be an integer") + .min(0, "Priority must be >= 0") + .max(1000, "Priority must be <= 1000") + .default(100), + condition: ConditionSchema.optional().describe("Optional condition for rule applicability"), + actions: z + .array(ActionSchema) + .min(1, "At least one action is required") + .describe("Array of actions to execute"), + enabled: z.boolean().default(true).describe("Whether the rule is active"), + worldId: z + .string() + .trim() + .nullable() + .optional() + .default(null) + .describe("World ID for world-specific rules, null for global"), + version: z.number().int("Version must be an integer").min(1, "Version must be >= 1").default(1), + metadata: z.record(z.any()).optional().describe("Additional metadata (author, tags, etc.)"), +}); + +// Update Rule Schema - PUT /rules/:ruleId +// All fields are optional for updates +export const UpdateRuleSchema = CreateRuleSchema.partial().omit({ ruleId: true }); + +// List Rules Query Schema - GET /rules +export const ListRulesQuerySchema = z.object({ + domain: z.enum(["ERP", "WMS", "TMS", "EDI"]).optional(), + targetCollection: z.string().trim().optional(), + worldId: z.string().trim().optional(), + enabled: z + .enum(["true", "false"]) + .optional() + .transform((val) => (val === "true" ? true : val === "false" ? false : undefined)), + trigger: z.string().trim().optional(), + limit: z + .string() + .optional() + .transform((val) => (val ? parseInt(val, 10) : undefined)) + .refine((val) => val === undefined || (val > 0 && val <= 1000), { + message: "Limit must be between 1 and 1000", + }), + skip: z + .string() + .optional() + .transform((val) => (val ? parseInt(val, 10) : undefined)) + .refine((val) => val === undefined || val >= 0, { + message: "Skip must be >= 0", + }), +}); + +// Test Rule Schema - POST /rules/test +export const TestRuleSchema = z.object({ + rule: CreateRuleSchema.describe("Full rule definition to test"), + sampleData: z.record(z.any()).describe("Sample data to test the rule against"), + context: z.record(z.any()).optional().describe("Optional execution context"), +}); + +// Bulk Create Schema - POST /rules/bulk +export const BulkCreateSchema = z.object({ + rules: z + .array(CreateRuleSchema) + .min(1, "At least one rule is required") + .max(100, "Maximum 100 rules per bulk operation"), +}); + +// Bulk Delete Schema - DELETE /rules/bulk +export const BulkDeleteSchema = z.object({ + ruleIds: z + .array(z.string().trim().min(1)) + .min(1, "At least one rule ID is required") + .max(100, "Maximum 100 rule IDs per bulk operation"), +}); + +// Rule ID Parameter Schema (for path params) +export const RuleIdParamSchema = z.object({ + ruleId: z.string().trim().min(1, "Rule ID is required"), +}); + +// Export type inference for TypeScript +export type TCreateRuleInput = z.infer; +export type TUpdateRuleInput = z.infer; +export type TListRulesQuery = z.infer; +export type TTestRuleInput = z.infer; +export type TBulkCreateInput = z.infer; +export type TBulkDeleteInput = z.infer; diff --git a/packages/controlmart/src/utils/validators/capability.validator.ts b/packages/controlmart/src/utils/validators/capability.validator.ts new file mode 100644 index 0000000000000000000000000000000000000000..a0d60fcf2159b5f8396efb9bc77ac055c42d7c37 --- /dev/null +++ b/packages/controlmart/src/utils/validators/capability.validator.ts @@ -0,0 +1,81 @@ +import { z } from "zod"; + +/** + * Capability Tags Schema + */ +const CapabilityTagsSchema = z.object({ + domain: z.array(z.string()).min(1, "At least one domain is required"), + complexity: z.enum(['simple', 'medium', 'complex']), + services: z.array(z.string()).min(1, "At least one service is required"), + personas: z.array(z.string()).optional(), + patterns: z.array(z.string()).optional(), +}); + +/** + * Capability Metadata Schema + */ +const CapabilityMetadataSchema = z.object({ + author: z.string().optional(), + createdAt: z.date().optional(), + estimatedDuration: z.number().int().positive().optional(), +}).catchall(z.any()); // Allow additional properties + +/** + * Chaos Policy Schema (simplified for validation) + */ +const ChaosPolicySchema = z.object({ + enabled: z.boolean(), + modes: z.array(z.string()).optional(), + probability: z.number().min(0).max(1).optional(), + config: z.record(z.any()).optional(), +}).passthrough(); + +/** + * Schema for creating a new capability + */ +export const CapabilityCreateSchema = z.object({ + id: z.string().trim().min(1, "Capability ID is required") + .regex(/^[a-z0-9-]+$/, "ID must be lowercase letters, numbers, and hyphens only"), + name: z.string().trim().min(1, "Capability name is required"), + description: z.string().trim().min(1, "Capability description is required"), + tags: CapabilityTagsSchema, + personas: z.array(z.string()).default([]), + odId: z.string().trim().min(1, "OD ID is required"), + version: z.string().trim().min(1, "Version is required") + .regex(/^\d+\.\d+\.\d+$/, "Version must follow semantic versioning (e.g., 1.0.0)"), + metadata: CapabilityMetadataSchema.optional(), + chaos: ChaosPolicySchema.optional(), +}); + +/** + * Schema for updating an existing capability + * All fields are optional except those that should never change + */ +export const CapabilityUpdateSchema = z.object({ + // id cannot be updated + name: z.string().trim().min(1, "Capability name is required").optional(), + description: z.string().trim().min(1, "Capability description is required").optional(), + tags: CapabilityTagsSchema.optional(), + personas: z.array(z.string()).optional(), + odId: z.string().trim().min(1, "OD ID is required").optional(), + version: z.string().trim().min(1, "Version is required") + .regex(/^\d+\.\d+\.\d+$/, "Version must follow semantic versioning (e.g., 1.0.0)") + .optional(), + metadata: CapabilityMetadataSchema.optional(), + chaos: ChaosPolicySchema.optional(), +}).strict(); // Don't allow unknown fields + +/** + * Schema for capability query filters + */ +export const CapabilityFilterSchema = z.object({ + domain: z.array(z.string()).optional(), + complexity: z.enum(['simple', 'medium', 'complex']).optional(), + services: z.array(z.string()).optional(), + personas: z.array(z.string()).optional(), + patterns: z.array(z.string()).optional(), +}); + +export type TCapabilityCreate = z.infer; +export type TCapabilityUpdate = z.infer; +export type TCapabilityFilter = z.infer; diff --git a/packages/controlmart/src/utils/validators/index.ts b/packages/controlmart/src/utils/validators/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..70675681d6ace3d5f4ccfd06cb3148b1d71d2dd8 --- /dev/null +++ b/packages/controlmart/src/utils/validators/index.ts @@ -0,0 +1,3 @@ +export * from "./validate-schema"; +export * from "./world.validator"; +export * from "./world-data.validator"; diff --git a/packages/controlmart/src/utils/validators/persona.validator.ts b/packages/controlmart/src/utils/validators/persona.validator.ts new file mode 100644 index 0000000000000000000000000000000000000000..95cc3c9f642bd6931a93d91f8c16f840f478a09d --- /dev/null +++ b/packages/controlmart/src/utils/validators/persona.validator.ts @@ -0,0 +1,76 @@ +import { z } from "zod"; + +/** + * Persona Role Enum + */ +const PersonaRoleSchema = z.enum(['operational', 'management', 'specialist', 'system']); + +/** + * Persona Department Enum + */ +const PersonaDepartmentSchema = z.enum([ + 'warehouse', + 'transportation', + 'customer-service', + 'inventory', + 'edi', + 'returns', + 'store-operations', +]); + +/** + * Persona Access Level Enum + */ +const PersonaAccessLevelSchema = z.enum(['basic', 'advanced', 'admin']); + +/** + * Persona Metadata Schema + */ +const PersonaMetadataSchema = z.object({ + permissions: z.array(z.string()).optional(), + accessLevel: PersonaAccessLevelSchema.optional(), + tags: z.array(z.string()).optional(), + priority: z.number().int().optional(), +}).catchall(z.any()); // Allow additional properties + +/** + * Schema for creating a new persona + */ +export const PersonaCreateSchema = z.object({ + id: z.string().trim().min(1, "Persona ID is required") + .regex(/^[a-z0-9-]+$/, "ID must be lowercase letters, numbers, and hyphens only"), + name: z.string().trim().min(1, "Persona name is required"), + description: z.string().trim().min(1, "Persona description is required"), + role: PersonaRoleSchema, + department: PersonaDepartmentSchema.optional(), + capabilityIds: z.array(z.string()).default([]), + metadata: PersonaMetadataSchema.optional(), +}); + +/** + * Schema for updating an existing persona + * All fields are optional except those that should never change + */ +export const PersonaUpdateSchema = z.object({ + // id cannot be updated + name: z.string().trim().min(1, "Persona name is required").optional(), + description: z.string().trim().min(1, "Persona description is required").optional(), + role: PersonaRoleSchema.optional(), + department: PersonaDepartmentSchema.optional(), + capabilityIds: z.array(z.string()).optional(), + metadata: PersonaMetadataSchema.optional(), +}).strict(); // Don't allow unknown fields + +/** + * Schema for persona query filters + */ +export const PersonaFilterSchema = z.object({ + role: PersonaRoleSchema.optional(), + department: PersonaDepartmentSchema.optional(), + accessLevel: PersonaAccessLevelSchema.optional(), + tags: z.array(z.string()).optional(), +}); + +export type TPersonaCreate = z.infer; +export type TPersonaUpdate = z.infer; +export type TPersonaFilter = z.infer; diff --git a/packages/controlmart/src/utils/validators/validate-schema.ts b/packages/controlmart/src/utils/validators/validate-schema.ts new file mode 100644 index 0000000000000000000000000000000000000000..5d1d200ab17bbd57057291556581c3b49d015d82 --- /dev/null +++ b/packages/controlmart/src/utils/validators/validate-schema.ts @@ -0,0 +1,63 @@ +import { z } from "zod"; + +export class ValidationError extends Error { + constructor( + message: string, + public readonly errors: z.ZodIssue[], + public readonly code: string = "VALIDATION_ERROR", + ) { + super(message); + this.name = "ValidationError"; + } +} + +/** + * Generic schema validation function + * @param schema - Zod schema to validate against + * @param data - Data to validate + * @param errorMessage - Optional custom error message + * @returns Validated and parsed data + * @throws ValidationError if validation fails + */ +export const validateSchema = ( + schema: z.ZodSchema, + data: unknown, + errorMessage?: string, +): T => { + try { + return schema.parse(data); + } catch (error) { + if (error instanceof z.ZodError) { + const message = errorMessage || "Validation failed"; + throw new ValidationError(message, error.issues); + } + throw error; + } +}; + +/** + * Safe schema validation that returns a result object instead of throwing + * @param schema - Zod schema to validate against + * @param data - Data to validate + * @returns Result object with success/error + */ +export const validateSchemaSafe = ( + schema: z.ZodSchema, + data: unknown, +): { success: true; data: T } | { success: false; error: ValidationError } => { + try { + const validatedData = schema.parse(data); + return { success: true, data: validatedData }; + } catch (error) { + if (error instanceof z.ZodError) { + return { + success: false, + error: new ValidationError("Validation failed", error.issues), + }; + } + return { + success: false, + error: new ValidationError("Unexpected validation error", []), + }; + } +}; diff --git a/packages/controlmart/src/utils/validators/world-data.validator.ts b/packages/controlmart/src/utils/validators/world-data.validator.ts new file mode 100644 index 0000000000000000000000000000000000000000..5ebfe5840c01e2818f6316fd66742e18f29ed19b --- /dev/null +++ b/packages/controlmart/src/utils/validators/world-data.validator.ts @@ -0,0 +1,33 @@ +import { z } from "zod"; + +import { EWorldDataType } from "../../types/models"; + +// WorldData validation schemas +export const CreateWorldDataSchema = z.object({ + worldId: z.string().trim().min(1, "World ID is required"), + name: z.string().trim().min(1, "World data name is required"), + companyId: z.string().trim().min(1, "Company ID is required"), + type: z.enum(EWorldDataType), + data: z.record(z.string(), z.any()).refine((obj) => Object.keys(obj).length > 0, { + message: "Data object cannot be empty", + }), +}); + +export const UpdateWorldDataSchema = CreateWorldDataSchema.partial().omit({ + worldId: true, +}); + +export const WorldDataFilterSchema = z + .object({ + worldId: z.string().trim().min(1).optional(), + companyId: z.string().trim().min(1).optional(), + type: z.enum(EWorldDataType).optional(), + name: z.string().trim().min(1).optional(), + }) + .refine((data) => Object.values(data).some((value) => value !== undefined), { + message: "At least one filter field must be provided", + }); + +export const BulkCreateWorldDataSchema = z + .array(CreateWorldDataSchema) + .min(1, "Data array cannot be empty"); diff --git a/packages/controlmart/src/utils/validators/world.validator.ts b/packages/controlmart/src/utils/validators/world.validator.ts new file mode 100644 index 0000000000000000000000000000000000000000..d6653029d7d49e8f65d9150a24cfc0796671a2bc --- /dev/null +++ b/packages/controlmart/src/utils/validators/world.validator.ts @@ -0,0 +1,81 @@ +import { z } from "zod"; +import { personaRegistry } from '../../services/persona-registry.service'; + +const CapabilityFilterSchema = z.object({ + domain: z.array(z.string()).optional(), + complexity: z.enum(['simple', 'medium', 'complex']).optional(), + services: z.array(z.string()).optional(), + personas: z.array(z.string()).optional(), + patterns: z.array(z.string()).optional(), +}); + +const SamplingStrategySchema = z.discriminatedUnion('type', [ + z.object({ type: z.literal('all') }), + z.object({ + type: z.literal('filter'), + filter: CapabilityFilterSchema, + }), + z.object({ + type: z.literal('random'), + count: z.number().int().positive(), + seed: z.number().int().optional(), + }).refine( + (data) => data.count > 0, + { message: "Count must be greater than 0" } + ), + z.object({ + type: z.literal('seeded'), + count: z.number().int().positive(), + seed: z.number().int(), + }).refine( + (data) => data.count > 0, + { message: "Count must be greater than 0" } + ), +]); + +// Persona validation schemas +const PersonaIdSchema = z + .string() + .trim() + .min(1, "Persona ID is required") + .refine( + (id) => personaRegistry.exists(id), + (id) => ({ message: `Persona '${id}' not found in registry` }) + ); + +export const PersonaFilterSchema = z.object({ + role: z.enum(['operational', 'management', 'specialist', 'system']).optional(), + department: z.string().optional(), + accessLevel: z.enum(['basic', 'advanced', 'admin']).optional(), + tags: z.array(z.string()).optional(), +}); + +const WorldPersonaConfigSchema = z.object({ + allowedPersonas: z.array(PersonaIdSchema).optional(), + personaOverrides: z + .record( + z.string(), + z.object({ + capabilityIds: z.array(z.string()), + }) + ) + .optional(), +}).optional(); + +// World validation schemas +export const CreateWorldSchema = z.object({ + name: z.string().trim().min(1, "World name is required"), + url: z.string().trim().min(1, "World URL is required").url("Invalid URL format"), + apiKey: z.string().trim().min(1, "API key is required"), + apiSecret: z.string().trim().min(1, "API secret is required"), + description: z.string().optional(), + companies: z.array(z.string().trim().min(1)).optional().default([]), + is_default: z.boolean().optional().default(false), + samplingStrategy: SamplingStrategySchema.optional(), + capabilityIds: z.array(z.string()).optional(), + personas: WorldPersonaConfigSchema, +}); + +export const UpdateWorldSchema = CreateWorldSchema.partial(); + +export const WorldIdSchema = z.string().trim().min(1, "World ID is required"); diff --git a/packages/controlmart/src/utils/wms/od-builder.wms.util.ts b/packages/controlmart/src/utils/wms/od-builder.wms.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..a9ab15c9362ba228e2f93ea9af9d3bb98811173c --- /dev/null +++ b/packages/controlmart/src/utils/wms/od-builder.wms.util.ts @@ -0,0 +1,1179 @@ +import type { Logger } from "pino"; + +import type { OperationalDescriptor } from "../../types/od.type"; +import type { IOperationalDescriptorContext } from "../../operational-descriptor/init.od"; +import { + GenericODBuilder, + GenericODBuilderFactory, +} from "../../operational-descriptor/generic-builder.od"; + +export interface WmsODBuilderConfig { + worldId: string; + dcId: string; + logger: Logger; + context: IOperationalDescriptorContext; + workflowType: "inbound" | "outbound" | "cycle_count" | "replenishment"; + customSteps?: any[]; +} + +export class InboundReceivingWmsODBuilder { + private builder: GenericODBuilder; + private config: WmsODBuilderConfig; + + constructor(config: WmsODBuilderConfig) { + this.config = config; + this.builder = GenericODBuilderFactory.createWmsBuilder({ + name: `Inbound Receiving Workflow - DC ${config.dcId}`, + description: `Complete inbound receiving process from appointment scheduling to putaway completion`, + chaosProbability: 0.0, // No chaos for normal workflow + }); + } + + private generateId(prefix: string): string { + const timestamp = new Date().toISOString().replace(/[:.]/g, "-"); + const { dcId } = this.config; + return `${prefix}-${dcId}-${timestamp}`; + } + + build(): OperationalDescriptor { + const { worldId, dcId, context } = this.config; + const id = this.generateId("inbound-receiving"); + + // Set the ID + this.builder.setId(id); + + this.builder.addMcpStep({ + id: "createShipment", + name: "Create Shipment", + service: "tms", + tool: "createShipment", + input: { + shipmentId: `SHIP-${Math.random().toString(36).substr(2, 9)}`, + shipmentNumber: `SHIP-${Math.random().toString(36).substr(2, 9)}`, + }, + inputType: "literal", + outputStoreAs: "shipment", + }); + + // STEP 1: SHIPMENT TENDER (TMS) + this.builder.addMcpStep({ + id: "tenderShipment", + name: "Tender Shipment to Carrier", + service: "tms", + tool: "tenderShipment", + input: { + shipmentId: "{{shipment.shipmentId}}", + carrierInfo: { + carrierId: "CARRIER-001", + carrierName: "ABC Freight Lines", + carrierCode: "ABC", + scacCode: "ABC", + }, + }, + inputType: "template", + outputStoreAs: "shipmentTender", + }); + + // STEP 2: IN-TRANSIT STATUS (TMS/EDI - No Database) + this.builder.addMcpStep({ + id: "trackInTransit", + name: "Track Shipment In-Transit with EDI 214 Updates", + service: "wms", + tool: "trackShipment", + input: { + shipmentId: "{{shipmentTender.shipmentId}}", + trackingNumber: `TRK-${Math.random().toString(36).substr(2, 12)}`, + status: "IN_TRANSIT", + currentLocation: "Chicago, IL", + estimatedArrival: new Date(Date.now() + 2 * 24 * 60 * 60 * 1000).toISOString(), + edi214Updates: [ + { + statusCode: "AF", // Shipment Picked Up + timestamp: new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString(), + location: "Supplier Facility", + }, + { + statusCode: "X1", // In Transit + timestamp: new Date().toISOString(), + location: "Chicago, IL", + }, + ], + }, + inputType: "template", + outputStoreAs: "inTransitStatus", + }); + + // STEP 2b: Process EDI 214 Status Updates (EDI - No Database) + this.builder.addMcpStep({ + id: "processEDI214", + name: "Process EDI 214 Shipment Status Updates", + service: "wms", + tool: "processEDI214", + input: { + edi214Data: { + transactionSetId: `EDI214-${Math.random().toString(36).substr(2, 9)}`, + shipmentId: "{{shipmentTender.shipmentId}}", + statusCode: "X1", + timestamp: new Date().toISOString(), + location: "{{inTransitStatus.currentLocation}}", + equipmentNumber: "TRL-123456", + }, + }, + inputType: "template", + outputStoreAs: "edi214Response", + }); + + // STEP 3: DOCK APPOINTMENT SCHEDULING (TMS/WMS Integration) + + // Step 3a: Create Dock Door (WMS - Required for scheduling) + this.builder.addWmsStep( + "createDockDoor", + "Create Dock Door for Receiving", + "createDockDoor", + { + dockDoorId: "DOCK-001", + dockDoorName: "Receiving Dock 1", + dockDoorType: "INBOUND", + status: "ACTIVE", + zoneId: "ZONE-RECEIVING-001", + capacity: { + maxPallets: 20, + maxWeight: 40000, + }, + }, + "dockDoor", + ); + + // Step 3b: Create Sample Appointments (WMS - To populate schedule) + this.builder.addWmsStep( + "createSampleAppointment1", + "Create Sample Appointment 1", + "createAppointment", + { + dockDoor: "{{dockDoor}}", + appointmentId: `SAMP-APT-1-${Math.random().toString(36).substr(2, 6)}`, + appointmentType: "INBOUND", + status: "SCHEDULED", + priority: "NORMAL", + carrier: { + carrierName: "Sample Carrier 1", + carrierId: "CARR-001", + }, + appointmentWindow: { + scheduledDate: new Date(Date.now() + 1 * 24 * 60 * 60 * 1000).toISOString(), + startTime: "06:00", + endTime: "08:00", + }, + references: { + inboundOrderIds: ["SAMPLE-ORDER-1"], + }, + documents: [], + notifications: {}, + }, + "sampleAppointment1", + ); + + this.builder.addWmsStep( + "createSampleAppointment2", + "Create Sample Appointment 2", + "createAppointment", + { + appointmentId: `SAMP-APT-2-${Math.random().toString(36).substr(2, 6)}`, + appointmentType: "INBOUND", + status: "SCHEDULED", + priority: "NORMAL", + carrier: { + carrierName: "Sample Carrier 2", + carrierId: "CARR-002", + }, + appointmentWindow: { + scheduledDate: new Date(Date.now() + 2 * 24 * 60 * 60 * 1000).toISOString(), + startTime: "10:00", + endTime: "12:00", + }, + references: { + inboundOrderIds: ["SAMPLE-ORDER-2"], + }, + documents: [], + notifications: {}, + }, + "sampleAppointment2", + ); + + // Step 3c: Create Inbound Order from ASN (WMS) + this.builder.addWmsStep( + "createInboundOrder", + "Create Inbound Order from ASN", + "createInboundOrder", + { + inboundOrderId: `IB-${Math.random().toString(36).substr(2, 9)}`, + vendor: "{{shipmentTender.carrierInfo.carrierId}}", + orderType: "PO", + orderStatus: "EXPECTED", + dates: { + expectedArrival: new Date(Date.now() + 1 * 24 * 60 * 60 * 1000).toISOString(), + }, + totals: { + pallets: 10, + cases: 250, + units: 250, + expectedLines: 2, + }, + lines: [ + { + lineNumber: 1, + productId: "PROD-001", + sku: "SKU-ABC-001", + productName: "Widget A", + expectedQuantity: 100, + receivedQuantity: 0, + uom: "EACH", + lineStatus: "EXPECTED", + }, + { + lineNumber: 2, + productId: "PROD-002", + sku: "SKU-ABC-002", + productName: "Widget B", + expectedQuantity: 150, + receivedQuantity: 0, + uom: "EACH", + lineStatus: "EXPECTED", + }, + ], + }, + "inboundOrder", + ); + + // Step 3d: Get Available Dock Schedule (WMS - Now should find data) + this.builder.addWmsStep( + "getDockSchedule", + "Get Available Dock Schedule", + "getDockSchedule", + { + startDate: new Date().toISOString().split("T")[0], + endDate: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000).toISOString().split("T")[0], + appointmentType: "INBOUND", + }, + "availableSlots", + ); + + // Step 3e: Create Dock Appointment (WMS) + this.builder.addWmsStep( + "createAppointment", + "Schedule Dock Appointment", + "createAppointment", + { + appointmentId: `APT-${Math.random().toString(36).substr(2, 9)}`, + appointmentType: "INBOUND", + status: "SCHEDULED", + priority: "NORMAL", + carrier: { + carrierName: "{{shipmentTender.carrierInfo.carrierName}}", + carrierId: "{{shipmentTender.carrierInfo.carrierId}}", + }, + appointmentWindow: { + scheduledDate: new Date(Date.now() + 2 * 24 * 60 * 60 * 1000).toISOString(), + startTime: "08:00", + endTime: "12:00", + }, + references: { + inboundOrderIds: ["{{inboundOrder.inboundOrderId}}"], + }, + documents: [], + notifications: {}, + }, + "appointment", + ); + + // STEP 4: RECEIVING & PUTAWAY (WMS - Full Implementation) + + // Step 4a: Truck Arrival and Check-In (WMS) + this.builder.addWmsStep( + "checkInTrailer", + "Check In Truck at Dock Door", + "updateAppointmentStatus", + { + appointmentId: "{{appointment.appointmentId}}", + status: "CHECKED_IN", + actualTimes: { + checkInTime: new Date().toISOString(), + }, + }, + "checkedInAppointment", + ); + + // Step 4b: Get Available Equipment (WMS) + this.builder.addWmsStep( + "getAvailableEquipment", + "Get Available Receiving Equipment", + "getAvailableEquipment", + { + equipmentType: "FORKLIFT", + zoneId: "ZONE-RECEIVING-001", + }, + "availableEquipment", + ); + + // Step 4c: Create Equipment if None Available (WMS) + this.builder.addWmsStep( + "createEquipment", + "Create Forklift Equipment", + "createEquipment", + { + equipmentId: `EQ-FORK-001-${Math.random().toString(36).substr(2, 9)}`, + equipmentName: "Forklift 001", + equipmentType: "FORKLIFT", + status: "AVAILABLE", + zoneId: "ZONE-RECEIVING-001", + specifications: { + maxLiftHeight: 15, + maxWeight: 5000, + }, + }, + "forklift", + ); + + // Step 4d: Get Available Users (WMS) + this.builder.addWmsStep( + "getReceivingUsers", + "Get Available Receiving Users", + "getActiveUsers", + { + shift: "DAY", + zoneId: "ZONE-RECEIVING-001", + userType: "WAREHOUSE_ASSOCIATE", + }, + "receivingUsers", + ); + + // Step 4e: Create Users if None Available (WMS) + this.builder.addWmsStep( + "createReceivingUser", + "Create Receiving User", + "createUser", + { + userId: `USER-RCV-001-${Math.random().toString(36).substr(2, 9)}`, + userName: `John Receiver-${Math.random().toString(36).substr(2, 9)}`, + userType: "WAREHOUSE_ASSOCIATE", + status: "ACTIVE", + shift: "DAY", + zoneId: "ZONE-RECEIVING-001", + permissions: ["RECEIVE", "INVENTORY"], + }, + "receivingUser", + ); + + // Step 4f: Assign Equipment (WMS) + this.builder.addWmsStep( + "assignEquipment", + "Assign Equipment to Receiving User", + "updateEquipmentStatus", + { + equipmentId: "{{forklift.equipmentId}}", + status: "IN_USE", + location: "ZONE-RECEIVING-001", + assignedTo: "{{receivingUser.userId}}", + }, + "assignedEquipment", + ); + + // Step 4g: Create Receiving Task (WMS) + this.builder.addWmsStep( + "createReceivingTask", + "Create Receiving Task", + "createTask", + { + taskId: `TASK-RCV-${Math.random().toString(36).substr(2, 9)}`, + taskType: "UNLOAD", + taskStatus: "CREATED", + assignment: { + userId: "{{receivingUser.userId}}", + userName: "{{receivingUser.userName}}", + assignedAt: new Date().toISOString(), + }, + inboundOrderId: "{{inboundOrder.inboundOrderId}}", + appointmentId: "{{appointment.appointmentId}}", + equipmentId: "{{forklift.equipmentId}}", + priority: 2, + }, + "receivingTask", + ); + + // Step 4h: Start Receiving Process (WMS) + this.builder.addWmsStep( + "startReceiving", + "Start Receiving Process", + "updateTaskStatus", + { + taskId: "{{receivingTask.taskId}}", + status: "IN_PROGRESS", + timestamp: new Date().toISOString(), + }, + "startedReceiving", + ); + + // Step 4i: Create Inventory and Receive Product 1 (WMS) + this.builder.addWmsStep( + "receiveProduct1", + "Create Inventory and Receive Product SKU-ABC-001", + "createInventory", + { + inventoryId: `INV-${Math.random().toString(36).substr(2, 9)}`, + binId: "BIN-001", + productId: "PROD-001", + sku: "SKU-ABC-001", + productName: "Widget A", + quantityOnHand: 100, + inventoryStatus: "AVAILABLE", + uom: "EACH", + receivedDate: new Date().toISOString(), + }, + "receivedInventory1", + ); + + // Step 4j: Create Inventory and Receive Product 2 (WMS) + this.builder.addWmsStep( + "receiveProduct2", + "Create Inventory and Receive Product SKU-ABC-002", + "createInventory", + { + inventoryId: `INV-${Math.random().toString(36).substr(2, 9)}`, + binId: "BIN-002", + productId: "PROD-002", + sku: "SKU-ABC-002", + productName: "Widget B", + quantityOnHand: 150, + inventoryStatus: "AVAILABLE", + uom: "EACH", + receivedDate: new Date().toISOString(), + }, + "receivedInventory2", + ); + + // Step 4k: Get Available Storage Locations (WMS) + this.builder.addWmsStep( + "getAvailableStorage", + "Get Available Storage Locations", + "getInventoryByLocation", + { + zoneId: "ZONE-STORAGE-001", + status: ["AVAILABLE"], + }, + "availableLocations", + ); + + // Step 4l: Create Putaway User (WMS) + this.builder.addWmsStep( + "createPutawayUser", + "Create Putaway User", + "createUser", + { + userId: `USER-PUT-001-${Math.random().toString(36).substr(2, 9)}`, + userName: `Jane Putaway-${Math.random().toString(36).substr(2, 9)}`, + userType: "WAREHOUSE_ASSOCIATE", + status: "ACTIVE", + shift: "DAY", + zoneId: "ZONE-STORAGE-001", + permissions: ["PUTAWAY", "INVENTORY"], + }, + "putawayUser", + ); + + // Step 4m: Create Putaway Task for Product 1 (WMS) + this.builder.addWmsStep( + "createPutawayTask1", + "Create Putaway Task for Product 1", + "createTask", + { + taskId: `TASK-PUT-${Math.random().toString(36).substr(2, 9)}`, + taskType: "PUTAWAY", + taskStatus: "CREATED", + assignment: { + userId: "{{putawayUser.userId}}", + userName: "{{putawayUser.userName}}", + assignedAt: new Date().toISOString(), + }, + product: { + productId: "PROD-001", + sku: "SKU-ABC-001", + productName: "Widget A", + }, + quantity: { + requested: 100, + actual: 0, + uom: "EACH", + }, + from: { + binId: "RECEIVING-DOCK", + }, + to: { + binId: "BIN-001", + }, + priority: 2, + }, + "putawayTask1", + ); + + // Step 4n: Execute Putaway for Product 1 (WMS) + this.builder.addWmsStep( + "executePutaway1", + "Execute Putaway for Product 1", + "updateTaskStatus", + { + taskId: "{{putawayTask1.taskId}}", + status: "IN_PROGRESS", + timestamp: new Date().toISOString(), + }, + "inProgressPutaway1", + ); + + this.builder.addWmsStep( + "completePutaway1", + "Complete Putaway for Product 1", + "updateTaskStatus", + { + taskId: "{{putawayTask1.taskId}}", + status: "COMPLETED", + timestamp: new Date().toISOString(), + }, + "completedPutaway1", + ); + + // Step 4o: Create Putaway Task for Product 2 (WMS) + this.builder.addWmsStep( + "createPutawayTask2", + "Create Putaway Task for Product 2", + "createTask", + { + taskId: `TASK-PUT-${Math.random().toString(36).substr(2, 9)}`, + taskType: "PUTAWAY", + taskStatus: "CREATED", + assignment: { + userId: "{{putawayUser.userId}}", + userName: "{{putawayUser.userName}}", + assignedAt: new Date().toISOString(), + }, + product: { + productId: "PROD-002", + sku: "SKU-ABC-002", + productName: "Widget B", + }, + quantity: { + requested: 150, + actual: 0, + uom: "EACH", + }, + from: { + binId: "RECEIVING-DOCK", + }, + to: { + binId: "BIN-002", + }, + priority: 2, + }, + "putawayTask2", + ); + + // Step 4p: Execute Putaway for Product 2 (WMS) + this.builder.addWmsStep( + "executePutaway2", + "Execute Putaway for Product 2", + "updateTaskStatus", + { + taskId: "{{putawayTask2.taskId}}", + status: "IN_PROGRESS", + timestamp: new Date().toISOString(), + }, + "inProgressPutaway2", + ); + + this.builder.addWmsStep( + "completePutaway2", + "Complete Putaway for Product 2", + "updateTaskStatus", + { + taskId: "{{putawayTask2.taskId}}", + status: "COMPLETED", + timestamp: new Date().toISOString(), + }, + "completedPutaway2", + ); + + // Step 4q: Complete Receiving Process (WMS) + this.builder.addWmsStep( + "completeReceiving", + "Complete Receiving Process", + "updateTaskStatus", + { + taskId: "{{receivingTask.taskId}}", + status: "COMPLETED", + timestamp: new Date().toISOString(), + }, + "completedReceiving", + ); + + // Step 4r: Update Inbound Order Status (WMS) + this.builder.addWmsStep( + "updateInboundOrderStatus", + "Update Inbound Order to Completed", + "updateInboundOrderStatus", + { + inboundOrderId: "{{inboundOrder.inboundOrderId}}", + status: "COMPLETED", + actualDates: { + actualArrival: new Date().toISOString(), + receivingCompleted: new Date().toISOString(), + }, + }, + "completedInboundOrder", + ); + + // Step 4s: Release Equipment (WMS) + this.builder.addWmsStep( + "releaseEquipment", + "Release Equipment", + "updateEquipmentStatus", + { + equipmentId: "{{forklift.equipmentId}}", + status: "AVAILABLE", + location: "ZONE-RECEIVING-001", + assignedTo: "", + }, + "releasedEquipment", + ); + + // Step 4t: Complete Appointment (WMS) + this.builder.addWmsStep( + "completeAppointment", + "Complete Dock Appointment", + "updateAppointmentStatus", + { + appointmentId: "{{appointment.appointmentId}}", + status: "COMPLETED", + actualTimes: { + endTime: new Date().toISOString(), + departureTime: new Date(Date.now() + 30 * 60 * 1000).toISOString(), + }, + }, + "completedAppointment", + ); + + // FINAL REPORTING AND METRICS (WMS) + + // Step 5a: Get Final Inventory Summary (WMS) + this.builder.addWmsStep( + "getFinalInventory", + "Get Final Inventory Summary", + "getInventorySummary", + { + zoneId: "ZONE-STORAGE-001", + }, + "finalInventory", + ); + + // Step 5b: Generate Performance Report (WMS) + this.builder.addWmsStep( + "generateReport", + "Generate Receiving Performance Report", + "getTaskPerformanceMetrics", + { + taskTypes: ["UNLOAD", "PUTAWAY"], + userIds: ["{{receivingUser.userId}}", "{{putawayUser.userId}}"], + dateStart: new Date().toISOString().split("T")[0], + dateEnd: new Date().toISOString().split("T")[0], + }, + "performanceReport", + ); + + // Add custom steps if provided + if (this.config.customSteps) { + this.builder.addSteps(this.config.customSteps); + } + + return this.builder.build(); + } +} + +export class OutboundFulfillmentWmsODBuilder { + private builder: GenericODBuilder; + private config: WmsODBuilderConfig; + + constructor(config: WmsODBuilderConfig) { + this.config = config; + this.builder = GenericODBuilderFactory.createWmsBuilder({ + name: `Outbound Fulfillment Workflow - DC ${config.dcId}`, + description: `Complete outbound fulfillment process from order creation to shipment`, + chaosProbability: 0.0, + }); + } + + private generateId(prefix: string): string { + const timestamp = new Date().toISOString().replace(/[:.]/g, "-"); + const { dcId } = this.config; + return `${prefix}-${dcId}-${timestamp}`; + } + + build(): OperationalDescriptor { + const { worldId, dcId } = this.config; + const id = this.generateId("outbound-fulfillment"); + + this.builder.setId(id); + + // INITIALIZATION PHASE - Create necessary master data first + + // Step 1: Create Picking User + this.builder.addWmsStep( + "createPickingUser", + "Create Picking User", + "createUser", + { + userId: "USER-PICK-001", + userName: "Jane Picker", + userType: "WAREHOUSE_ASSOCIATE", + status: "ACTIVE", + shift: "DAY", + zoneId: "ZONE-STORAGE-001", + permissions: ["PICK", "INVENTORY"], + }, + "pickingUser", + ); + + // Step 2: Create Packing User + this.builder.addWmsStep( + "createPackingUser", + "Create Packing User", + "createUser", + { + userId: "USER-PACK-001", + userName: "Bob Packer", + userType: "WAREHOUSE_ASSOCIATE", + status: "ACTIVE", + shift: "DAY", + zoneId: "ZONE-PACKING-001", + permissions: ["PACK", "SHIP"], + }, + "packingUser", + ); + + // Step 3: Create RotaCart for Picking + this.builder.addWmsStep( + "createPickingCart", + "Create RotaCart for Picking", + "createRotaCart", + { + cartId: "CART-PICK-001", + cartName: "Picking Cart 001", + cartType: "PICKING", + status: "AVAILABLE", + zoneId: "ZONE-STORAGE-001", + capacity: { + maxUnits: 500, + maxWeight: 1000, + }, + }, + "pickingCart", + ); + + // Step 4: Create Packing Station + this.builder.addWmsStep( + "createPackingStation", + "Create Packing Station", + "createPackingStation", + { + stationId: "STATION-PACK-001", + stationName: "Packing Station 1", + status: "ACTIVE", + zoneId: "ZONE-PACKING-001", + equipment: ["SCALE", "PRINTER", "LABELER"], + }, + "packingStation", + ); + + // WORKFLOW PHASE - Now execute the actual business process + + // Step 5: Create Outbound Order + this.builder.addWmsStep( + "createOutboundOrder", + "Create Outbound Order", + "createOutboundOrder", + { + orderId: `ORD-${Math.random().toString(36).substr(2, 9)}`, + orderNumber: `SO-${Math.random().toString(36).substr(2, 9)}`, + orderType: "SALES", + customer: { + customerId: "CUST-001", + customerName: "ABC Customer Corp", + accountNumber: "ACC-001", + }, + destinationType: "CUSTOMER", + orderPriority: "NORMAL", + shipping: { + carrierCode: "UPS", + carrierName: "United Parcel Service", + serviceLevel: "GROUND", + }, + orderStatus: "CREATED", + dates: { + orderDate: new Date().toISOString(), + requiredShipDate: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(), + }, + lines: [ + { + lineNumber: 1, + productId: "PROD-001", + sku: "SKU-ABC-001", + productName: "Widget A", + orderedQuantity: 50, + uom: "EACH", + lineStatus: "CREATED", + }, + { + lineNumber: 2, + productId: "PROD-002", + sku: "SKU-ABC-002", + productName: "Widget B", + orderedQuantity: 75, + uom: "EACH", + lineStatus: "CREATED", + }, + ], + }, + "outboundOrder", + ); + + // Step 6: Check Inventory Availability for Product 1 + this.builder.addWmsStep( + "checkInventoryAvailability1", + "Check Inventory Availability for Product 1", + "getInventoryByProduct", + { + productId: "PROD-001", + availableOnly: true, + sortBy: "FIFO", + }, + "availableInventory1", + ); + + // Step 7: Check Inventory Availability for Product 2 + this.builder.addWmsStep( + "checkInventoryAvailability2", + "Check Inventory Availability for Product 2", + "getInventoryByProduct", + { + productId: "PROD-002", + availableOnly: true, + sortBy: "FIFO", + }, + "availableInventory2", + ); + + // Step 8: Allocate Inventory for Product 1 + this.builder.addWmsStep( + "allocateInventory1", + "Allocate Inventory for Product 1", + "allocateInventory", + { + inventoryId: "{{availableInventory1[0].inventoryId}}", + allocation: { + orderId: "{{outboundOrder.orderId}}", + orderLineId: "{{outboundOrder.lines[0].orderLineId}}", + allocatedQuantity: 50, + priority: "NORMAL", + }, + }, + "allocatedInventory1", + ); + + // Step 9: Allocate Inventory for Product 2 + this.builder.addWmsStep( + "allocateInventory2", + "Allocate Inventory for Product 2", + "allocateInventory", + { + inventoryId: "{{availableInventory2[0].inventoryId}}", + allocation: { + orderId: "{{outboundOrder.orderId}}", + orderLineId: "{{outboundOrder.lines[1].orderLineId}}", + allocatedQuantity: 75, + priority: "NORMAL", + }, + }, + "allocatedInventory2", + ); + + // Step 10: Create Pick Task + this.builder.addWmsStep( + "createPickTask", + "Create Pick Task", + "createTask", + { + taskId: `TASK-PICK-${Math.random().toString(36).substr(2, 9)}`, + taskType: "PICK", + taskStatus: "CREATED", + assignment: { + userId: "{{pickingUser.userId}}", + userName: "{{pickingUser.userName}}", + assignedAt: new Date().toISOString(), + }, + orderId: "{{outboundOrder.orderId}}", + cartId: "{{pickingCart.cartId}}", + quantity: { + requested: 125, + actual: 0, + uom: "EACH", + }, + priority: 2, + }, + "pickTask", + ); + + // Step 11: Assign RotaCart to User + this.builder.addWmsStep( + "assignPickCart", + "Assign RotaCart for Picking", + "assignRotaCartToUser", + { + cartId: "{{pickingCart.cartId}}", + userId: "{{pickingUser.userId}}", + userName: "{{pickingUser.userName}}", + }, + "assignedPickCart", + ); + + // Step 12: Start Picking + this.builder.addWmsStep( + "startPicking", + "Start Picking Process", + "updateTaskStatus", + { + taskId: "{{pickTask.taskId}}", + status: "IN_PROGRESS", + timestamp: new Date().toISOString(), + }, + "startedPicking", + ); + + // Step 13: Pick Product 1 + this.builder.addWmsStep( + "pickProduct1", + "Pick Product 1", + "updateInventoryQuantity", + { + inventoryId: "{{allocatedInventory1.inventoryId}}", + quantityChange: -50, + transactionType: "PICK", + referenceId: "{{outboundOrder.orderId}}", + taskId: "{{pickTask.taskId}}", + cartId: "{{pickingCart.cartId}}", + }, + "pickedProduct1", + ); + + // Step 14: Pick Product 2 + this.builder.addWmsStep( + "pickProduct2", + "Pick Product 2", + "updateInventoryQuantity", + { + inventoryId: "{{allocatedInventory2.inventoryId}}", + quantityChange: -75, + transactionType: "PICK", + referenceId: "{{outboundOrder.orderId}}", + taskId: "{{pickTask.taskId}}", + cartId: "{{pickingCart.cartId}}", + }, + "pickedProduct2", + ); + + // Step 15: Complete Picking + this.builder.addWmsStep( + "completePicking", + "Complete Picking Task", + "updateTaskStatus", + { + taskId: "{{pickTask.taskId}}", + status: "COMPLETED", + timestamp: new Date().toISOString(), + }, + "completedPicking", + ); + + // Step 16: Create Packing Task + this.builder.addWmsStep( + "createPackTask", + "Create Packing Task", + "createTask", + { + taskId: `TASK-PACK-${Math.random().toString(36).substr(2, 9)}`, + taskType: "PACK", + taskStatus: "CREATED", + assignment: { + userId: "{{packingUser.userId}}", + userName: "{{packingUser.userName}}", + assignedAt: new Date().toISOString(), + }, + orderId: "{{outboundOrder.orderId}}", + stationId: "{{packingStation.stationId}}", + quantity: { + requested: 125, + actual: 0, + uom: "EACH", + }, + priority: 2, + }, + "packTask", + ); + + // Step 17: Start Packing + this.builder.addWmsStep( + "startPacking", + "Start Packing Process", + "updateTaskStatus", + { + taskId: "{{packTask.taskId}}", + status: "IN_PROGRESS", + timestamp: new Date().toISOString(), + }, + "startedPacking", + ); + + // Step 18: Complete Packing + this.builder.addWmsStep( + "completePacking", + "Complete Packing Task", + "updateTaskStatus", + { + taskId: "{{packTask.taskId}}", + status: "COMPLETED", + timestamp: new Date().toISOString(), + }, + "completedPacking", + ); + + // Step 19: Create Shipment + this.builder.addWmsStep( + "createShipment", + "Create Shipment", + "createShipment", + { + shipmentId: `SHIP-${Math.random().toString(36).substr(2, 9)}`, + orderId: "{{outboundOrder.orderId}}", + carrierCode: "UPS", + serviceLevel: "GROUND", + trackingNumber: `1Z${Math.random().toString(36).substr(2, 16).toUpperCase()}`, + eventData: { + timestamp: new Date().toISOString(), + location: dcId, + note: "Shipment created", + source: "WMS", + }, + }, + "shipment", + ); + + // Step 20: Load Shipment + this.builder.addWmsStep( + "loadShipment", + "Load Shipment", + "updateShipmentStatus", + { + shipmentId: "{{shipment.shipmentId}}", + status: "LOADING", + eventData: { + timestamp: new Date().toISOString(), + location: dcId, + note: "Loading started", + source: "WMS", + }, + }, + "loadingShipment", + ); + + // Step 21: Ship Order + this.builder.addWmsStep( + "shipOrder", + "Ship Order", + "updateShipmentStatus", + { + shipmentId: "{{shipment.shipmentId}}", + status: "SHIPPED", + eventData: { + timestamp: new Date().toISOString(), + location: dcId, + note: "Shipment departed", + source: "WMS", + }, + }, + "shippedOrder", + ); + + // Step 22: Release RotaCart + this.builder.addWmsStep( + "releasePickCart", + "Release Picking RotaCart", + "assignRotaCartToUser", + { + cartId: "{{pickingCart.cartId}}", + userId: null, + userName: null, + }, + "releasedPickCart", + ); + + // Step 23: Get Performance Metrics + this.builder.addWmsStep( + "getOutboundMetrics", + "Get Outbound Performance Metrics", + "getTaskPerformanceMetrics", + { + taskTypes: ["PICK", "PACK"], + userIds: ["{{pickingUser.userId}}", "{{packingUser.userId}}"], + dateStart: new Date().toISOString().split("T")[0], + dateEnd: new Date().toISOString().split("T")[0], + }, + "outboundMetrics", + ); + + // Step 24: Create Manifest + this.builder.addWmsStep( + "createShipmentManifest", + "Create Shipment Manifest", + "createManifest", + { + shipmentId: "{{shipment.shipmentId}}", + cartIds: ["{{pickingCart.cartId}}"], + status: ["COMPLETED"], + }, + "manifest", + ); + + return this.builder.build(); + } +} + +export class WmsODBuilderFactory { + static createInboundBuilder(config: WmsODBuilderConfig): InboundReceivingWmsODBuilder { + return new InboundReceivingWmsODBuilder(config); + } + + static createOutboundBuilder(config: WmsODBuilderConfig): OutboundFulfillmentWmsODBuilder { + return new OutboundFulfillmentWmsODBuilder(config); + } + + static createBuilder( + type: string, + config: WmsODBuilderConfig, + ): InboundReceivingWmsODBuilder | OutboundFulfillmentWmsODBuilder { + switch (type) { + case "inbound": + case "inbound-receiving": + return new InboundReceivingWmsODBuilder(config); + case "outbound": + case "outbound-fulfillment": + return new OutboundFulfillmentWmsODBuilder(config); + default: + throw new Error(`Unsupported WMS OD builder type: ${type}`); + } + } +} diff --git a/packages/controlmart/src/utils/wms/tool-annotations.wms.ts b/packages/controlmart/src/utils/wms/tool-annotations.wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..4c5647808dedc1a7cf83909e3b1e9a06709155f6 --- /dev/null +++ b/packages/controlmart/src/utils/wms/tool-annotations.wms.ts @@ -0,0 +1,270 @@ +/** + * WMS Tool Annotations for Knowledge Graph + * + * Annotations for WMS tools used in operational descriptors. + * Each annotation describes what entities a tool produces, requires, and modifies. + */ + +import type { ToolAnnotation } from '../../types'; + +/** + * Annotated WMS tools for knowledge graph building + * + * Section 1: Research OD tools (dotted naming convention) + * Section 2: Legacy od-arch tools (camelCase naming convention) + */ +export const WMS_TOOL_ANNOTATIONS: ToolAnnotation[] = [ + // ============================================================ + // Research OD Tools (used in perishables-food-manufacturer ODs) + // ============================================================ + + // --- Outbound Order Tools --- + { + toolId: 'wms.outbound.order.get_by_status', + service: 'wms', + produces: [], + fetches: ['OutboundOrder'], + requires: [], + modifies: [], + description: 'Get outbound orders filtered by status', + }, + { + toolId: 'wms.outbound.order.update_status', + service: 'wms', + produces: [], + requires: ['OutboundOrder'], + modifies: ['OutboundOrder'], + description: 'Update the status of an outbound order', + }, + { + toolId: 'wms.outbound.order.create', + service: 'wms', + produces: ['OutboundOrder'], + requires: ['Order'], + modifies: [], + description: 'Create an outbound order from a sales order', + }, + { + toolId: 'wms.outbound.order.allocate_line', + service: 'wms', + produces: [], + requires: ['OutboundOrder'], + modifies: ['OutboundOrder'], + description: 'Allocate inventory to an outbound order line', + }, + { + toolId: 'wms.outbound_shipment.create', + service: 'wms', + produces: ['Shipment'], + requires: ['OutboundOrder'], + modifies: [], + description: 'Create a shipment for an outbound order', + }, + { + toolId: 'wms.outbound_shipment.update_status', + service: 'wms', + produces: [], + requires: ['Shipment'], + modifies: ['Shipment'], + description: 'Update the status of a shipment', + }, + + // --- Task Tools --- + { + toolId: 'wms.task.create', + service: 'wms', + produces: ['Task'], + requires: ['OutboundOrder'], + modifies: [], + description: 'Create a warehouse task (pick, pack, putaway)', + }, + { + toolId: 'wms.task.update_status', + service: 'wms', + produces: [], + requires: ['Task'], + modifies: ['Task'], + description: 'Update the status of a warehouse task', + }, + { + toolId: 'wms.task.get_active', + service: 'wms', + produces: [], + fetches: ['Task'], + requires: [], + modifies: [], + description: 'Get active warehouse tasks', + }, + + // --- Inventory Transaction Tools --- + { + toolId: 'wms.inventory.transaction.create', + service: 'wms', + produces: ['InventoryTransaction'], + requires: ['Product'], + modifies: ['Inventory'], + description: 'Create an inventory transaction (receive, ship, adjust)', + }, + { + toolId: 'wms.inventory.transaction.get_history', + service: 'wms', + produces: [], + fetches: ['InventoryTransaction'], + requires: [], + modifies: [], + description: 'Get inventory transaction history', + }, + + // --- Receiving Tools --- + { + toolId: 'wms.receiving_transaction.create', + service: 'wms', + produces: ['ReceivingTransaction'], + requires: ['InboundOrder'], + modifies: ['Inventory'], + description: 'Create a receiving transaction for inbound goods', + }, + + // --- Inbound Order Tools --- + { + toolId: 'wms.inbound.order.get_by_po_number', + service: 'wms', + produces: [], + fetches: ['InboundOrder'], + requires: [], + modifies: [], + description: 'Get inbound order by purchase order number', + }, + { + toolId: 'wms.inbound.order.create', + service: 'wms', + produces: ['InboundOrder'], + requires: ['PurchaseOrder'], + modifies: [], + description: 'Create an inbound order from a purchase order', + }, + { + toolId: 'wms.inbound.order.update_status', + service: 'wms', + produces: [], + requires: ['InboundOrder'], + modifies: ['InboundOrder'], + description: 'Update the status of an inbound order', + }, + + // --- Bin/Location Tools --- + { + toolId: 'wms.bin.get_by_warehouse', + service: 'wms', + produces: [], + fetches: ['Bin'], + requires: [], + modifies: [], + description: 'Get bins in a warehouse', + }, + { + toolId: 'wms.bin.get_available', + service: 'wms', + produces: [], + fetches: ['Bin'], + requires: [], + modifies: [], + description: 'Get available bins for putaway', + }, + // --- Warehouse/Zone Tools --- + { + toolId: 'wms.warehouse.get_by_code', + service: 'wms', + produces: [], + fetches: ['Warehouse'], + requires: [], + modifies: [], + description: 'Get a warehouse by its code', + }, + { + toolId: 'wms.zone.get_by_warehouse', + service: 'wms', + produces: [], + fetches: ['Zone'], + requires: [], + modifies: [], + description: 'Get zones in a warehouse', + }, + + // --- Cycle Count & Metrics Tools --- + { + toolId: 'wms.cycle_count.create', + service: 'wms', + produces: ['CycleCount'], + requires: ['Inventory'], + modifies: [], + description: 'Create a cycle count record', + }, + { + toolId: 'wms.daily_metrics.create', + service: 'wms', + produces: ['DailyMetrics'], + requires: ['Inventory'], + modifies: [], + description: 'Create daily warehouse metrics record', + }, + + // ============================================================ + // Legacy od-arch Tools (kept for backward compatibility) + // ============================================================ + + { + toolId: 'wms.getInventoryByProduct', + service: 'wms', + produces: [], + fetches: ['InventoryRecord'], + requires: ['Product'], + modifies: [], + description: 'Queries current inventory levels for a product across all warehouse locations', + }, + { + toolId: 'wms.getDockSchedule', + service: 'wms', + produces: [], + fetches: ['DockSchedule'], + requires: [], + modifies: [], + description: 'Retrieves dock appointment schedule for a specified date range', + }, + { + toolId: 'wms.getAvailableTimeSlots', + service: 'wms', + produces: [], + fetches: ['TimeSlot'], + requires: ['DockSchedule'], + modifies: [], + description: 'Retrieves available dock appointment time slots for a specific date', + }, + { + toolId: 'wms.getAvailableEquipment', + service: 'wms', + produces: [], + fetches: ['Equipment'], + requires: [], + modifies: [], + description: 'Queries available warehouse equipment by type and zone', + }, + { + toolId: 'wms.getShipmentStatus', + service: 'wms', + produces: [], + fetches: ['ShipmentStatus'], + requires: ['Shipment'], + modifies: [], + description: 'Retrieves current shipment status, location, and tracking information', + }, + { + toolId: 'wms.inventory.ensure_available', + service: 'wms', + produces: [], + fetches: ['InventoryTransaction'], + requires: ['Product'], + modifies: [], + description: 'Ensure inventory availability for a product', + }, +]; diff --git a/packages/controlmart/src/verification/__tests__/verification.test.ts b/packages/controlmart/src/verification/__tests__/verification.test.ts new file mode 100644 index 0000000000000000000000000000000000000000..696421c3fe465cc0e9a495e09fb4fac255697cfc --- /dev/null +++ b/packages/controlmart/src/verification/__tests__/verification.test.ts @@ -0,0 +1,132 @@ +import { describe, it, expect, mock } from "bun:test"; +import { verifyTicket, type WorldVerifierMap } from "../engine"; +import type { Ticket, VerificationResult } from "../types/verification.types"; + +describe("Verification Engine (verifyTicket)", () => { + const mockLogger = { + info: mock(() => { }), + error: mock(() => { }), + debug: mock(() => { }), + }; + + const mockVerifier = mock(async (ticket: Ticket, logger: any): Promise => { + return { + passed: true, + ticketId: ticket.id, + worldId: ticket.worldId, + timestamp: new Date().toISOString(), + totalChecks: 1, + passedChecks: 1, + failedChecks: 0, + verificationDurationMs: 10, + }; + }); + + const registry: Record = { + "world-1": { + "od-1": mockVerifier, + "*": mock(async () => ({ passed: false } as any)), + }, + "*": { + "global-od": mock(async () => ({ passed: true, worldId: "global" } as any)), + } + }; + + it("should dispatch to specific world and OD verifier", async () => { + const ticket: Ticket = { + id: "t-1", + worldId: "world-1", + odId: "od-1", + odRunId: "run-1", + failedStepId: "step-1", + failureType: "TEST", + affectedEntities: [], + createdAt: new Date().toISOString(), + status: "new", + }; + + const result = await verifyTicket(ticket, "world-1", registry, mockLogger); + + expect(result.passed).toBe(true); + expect(result.ticketId).toBe("t-1"); + expect(mockVerifier).toHaveBeenCalled(); + }); + + it("should fall back to world-specific generic verifier (*)", async () => { + const ticket: Ticket = { + id: "t-2", + worldId: "world-1", + odId: "unknown-od", + odRunId: "run-1", + failedStepId: "step-1", + failureType: "TEST", + affectedEntities: [], + createdAt: new Date().toISOString(), + status: "new", + }; + + const result = await verifyTicket(ticket, "world-1", registry, mockLogger); + + expect(result.passed).toBe(false); + }); + + it("should fall back to global registry (*) if world is not found", async () => { + const ticket: Ticket = { + id: "t-3", + worldId: "unknown-world", + odId: "global-od", + odRunId: "run-1", + failedStepId: "step-1", + failureType: "TEST", + affectedEntities: [], + createdAt: new Date().toISOString(), + status: "new", + metadata: {}, + }; + + const result = await verifyTicket(ticket, "unknown-world", registry, mockLogger); + + expect(result.passed).toBe(true); + expect(result.worldId).toBe("global"); + }); + + it("should throw error if world is not found and no global fallback exists", async () => { + const ticket: Ticket = { + id: "t-4", + worldId: "unknown-world", + odId: "unknown-od", + odRunId: "run-1", + failedStepId: "step-1", + failureType: "TEST", + affectedEntities: [], + createdAt: new Date().toISOString(), + status: "new", + }; + + const emptyRegistry: Record = {}; + + await expect(verifyTicket(ticket, "unknown-world", emptyRegistry, mockLogger)).rejects.toThrow("No verifiers found for world layout: unknown-world"); + }); + + it("should throw error if OD is not found in world and no local fallback exists", async () => { + const ticket: Ticket = { + id: "t-5", + worldId: "world-1", + odId: "unknown-od", + odRunId: "run-1", + failedStepId: "step-1", + failureType: "TEST", + affectedEntities: [], + createdAt: new Date().toISOString(), + status: "new", + }; + + const limitedRegistry: Record = { + "world-1": { + "specific-od": mockVerifier, + } + }; + + await expect(verifyTicket(ticket, "world-1", limitedRegistry, mockLogger)).rejects.toThrow("No verifier found for OD: unknown-od in world: world-1"); + }); +}); diff --git a/packages/controlmart/src/verification/engine.ts b/packages/controlmart/src/verification/engine.ts new file mode 100644 index 0000000000000000000000000000000000000000..7db100048cffa308220baead2e7e6f75635a896c --- /dev/null +++ b/packages/controlmart/src/verification/engine.ts @@ -0,0 +1,34 @@ +import type { Ticket, VerificationResult } from "./types/verification.types"; + +/** + * Registry of world-specific verifiers + */ +export type WorldVerifierMap = Record, logger: any) => Promise>>; + +/** + * Functional verification engine + */ +export const verifyTicket = async ( + ticket: Ticket, + worldLayout: string, + registry: Record, + logger: any +): Promise> => { + const worldId = ticket.worldId; + const odId = ticket.odId; + + // 1. Dispatch to world-specific registry + const worldVerifiers = registry[worldLayout] || registry["*"]; // Fallback to generic if needed + if (!worldVerifiers) { + throw new Error(`No verifiers found for world layout: ${worldLayout}`); + } + + // 2. Dispatch to OD-specific verifier + const verifier = worldVerifiers[odId] || worldVerifiers["*"]; + if (!verifier) { + throw new Error(`No verifier found for OD: ${odId} in world: ${worldId}`); + } + + // 3. Execute and return result + return await verifier(ticket, logger) as VerificationResult; +}; diff --git a/packages/controlmart/src/verification/index.ts b/packages/controlmart/src/verification/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..63147538b8c6ec37d0bbcaaaf5a2378263530b59 --- /dev/null +++ b/packages/controlmart/src/verification/index.ts @@ -0,0 +1,4 @@ +export * from "./types/verification.types"; +export * from "./engine"; +export * from "./registry"; +export * from "./utils/ticket-mapper.util"; diff --git a/packages/controlmart/src/verification/primitives/verification-primitives.ts b/packages/controlmart/src/verification/primitives/verification-primitives.ts new file mode 100644 index 0000000000000000000000000000000000000000..ae63e769851d6c7853795302790e504fc647f365 --- /dev/null +++ b/packages/controlmart/src/verification/primitives/verification-primitives.ts @@ -0,0 +1,101 @@ +import { evaluateInvariant } from "../utils/generic-checks"; +import type { InvariantCheckResult } from "../types/verification.types"; + +/** + * Checks if 'earlier' event occurred before 'later' event. + */ +export const checkCausalOrder = ( + earlierTime: Date | string | undefined, + laterTime: Date | string | undefined, + code: string, + description: string, + report: { + earlierLabel?: string, + laterLabel?: string + } = {} +): InvariantCheckResult | null => { + if (!earlierTime && !laterTime) return null; // Can't check if both missing + + // If one is missing, we might assume failure if strict, or skip. + // Here we skip if data is incomplete to avoid false positives on missing optional data, + // but the caller normally ensures critical timestamps exist. + if (!earlierTime || !laterTime) return null; + + const t1 = new Date(earlierTime).getTime(); + const t2 = new Date(laterTime).getTime(); + const passed = t1 <= t2; + + return evaluateInvariant( + code, + description, + passed, + { + [report.earlierLabel || "earlier"]: earlierTime, + [report.laterLabel || "later"]: laterTime + }, + passed ? "Chronological" : "Causality Violation" + ); +}; + +/** + * Checks conservation of quantity between two numbers. + */ +export const checkQuantityConserved = ( + expected: number, + actual: number, + code: string = "QTY_CONSERVED", + context: string = "" +): InvariantCheckResult => { + return evaluateInvariant( + code, + `Quantity conserved: ${context}`, + expected === actual, + { expected, actual }, + expected === actual ? "Conserved" : `Diff: ${actual - expected}` + ); +}; + +/** + * Checks that a list of entities refers to a single parent reference. + */ +export const checkEntityReference = ( + entities: any[], + referenceId: string, + code: string = "REF_INTEGRITY", + refField: string = "reference.id" +): InvariantCheckResult => { + // Basic deep access helper if needed, but assuming structure for now + const getRef = (e: any) => { + const parts = refField.split('.'); + let val = e; + for (const p of parts) val = val?.[p]; + return val; + }; + + const invalid = entities.filter(e => getRef(e) !== referenceId); + + return evaluateInvariant( + code, + `All entities reference ${referenceId}`, + invalid.length === 0, + invalid.length, + "0 Invalid Refs" + ); +}; + +/** + * Checks that exactly one entity of a type exists. + */ +export const checkSingleEntity = ( + entities: any[], + typeLabel: string, + code: string = "SINGLE_ENTITY" +): InvariantCheckResult => { + return evaluateInvariant( + code, + `Exactly one ${typeLabel} exists`, + entities.length === 1, + entities.length, + "1" + ); +}; diff --git a/packages/controlmart/src/verification/registry.ts b/packages/controlmart/src/verification/registry.ts new file mode 100644 index 0000000000000000000000000000000000000000..be99de18a4485fe3ea0cb1cd1b2171cfb5fb0968 --- /dev/null +++ b/packages/controlmart/src/verification/registry.ts @@ -0,0 +1,6 @@ +import { processOutboundVerifiers } from "../worlds/process-outbound/verifications"; +import type { WorldVerifierMap } from "./engine"; + +export const verifierRegistry: Record = { + "process-outbound": processOutboundVerifiers, +}; diff --git a/packages/controlmart/src/verification/systems/wms/wms-projection.builder.ts b/packages/controlmart/src/verification/systems/wms/wms-projection.builder.ts new file mode 100644 index 0000000000000000000000000000000000000000..0a33549e14bd1fe9aa0775c8d6d2c7f044c0ee44 --- /dev/null +++ b/packages/controlmart/src/verification/systems/wms/wms-projection.builder.ts @@ -0,0 +1,119 @@ +import { WMSOutboundOrderRepository } from "../../../repository/wms/outbound_order.wms.repository"; +import { WMSTaskRepository } from "../../../repository/wms/task.wms.repository"; +import { WMSOutboundShipmentRepository } from "../../../repository/wms/outbound_shipment.wms.repository"; +import { WMSInventoryTransactionRepository } from "../../../repository/wms/inventory_transaction.wms.repository"; +import type { Ticket } from "../../types/verification.types"; + +// Extended type to include runtime-only properties/mongoose timestamps +export type VerifiableOrder = { + _id: any; + orderId: string; + orderStatus: string; + lines: any[]; + customFields?: any; + createdAt: Date; + updatedAt: Date; + timing?: { + releasedAt?: Date; + allocatedAt?: Date; + pickingStartedAt?: Date; + pickedAt?: Date; + packingStartedAt?: Date; + packedAt?: Date; + }; + dates?: { + actualShipDate?: Date; + }; + [key: string]: any; +}; + +export interface WMSProjection { + order: VerifiableOrder | null; + tasks: any[]; + shipments: any[]; + transactions: any[]; + orderId?: string; +} + +export const buildWMSProjection = async (ticket: Ticket): Promise => { + const worldId = ticket.worldId; + + const orderRepo = WMSOutboundOrderRepository(worldId); + const taskRepo = WMSTaskRepository(worldId); + const shipmentRepo = WMSOutboundShipmentRepository(worldId); + const inventoryRepo = WMSInventoryTransactionRepository(worldId); + + // 1. Resolve Order ID + let orderId = ticket.affectedEntities.find((e: any) => e.type === "ORDER")?.id; + + // Fallback: Check context snapshots if explicit entity is missing + if (!orderId && ticket.metadata?.contextSnapshots) { + for (const snapshot of ticket.metadata.contextSnapshots) { + // Check for direct orderId in data + if (snapshot.data?.orderId) { + orderId = snapshot.data.orderId; + break; + } + // Check for referenceId if type is ORDER + if (snapshot.data?.referenceType === "ORDER" && snapshot.data?.referenceId) { + orderId = snapshot.data.referenceId; + break; + } + // Check for order object + if (snapshot.data?.order?.orderId) { + orderId = snapshot.data.order.orderId; + break; + } + } + } + + // NOTE: We do NOT strip prefixes here because the DB stores the full URN (e.g. wms:outbound-order:...) + // if (orderId && orderId.includes(":")) ... + + if (!orderId) { + const shipmentEntity = ticket.affectedEntities.find((e: any) => e.type === "SHIPMENT"); + if (shipmentEntity) { + const shipment = await shipmentRepo.getShipmentById(shipmentEntity.id); + if (shipment) { + orderId = (shipment.lines?.find((l: any) => l.orderId)?.orderId || + shipment.orders?.find((o: any) => o.orderId)?.orderId) || undefined; + } + } + } + + if (!orderId) { + return { + order: null, + tasks: [], + shipments: [], + transactions: [], + orderId: undefined + }; + } + + // 2. Parallel Fetch + const [rawOrder, tasks, shipments, transactions] = await Promise.all([ + orderRepo.getOutboundOrderById(orderId), + taskRepo.getTaskLogs({ status: [] }), + shipmentRepo.getShipmentsByStatus(["SHIPPED", "LOADED", "PACKED", "STAGED", "HU_CREATED"]), + inventoryRepo.getTransactionsByReference("ORDER", orderId) + ]); + + // 3. Filter & Normalize + // Cast to verifiable type to access runtime properties safely + const order = rawOrder as unknown as VerifiableOrder | null; + + const orderTasks = tasks.filter((t: any) => t.reference?.id === orderId); + const orderShipments = shipments.filter((s: any) => + s.lines?.some((l: any) => l.orderId === orderId) || + s.orders?.some((o: any) => o.orderId === orderId) + ); + + return { + order, + tasks: orderTasks, + shipments: orderShipments, + transactions, + orderId + }; +}; diff --git a/packages/controlmart/src/verification/types/verification.types.ts b/packages/controlmart/src/verification/types/verification.types.ts new file mode 100644 index 0000000000000000000000000000000000000000..cfebecbe451963fbb0bdfaa7c4f33c5e0fe14631 --- /dev/null +++ b/packages/controlmart/src/verification/types/verification.types.ts @@ -0,0 +1,120 @@ +import type { Logger } from "pino"; + +/** + * Represents a ticket generated from an OD failure + */ +export interface Ticket { + id: string; + worldId: string; + odId: string; + odRunId: string; + failedStepId: string; + failureType: TFailure; + affectedEntities: AffectedEntity[]; + createdAt: string; + status: "new" | "open" | "in_progress" | "on_hold" | "resolved" | "closed"; + type?: string; + metadata?: Record; +} + +/** + * Entity affected by the failure + */ +export interface AffectedEntity { + type: TEntity; + id: string; + status?: string; + metadata?: Record; +} + +/** + * Result of verification check + */ +export interface VerificationResult { + passed: boolean; + ticketId: string; + worldId: string; + timestamp: string; + + // Detailed breakdown + invariantChecks?: InvariantCheckResult[]; + entityStateChecks?: EntityStateCheckResult[]; + constraintChecks?: ConstraintCheckResult[]; + + // Summary + totalChecks: number; + passedChecks: number; + failedChecks: number; + + // Failure info + failureReason?: string; + failureDetails?: any; + + // Metadata + verificationDurationMs: number; +} + +export interface InvariantCheckResult { + invariantId: string; + description: string; + passed: boolean; + actual?: any; + expected?: any; + error?: string; +} + +export interface EntityStateCheckResult { + entityType: TEntity; + entityId: string; + expectedStates: string[]; + actualState: string; + passed: boolean; +} + +export interface ConstraintCheckResult { + constraintId: string; + description: string; + passed: boolean; + details?: any; +} + +/** + * Defines what must be true for a ticket to be considered resolved + */ +export interface VerificationStrategy { + failureType: TFailure; + invariants: InvariantCheck[]; + entityStateRequirements: EntityStateRequirement[]; + domainConstraints: DomainConstraint[]; +} + +export interface InvariantCheck { + id: string; + description: string; + evaluate: (ticket: Ticket, worldState: any, logger: Logger) => Promise; +} + +export interface EntityStateRequirement { + entityType: TEntity; + allowedStates: string[]; + extractEntityId: (ticket: Ticket) => string | null; +} + +export interface DomainConstraint { + id: string; + description: string; + evaluate: (ticket: Ticket, worldState: any, logger: Logger) => Promise; +} + +/** + * Configuration for the verification system + */ +export interface VerificationConfig { + enabledLayers: { + invariants: boolean; + entityStates: boolean; + domainConstraints: boolean; + }; + timeout?: number; // milliseconds + strictMode?: boolean; // fail on first check failure +} diff --git a/packages/controlmart/src/verification/utils/generic-checks.ts b/packages/controlmart/src/verification/utils/generic-checks.ts new file mode 100644 index 0000000000000000000000000000000000000000..c6875ca121c6c947436fb6c677ce9d0639dfaa84 --- /dev/null +++ b/packages/controlmart/src/verification/utils/generic-checks.ts @@ -0,0 +1,53 @@ +import type { InvariantCheckResult, EntityStateCheckResult } from "../types/verification.types"; + +/** + * Generic check for entity status + */ +export const checkEntityStatus = ( + entity: any, + entityType: T, + entityId: string, + allowedStates: string[] +): EntityStateCheckResult => { + if (!entity) { + return { + entityType, + entityId, + expectedStates: allowedStates, + actualState: "NOT_FOUND", + passed: false, + }; + } + + const actualState = entity.status || entity.state; + const passed = allowedStates.includes(actualState); + + return { + entityType, + entityId, + expectedStates: allowedStates, + actualState: actualState || "UNKNOWN", + passed, + }; +}; + +/** + * Generic invariant evaluator + */ +export const evaluateInvariant = ( + id: string, + description: string, + condition: boolean, + actual?: any, + expected?: any, + error?: string +): InvariantCheckResult => { + return { + invariantId: id, + description, + passed: condition, + actual, + expected, + error, + }; +}; diff --git a/packages/controlmart/src/verification/utils/ticket-mapper.util.ts b/packages/controlmart/src/verification/utils/ticket-mapper.util.ts new file mode 100644 index 0000000000000000000000000000000000000000..650018ac744942a4dffe47a3c353d577c7c29127 --- /dev/null +++ b/packages/controlmart/src/verification/utils/ticket-mapper.util.ts @@ -0,0 +1,94 @@ +import type { TWorldItsmTicketModel } from "../../models/tickets.model"; +import type { Ticket, AffectedEntity } from "../types/verification.types"; + +/** + * Maps a WorldItsmTicket DB model to a Verification Ticket + */ +export const mapToVerificationTicket = (dbTicket: TWorldItsmTicketModel): Ticket => { + const metadata = dbTicket.metadata || {}; + const contextSnapshots = metadata.contextSnapshots || []; + + // Extract affected entities from context snapshots if available + const affectedEntities: AffectedEntity[] = []; + + // 1. Try to get entities from metadata if pre-populated + if (metadata.affectedEntities && Array.isArray(metadata.affectedEntities)) { + affectedEntities.push(...metadata.affectedEntities); + } + + // 2. Fallback: Search in metadata and context snapshots + + // Helper to find ID with multiple possible keys + const findId = (obj: any, keys: string[]) => { + if (!obj) return null; + for (const key of keys) { + if (obj[key]) return obj[key]; + } + return null; + }; + + // Check top-level metadata first + if (affectedEntities.length === 0) { + const orderId = findId(metadata, ["orderId", "order_id", "outboundOrderId", "outbound_order_id"]); + if (orderId) affectedEntities.push({ type: "ORDER", id: orderId }); + + const shipmentId = findId(metadata, ["shipmentId", "shipment_id", "outboundShipmentId"]); + if (shipmentId) affectedEntities.push({ type: "SHIPMENT", id: shipmentId }); + } + + // Check context snapshots in reverse order + if (affectedEntities.length === 0 && contextSnapshots.length > 0) { + // Look through all snapshots from newest to oldest until we find entities + for (let i = contextSnapshots.length - 1; i >= 0; i--) { + const context = contextSnapshots[i].context || {}; + + // Search for ORDER + if (!affectedEntities.some(e => e.type === "ORDER")) { + const orderId = findId(context, ["orderId", "order_id", "outboundOrderId", "outbound_order_id"]) || + findId(context.order, ["id", "orderId", "_id", "order_id"]); + if (orderId) affectedEntities.push({ type: "ORDER", id: orderId }); + } + + // Search for SHIPMENT + if (!affectedEntities.some(e => e.type === "SHIPMENT")) { + const shipmentId = findId(context, ["shipmentId", "shipment_id", "outboundShipmentId"]) || + findId(context.shipment, ["id", "shipmentId", "_id"]); + if (shipmentId) affectedEntities.push({ type: "SHIPMENT", id: shipmentId }); + } + + // Search for TASK + if (!affectedEntities.some(e => e.type === "TASK")) { + const taskId = findId(context, ["taskId", "task_id"]) || + findId(context.task, ["id", "taskId", "_id"]); + if (taskId) affectedEntities.push({ type: "TASK", id: taskId }); + } + + // Search for INVENTORY + if (!affectedEntities.some(e => e.type === "INVENTORY")) { + const invId = findId(context, ["inventoryId", "inventory_id"]) || + findId(context.inventory, ["id", "inventoryId", "_id"]); + if (invId) affectedEntities.push({ type: "INVENTORY", id: invId }); + } + + // If we found something, likely this is the relevant snapshot context, but we can keep looking if incomplete? + // For now, let's stop if we found at least one entity to avoid mixing context potentially (though usually they accumulate) + if (affectedEntities.length > 0) break; + } + } + + // Construct the Verification Ticket + const ticketAny = dbTicket as any; + return { + id: ticketAny._id?.toString() || ticketAny.id?.toString(), + worldId: dbTicket.worldRef?.worldId || "UNKNOWN", + odId: metadata.odId || "UNKNOWN_OD", + odRunId: metadata.runId || "UNKNOWN_RUN", + failedStepId: metadata.failedStepId || "UNKNOWN_STEP", + failureType: metadata.failureType || "UNKNOWN_FAILURE", + affectedEntities, + createdAt: dbTicket.createdAt.toISOString(), + status: dbTicket.status, + type: dbTicket.type, + metadata: metadata + }; +}; diff --git a/packages/controlmart/src/worlds/common/wms-verifiers.ts b/packages/controlmart/src/worlds/common/wms-verifiers.ts new file mode 100644 index 0000000000000000000000000000000000000000..6c323a6af81e562ee6c0646f6ea3b9175f028fc9 --- /dev/null +++ b/packages/controlmart/src/worlds/common/wms-verifiers.ts @@ -0,0 +1,36 @@ +import { checkEntityStatus } from "../../verification/utils/generic-checks"; +import type { EntityStateCheckResult } from "../../verification/types/verification.types"; + +/** + * WMS-specific entity types + */ +export type WMSEntityType = "ORDER" | "TASK" | "SHIPMENT" | "INVENTORY" | "BIN"; + +/** + * Common WMS domain checks + */ +export const wmsVerifiers = { + /** + * Check if an order is in a terminal or progressable state + */ + checkOrderStatus: (order: any, allowedStates: string[] = ["CLOSED", "SHIPPED", "CANCELLED"]): EntityStateCheckResult => { + const actualState = order?.orderStatus || order?.status || order?.state; + return checkEntityStatus({ ...order, status: actualState }, "ORDER", order?.orderId || "UNKNOWN", allowedStates); + }, + + /** + * Check if a task is completed/progressed + */ + checkTaskStatus: (task: any, allowedStates: string[] = ["PICK_CONFIRMED", "COMPLETED"]): EntityStateCheckResult => { + const actualState = task?.taskStatus || task?.status || task?.state; + return checkEntityStatus({ ...task, status: actualState }, "TASK", task?.taskId || "UNKNOWN", allowedStates); + }, + + /** + * Check if a shipment is processed + */ + checkShipmentStatus: (shipment: any, allowedStates: string[] = ["SHIPPED", "LOADED"]): EntityStateCheckResult => { + const actualState = shipment?.shipmentStatus || shipment?.status || shipment?.state; + return checkEntityStatus({ ...shipment, status: actualState }, "SHIPMENT", shipment?.shipmentId || "UNKNOWN", allowedStates); + } +}; diff --git a/packages/controlmart/src/worlds/index.ts b/packages/controlmart/src/worlds/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..e4e85b32832b7f5b9a6349983479d1b94c95c9ab --- /dev/null +++ b/packages/controlmart/src/worlds/index.ts @@ -0,0 +1,62 @@ +import type { TWorldConfig } from "../types/world.type"; +// import { perishableWorldDocs } from "./perishables-food-manufacturer"; +// import { +// seedDataPerishablesManufacturerWorld, +// seedODsPerishablesManufacturerWorld, +// } from "./perishables-food-manufacturer"; +// import { ODs as perishablesODs } from "./perishables-food-manufacturer/ods"; +// import { manufacturingUnitWorldDocs } from "./manufacturing-unit"; +// import { +// seedDataManufacturingUnitWorld, +// seedODsManufacturingUnitWorld, +// } from "./manufacturing-unit"; +// import { ODs as manufacturingODs } from "./manufacturing-unit/ods"; +import { processOutboundODs, processOutboundWorldDocs, seedDataProcessOutboundWorld, seedODsProcessOutboundWorld } from "./process-outbound"; +import { processInboundODs, processInboundWorldDocs, seedDataProcessInboundWorld, seedODsProcessInboundWorld } from "./process-inbound"; + +export const worlds: Record = { + // "perishables-food-manufacturer": { + // id: "perishables-food-manufacturer", + // layoutName: "Perishables Manufacturing", + // shortDescription: + // "SkyFoods International is a high-volume manufacturer of premium snack foods and frozen desserts. The company operates on a make-to-stock model with strict quality controls due to the highly perishable nature of its raw ingredients (milk, cream, potatoes).", + // description: "Simulates the operations of SkyFoods International, a manufacturer of perishable goods. The world models the complete 'Procure to Pay' and 'Order to Cash' cycles, including: Raw Material Procurement and Safety Stock management; Inbound Logistics with ASN (EDI 856) receiving; Internal Manufacturing with BOM consumption (Potato Chips, Ice Cream); B2B Sales Orders with credit checks; and Outbound Fulfillment involving Pick-Pack-Ship and Carrier selection. It features realistic system interactions across ERP, WMS, TMS, and Finance modules.", + // docs: perishableWorldDocs, + // dataSeeder: seedDataPerishablesManufacturerWorld, + // odSeeder: seedODsPerishablesManufacturerWorld, + // ods: perishablesODs, + // }, + // "manufacturing-unit": { + // id: "manufacturing-unit", + // layoutName: "Manufacturing Unit - Production Operations", + // shortDescription: + // "A focused manufacturing simulation world demonstrating the complete production lifecycle from raw material procurement through finished goods receipt, emphasizing MES integration.", + // description: "Simulates a production-focused manufacturing unit where raw materials flow from suppliers through warehouse storage, are transformed in the manufacturing facility, and finished goods are received back into inventory. This world showcases the integration of ERP, WMS, and Manufacturing systems working together to execute production orders efficiently.", + // docs: manufacturingUnitWorldDocs, + // dataSeeder: seedDataManufacturingUnitWorld, + // odSeeder: seedODsManufacturingUnitWorld, + // ods: manufacturingODs, + // }, + "process-outbound": { + id: "process-outbound", + layoutName: "Process Outbound - Outbound Logistics", + shortDescription: + "A focused outbound simulation world demonstrating the complete order lifecycle from order creation through payment, emphasizing outbound logistics.", + description: "Simulates an outbound-focused process where orders flow from suppliers through warehouse storage, are transformed in the manufacturing facility, and finished goods are received back into inventory. This world showcases the integration of ERP, WMS, and Manufacturing systems working together to execute production orders efficiently.", + docs: processOutboundWorldDocs, + dataSeeder: seedDataProcessOutboundWorld, + odSeeder: seedODsProcessOutboundWorld, + ods: [processOutboundODs.outbound_order], + }, + "process-inbound": { + id: "process-inbound", + layoutName: "Process Inbound - Receiving & Putaway", + shortDescription: + "A focused inbound simulation world demonstrating the complete receiving lifecycle from supplier arrival through quality control, putaway, and inventory management.", + description: "Simulates an inbound-focused process where shipments arrive from suppliers, undergo quality control inspection, and are received into inventory. The world showcases the integration of ERP, WMS, and TMS systems working together to execute receiving and putaway operations efficiently. Includes chaos engineering scenarios for real-world disruptions.", + docs: processInboundWorldDocs, + dataSeeder: seedDataProcessInboundWorld, + odSeeder: seedODsProcessInboundWorld, + ods: [processInboundODs.inbound_process], + }, +}; diff --git a/packages/controlmart/src/worlds/manufacturing-unit/index.ts b/packages/controlmart/src/worlds/manufacturing-unit/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..937866021ce04c8ebfe548b07e99ab91e4a0073b --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/index.ts @@ -0,0 +1,22 @@ +import { seedManufacturingWorld } from "./seeder"; +import { scheduleManufacturingWorld } from "./schedule-manufacturing-world"; +import { manufacturingUnitWorldDoc } from "./world-doc"; + +export const seedDataManufacturingUnitWorld = (initialCash: number, worldId?: string) => { + if (initialCash < 0 || initialCash < 200) { + initialCash = 1_000_000; + } + return seedManufacturingWorld({ + initialCash, + worldId, + }); +}; + +export const seedODsManufacturingUnitWorld = async (worldId: string, realHoursPerSimDay: number) => { + return await scheduleManufacturingWorld(worldId, realHoursPerSimDay); +}; + +export const manufacturingUnitWorldDocs = () => { + return manufacturingUnitWorldDoc; +}; + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/aging-inventory-check.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/aging-inventory-check.od.json new file mode 100644 index 0000000000000000000000000000000000000000..62092e82164892fa0ae5ca986cb079e9d29906e9 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/aging-inventory-check.od.json @@ -0,0 +1,170 @@ +{ + "id": "aging-inventory-check", + "name": "Aging Inventory Check", + "version": "1.0.0", + "description": "Check for expired inventory and write off spoilage", + "namespace": "manufacturingUnit.background", + "persona": "Quality Control Specialist", + "type": "background_job", + "steps": [ + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" } + }, + { + "id": "get_inventory", + "name": "Get All Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { + "type": "literal", + "value": { "limit": 200 } + }, + "output": { "storeAs": "inventory" } + }, + { + "id": "identify_expired", + "name": "Identify Expired Items", + "type": "script", + "script": "const items = ctx.inventory?.items || ctx.inventory || []; const now = new Date(); const expired = items.filter(i => i.expirationDate && new Date(i.expirationDate) < now); return { expiredItems: expired, count: expired.length };", + "output": { "storeAs": "spoilageReport" } + }, + { + "id": "write_off_spoilage", + "name": "Write Off Spoilage", + "type": "script", + "script": "const expired = ctx.spoilageReport?.expiredItems || []; if (expired.length === 0) return { writtenOff: 0, totalValue: 0 }; let totalValue = 0; totalValue = expired.reduce((sum, item) => sum + (item.quantity * item.unitPrice), 0); console.log('[SPOILAGE] Writing off', expired.length, 'items. Total Value:', totalValue); return { writtenOff: expired.length, totalValue: totalValue };", + "output": { "storeAs": "writeOffResult" } + }, + { + "id": "record_loss", + "name": "Record Financial Loss", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "condition": { "expression": "writeOffResult.writtenOff > `0`" }, + "input": { + "type": "template", + "template": { + "cashDelta": 0, + "inventoryValueDelta": "-{{writeOffResult.totalValue}}" + } + }, + "output": { "storeAs": "updatedLedger" } + }, + { + "id": "get_all_companies", + "name": "Get All Companies", + "type": "mcp", + "service": "erp", + "tool": "company.get_all", + "input": { "type": "literal", "value": { "limit": 10 } }, + "output": { "storeAs": "allCompanies" }, + "retry": { "maxRetries": 2, "backoff": "fixed", "baseMs": 300 } + }, + { + "id": "select_supplier", + "name": "Select a Supplier", + "type": "script", + "script": "const companies = ctx.allCompanies?.items || ctx.allCompanies || []; const suppliers = companies.filter(c => c.companyId !== ctx.mpc?.companyId && c.name !== ctx.mpc?.name); if (suppliers.length === 0) throw new Error(`No suppliers found. Total companies: ${companies.length}. Names: ${companies.map(c => c.name).join(', ')}. MPC: ${ctx.mpc?.name}`); const supplier = suppliers[Math.floor(Math.random() * suppliers.length)]; return supplier;", + "output": { "storeAs": "supplier" } + }, + { + "id": "create_production_order", + "name": "Create Production Order", + "type": "mcp", + "service": "erp", + "tool": "order.create", + "input": { + "type": "template", + "template": { + "customerId": "{{mpc.companyId}}", + "partnerId": "{{supplier.companyId}}", + "poType": "STANDARD", + "status": "RECEIVED", + "currency": "USD", + "paymentTerms": "{{supplier.paymentTerms}}", + "lines": "{{spoilageReport.expiredItems}}", + "totalAmount": "{{writeOffResult.totalValue}}" + } + }, + "output": { "storeAs": "productionOrder" } + }, + { + "id": "reserve_payable", + "name": "Reserve Payable Amount", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { "payablesDelta": "{{writeOffResult.totalValue}}" } + }, + "output": { "storeAs": "updatedLedger" } + }, + { + "id": "generate_edi_850", + "name": "Generate EDI 850", + "type": "mcp", + "service": "edi", + "tool": "generate.850", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.companyId}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{supplier.companyId}}", "name": "{{supplier.name}}" }, + "poNumber": "{{productionOrder.orderId}}", + "items": "{{spoilageReport.expiredItems}}" + } + }, + "output": { "storeAs": "edi850" } + }, + { + "id": "validate_edi_850", + "name": "Validate EDI 850 (PO)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi850}}", + "docType": "850", + "worldId": "{{mpc.companyId}}", + "context": { "items": "{{spoilageReport.expiredItems}}" } + } + }, + "output": { "storeAs": "val850" }, + "continueOnError": true + }, + { + "id": "store_edi", + "name": "Store EDI Transaction", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{supplier.companyId}}", + "docType": "850", + "direction": "OUTBOUND", + "status": "SENT", + "payload": { + "orderId": "{{productionOrder.orderId}}", + "totalAmount": "{{writeOffResult.totalValue}}" + }, + "rawEdi": "{{edi850}}" + } + }, + "output": { "storeAs": "ediTransaction" } + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/daily-inventory-check.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/daily-inventory-check.od.json new file mode 100644 index 0000000000000000000000000000000000000000..0d5bf09585bade1c0b0543c1f9259b3cd8e6c6a9 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/daily-inventory-check.od.json @@ -0,0 +1,95 @@ +{ + "id": "daily-inventory-check", + "name": "Daily Inventory Check", + "version": "1.0.0", + "description": "Daily cycle count, inventory verification, and metrics recording", + "namespace": "manufacturingUnit.background", + "persona": "Inventory Manager", + "type": "background_job", + "steps": [ + { + "id": "get_ledger", + "name": "Get Current Financial Position", + "type": "mcp", + "service": "finance", + "tool": "ledger.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "ledger" } + }, + { + "id": "get_financial_summary", + "name": "Get Financial Summary", + "type": "mcp", + "service": "finance", + "tool": "summary.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "financialSummary" } + }, + { + "id": "get_bins", + "name": "Get All Warehouse Bins", + "type": "mcp", + "service": "wms", + "tool": "bin.get_by_warehouse", + "input": { "type": "literal", "value": { "warehouseId": "MAIN_WAREHOUSE", "limit": 100 } }, + "output": { "storeAs": "bins" } + }, + { + "id": "get_inventory", + "name": "Get Current Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { "type": "literal", "value": { "limit": 200 } }, + "output": { "storeAs": "inventory" } + }, + { + "id": "calculate_metrics", + "name": "Calculate Daily Metrics", + "type": "script", + "script": "const inventory = ctx.inventory || {}; const ledger = ctx.ledger || {}; const invItems = inventory.items || []; const totalItems = invItems.length; const totalValue = invItems.reduce((sum, i) => sum + (i.quantity || 0) * (i.unitPrice || 1), 0); return { date: new Date().toISOString().split('T')[0], totalSkus: totalItems, estimatedValue: Math.round(totalValue * 100) / 100, cashPosition: ledger?.cash || 0, receivables: ledger?.totalReceivables || 0, payables: ledger?.totalPayables || 0, netPosition: ledger?.netPosition || 0 };", + "output": { "storeAs": "dailyMetrics" } + }, + { + "id": "create_cycle_count", + "name": "Create Cycle Count", + "type": "mcp", + "service": "wms", + "tool": "cycle_count.create", + "input": { + "type": "template", + "template": { + "countDate": "{{dailyMetrics.date}}", + "countType": "DAILY", + "status": "COMPLETED", + "itemsCounted": "{{dailyMetrics.totalSkus}}", + "discrepancies": 0 + } + }, + "output": { "storeAs": "cycleCount" } + }, + { + "id": "record_metrics", + "name": "Record Daily Metrics", + "type": "mcp", + "service": "wms", + "tool": "daily_metrics.create", + "input": { + "type": "template", + "template": { + "date": "{{dailyMetrics.date}}", + "metrics": "{{dailyMetrics}}" + } + }, + "output": { "storeAs": "recordedMetrics" } + }, + { + "id": "log_summary", + "name": "Log Daily Summary", + "type": "script", + "script": "const dailyMetrics = ctx.dailyMetrics || {}; console.log('[DAILY CHECK]', dailyMetrics.date, '| Cash:', dailyMetrics.cashPosition, '| Receivables:', dailyMetrics.receivables, '| Payables:', dailyMetrics.payables, '| Net:', dailyMetrics.netPosition); return dailyMetrics;", + "output": { "storeAs": "loggedSummary" } + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/finished-goods-receipt.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/finished-goods-receipt.od.json new file mode 100644 index 0000000000000000000000000000000000000000..6c9dfda7727a2e35a80580e9cf90d0288f4a01b8 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/finished-goods-receipt.od.json @@ -0,0 +1,156 @@ +{ + "id": "finished-goods-receipt", + "name": "Finished Goods Receipt", + "version": "1.0.0", + "description": "Receive finished goods into WMS after manufacturing completion", + "namespace": "manufacturingUnit.manufacturing", + "persona": "Forklift Operator", + "type": "standard", + "steps": [ + { + "id": "get_production_orders", + "name": "Get Production Orders Ready for Receipt", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { + "poType": "PRODUCTION", + "status": "MANUFACTURING_COMPLETE", + "limit": 10 + } + }, + "output": { "storeAs": "productionOrders" } + }, + { + "id": "select_order", + "name": "Select Order to Receive", + "type": "script", + "script": "const orders = ctx.productionOrders?.items || ctx.productionOrders || []; if (orders.length === 0) throw new Error('No production orders ready for finished goods receipt'); const order = orders[0]; if (order.lines && order.lines[0]) { const sku = order.lines[0].sku; if (typeof sku === 'object' && sku !== null && sku.productId) { order.lines[0].sku = sku.productId; } } return order;", + "output": { "storeAs": "prodOrder" } + }, + { + "id": "get_production_run", + "name": "Get Production Run for Order", + "type": "mcp", + "service": "manufacturing", + "tool": "production.run.get_by_order", + "input": { + "type": "template", + "template": { + "productionOrderId": "{{prodOrder.orderId}}" + } + }, + "output": { "storeAs": "productionRuns" } + }, + { + "id": "get_completed_run", + "name": "Get Completed Production Run", + "type": "script", + "script": "const runs = ctx.productionRuns || []; const completedRun = runs.find(r => r.status === 'COMPLETED'); if (!completedRun) throw new Error('No completed production run found'); return completedRun;", + "output": { "storeAs": "productionRun" } + }, + { + "id": "prepare_product_lookup", + "name": "Prepare Product Lookup", + "type": "script", + "script": "const sku = ctx.prodOrder?.lines?.[0]?.sku; if (!sku) throw new Error('No SKU found in production order'); if (typeof sku === 'string') return sku; if (sku && sku.productId) return String(sku.productId); return String(sku);", + "output": { "storeAs": "productIdToLookup" } + }, + { + "id": "get_product", + "name": "Get Product Details", + "type": "mcp", + "service": "erp", + "tool": "product.get_by_id", + "input": { + "type": "template", + "template": { "productId": "{{productIdToLookup}}" } + }, + "output": { "storeAs": "product" } + }, + { + "id": "receive_finished_goods", + "name": "Receive Finished Goods into WMS", + "type": "map", + "mapSpec": { + "iterable": { "type": "jmesPath", "value": "productionRun.finishedGoodsProduced" }, + "itemName": "finishedGood", + "concurrency": 1 + }, + "children": [ + { + "id": "create_receive_transaction", + "name": "Create WMS Receive Transaction", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "WH001", + "transactionType": "RECEIVE", + "productId": "{{finishedGood.productId}}", + "sku": "{{finishedGood.sku}}", + "quantity": "{{finishedGood.quantity}}", + "toBinId": "END_OF_LINE", + "lotNumber": "{{finishedGood.lotNumber}}", + "referenceType": "ORDER", + "referenceId": "{{prodOrder.orderId}}" + } + }, + "output": { "storeAs": "receiveTxn" } + } + ], + "output": { "storeAs": "receiveTransactions" } + }, + { + "id": "calculate_cogs", + "name": "Calculate Cost of Goods Manufactured", + "type": "script", + "script": "const prodOrder = ctx.prodOrder || {}; const bom = prodOrder.customFields?.billOfMaterials || []; const cost = bom.reduce((sum, item) => sum + (item.requiredQty * (item.pricePerUnit || 1)), 0); const quantityProduced = ctx.productionRun?.finishedGoodsProduced?.[0]?.quantity || prodOrder.lines?.[0]?.quantityOrdered || 1; return { totalCost: Math.round(cost * 100) / 100, unitsProduced: quantityProduced };", + "output": { "storeAs": "cogsCalc" } + }, + { + "id": "record_cogs", + "name": "Record Manufacturing Cost", + "type": "mcp", + "service": "finance", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "type": "payment_out", + "amount": "{{cogsCalc.totalCost}}", + "sourceType": "manual", + "sourceId": "{{prodOrder.orderId}}", + "metadata": { + "description": "COGS for production order {{prodOrder.orderId}}", + "unitsProduced": "{{cogsCalc.unitsProduced}}", + "product": "{{product.name}}", + "productionRunId": "{{productionRun.productionRunId}}" + } + } + }, + "output": { "storeAs": "cogsTxn" } + }, + { + "id": "complete_order", + "name": "Complete Production Order", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{prodOrder.orderId}}", + "status": "COMPLETED" + } + }, + "output": { "storeAs": "completedOrder" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/inbound-asn-process.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/inbound-asn-process.od.json new file mode 100644 index 0000000000000000000000000000000000000000..1ce3a2ef46535899c33fdaf013512295c36ce382 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/inbound-asn-process.od.json @@ -0,0 +1,113 @@ +{ + "id": "inbound-asn-process", + "name": "Inbound ASN Process", + "version": "1.0.0", + "description": "Receive Advanced Shipping Notice (ASN) from supplier and create Inbound Order", + "namespace": "manufacturingUnit.logistics", + "persona": "Logistics Coordinator", + "type": "standard", + "steps": [ + { + "id": "get_pending_pos", + "name": "Get Pending Purchase Orders", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { "poType": "STANDARD", "limit": 10 } + }, + "output": { "storeAs": "pendingPos" } + }, + { + "id": "select_po", + "name": "Select PO for ASN", + "type": "script", + "script": "const orders = ctx.pendingPos?.items || ctx.pendingPos || []; if (orders.length === 0) throw new Error('No pending POs found'); return orders[0];", + "output": { "storeAs": "selectedPo" } + }, + { + "id": "get_supplier", + "name": "Get Supplier Details", + "type": "mcp", + "service": "erp", + "tool": "company.get_by_id", + "input": { + "type": "template", + "template": { "companyId": "{{selectedPo.partnerId}}" } + }, + "output": { "storeAs": "supplier" } + }, + { + "id": "generate_asn_data", + "name": "Generate ASN Data", + "type": "script", + "script": "const po = ctx.selectedPo; const lines = po.lines || []; return { shipmentId: 'ASN-' + po.orderId, trackingNumber: 'TRK-' + Math.floor(Math.random() * 1000000), lines: lines.map(l => ({ ...l, productId: l.productId || l.sku, shippedQuantity: l.quantityOrdered || l.qty })) };", + "output": { "storeAs": "asnData" } + }, + { + "id": "check_existing_inbound", + "name": "Check if Inbound Order Exists", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.get_by_po_number", + "input": { + "type": "template", + "template": "{{selectedPo.orderId}}" + }, + "output": { "storeAs": "existingInbound" }, + "continueOnError": true + }, + { + "id": "create_inbound_order", + "name": "Create Inbound Order from ASN", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.create", + "condition": { "expression": "!existingInbound" }, + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "poNumber": "{{selectedPo.orderId}}", + "orderType": "PO", + "orderStatus": "IN_TRANSIT", + "vendor": { + "vendorId": "{{supplier.companyId}}", + "vendorName": "{{supplier.name}}" + }, + "dates": { + "expectedArrival": "{{now}}", + "shippedDate": "{{now}}" + }, + "lines": "{{asnData.lines}}", + "referenceNumbers": { + "asnNumber": "{{asnData.shipmentId}}", + "trackingNumber": "{{asnData.trackingNumber}}" + } + } + }, + "output": { "storeAs": "newInboundOrder" } + }, + { + "id": "select_inbound_order", + "name": "Select Inbound Order", + "type": "script", + "script": "return ctx.existingInbound || ctx.newInboundOrder;", + "output": { "storeAs": "inboundOrder" } + }, + { + "id": "update_po_status", + "name": "Update PO Status", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { "orderId": "{{selectedPo.orderId}}", "status": "ACKED" } + }, + "output": { "storeAs": "updatedPo" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/index.ts b/packages/controlmart/src/worlds/manufacturing-unit/ods/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..753ca2595d7fc49f5688c36f7b4c2f2de815118a --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/index.ts @@ -0,0 +1,44 @@ +import agingInventoryCheck from "./aging-inventory-check.od.json"; +import dailyInventoryCheck from "./daily-inventory-check.od.json"; +import finishedGoodsReceipt from "./finished-goods-receipt.od.json"; +import inboundAsnProcess from "./inbound-asn-process.od.json"; +import manufacturingExecution from "./manufacturing-execution.od.json"; +import manufacturingExecutionDetailed from "./manufacturing-execution-detailed.od.json"; +import materialPickForProduction from "./material-pick-for-production.od.json"; +import productionOrder from "./production-order.od.json"; +import putawayProcess from "./putaway-process.od.json"; +import rawMaterialProcurement from "./raw-material-procurement.od.json"; +import receiveSupplierShipment from "./receive-supplier-shipment.od.json"; +import supplierReorderTrigger from "./supplier-reorder-trigger.od.json"; +import type { OperationalDescriptor } from "../../../types/od.type"; + +export const ODs: OperationalDescriptor[] = [ + agingInventoryCheck as unknown as OperationalDescriptor, + dailyInventoryCheck as unknown as OperationalDescriptor, + finishedGoodsReceipt as unknown as OperationalDescriptor, + inboundAsnProcess as unknown as OperationalDescriptor, + manufacturingExecution as unknown as OperationalDescriptor, + manufacturingExecutionDetailed as unknown as OperationalDescriptor, + materialPickForProduction as unknown as OperationalDescriptor, + productionOrder as unknown as OperationalDescriptor, + putawayProcess as unknown as OperationalDescriptor, + rawMaterialProcurement as unknown as OperationalDescriptor, + receiveSupplierShipment as unknown as OperationalDescriptor, + supplierReorderTrigger as unknown as OperationalDescriptor, +]; + +export { + agingInventoryCheck, + dailyInventoryCheck, + finishedGoodsReceipt, + inboundAsnProcess, + manufacturingExecution, + manufacturingExecutionDetailed, + materialPickForProduction, + productionOrder, + putawayProcess, + rawMaterialProcurement, + receiveSupplierShipment, + supplierReorderTrigger, +}; + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/manufacturing-execution-detailed.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/manufacturing-execution-detailed.od.json new file mode 100644 index 0000000000000000000000000000000000000000..d6ac7349aa26dfbf7847b4be99b56e661701a872 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/manufacturing-execution-detailed.od.json @@ -0,0 +1,293 @@ +{ + "id": "manufacturing-execution-detailed", + "name": "Manufacturing Execution (Detailed Machine-Level)", + "version": "1.0.0", + "description": "Detailed manufacturing execution with granular machine-level steps. Routes through Ice Cream line (Pasteurizer→Fermenter→Mixer→CupPacker→ChillRoom) or Chips line (Slicer→Fryer→SeasoningDrum→PouchPacker→MetalDetector) based on product type.", + "namespace": "manufacturingUnit.manufacturing", + "persona": "Production Operator", + "type": "standard", + "steps": [ + { + "id": "get_production_orders", + "name": "Get Production Orders with Materials Picked", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { + "poType": "PRODUCTION", + "status": "MATERIALS_PICKED", + "limit": 10 + } + }, + "output": { "storeAs": "productionOrders" } + }, + { + "id": "select_order", + "name": "Select Order to Manufacture", + "type": "script", + "script": "return ctx.productionOrders?.items || ctx.productionOrders || [];", + "output": { "storeAs": "prodOrders" } + }, + { + "id": "exit-if-no-orders", + "name": "Exit Early if No Orders", + "type": "exit_early", + "exitCondition": { + "expression": "length(prodOrders) == `0`" + }, + "message": "No production orders with materials picked found" + }, + { + "id": "select_first_order", + "name": "Select First Order to Process", + "type": "script", + "script": "const order = ctx.prodOrders?.[0]; if (order.lines && order.lines[0]) { const sku = order.lines[0].sku; if (typeof sku === 'object' && sku !== null && sku.productId) { order.lines[0].sku = sku.productId; } } return order;", + "output": { "storeAs": "prodOrder" } + }, + { + "id": "prepare_product_lookup", + "name": "Prepare Product Lookup", + "type": "script", + "script": "const sku = ctx.prodOrder?.lines?.[0]?.sku; if (!sku) throw new Error('No SKU found in production order'); if (typeof sku === 'string') return sku; if (sku && sku.productId) return String(sku.productId); return String(sku);", + "output": { "storeAs": "productIdToLookup" } + }, + { + "id": "get_product", + "name": "Get Product Details", + "type": "mcp", + "service": "erp", + "tool": "product.get_by_id", + "input": { + "type": "template", + "template": { "productId": "{{productIdToLookup}}" } + }, + "output": { "storeAs": "product" } + }, + { + "id": "determine_production_line", + "name": "Determine Production Line Type", + "description": "Determines whether to use Ice Cream or Chips production line based on SKU prefix", + "type": "script", + "script": "console.log(ctx.product); const sku = ctx.product?.customFields?.productType || ''; const isIceCream = sku == 'ice_cream'; const isChips = sku == 'chips'; if (!isIceCream && !isChips) throw new Error(`Unknown product type for SKU: ${sku}. Expected ICE-* or CHP-*`); return { productionLineType: isIceCream ? 'ICE_CREAM' : 'CHIPS', machines: isIceCream ? ['MCH_PASTEUR_01', 'MCH_FERMENT_01', 'MCH_MIXER_01', 'MCH_PACKER_01', 'MCH_CHILL_01'] : ['MCH_SLICER_01', 'MCH_FRYER_01', 'MCH_SEASON_01', 'MCH_PACKER_02', 'MCH_MD_01'] };", + "output": { "storeAs": "productionLine" } + }, + { + "id": "initialize_chaos_config", + "name": "Initialize Chaos Configuration", + "description": "Set up chaos injection probability and seed for reproducible testing", + "type": "script", + "script": "const chaosEnabled = true; const chaosProbability = 0.15; const seed = Date.now(); const shouldTrigger = (prob) => Math.random() < (prob * chaosProbability); return { chaosEnabled, chaosProbability, seed, shouldTrigger: shouldTrigger.toString(), chaosEvents: [] };", + "output": { "storeAs": "chaosConfig" } + }, + { + "id": "prepare_materials_consumed", + "name": "Prepare Raw Materials Consumed Data", + "type": "script", + "script": "const bom = ctx.prodOrder?.customFields?.billOfMaterials || []; return bom.map(item => ({ materialId: item.sku, sku: item.sku, quantity: item.requiredQty || item.qty, unit: item.unit || 'EA' }));", + "output": { "storeAs": "rawMaterialsConsumed" } + }, + { + "id": "create_production_run", + "name": "Create Production Run", + "type": "mcp", + "service": "manufacturing", + "tool": "production.run.create", + "input": { + "type": "template", + "template": { + "productionOrderId": "{{prodOrder.orderId}}", + "status": "CREATED", + "rawMaterialsConsumed": "{{rawMaterialsConsumed}}", + "customFields": { + "productionLineType": "{{productionLine.productionLineType}}", + "machines": "{{productionLine.machines}}", + "machineSteps": [] + } + } + }, + "output": { "storeAs": "productionRun" } + }, + { + "id": "start_production_run", + "name": "Start Production Run", + "type": "mcp", + "service": "manufacturing", + "tool": "production.run.update_status", + "input": { + "type": "template", + "template": { + "productionRunId": "{{productionRun.productionRunId}}", + "status": "IN_PROGRESS" + } + }, + "output": { "storeAs": "startedRun" } + }, + + { + "id": "branch_production_line", + "name": "Route to Appropriate Production Line", + "type": "branch", + "branchSpec": { + "condition": { + "expression": "productionLine.productionLineType == 'ICE_CREAM'" + }, + "then": [ + { + "id": "ice_cream_step_1_pasteurizer", + "name": "Step 1: Pasteurizer (MCH_PASTEUR_01)", + "description": "Heat milk and cream to 85°C for 15 seconds to eliminate pathogens while preserving nutritional value", + "type": "script", + "script": "const bom = ctx.prodOrder?.customFields?.billOfMaterials || []; const milkInput = bom.find(i => i.sku?.includes('MILK')); const creamInput = bom.find(i => i.sku?.includes('CREAM')); const inputMaterials = { milk: milkInput?.requiredQty || 100, cream: creamInput?.requiredQty || 50, unit: 'L' }; const processTime = Math.floor(Math.random() * 300) + 600; const outputTemp = 4 + Math.random() * 2; return { machineId: 'MCH_PASTEUR_01', machineName: 'Pasteurizer', operation: 'PASTEURIZATION', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputMaterials, outputProduct: { type: 'PASTEURIZED_BASE', quantity: inputMaterials.milk + inputMaterials.cream, unit: 'L', temperature: outputTemp, batchId: `PAST-${Date.now()}` }, parameters: { targetTemp: 85, holdTime: 15, coolingTemp: 4 }, qualityChecks: [{ checkType: 'TEMPERATURE', status: 'PASS', value: 85.2 }, { checkType: 'PATHOGEN_TEST', status: 'PASS', value: 'NEGATIVE' }] };", + "output": { "storeAs": "machineStep1" } + }, + { + "id": "ice_cream_step_2_fermenter", + "name": "Step 2: Fermenter (MCH_FERMENT_01)", + "description": "Optional fermentation step for cultured ice cream varieties. Adds beneficial cultures and develops flavor complexity. Subcomponents: Vessel, Agitator, Culture/Media, Sensors, Valves/Thermal", + "type": "script", + "script": "const input = ctx.machineStep1?.outputProduct || {}; const needsFermentation = ctx.product?.name?.toLowerCase().includes('yogurt') || ctx.product?.name?.toLowerCase().includes('cultured'); const processTime = needsFermentation ? Math.floor(Math.random() * 1800) + 3600 : 60; const chaosProb = ctx.chaosConfig?.chaosProbability || 0.15; const chaosEvents = []; const subcomponentStates = []; const qualityChecks = []; /* Vessel Subcomponent */ const vesselState = { id: 'fermenter-vessel', name: 'Vessel', attributes: { material: 'SS316', capacity: 5000, working_volume: 4000, pressure_rating: 2 + Math.random(), jacket_type: 'Dimple' }, status: 'normal' }; if (Math.random() < 0.02 * chaosProb) { vesselState.status = 'warning'; chaosEvents.push({ id: `chaos-vessel-${Date.now()}`, subcomponentId: 'fermenter-vessel', scenarioId: 'vessel-leak', scenarioName: 'Vessel Seal Leak', severity: 'high', description: 'Micro-leak in vessel seal', qualityImpact: 'CONTAMINATION_TEST' }); } subcomponentStates.push(vesselState); /* Agitator Subcomponent */ const agitatorState = { id: 'fermenter-agitator', name: 'Agitator', attributes: { type: 'Impeller', rpm: 140 + Math.floor(Math.random() * 20), motor_power: 15, seal_type: 'Mechanical' }, status: 'normal' }; if (Math.random() < 0.05 * chaosProb) { agitatorState.status = 'degraded'; agitatorState.attributes.rpm = 100 + Math.floor(Math.random() * 30); chaosEvents.push({ id: `chaos-agitator-${Date.now()}`, subcomponentId: 'fermenter-agitator', scenarioId: 'agitator-speed-drift', scenarioName: 'Agitator Speed Drift', severity: 'medium', description: 'RPM deviating from setpoint', qualityImpact: 'MIXING_UNIFORMITY', degradation: 15 }); } subcomponentStates.push(agitatorState); /* Sensors Subcomponent */ const sensorsState = { id: 'fermenter-sensors', name: 'Sensors', attributes: { temperature: 41 + Math.random() * 2, pH: needsFermentation ? 4.3 + Math.random() * 0.4 : 6.5 + Math.random() * 0.4, dissolved_oxygen: 25 + Math.random() * 10, pressure: 0.4 + Math.random() * 0.2 }, status: 'normal' }; if (Math.random() < 0.03 * chaosProb) { sensorsState.status = 'warning'; sensorsState.attributes.temperature = 45 + Math.random() * 5; chaosEvents.push({ id: `chaos-sensors-${Date.now()}`, subcomponentId: 'fermenter-sensors', scenarioId: 'temperature-spike', scenarioName: 'Temperature Spike', severity: 'high', description: 'Temperature exceeded safe limits', qualityImpact: 'TEMPERATURE' }); } subcomponentStates.push(sensorsState); /* Culture Subcomponent */ const cultureState = { id: 'fermenter-culture', name: 'Culture/Media', attributes: { strain: 'Lactobacillus bulgaricus', volume: input.quantity, culture_id: `CUL-${Date.now()}` }, status: 'normal' }; if (Math.random() < 0.02 * chaosProb) { cultureState.status = 'failed'; chaosEvents.push({ id: `chaos-culture-${Date.now()}`, subcomponentId: 'fermenter-culture', scenarioId: 'culture-contamination', scenarioName: 'Culture Contamination', severity: 'critical', description: 'Foreign microorganism detected', qualityImpact: 'PATHOGEN_TEST' }); } subcomponentStates.push(cultureState); /* Generate Quality Checks based on subcomponent states */ const hasCriticalChaos = chaosEvents.some(e => e.severity === 'critical'); const hasHighChaos = chaosEvents.some(e => e.severity === 'high'); const phCheck = { checkType: 'PH_LEVEL', status: hasCriticalChaos ? 'FAIL' : 'PASS', value: sensorsState.attributes.pH.toFixed(1), linkedChaosEvent: chaosEvents.find(e => e.qualityImpact === 'PH_LEVEL')?.id }; const tempCheck = { checkType: 'TEMPERATURE', status: hasHighChaos && chaosEvents.some(e => e.qualityImpact === 'TEMPERATURE') ? 'WARNING' : 'PASS', value: sensorsState.attributes.temperature.toFixed(1), linkedChaosEvent: chaosEvents.find(e => e.qualityImpact === 'TEMPERATURE')?.id }; const pathogenCheck = { checkType: 'PATHOGEN_TEST', status: chaosEvents.some(e => e.qualityImpact === 'PATHOGEN_TEST') ? 'FAIL' : 'PASS', value: chaosEvents.some(e => e.qualityImpact === 'PATHOGEN_TEST') ? 'CONTAMINATION_DETECTED' : 'NEGATIVE', linkedChaosEvent: chaosEvents.find(e => e.qualityImpact === 'PATHOGEN_TEST')?.id }; qualityChecks.push(phCheck, tempCheck, pathogenCheck); return { machineId: 'MCH_FERMENT_01', machineName: 'Fermenter', operation: needsFermentation ? 'FERMENTATION' : 'PASS_THROUGH', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, subcomponentStates, outputProduct: { type: needsFermentation ? 'FERMENTED_BASE' : 'PASTEURIZED_BASE', quantity: hasCriticalChaos ? 0 : input.quantity, unit: input.unit, temperature: sensorsState.attributes.temperature, batchId: `FERM-${Date.now()}`, acidityPH: sensorsState.attributes.pH, qualityStatus: hasCriticalChaos ? 'REJECTED' : hasHighChaos ? 'HOLD' : 'RELEASED' }, parameters: { fermentationTemp: 42, targetPH: 4.5, cultureType: 'LACTOBACILLUS', agitatorRpm: agitatorState.attributes.rpm }, qualityChecks, chaosEvents, skipped: !needsFermentation };", + "output": { "storeAs": "machineStep2" } + }, + { + "id": "ice_cream_step_3_mixer", + "name": "Step 3: Mixer (MCH_MIXER_01)", + "description": "Combine base with sugar, flavorings, and incorporate air (overrun) to achieve desired texture and taste", + "type": "script", + "script": "const input = ctx.machineStep2?.outputProduct || {}; const bom = ctx.prodOrder?.customFields?.billOfMaterials || []; const sugar = bom.find(i => i.sku?.includes('SUGAR')); const vanilla = bom.find(i => i.sku?.includes('VANILLA')); const chocolate = bom.find(i => i.sku?.includes('CHOCO')); const processTime = Math.floor(Math.random() * 600) + 900; const overrun = 80 + Math.floor(Math.random() * 40); return { machineId: 'MCH_MIXER_01', machineName: 'Mixer', operation: 'MIXING_AND_AERATION', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, additives: { sugar: sugar?.requiredQty || 20, flavoring: vanilla?.requiredQty || chocolate?.requiredQty || 5, flavorType: vanilla ? 'VANILLA' : chocolate ? 'CHOCOLATE' : 'PLAIN' }, outputProduct: { type: 'ICE_CREAM_MIX', quantity: input.quantity * (1 + overrun/100), unit: 'L', temperature: -6, batchId: `MIX-${Date.now()}`, overrunPercent: overrun, viscosity: 'MEDIUM' }, parameters: { mixingSpeed: 120, mixingTime: processTime, aerationRate: overrun, targetTemp: -6 }, qualityChecks: [{ checkType: 'OVERRUN', status: 'PASS', value: overrun }, { checkType: 'VISCOSITY', status: 'PASS', value: 'WITHIN_SPEC' }, { checkType: 'FLAVOR_PROFILE', status: 'PASS', value: 'APPROVED' }] };", + "output": { "storeAs": "machineStep3" } + }, + { + "id": "ice_cream_step_4_packer", + "name": "Step 4: Cup Packer (MCH_PACKER_01)", + "description": "Fill ice cream mix into cups/containers with precise portioning and seal for freshness", + "type": "script", + "script": "const input = ctx.machineStep3?.outputProduct || {}; const quantityOrdered = ctx.prodOrder?.lines?.[0]?.quantityOrdered || 100; const containerSize = 0.5; const unitsProduced = Math.floor(input.quantity / containerSize); const processTime = unitsProduced * 2; return { machineId: 'MCH_PACKER_01', machineName: 'Cup Packer', operation: 'FILLING_AND_SEALING', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, packagingUsed: { containerType: '500ml_CUP', containersUsed: unitsProduced, lidsUsed: unitsProduced, labelsApplied: unitsProduced }, outputProduct: { type: 'PACKED_ICE_CREAM', quantity: unitsProduced, unit: 'CUPS', temperature: -6, batchId: `PACK-${Date.now()}`, containerSize: '500ml', sealed: true }, parameters: { fillVolume: 500, fillTolerance: 5, sealTemp: 180, lineSpeed: 30 }, qualityChecks: [{ checkType: 'FILL_WEIGHT', status: 'PASS', value: '498-502ml' }, { checkType: 'SEAL_INTEGRITY', status: 'PASS', value: 'HERMETIC' }, { checkType: 'LABEL_ALIGNMENT', status: 'PASS', value: 'CENTERED' }] };", + "output": { "storeAs": "machineStep4" } + }, + { + "id": "ice_cream_step_5_chill_room", + "name": "Step 5: Chill Room (MCH_CHILL_01)", + "description": "Blast freeze packed ice cream to -18°C for storage stability and optimal texture development", + "type": "script", + "script": "const input = ctx.machineStep4?.outputProduct || {}; const processTime = Math.floor(Math.random() * 1800) + 3600; const finalTemp = -18 - Math.random() * 2; return { machineId: 'MCH_CHILL_01', machineName: 'Chill Room', operation: 'BLAST_FREEZING', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, outputProduct: { type: 'FINISHED_ICE_CREAM', quantity: input.quantity, unit: input.unit, temperature: finalTemp, batchId: `FIN-${Date.now()}`, storageClass: 'FROZEN', shelfLifeDays: 365 }, parameters: { targetTemp: -18, coolingRate: 'RAPID', airCirculation: 'HIGH' }, qualityChecks: [{ checkType: 'CORE_TEMPERATURE', status: 'PASS', value: finalTemp }, { checkType: 'TEXTURE', status: 'PASS', value: 'SMOOTH' }, { checkType: 'CRYSTAL_FORMATION', status: 'PASS', value: 'MINIMAL' }] };", + "output": { "storeAs": "machineStep5" } + }, + { + "id": "ice_cream_aggregate_steps", + "name": "Aggregate Ice Cream Machine Steps", + "type": "script", + "script": "const steps = [ctx.machineStep1, ctx.machineStep2, ctx.machineStep3, ctx.machineStep4, ctx.machineStep5]; const allChaosEvents = steps.flatMap(s => s?.chaosEvents || []); const allSubcomponentStates = steps.flatMap(s => s?.subcomponentStates || []); const hasCriticalFailure = allChaosEvents.some(e => e.severity === 'critical'); const hasHighSeverity = allChaosEvents.some(e => e.severity === 'high'); return { allMachineSteps: steps, totalProcessTime: steps.reduce((sum, s) => sum + (s?.processTimeSeconds || 0), 0), finalOutput: ctx.machineStep5?.outputProduct, productionLineType: 'ICE_CREAM', chaosAggregation: { totalChaosEvents: allChaosEvents.length, criticalEvents: allChaosEvents.filter(e => e.severity === 'critical'), highEvents: allChaosEvents.filter(e => e.severity === 'high'), mediumEvents: allChaosEvents.filter(e => e.severity === 'medium'), affectedSubcomponents: [...new Set(allChaosEvents.map(e => e.subcomponentId))] }, allChaosEvents, allSubcomponentStates, overallStatus: hasCriticalFailure ? 'FAILED' : hasHighSeverity ? 'WARNING' : 'PASS' };", + "output": { "storeAs": "machineStepsAggregate" } + } + ], + "else": [ + { + "id": "chips_step_1_slicer", + "name": "Step 1: Slicer (MCH_SLICER_01)", + "description": "Slice washed potatoes into uniform thin chips (1.5mm thickness) for consistent frying", + "type": "script", + "script": "const bom = ctx.prodOrder?.customFields?.billOfMaterials || []; const potatoInput = bom.find(i => i.sku?.includes('POTATO')); const inputWeight = potatoInput?.requiredQty || 500; const processTime = Math.floor(inputWeight / 10) + Math.floor(Math.random() * 60); const yieldPercent = 85 + Math.random() * 5; return { machineId: 'MCH_SLICER_01', machineName: 'Slicer', operation: 'SLICING', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputMaterials: { potatoes: inputWeight, unit: 'KG', grade: 'PREMIUM' }, outputProduct: { type: 'SLICED_POTATOES', quantity: inputWeight * (yieldPercent/100), unit: 'KG', thickness: 1.5, batchId: `SLICE-${Date.now()}` }, waste: { peels: inputWeight * ((100-yieldPercent)/100), unit: 'KG' }, parameters: { bladeSpeed: 3000, sliceThickness: 1.5, waterRinse: true }, qualityChecks: [{ checkType: 'THICKNESS_UNIFORMITY', status: 'PASS', value: '1.4-1.6mm' }, { checkType: 'DEFECT_RATE', status: 'PASS', value: '< 2%' }] };", + "output": { "storeAs": "machineStep1" } + }, + { + "id": "chips_step_2_fryer", + "name": "Step 2: Fryer (MCH_FRYER_01)", + "description": "Deep fry sliced potatoes at 180°C until golden and crispy with optimal moisture content", + "type": "script", + "script": "const input = ctx.machineStep1?.outputProduct || {}; const processTime = Math.floor(Math.random() * 120) + 180; const moistureLoss = 75 + Math.random() * 5; const oilAbsorption = 30 + Math.random() * 5; return { machineId: 'MCH_FRYER_01', machineName: 'Fryer', operation: 'DEEP_FRYING', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, oilUsed: { type: 'SUNFLOWER_OIL', quantity: input.quantity * 0.1, unit: 'L' }, outputProduct: { type: 'FRIED_CHIPS', quantity: input.quantity * (1 - moistureLoss/100) * (1 + oilAbsorption/100), unit: 'KG', temperature: 85, batchId: `FRY-${Date.now()}`, color: 'GOLDEN', crispness: 'HIGH' }, parameters: { oilTemp: 180, fryTime: processTime, oilType: 'SUNFLOWER' }, qualityChecks: [{ checkType: 'OIL_TEMPERATURE', status: 'PASS', value: 180 }, { checkType: 'COLOR_INDEX', status: 'PASS', value: 'L*55-65' }, { checkType: 'MOISTURE_CONTENT', status: 'PASS', value: '< 2%' }, { checkType: 'ACRYLAMIDE_LEVEL', status: 'PASS', value: '< 750 ppb' }] };", + "output": { "storeAs": "machineStep2" } + }, + { + "id": "chips_step_3_seasoner", + "name": "Step 3: Seasoning Drum (MCH_SEASON_01)", + "description": "Tumble fried chips with seasoning blend for even flavor distribution", + "type": "script", + "script": "const input = ctx.machineStep2?.outputProduct || {}; const bom = ctx.prodOrder?.customFields?.billOfMaterials || []; const salt = bom.find(i => i.sku?.includes('SALT')); const paprika = bom.find(i => i.sku?.includes('PAPRIKA')); const garlic = bom.find(i => i.sku?.includes('GARLIC')); const isBBQ = ctx.product?.name?.toLowerCase().includes('bbq'); const processTime = Math.floor(Math.random() * 60) + 120; const seasoningWeight = input.quantity * 0.06; return { machineId: 'MCH_SEASON_01', machineName: 'Seasoning Drum', operation: 'SEASONING', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, seasoningApplied: { salt: salt?.requiredQty || seasoningWeight * 0.7, paprika: isBBQ ? (paprika?.requiredQty || seasoningWeight * 0.2) : 0, garlic: isBBQ ? (garlic?.requiredQty || seasoningWeight * 0.1) : 0, flavorProfile: isBBQ ? 'BBQ_BLAZE' : 'CLASSIC_SALTED', totalWeight: seasoningWeight, unit: 'KG' }, outputProduct: { type: 'SEASONED_CHIPS', quantity: input.quantity + seasoningWeight, unit: 'KG', temperature: 45, batchId: `SEAS-${Date.now()}`, flavor: isBBQ ? 'BBQ' : 'SALTED' }, parameters: { drumSpeed: 15, tumbleTime: processTime, seasoningRate: 6 }, qualityChecks: [{ checkType: 'SEASONING_COVERAGE', status: 'PASS', value: '> 95%' }, { checkType: 'FLAVOR_INTENSITY', status: 'PASS', value: 'TARGET' }] };", + "output": { "storeAs": "machineStep3" } + }, + { + "id": "chips_step_4_packer", + "name": "Step 4: Pouch Packer (MCH_PACKER_02)", + "description": "Weigh and pack seasoned chips into nitrogen-flushed pouches for freshness", + "type": "script", + "script": "const input = ctx.machineStep3?.outputProduct || {}; const bagSize = 0.150; const unitsProduced = Math.floor(input.quantity / bagSize); const processTime = unitsProduced * 1.5; return { machineId: 'MCH_PACKER_02', machineName: 'Pouch Packer', operation: 'WEIGHING_AND_BAGGING', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, packagingUsed: { pouchType: 'FOIL_LAMINATE_150G', pouchesUsed: unitsProduced, nitrogenFlushed: true }, outputProduct: { type: 'PACKED_CHIPS', quantity: unitsProduced, unit: 'BAGS', temperature: 25, batchId: `PACK-${Date.now()}`, bagSize: '150g', sealed: true, atmosphere: 'NITROGEN' }, parameters: { targetWeight: 150, weightTolerance: 3, sealTemp: 165, nitrogenPurity: 99.5 }, qualityChecks: [{ checkType: 'BAG_WEIGHT', status: 'PASS', value: '147-153g' }, { checkType: 'SEAL_STRENGTH', status: 'PASS', value: '> 15N' }, { checkType: 'NITROGEN_LEVEL', status: 'PASS', value: '> 98%' }] };", + "output": { "storeAs": "machineStep4" } + }, + { + "id": "chips_step_5_metal_detector", + "name": "Step 5: Metal Detector (MCH_MD_01)", + "description": "Final quality gate - detect any metal contaminants before shipping", + "type": "script", + "script": "const input = ctx.machineStep4?.outputProduct || {}; const processTime = input.quantity * 0.5; const rejectRate = Math.random() * 0.005; const rejected = Math.floor(input.quantity * rejectRate); const passed = input.quantity - rejected; return { machineId: 'MCH_MD_01', machineName: 'Metal Detector', operation: 'CONTAMINANT_SCREENING', startTime: new Date().toISOString(), endTime: new Date(Date.now() + processTime * 1000).toISOString(), processTimeSeconds: processTime, inputProduct: input, screening: { sensitivity: { ferrous: 1.5, nonFerrous: 2.0, stainless: 2.5, unit: 'mm' }, bagsScanned: input.quantity, bagsRejected: rejected, rejectionReason: rejected > 0 ? 'SUSPECTED_CONTAMINATION' : null }, outputProduct: { type: 'FINISHED_CHIPS', quantity: passed, unit: input.unit, temperature: input.temperature, batchId: `FIN-${Date.now()}`, storageClass: 'AMBIENT', shelfLifeDays: 180, qualityCleared: true }, parameters: { conveyorSpeed: 60, detectorType: 'MULTI_FREQUENCY', autoReject: true }, qualityChecks: [{ checkType: 'METAL_DETECTION', status: 'PASS', value: 'NO_CONTAMINANTS' }, { checkType: 'REJECT_RATE', status: 'PASS', value: `${(rejectRate*100).toFixed(3)}%` }] };", + "output": { "storeAs": "machineStep5" } + }, + { + "id": "chips_aggregate_steps", + "name": "Aggregate Chips Machine Steps", + "type": "script", + "script": "const steps = [ctx.machineStep1, ctx.machineStep2, ctx.machineStep3, ctx.machineStep4, ctx.machineStep5]; const allChaosEvents = steps.flatMap(s => s?.chaosEvents || []); const allSubcomponentStates = steps.flatMap(s => s?.subcomponentStates || []); const hasCriticalFailure = allChaosEvents.some(e => e.severity === 'critical'); const hasHighSeverity = allChaosEvents.some(e => e.severity === 'high'); return { allMachineSteps: steps, totalProcessTime: steps.reduce((sum, s) => sum + (s?.processTimeSeconds || 0), 0), finalOutput: ctx.machineStep5?.outputProduct, productionLineType: 'CHIPS', chaosAggregation: { totalChaosEvents: allChaosEvents.length, criticalEvents: allChaosEvents.filter(e => e.severity === 'critical'), highEvents: allChaosEvents.filter(e => e.severity === 'high'), mediumEvents: allChaosEvents.filter(e => e.severity === 'medium'), affectedSubcomponents: [...new Set(allChaosEvents.map(e => e.subcomponentId))] }, allChaosEvents, allSubcomponentStates, overallStatus: hasCriticalFailure ? 'FAILED' : hasHighSeverity ? 'WARNING' : 'PASS' };", + "output": { "storeAs": "machineStepsAggregate" } + } + ] + } + }, + + { + "id": "prepare_finished_goods", + "name": "Prepare Finished Goods Data", + "type": "script", + "script": "const agg = ctx.machineStepsAggregate || {}; const product = ctx.product || {}; const finalOutput = agg.finalOutput || {}; const lotNumber = finalOutput.batchId || `LOT-${Date.now()}`; return [{ productId: product.productId || ctx.productIdToLookup, sku: product.productId || ctx.productIdToLookup, quantity: finalOutput.quantity || ctx.prodOrder?.lines?.[0]?.quantityOrdered || 1, unit: finalOutput.unit || 'EA', lotNumber }];", + "output": { "storeAs": "finishedGoodsProduced" } + }, + { + "id": "prepare_quality_checks", + "name": "Aggregate All Quality Checks", + "type": "script", + "script": "const steps = ctx.machineStepsAggregate?.allMachineSteps || []; const allChecks = steps.flatMap(step => (step?.qualityChecks || []).map(qc => ({ ...qc, machineId: step?.machineId, machineName: step?.machineName, checkedAt: new Date().toISOString() }))); return allChecks;", + "output": { "storeAs": "aggregatedQualityChecks" } + }, + { + "id": "complete_production_run", + "name": "Complete Production Run with Machine Data", + "type": "mcp", + "service": "manufacturing", + "tool": "production.run.update_status", + "input": { + "type": "template", + "template": { + "productionRunId": "{{productionRun.productionRunId}}", + "status": "COMPLETED", + "finishedGoodsProduced": "{{finishedGoodsProduced}}", + "notes": "Production completed via {{machineStepsAggregate.productionLineType}} line. Total process time: {{machineStepsAggregate.totalProcessTime}} seconds." + } + }, + "output": { "storeAs": "completedRun" } + }, + { + "id": "update_order_status", + "name": "Update Production Order Status to MANUFACTURING_COMPLETE", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{prodOrder.orderId}}", + "status": "MANUFACTURING_COMPLETE" + } + }, + "output": { "storeAs": "updatedOrder" } + }, + { + "id": "log_production_summary", + "name": "Log Production Summary", + "type": "script", + "script": "const agg = ctx.machineStepsAggregate || {}; const steps = agg.allMachineSteps || []; const chaosAgg = agg.chaosAggregation || {}; const qcFailed = ctx.aggregatedQualityChecks?.filter(qc => qc.status === 'FAIL') || []; const qcWarning = ctx.aggregatedQualityChecks?.filter(qc => qc.status === 'WARNING') || []; return { summary: { productionRunId: ctx.productionRun?.productionRunId, productionOrderId: ctx.prodOrder?.orderId, productSku: ctx.productIdToLookup, productName: ctx.product?.name, productionLineType: agg.productionLineType, overallStatus: agg.overallStatus, totalMachineSteps: steps.length, machineSequence: steps.map(s => ({ id: s?.machineId, name: s?.machineName, operation: s?.operation, durationSeconds: s?.processTimeSeconds, chaosEventsCount: s?.chaosEvents?.length || 0, subcomponentStatus: s?.subcomponentStates?.map(sc => ({ id: sc.id, status: sc.status })) || [] })), totalProcessTimeSeconds: agg.totalProcessTime, totalProcessTimeFormatted: `${Math.floor(agg.totalProcessTime/3600)}h ${Math.floor((agg.totalProcessTime%3600)/60)}m ${agg.totalProcessTime%60}s`, qualityChecksPerformed: ctx.aggregatedQualityChecks?.length || 0, qualityChecksPassed: ctx.aggregatedQualityChecks?.filter(qc => qc.status === 'PASS').length || 0, qualityChecksFailed: qcFailed.length, qualityChecksWarning: qcWarning.length, failedChecks: qcFailed.map(qc => ({ checkType: qc.checkType, machineId: qc.machineId, reason: qc.failureReason })), chaosReport: { chaosEnabled: ctx.chaosConfig?.chaosEnabled, chaosProbability: ctx.chaosConfig?.chaosProbability, totalChaosEvents: chaosAgg.totalChaosEvents || 0, criticalEvents: (chaosAgg.criticalEvents || []).length, highEvents: (chaosAgg.highEvents || []).length, affectedSubcomponents: chaosAgg.affectedSubcomponents || [], eventDetails: (agg.allChaosEvents || []).map(e => ({ id: e.id, scenario: e.scenarioName, severity: e.severity, subcomponent: e.subcomponentId, impact: e.qualityImpact })) }, finishedGoodsQuantity: ctx.finishedGoodsProduced?.[0]?.quantity, finishedGoodsUnit: ctx.finishedGoodsProduced?.[0]?.unit, lotNumber: ctx.finishedGoodsProduced?.[0]?.lotNumber, qualityStatus: agg.overallStatus === 'FAILED' ? 'REJECTED' : agg.overallStatus === 'WARNING' ? 'HOLD_FOR_REVIEW' : 'RELEASED' } };", + "output": { "storeAs": "productionSummary" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/manufacturing-execution.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/manufacturing-execution.od.json new file mode 100644 index 0000000000000000000000000000000000000000..99346b90bdb2cc3b1655981be45703781cc97bf1 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/manufacturing-execution.od.json @@ -0,0 +1,153 @@ +{ + "id": "manufacturing-execution", + "name": "Manufacturing Execution", + "version": "1.0.0", + "description": "Execute manufacturing transformation from raw materials to finished goods", + "namespace": "manufacturingUnit.manufacturing", + "persona": "Production Operator", + "type": "standard", + "steps": [ + { + "id": "get_production_orders", + "name": "Get Production Orders with Materials Picked", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { + "poType": "PRODUCTION", + "status": "MATERIALS_PICKED", + "limit": 10 + } + }, + "output": { "storeAs": "productionOrders" } + }, + { + "id": "select_order", + "name": "Select Order to Manufacture", + "type": "script", + "script": "return ctx.productionOrders?.items || ctx.productionOrders || [];", + "output": { "storeAs": "prodOrders" } + }, + { + "id": "exit-if-no-orders", + "name": "Exit Early if No Orders", + "type": "exit_early", + "exitCondition": { + "expression": "length(prodOrders) == `0`" + }, + "message": "No production orders with materials picked found" + }, + { + "id": "select_first_order", + "name": "Select First Order to Manufacture", + "type": "script", + "script": "const order = ctx.prodOrders?.[0]; if (order.lines && order.lines[0]) { const sku = order.lines[0].sku; if (typeof sku === 'object' && sku !== null && sku.productId) { order.lines[0].sku = sku.productId; } } return order;", + "output": { "storeAs": "prodOrder" } + }, + { + "id": "prepare_product_lookup", + "name": "Prepare Product Lookup", + "type": "script", + "script": "const sku = ctx.prodOrder?.lines?.[0]?.sku; if (!sku) throw new Error('No SKU found in production order'); if (typeof sku === 'string') return sku; if (sku && sku.productId) return String(sku.productId); return String(sku);", + "output": { "storeAs": "productIdToLookup" } + }, + { + "id": "get_product", + "name": "Get Product Details", + "type": "mcp", + "service": "erp", + "tool": "product.get_by_id", + "input": { + "type": "template", + "template": { "productId": "{{productIdToLookup}}" } + }, + "output": { "storeAs": "product" } + }, + { + "id": "prepare_materials_consumed", + "name": "Prepare Raw Materials Consumed Data", + "type": "script", + "script": "const bom = ctx.prodOrder?.customFields?.billOfMaterials || []; return bom.map(item => ({ materialId: item.sku, sku: item.sku, quantity: item.requiredQty || item.qty, unit: item.unit || 'EA' }));", + "output": { "storeAs": "rawMaterialsConsumed" } + }, + { + "id": "create_production_run", + "name": "Create Production Run", + "type": "mcp", + "service": "manufacturing", + "tool": "production.run.create", + "input": { + "type": "template", + "template": { + "productionOrderId": "{{prodOrder.orderId}}", + "status": "CREATED", + "rawMaterialsConsumed": "{{rawMaterialsConsumed}}" + } + }, + "output": { "storeAs": "productionRun" } + }, + { + "id": "start_production_run", + "name": "Start Production Run", + "type": "mcp", + "service": "manufacturing", + "tool": "production.run.update_status", + "input": { + "type": "template", + "template": { + "productionRunId": "{{productionRun.productionRunId}}", + "status": "IN_PROGRESS" + } + }, + "output": { "storeAs": "startedRun" } + }, + { + "id": "simulate_manufacturing", + "name": "Simulate Manufacturing Process", + "type": "script", + "script": "const prodOrder = ctx.prodOrder || {}; const quantityProduced = prodOrder.lines?.[0]?.quantityOrdered || 1; const product = ctx.product || {}; return { quantityProduced, lotNumber: `LOT-${Date.now()}` };", + "output": { "storeAs": "manufacturingResult" } + }, + { + "id": "prepare_finished_goods", + "name": "Prepare Finished Goods Data", + "type": "script", + "script": "const product = ctx.product || {}; const result = ctx.manufacturingResult || {}; return [{ productId: product.productId || ctx.productIdToLookup, sku: product.productId || ctx.productIdToLookup, quantity: result.quantityProduced, unit: 'EA', lotNumber: result.lotNumber }];", + "output": { "storeAs": "finishedGoodsProduced" } + }, + { + "id": "complete_production_run", + "name": "Complete Production Run", + "type": "mcp", + "service": "manufacturing", + "tool": "production.run.update_status", + "input": { + "type": "template", + "template": { + "productionRunId": "{{productionRun.productionRunId}}", + "status": "COMPLETED", + "finishedGoodsProduced": "{{finishedGoodsProduced}}" + } + }, + "output": { "storeAs": "completedRun" } + }, + { + "id": "update_order_status", + "name": "Update Production Order Status to MANUFACTURING_COMPLETE", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{prodOrder.orderId}}", + "status": "MANUFACTURING_COMPLETE" + } + }, + "output": { "storeAs": "updatedOrder" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/material-pick-for-production.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/material-pick-for-production.od.json new file mode 100644 index 0000000000000000000000000000000000000000..d7e630f3768ffb56650291eadd8c87d202d99277 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/material-pick-for-production.od.json @@ -0,0 +1,101 @@ +{ + "id": "material-pick-for-production", + "name": "Material Pick for Production", + "version": "1.0.0", + "description": "Pick raw materials from WMS bins for production orders", + "namespace": "manufacturingUnit.manufacturing", + "persona": "Forklift Operator", + "type": "standard", + "steps": [ + { + "id": "get_production_orders", + "name": "Get Production Orders Ready for Material Pick", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { + "poType": "PRODUCTION", + "status": "IN_PROGRESS", + "limit": 10 + } + }, + "output": { "storeAs": "productionOrders" } + }, + { + "id": "select_orders", + "name": "Select Orders to Process", + "type": "script", + "script": "return ctx.productionOrders?.items || ctx.productionOrders || [];", + "output": { "storeAs": "prodOrders" } + }, + { + "id": "exit-if-no-orders", + "name": "Exit Early if No Orders", + "type": "exit_early", + "exitCondition": { + "expression": "length(prodOrders) == `0`" + }, + "message": "No production orders ready for material pick found" + }, + { + "id": "select_first_order", + "name": "Select First Order to Process", + "type": "script", + "script": "const order = ctx.prodOrders?.[0]; if (order.lines && order.lines[0]) { const sku = order.lines[0].sku; if (typeof sku === 'object' && sku !== null && sku.productId) { order.lines[0].sku = sku.productId; } } return order;", + "output": { "storeAs": "prodOrder" } + }, + { + "id": "pick_raw_materials", + "name": "Pick Raw Materials from WMS", + "type": "map", + "mapSpec": { + "iterable": { "type": "jmesPath", "value": "prodOrder.customFields.billOfMaterials" }, + "itemName": "bomItem", + "concurrency": 3 + }, + "children": [ + { + "id": "create_pick_transaction", + "name": "Create WMS Pick Transaction for BOM Item", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "WH001", + "transactionType": "PICK", + "productId": "{{bomItem.sku}}", + "sku": "{{bomItem.sku}}", + "quantity": "{{bomItem.requiredQty}}", + "toBinId": "DEFAULT_BIN", + "lotNumber": "{{bomItem.lotNumber}}", + "referenceType": "ORDER", + "referenceId": "{{prodOrder.orderId}}" + } + }, + "output": { "storeAs": "pickTxn" } + } + ], + "output": { "storeAs": "pickTransactions" } + }, + { + "id": "update_order_status", + "name": "Update Production Order Status to MATERIALS_PICKED", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{prodOrder.orderId}}", + "status": "MATERIALS_PICKED" + } + }, + "output": { "storeAs": "updatedOrder" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/production-order.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/production-order.od.json new file mode 100644 index 0000000000000000000000000000000000000000..92dbeece8e56d8747716885a4ddfac1bf1bcebab --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/production-order.od.json @@ -0,0 +1,125 @@ +{ + "id": "production-order", + "name": "Production Order", + "version": "1.0.0", + "description": "Create production order for chips or ice cream based on product BOM", + "namespace": "manufacturingUnit.manufacturing", + "persona": "Production Planner", + "type": "standard", + "steps": [ + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" } + }, + { + "id": "get_product", + "name": "Get Random MPC Product", + "type": "mcp", + "service": "erp", + "tool": "product.get_random", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "product" } + }, + { + "id": "extract_bom", + "name": "Extract Bill of Materials", + "type": "script", + "script": "const product = ctx.product || {}; let bom = product.customFields?.billOfMaterials || []; if (bom.length === 0) { console.log('Product has no BOM, creating default BOM for manufacturing'); const productType = product.customFields?.productType || 'chips'; if (productType === 'ice_cream') { bom = [{ sku: 'DAIRY-MILK-001', qty: 2.5, unit: 'L' }, { sku: 'AGRO-SUGAR-001', qty: 0.8, unit: 'KG' }, { sku: 'DAIRY-CREAM-001', qty: 1.2, unit: 'L' }]; } else { bom = [{ sku: 'AGRO-POTATO-001', qty: 3.0, unit: 'KG' }, { sku: 'CHEM-OIL-001', qty: 0.5, unit: 'L' }, { sku: 'AGRO-SALT-001', qty: 0.1, unit: 'KG' }]; } } return bom;", + "output": { "storeAs": "billOfMaterials" } + }, + { + "id": "calculate_production_qty", + "name": "Calculate Production Quantity", + "type": "script", + "script": "const billOfMaterials = ctx.billOfMaterials || []; const baseQty = Math.floor(Math.random() * 100) + 50; return { productionQty: baseQty, bomMultiplied: billOfMaterials.map(item => ({ ...item, requiredQty: Math.round(item.qty * baseQty * 100) / 100 })) };", + "output": { "storeAs": "productionCalc" } + }, + { + "id": "check_inventory", + "name": "Check Raw Material Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { "type": "literal", "value": { "limit": 100 } }, + "output": { "storeAs": "currentInventory" } + }, + { + "id": "create_production_order", + "name": "Create Production Order", + "type": "mcp", + "service": "erp", + "tool": "order.create", + "input": { + "type": "template", + "template": { + "customerId": "{{mpc.companyId}}", + "partnerId": "{{mpc.companyId}}", + "poType": "STANDARD", + "status": "IN_PROGRESS", + "currency": "USD", + "lines": [ + { + "lineNumber": 1, + "sku": "{{product.productId}}", + "name": "{{product.name}}", + "quantityOrdered": "{{productionCalc.productionQty}}", + "unitOfMeasure": "EA" + } + ], + "customFields": { + "orderType": "PRODUCTION", + "billOfMaterials": "{{productionCalc.bomMultiplied}}", + "productType": "{{product.customFields.productType}}" + } + } + }, + "output": { "storeAs": "productionOrder" } + }, + { + "id": "create_pick_tasks", + "name": "Create Material Pick Tasks", + "type": "map", + "mapSpec": { + "iterable": { "type": "jmesPath", "value": "productionCalc.bomMultiplied" }, + "itemName": "bomItem", + "concurrency": 3 + }, + "children": [ + { + "id": "create_material_pick", + "name": "Create Pick for BOM Item", + "type": "mcp", + "service": "wms", + "tool": "task.create", + "input": { + "type": "template", + "template": { + "taskId": "PICK-{{productionOrder.orderId}}-{{bomItem.sku}}-{{bomItem.requiredQty}}", + "warehouseId": "WH001", + "taskType": "PICK", + "reference": { + "type": "ORDER", + "id": "{{productionOrder.orderId}}" + }, + "product": { + "sku": "{{bomItem.sku}}", + "productName": "{{bomItem.name}}" + }, + "quantity": "{{bomItem.requiredQty}}", + "uom": "{{bomItem.unit}}", + "taskStatus": "CREATED" + } + }, + "output": { "storeAs": "pickTask" } + } + ], + "output": { "storeAs": "allPickTasks" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/putaway-process.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/putaway-process.od.json new file mode 100644 index 0000000000000000000000000000000000000000..f11f3fc434b60c2fa382b48a35300ee971bd80be --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/putaway-process.od.json @@ -0,0 +1,108 @@ +{ + "id": "putaway-process", + "name": "Putaway Process", + "version": "1.0.0", + "description": "Move received inventory from Dock to Storage locations", + "namespace": "manufacturingUnit.logistics", + "persona": "Forklift Operator", + "type": "standard", + "steps": [ + { + "id": "get_received_inventory", + "name": "Get Inventory at Dock", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { + "type": "literal", + "value": { "transactionType": ["RECEIVE"], "limit": 10 } + }, + "output": { "storeAs": "receivedItems" } + }, + { + "id": "select_item_to_putaway", + "name": "Select Item to Putaway", + "type": "script", + "script": "const items = ctx.receivedItems?.items || ctx.receivedItems || []; if (items.length === 0) throw new Error('No items to putaway'); const item = { ...items[0] }; if (!item.toBinId) item.toBinId = 'DOCK'; return item;", + "output": { "storeAs": "itemToPutaway" } + }, + { + "id": "find_storage_bin", + "name": "Find Storage Bin", + "type": "mcp", + "service": "wms", + "tool": "bin.get_available", + "input": { + "type": "literal", + "value": { "zoneIds": ["STORAGE"], "limit": 1 } + }, + "output": { "storeAs": "storageBins" } + }, + { + "id": "select_bin", + "name": "Select Target Bin", + "type": "script", + "script": "const bins = ctx.storageBins?.items || ctx.storageBins || []; if (bins.length === 0) return { binId: 'DEFAULT_STORAGE' }; return bins[0];", + "output": { "storeAs": "targetBin" } + }, + { + "id": "create_putaway_task", + "name": "Create Putaway Task", + "type": "mcp", + "service": "wms", + "tool": "task.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "taskType": "PUTAWAY", + "priority": 10, + "taskStatus": "CREATED", + "productId": "{{itemToPutaway.productId}}", + "quantity": "{{itemToPutaway.quantity}}", + "from": { "binId": "{{itemToPutaway.toBinId}}" }, + "to": { "binId": "{{targetBin.binId}}" } + } + }, + "output": { "storeAs": "putawayTask" } + }, + { + "id": "complete_putaway", + "name": "Complete Putaway Task", + "type": "mcp", + "service": "wms", + "tool": "task.update_status", + "input": { + "type": "template", + "template": { + "taskId": "{{putawayTask.taskId}}", + "taskStatus": "COMPLETED" + } + }, + "output": { "storeAs": "completedTask" } + }, + { + "id": "move_inventory", + "name": "Move Inventory to Storage", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "transactionType": "PUTAWAY", + "productId": "{{itemToPutaway.productId}}", + "sku": "{{itemToPutaway.sku}}", + "quantity": "{{itemToPutaway.quantity}}", + "fromBinId": "{{itemToPutaway.toBinId}}", + "toBinId": "{{targetBin.binId}}", + "referenceType": "TASK", + "referenceId": "{{putawayTask.taskId}}" + } + }, + "output": { "storeAs": "transferTxn" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/raw-material-procurement.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/raw-material-procurement.od.json new file mode 100644 index 0000000000000000000000000000000000000000..8da2de5e595e75ba082f037e146954c2144487b1 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/raw-material-procurement.od.json @@ -0,0 +1,180 @@ +{ + "id": "raw-material-procurement", + "name": "Raw Material Procurement", + "version": "1.0.0", + "description": "MPC orders raw materials from suppliers when inventory is low", + "namespace": "manufacturingUnit.procurement", + "type": "standard", + "steps": [ + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" }, + "retry": { "maxRetries": 3, "backoff": "exponential", "baseMs": 200 } + }, + { + "id": "get_all_companies", + "name": "Get All Companies", + "type": "mcp", + "service": "erp", + "tool": "company.get_all", + "input": { "type": "literal", "value": { "limit": 10 } }, + "output": { "storeAs": "allCompanies" }, + "retry": { "maxRetries": 2, "backoff": "fixed", "baseMs": 300 } + }, + { + "id": "select_supplier", + "name": "Select a Supplier", + "type": "script", + "script": "const companies = ctx.allCompanies?.items || ctx.allCompanies || []; const suppliers = companies.filter(c => c.companyId !== ctx.mpc?.companyId && c.name !== ctx.mpc?.name && c.customFields?.catalog?.length > 0); if (suppliers.length === 0) throw new Error(`No suitable suppliers found (with catalog). Total companies: ${companies.length}.`); const supplier = suppliers[Math.floor(Math.random() * suppliers.length)]; return supplier;", + "output": { "storeAs": "supplier" } + }, + { + "id": "extract_catalog", + "name": "Extract Supplier Catalog", + "type": "script", + "script": "const catalog = ctx.supplier?.customFields?.catalog || []; return catalog;", + "output": { "storeAs": "catalog" } + }, + { + "id": "select_items", + "name": "Select Items to Order", + "type": "script", + "script": "const catalog = ctx.catalog || []; const items = catalog.slice(0, 3).map((item, index) => ({ lineNumber: index + 1, sku: item.sku, name: item.name, quantityOrdered: Math.floor(Math.random() * 100) + 50, unitPrice: item.pricePerUnit || 10, unitOfMeasure: 'EA' })); return items;", + "output": { "storeAs": "orderItems" } + }, + { + "id": "calculate_total", + "name": "Calculate Order Total", + "type": "script", + "script": "const items = ctx.orderItems || []; const total = items.reduce((sum, item) => sum + (item.quantityOrdered * item.unitPrice), 0); return { total: Math.round(total * 100) / 100, itemCount: items.length };", + "output": { "storeAs": "orderCalc" } + }, + { + "id": "ensure_ledger", + "name": "Ensure Ledger Exists", + "type": "mcp", + "service": "finance", + "tool": "ledger.ensure", + "input": { + "type": "literal", + "value": { "cash": 1000000, "totalReceivables": 0, "totalPayables": 0 } + }, + "output": { "storeAs": "ledgerEnsured" } + }, + { + "id": "check_ledger", + "name": "Check Available Funds", + "type": "mcp", + "service": "finance", + "tool": "ledger.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "ledger" }, + "assertions": [ + { + "id": "sufficient_funds", + "expression": "ledger && ledger.cash >= orderCalc.total", + "language": "jmespath", + "description": "Verify sufficient cash for order", + "continueOnFailure": false + } + ] + }, + { + "id": "create_purchase_order", + "name": "Create Purchase Order", + "type": "mcp", + "service": "erp", + "tool": "order.create", + "input": { + "type": "template", + "template": { + "customerId": "{{mpc.companyId}}", + "partnerId": "{{supplier.companyId}}", + "poType": "STANDARD", + "status": "ACKED", + "currency": "USD", + "paymentTerms": "{{supplier.paymentTerms}}", + "lines": "{{orderItems}}", + "totalAmount": "{{orderCalc.total}}" + } + }, + "output": { "storeAs": "purchaseOrder" }, + "retry": { "maxRetries": 3, "backoff": "exponential", "baseMs": 300 } + }, + { + "id": "reserve_payable", + "name": "Reserve Payable Amount", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { "payablesDelta": "{{orderCalc.total}}" } + }, + "output": { "storeAs": "updatedLedger" } + }, + { + "id": "generate_edi_850", + "name": "Generate EDI 850", + "type": "mcp", + "service": "edi", + "tool": "generate.850", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.companyId}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{supplier.companyId}}", "name": "{{supplier.name}}" }, + "poNumber": "{{purchaseOrder.orderId}}", + "items": "{{orderItems}}" + } + }, + "output": { "storeAs": "edi850" } + }, + { + "id": "validate_edi_850", + "name": "Validate EDI 850 (PO)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi850}}", + "docType": "850", + "worldId": "{{mpc.companyId}}", + "context": { "items": "{{orderItems}}" } + } + }, + "output": { "storeAs": "val850" }, + "continueOnError": true + }, + { + "id": "store_edi", + "name": "Store EDI Transaction", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{supplier.companyId}}", + "docType": "850", + "direction": "OUTBOUND", + "status": "SENT", + "payload": { + "orderId": "{{purchaseOrder.orderId}}", + "totalAmount": "{{orderCalc.total}}" + }, + "rawEdi": "{{edi850}}" + } + }, + "output": { "storeAs": "ediTransaction" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/receive-supplier-shipment.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/receive-supplier-shipment.od.json new file mode 100644 index 0000000000000000000000000000000000000000..917a32ec8c61d798c62abeec4c5a35a83926967e --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/receive-supplier-shipment.od.json @@ -0,0 +1,227 @@ +{ + "id": "receive-supplier-shipment", + "name": "Receive Supplier Shipment", + "version": "1.0.0", + "description": "Receive raw materials at warehouse and process supplier payment", + "namespace": "manufacturingUnit.procurement", + "persona": "Dock Receiver", + "type": "standard", + "steps": [ + { + "id": "get_pending_orders", + "name": "Get Pending Purchase Orders", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { "status": "ACKED", "limit": 10 } + }, + "output": { "storeAs": "pendingOrders" } + }, + { + "id": "select_order", + "name": "Select Order to Receive", + "type": "script", + "script": "const orders = ctx.pendingOrders?.items || ctx.pendingOrders || []; if (orders.length === 0) throw new Error('No pending orders'); return orders[0];", + "output": { "storeAs": "selectedOrder" } + }, + { + "id": "get_all_suppliers", + "name": "Get All Suppliers", + "type": "mcp", + "service": "erp", + "tool": "company.get_all", + "input": { "type": "literal", "value": { "limit": 10 } }, + "output": { "storeAs": "allSuppliers" } + }, + { + "id": "select_supplier", + "name": "Select Random Supplier", + "type": "script", + "script": "const companies = ctx.allSuppliers?.items || ctx.allSuppliers || []; const suppliers = companies.filter(c => c.companyId !== ctx.selectedOrder?.customerId); if (suppliers.length === 0) throw new Error('No suppliers found'); const supplier = suppliers[Math.floor(Math.random() * suppliers.length)]; return supplier;", + "output": { "storeAs": "supplier" } + }, + { + "id": "transform_order_lines", + "name": "Transform Order Lines for WMS", + "type": "script", + "script": "const erpLines = ctx.selectedOrder?.lines || []; const wmsLines = erpLines.map((line, index) => ({ lineNumber: index + 1, productId: line.sku || line.productId || 'UNKNOWN_SKU', sku: line.sku || line.productId, productName: line.name || line.description || 'Unknown Product', expectedQuantity: line.quantityOrdered || line.qty || line.quantity || 1, uom: line.unitOfMeasure || 'EA' })); return wmsLines;", + "output": { "storeAs": "wmsOrderLines" } + }, + { + "id": "calculate_order_total", + "name": "Calculate Order Total Amount", + "type": "script", + "script": "const order = ctx.selectedOrder || {}; let total = order.totalAmount; if (!total || isNaN(Number(total))) { const lines = order.lines || []; total = lines.reduce((sum, line) => { const qty = Number(line.quantityOrdered || line.qty || line.quantity || 1); const price = Number(line.unitPrice || line.pricePerUnit || 10); return sum + (qty * price); }, 0); } return { total: Number(total) || 100 };", + "output": { "storeAs": "orderCalc" } + }, + { + "id": "check_existing_inbound", + "name": "Check if Inbound Order Exists", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.get_by_po_number", + "input": { + "type": "template", + "template": "{{selectedOrder.orderId}}" + }, + "output": { "storeAs": "existingInboundOrder" }, + "retry": { "maxRetries": 1, "backoff": "fixed", "baseMs": 100 }, + "continueOnError": true + }, + { + "id": "check_asn_inbound", + "name": "Check for ASN Inbound Order", + "type": "script", + "script": "const existing = ctx.existingInboundOrder; if (existing && existing.orderType === 'ASN') return existing; return null;", + "output": { "storeAs": "asnInboundOrder" } + }, + { + "id": "create_inbound_order", + "name": "Create WMS Inbound Order", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.create", + "condition": { "expression": "!existingInboundOrder" }, + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "poNumber": "{{selectedOrder.orderId}}", + "orderType": "PO", + "orderStatus": "EXPECTED", + "vendor": { + "vendorId": "{{supplier.companyId}}", + "vendorName": "{{supplier.name}}", + "contactEmail": "{{supplier.contactEmail}}", + "contactPhone": "{{supplier.contactPhone}}" + }, + "dates": { + "expectedArrival": "{{selectedOrder.createdAt}}" + }, + "lines": "{{wmsOrderLines}}" + } + }, + "output": { "storeAs": "newInboundOrder" } + }, + { + "id": "select_inbound_order", + "name": "Select Inbound Order to Use", + "type": "script", + "script": "return ctx.existingInboundOrder || ctx.newInboundOrder;", + "output": { "storeAs": "inboundOrder" } + }, + { + "id": "create_receiving", + "name": "Create Receiving Transaction", + "type": "mcp", + "service": "wms", + "tool": "receiving_transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "inboundOrderId": "{{inboundOrder.inboundOrderId}}", + "productId": "{{wmsOrderLines[0].productId}}", + "sku": "{{wmsOrderLines[0].sku}}", + "productName": "{{wmsOrderLines[0].productName}}", + "receivedQuantity": "{{wmsOrderLines[0].expectedQuantity}}", + "receivedBy": "SYSTEM" + } + }, + "output": { "storeAs": "receivingTxn" } + }, + { + "id": "update_inventory", + "name": "Add Items to Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "transactionType": "RECEIVE", + "productId": "{{wmsOrderLines[0].productId}}", + "sku": "{{wmsOrderLines[0].sku}}", + "quantity": "{{wmsOrderLines[0].expectedQuantity}}", + "toBinId": "DOCK", + "referenceType": "PO", + "referenceId": "{{selectedOrder.orderId}}", + "notes": "Received from supplier shipment" + } + }, + "output": { "storeAs": "inventoryTxn" } + }, + { + "id": "complete_inbound", + "name": "Complete Inbound Order", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.update_status", + "input": { + "type": "template", + "template": { "inboundOrderId": "{{inboundOrder.inboundOrderId}}", "status": "RECEIVED" } + }, + "output": { "storeAs": "completedInbound" } + }, + { + "id": "update_po_status", + "name": "Update PO to Received", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { "orderId": "{{selectedOrder.orderId}}", "status": "COMPLETED" } + }, + "output": { "storeAs": "updatedOrder" } + }, + { + "id": "record_payment_out", + "name": "Record Supplier Payment", + "type": "mcp", + "service": "finance", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "type": "payment_out", + "amount": "{{orderCalc.total}}", + "sourceType": "bill", + "sourceId": "{{selectedOrder.orderId}}", + "partnerId": "{{supplier.companyId}}", + "metadata": { + "description": "Payment for PO {{selectedOrder.orderId}}", + "paymentTerms": "{{supplier.paymentTerms}}" + } + } + }, + "output": { "storeAs": "paymentTxn" } + }, + { + "id": "update_ledger", + "name": "Update Ledger - Pay Supplier", + "type": "script", + "script": "const amount = Number(ctx.orderCalc?.total || 100); return { cashDelta: -amount, payablesDelta: -amount };", + "output": { "storeAs": "ledgerDeltas" } + }, + { + "id": "apply_ledger_changes", + "name": "Apply Ledger Changes", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { + "cashDelta": "{{ledgerDeltas.cashDelta}}", + "payablesDelta": "{{ledgerDeltas.payablesDelta}}" + } + }, + "output": { "storeAs": "updatedLedger" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/ods/supplier-reorder-trigger.od.json b/packages/controlmart/src/worlds/manufacturing-unit/ods/supplier-reorder-trigger.od.json new file mode 100644 index 0000000000000000000000000000000000000000..ef7c761ad09ab81f2ba9445774a76be65ec5904f --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/ods/supplier-reorder-trigger.od.json @@ -0,0 +1,58 @@ +{ + "id": "supplier-reorder-trigger", + "name": "Supplier Reorder Trigger", + "version": "1.0.0", + "description": "Check inventory levels and trigger reorder when stock is low", + "namespace": "manufacturingUnit.background", + "persona": "System / Procurement Bot", + "type": "background_job", + "steps": [ + { + "id": "get_inventory", + "name": "Get Current Inventory Levels", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { "type": "literal", "value": { "limit": 200 } }, + "output": { "storeAs": "inventory" } + }, + { + "id": "get_ledger", + "name": "Check Available Funds", + "type": "mcp", + "service": "finance", + "tool": "ledger.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "ledger" } + }, + { + "id": "identify_low_stock", + "name": "Identify Low Stock Items", + "type": "script", + "script": "const inv = ctx.inventory?.items || []; const lowThreshold = 20; const lowStock = inv.filter(item => (item.quantity || 0) < lowThreshold); const groupedBySupplier = {}; lowStock.forEach(item => { const supplierId = item.supplierId || 'UNKNOWN'; if (!groupedBySupplier[supplierId]) groupedBySupplier[supplierId] = []; groupedBySupplier[supplierId].push(item); }); return { lowStockCount: lowStock.length, bySupplier: groupedBySupplier, needsReorder: lowStock.length > 0 };", + "output": { "storeAs": "lowStockAnalysis" } + }, + { + "id": "check_reorder_needed", + "name": "Check If Reorder Needed", + "type": "script", + "script": "const ledger = ctx.ledger || {}; const lowStockAnalysis = ctx.lowStockAnalysis || {}; const minCashBuffer = 50000; const canAfford = ledger.cash > minCashBuffer; const needsReorder = lowStockAnalysis.needsReorder && canAfford; console.log('[REORDER CHECK] Low stock items:', lowStockAnalysis.lowStockCount, '| Cash:', ledger.cash, '| Can afford:', canAfford); return { shouldReorder: needsReorder, reason: needsReorder ? 'Low stock detected' : (lowStockAnalysis.needsReorder ? 'Insufficient funds' : 'Stock levels OK') };", + "output": { "storeAs": "reorderDecision" } + }, + { + "id": "log_decision", + "name": "Log Reorder Decision", + "type": "script", + "script": "const reorderDecision = ctx.reorderDecision || {}; console.log('[REORDER]', reorderDecision.reason, '| Action:', reorderDecision.shouldReorder ? 'REORDER_TRIGGERED' : 'NO_ACTION'); return reorderDecision;", + "output": { "storeAs": "loggedDecision" } + }, + { + "id": "trigger_procurement", + "name": "Trigger Procurement", + "type": "script", + "script": "const reorderDecision = ctx.reorderDecision || {}; if (reorderDecision.shouldReorder) { console.log('[REORDER] Triggering raw-material-procurement OD...'); } return { triggered: reorderDecision.shouldReorder };", + "output": { "storeAs": "procurementResult" } + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/manufacturing-unit/schedule-manufacturing-world.ts b/packages/controlmart/src/worlds/manufacturing-unit/schedule-manufacturing-world.ts new file mode 100644 index 0000000000000000000000000000000000000000..396ec85ac78303646a3c36e71bb06cdb6f178410 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/schedule-manufacturing-world.ts @@ -0,0 +1,182 @@ +import { ODRepository } from "../../repository/od.repository"; +import { WorldRepository } from "../../repository/world.repository"; +import { + scheduleRecurringOD, +} from "../../operational-descriptor/schedule.od"; +import { createAppLogger } from "../../utils/logger.util"; +import { ODs } from "./ods"; + + +const logger = createAppLogger({ service: "od-scheduler" }); + + + +type OdSchedule = { + type: "recurring" | "once"; + simInterval?: number; // example 24 + simTime?: string; // Human readable sim time + simOffsetHours: number; // Hours from Sim Midnight (0-24) +}; + +const SIM_SCHEDULES: Record = { + "raw-material-procurement": { + type: "recurring", + simInterval: 24, + simTime: "08:00 AM", + simOffsetHours: 8, + }, + "inbound-asn-process": { + type: "recurring", + simInterval: 2, + simTime: "Every 2h", + simOffsetHours: 1, + }, + "receive-supplier-shipment": { + type: "recurring", + simInterval: 24, + simTime: "11:00 AM", + simOffsetHours: 11, + }, + "putaway-process": { type: "recurring", simInterval: 1, simTime: "Every 1h", simOffsetHours: 0 }, + "production-order": { + type: "recurring", + simInterval: 24, + simTime: "01:00 PM", + simOffsetHours: 13, + }, + "material-pick-for-production": { + type: "recurring", + simInterval: 1, + simTime: "Every 1h after production", + simOffsetHours: 13.5, + }, + "manufacturing-execution": { + type: "recurring", + simInterval: 2, + simTime: "Every 2h", + simOffsetHours: 14, + }, + "finished-goods-receipt": { + type: "recurring", + simInterval: 1, + simTime: "Every 1h", + simOffsetHours: 15, + }, + "daily-inventory-check": { + type: "recurring", + simInterval: 12, + simTime: "Every 12h", + simOffsetHours: 10, + }, + "aging-inventory-check": { + type: "recurring", + simInterval: 24, + simTime: "Midnight", + simOffsetHours: 0, + }, + "supplier-reorder-trigger": { + type: "recurring", + simInterval: 1, + simTime: "Background", + simOffsetHours: 0, + }, +}; + + +const calculateSimToRealRatio = (realHours: number) => { + return 24 / realHours; +}; + + + +const convertSimIntervalToReal = (simInterval: number, realHoursPerSimDay: number): string => { + const SIM_TO_REAL_RATIO = calculateSimToRealRatio(realHoursPerSimDay); + const hours = simInterval; + const realMinutes = (hours * 60) / SIM_TO_REAL_RATIO; + if (realMinutes >= 60) { + return `${realMinutes / 60} hours`; + } + return `${realMinutes} minutes`; +}; + + +const onboardODs = async (worldId: string, realHoursPerSimDay: number) => { + const SIM_TO_REAL_RATIO = calculateSimToRealRatio(realHoursPerSimDay); + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World ${worldId} not found`); + } + + for (const odData of ODs) { + try { + logger.info(`Processing OD: ${odData.name} (${odData.id})`); + + // 1. Create or Update OD in Repository + if (!odData.id) { + logger.warn(`Skipping OD data without ID`); + continue; + } + + const repoData = { + odId: odData.id, + data: odData, + name: odData.name, + description: odData.description, + odType: odData.type || "standard", + persona: odData.persona, // Ensure persona maps correctly + }; + + let odRecord = await ODRepository.getODById(odData.id, worldId); + if (odRecord) { + // Update doesn't need odId in the data usually if it's the specific updateOne, but we use updateODById + await ODRepository.updateODById(odData.id, worldId, repoData); + logger.info(`Updated existing OD: ${odData.id}`); + odRecord = await ODRepository.getODById(odData.id, worldId); + } else { + odRecord = await ODRepository.createOD({ worldId }, repoData); + logger.info(`Created new OD: ${odData.id}`); + } + + // 2. Schedule OD if configuration exists + const simSchedule = SIM_SCHEDULES[odData.id]; + if (simSchedule && odRecord) { + if (simSchedule.type === "recurring" && simSchedule.simInterval) { + const realInterval = convertSimIntervalToReal(simSchedule.simInterval, realHoursPerSimDay); + + const realMinutesOffset = simSchedule.simOffsetHours * (60 / SIM_TO_REAL_RATIO); + + logger.info(`Scheduling ${odData.id}:`); + logger.info(`Sim Interval: ${simSchedule.simInterval} -> Real Interval: ${realInterval}`); + logger.info( + `Sim Offset: ${simSchedule.simOffsetHours}h -> Real Offset: ${realMinutesOffset.toFixed(1)} mins`, + ); + + await scheduleRecurringOD(realInterval, odRecord, world, { + scheduledBy: "manufacturing-unit-scheduler-v1", + simTimeRatio: `${SIM_TO_REAL_RATIO}x`, + simInterval: simSchedule.simInterval, + }); + } + } else { + logger.info(`No schedule config found for ${odData.id}, skipping schedule.`); + } + } catch (err) { + logger.error({ error: err }, `Failed to process OD ${odData.id}`); + } + } +}; + +export const scheduleManufacturingWorld = async (worldId: string, realHoursPerSimDay: number) => { + if (!worldId) { + throw new Error("worldId is required"); + } + + try { + await onboardODs(worldId, realHoursPerSimDay); + logger.info("OD Onboarding Complete"); + } catch (error) { + logger.error({ error }, "Onboarding failed"); + throw error; + } +}; + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/seeder/companies.generator.ts b/packages/controlmart/src/worlds/manufacturing-unit/seeder/companies.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..f61008f5b4377ca576664dda1d36282d9e2fe4dd --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/seeder/companies.generator.ts @@ -0,0 +1,274 @@ +import { faker } from "@faker-js/faker"; +import type { TWorldRefModel } from "../../../models/shared.model"; +import type { TCompanyGenerate } from "../../../models/erp/company.erp.model"; + +export const MANUFACTURING_COMPANY_NAMES = { + MPC: "SkyFoods International", + DAIRY_SUPPLIER: "Dairyland Essentials Ltd", + PACKAGING_SUPPLIER: "PackChem Solutions Inc", + AGRO_SUPPLIER: "Harvest Fields Agro Co", +} as const; + +export const MANUFACTURING_COMPANY_IDS = { + MPC: "MPC-SKYFOODS-001", + DAIRY_SUPPLIER: "SUP-DAIRY-001", + PACKAGING_SUPPLIER: "SUP-PACK-001", + AGRO_SUPPLIER: "SUP-AGRO-001", +} as const; + +export const PARTNER_COMPANY_NAMES = { + FRESHMART: "FreshMart Retailers", + URBAN_GROCERS: "Urban Grocers Alliance", + SNACKWAVE: "SnackWave Distributors", + MEGAFOODS: "MegaFoods Corporation", + QUICKSTOP: "QuickStop Convenience", +} as const; + +export const PARTNER_COMPANY_IDS = { + FRESHMART: "CUSTOMER-001", + URBAN_GROCERS: "CUSTOMER-002", + SNACKWAVE: "CUSTOMER-003", + MEGAFOODS: "CUSTOMER-004", + QUICKSTOP: "CUSTOMER-005", +} as const; + +export const DAIRY_CATALOG = [ + { sku: "DAIRY-MILK-001", name: "Fresh Whole Milk", unit: "L", pricePerUnit: 4.5 }, + { sku: "DAIRY-CREAM-001", name: "Heavy Cream 35%", unit: "L", pricePerUnit: 8.99 }, + { sku: "DAIRY-SUGAR-001", name: "Refined White Sugar", unit: "KG", pricePerUnit: 2.99 }, + { sku: "DAIRY-COND-001", name: "Sweetened Condensed Milk", unit: "KG", pricePerUnit: 6.49 }, + { sku: "DAIRY-BUTTER-001", name: "Unsalted Butter Blocks", unit: "KG", pricePerUnit: 12.99 }, + { sku: "DAIRY-POWDER-001", name: "Skim Milk Powder", unit: "KG", pricePerUnit: 15.99 }, + { sku: "DAIRY-VANILLA-001", name: "Vanilla Extract Pure", unit: "L", pricePerUnit: 45.0 }, +]; + +export const PACKAGING_CATALOG = [ + { sku: "PACK-BAG-150", name: "Chip Bag Foil Laminate 150g", unit: "EA", pricePerUnit: 0.15 }, + { sku: "PACK-BAG-300", name: "Chip Bag Foil Laminate 300g", unit: "EA", pricePerUnit: 0.22 }, + { sku: "PACK-ICE-500", name: "Ice Cream Container 500ml", unit: "EA", pricePerUnit: 0.35 }, + { sku: "PACK-ICE-1000", name: "Ice Cream Container 1L", unit: "EA", pricePerUnit: 0.48 }, + { sku: "PACK-CARTON-001", name: "Cardboard Shipping Carton", unit: "EA", pricePerUnit: 1.25 }, + { sku: "PACK-WRAP-001", name: "Pallet Shrink Wrap Roll", unit: "ROLL", pricePerUnit: 45.0 }, + { sku: "CHEM-PRES-001", name: "Sodium Benzoate Preservative", unit: "KG", pricePerUnit: 28.99 }, + { sku: "CHEM-EMUL-001", name: "Lecithin Emulsifier", unit: "KG", pricePerUnit: 35.5 }, + { sku: "CHEM-STAB-001", name: "Guar Gum Stabilizer", unit: "KG", pricePerUnit: 42.0 }, + { sku: "CHEM-ACID-001", name: "Citric Acid Food Grade", unit: "KG", pricePerUnit: 18.75 }, + {sku: "CHEM-OIL-001", name: "Palm Oil", unit: "L", pricePerUnit: 10.0 }, +]; + +export const AGRO_CATALOG = [ + { sku: "AGRO-POTATO-001", name: "Russet Potatoes Premium", unit: "KG", pricePerUnit: 0.85 }, + { sku: "AGRO-ONION-001", name: "Yellow Onions", unit: "KG", pricePerUnit: 1.2 }, + { sku: "AGRO-SALT-001", name: "Sea Salt Fine", unit: "KG", pricePerUnit: 0.95 }, + { sku: "AGRO-PAPRIKA-001", name: "Smoked Paprika Powder", unit: "KG", pricePerUnit: 18.5 }, + { sku: "AGRO-PEPPER-001", name: "Black Pepper Ground", unit: "KG", pricePerUnit: 25.0 }, + { sku: "AGRO-CUMIN-001", name: "Cumin Powder", unit: "KG", pricePerUnit: 22.0 }, + { sku: "AGRO-GARLIC-001", name: "Garlic Powder", unit: "KG", pricePerUnit: 12.5 }, + { sku: "AGRO-CHILI-001", name: "Dried Chili Flakes", unit: "KG", pricePerUnit: 28.0 }, +]; + +const generateAddress = (type: "BILL_TO" | "SHIP_TO" | "REM_TO") => ({ + type, + country: "United States", + attention: faker.person.fullName(), + street1: faker.location.streetAddress(), + street2: faker.datatype.boolean() ? faker.location.secondaryAddress() : undefined, + city: faker.location.city(), + state: faker.location.state(), + postalCode: faker.location.zipCode(), + contactEmail: faker.internet.email(), + contactPhone: faker.phone.number(), +}); + +const generateContact = () => ({ + name: faker.person.fullName(), + email: faker.internet.email(), + phone: faker.phone.number(), +}); + +export const generateMpcCompany = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: true, + companyId: MANUFACTURING_COMPANY_IDS.MPC, + externalReference: "EXT-MPC-SKYFOODS-2024", + name: MANUFACTURING_COMPANY_NAMES.MPC, + legalName: "SkyFoods International LLC", + duns: "123456789", + taxId: "TAX-CFI-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-CFI-2024" }, + currency: "USD", + paymentTerms: "Net 30", + creditLimit: 500000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "SKYMART-SALES", + priceList: "Enterprise", + glAccount: faker.finance.accountNumber(), + customerClass: "VIP", + status: "ACTIVE", + companyType: "INTERNAL", + customFields: { + erpSource: "SAP", + regionCode: "US", + industryType: "Food Manufacturing", + productLines: ["Potato Chips", "Ice Cream"], + suppliers: [ + MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + ], + }, +}); + +export const generateDairySupplier = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + externalReference: "EXT-SUP-DAIRY-2024", + name: MANUFACTURING_COMPANY_NAMES.DAIRY_SUPPLIER, + legalName: "Dairyland Essentials Limited", + duns: "987654321", + taxId: "TAX-DEL-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-DEL-2024" }, + currency: "USD", + paymentTerms: "Net 15", + creditLimit: 100000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "DAIRY-DIST", + priceList: "Standard", + glAccount: faker.finance.accountNumber(), + customerClass: "A", + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + erpSource: "NetSuite", + regionCode: "US", + industryType: "Dairy & Ingredients", + certifications: ["FDA Approved", "HACCP Certified"], + catalog: DAIRY_CATALOG, + }, +}); + +export const generatePackagingSupplier = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + externalReference: "EXT-SUP-PACK-2024", + name: MANUFACTURING_COMPANY_NAMES.PACKAGING_SUPPLIER, + legalName: "PackChem Solutions Incorporated", + duns: "456789123", + taxId: "TAX-PCS-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-PCS-2024" }, + currency: "USD", + paymentTerms: "Net 45", + creditLimit: 150000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "PACKTECH", + priceList: "Partner", + glAccount: faker.finance.accountNumber(), + customerClass: "A", + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + erpSource: "Oracle", + regionCode: "US", + industryType: "Packaging & Food Chemicals", + certifications: ["ISO 9001", "FDA Food-Grade"], + catalog: PACKAGING_CATALOG, + }, +}); + +export const generateAgroSupplier = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + externalReference: "EXT-SUP-AGRO-2024", + name: MANUFACTURING_COMPANY_NAMES.AGRO_SUPPLIER, + legalName: "Harvest Fields Agro Company", + duns: "789123456", + taxId: "TAX-HFA-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-HFA-2024" }, + currency: "USD", + paymentTerms: "Due on Receipt", + creditLimit: 75000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "AGRO-HARVEST", + priceList: "Standard", + glAccount: faker.finance.accountNumber(), + customerClass: "B", + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + erpSource: "Odoo", + regionCode: "US", + industryType: "Agriculture & Spices", + certifications: ["USDA Organic", "Non-GMO Verified"], + catalog: AGRO_CATALOG, + }, +}); + +const generatePartnerCompany = ( + worldRef: TWorldRefModel, + id: string, + name: string, + duns: string, + custClass: string +): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: id, + externalReference: `EXT-${id}`, + name: name, + legalName: `${name} Inc.`, + duns: duns, + taxId: `TAX-${id}`, + taxRegistrationNumbers: { country: "United States", number: `REG-${id}` }, + currency: "USD", + paymentTerms: "Net 30", + creditLimit: 250000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "SKYMART-SALES", + priceList: "Wholesale", + glAccount: faker.finance.accountNumber(), + customerClass: custClass, + status: "ACTIVE", + companyType: "CUSTOMER", + customFields: { + erpSource: "SAP", + regionCode: "US", + industryType: "Retail", + segment: "Grocery", + }, +}); + +export const generateAllManufacturingCompanies = (worldRef: TWorldRefModel): TCompanyGenerate[] => [ + generateMpcCompany(worldRef), + generateDairySupplier(worldRef), + generatePackagingSupplier(worldRef), + generateAgroSupplier(worldRef), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.FRESHMART, PARTNER_COMPANY_NAMES.FRESHMART, "111222333", "A"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.URBAN_GROCERS, PARTNER_COMPANY_NAMES.URBAN_GROCERS, "222333444", "A"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.SNACKWAVE, PARTNER_COMPANY_NAMES.SNACKWAVE, "333444555", "B"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.MEGAFOODS, PARTNER_COMPANY_NAMES.MEGAFOODS, "444555666", "VIP"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.QUICKSTOP, PARTNER_COMPANY_NAMES.QUICKSTOP, "555666777", "C"), +]; + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/seeder/index.ts b/packages/controlmart/src/worlds/manufacturing-unit/seeder/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..d6f46422e41eddcf14d8a4bedde7b42223717b71 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/seeder/index.ts @@ -0,0 +1,4 @@ +export * from "./world.generator"; +export * from "./companies.generator"; +export * from "./products.generator"; + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/seeder/products.generator.ts b/packages/controlmart/src/worlds/manufacturing-unit/seeder/products.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..18d80119be0d957361d439e76405379f44a14446 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/seeder/products.generator.ts @@ -0,0 +1,493 @@ +import { faker } from "@faker-js/faker"; +import type { TProductGenerate } from "../../../models/erp/product.erp.model"; +import type { TWorldRefModel } from "../../../models/shared.model"; +import { generateIdByService } from "../../../utils/mongo.util"; +import { AGRO_CATALOG, DAIRY_CATALOG, PACKAGING_CATALOG, MANUFACTURING_COMPANY_IDS } from "./companies.generator"; + +export type TBomItem = { + supplierCompanyId: string; + sku: string; + name: string; + qty: number; + unit: string; +}; + +const generateWeight = (min: number, max: number, unit: "KG" | "G" | "LB" = "KG") => ({ + value: faker.number.float({ min, max, fractionDigits: 2 }), + unit, +}); + +const generateDimensions = ( + length: number, + width: number, + height: number, + unit: "CM" | "IN" = "CM", +) => ({ + length, + width, + height, + unit, +}); + +const MPC_CHIP_PRODUCTS: Array<{ name: string; sku: string; price: number; bom: TBomItem[] }> = [ + { + name: "Skymart Classic Salted Chips", + sku: "CHP-CLASSIC-001", + price: 3.99, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-SALT-001", + name: "Sea Salt", + qty: 0.01, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart BBQ Blaze Chips", + sku: "CHP-BBQ-001", + price: 4.29, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-PAPRIKA-001", + name: "Smoked Paprika", + qty: 0.005, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-GARLIC-001", + name: "Garlic Powder", + qty: 0.003, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Sour Cream & Onion Chips", + sku: "CHP-SCREAM-001", + price: 4.29, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-ONION-001", + name: "Onion Powder", + qty: 0.005, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Cream Powder", + qty: 0.01, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Spicy Jalapeño Chips", + sku: "CHP-SPICY-001", + price: 4.49, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-CHILI-001", + name: "Chili Flakes", + qty: 0.008, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-PEPPER-001", + name: "Black Pepper", + qty: 0.003, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Family Size Classic Chips", + sku: "CHP-CLASSIC-FAM", + price: 7.99, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.6, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-SALT-001", + name: "Sea Salt", + qty: 0.02, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-300", + name: "Chip Bag 300g", + qty: 1, + unit: "EA", + }, + ], + }, +]; + +const MPC_ICECREAM_PRODUCTS: Array<{ name: string; sku: string; price: number; bom: TBomItem[] }> = + [ + { + name: "Skymart Vanilla Dream Ice Cream", + sku: "ICE-VANILLA-001", + price: 5.99, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.15, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.08, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-VANILLA-001", + name: "Vanilla Extract", + qty: 0.005, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "CHEM-STAB-001", + name: "Stabilizer", + qty: 0.002, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Belgian Chocolate Ice Cream", + sku: "ICE-CHOCO-001", + price: 6.49, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.15, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.1, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "CHEM-EMUL-001", + name: "Emulsifier", + qty: 0.003, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Strawberry Swirl Ice Cream", + sku: "ICE-STRAW-001", + price: 5.99, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.15, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.08, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "CHEM-PRES-001", + name: "Preservative", + qty: 0.001, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Mango Paradise Ice Cream", + sku: "ICE-MANGO-001", + price: 6.49, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.12, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.1, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Premium Vanilla 1L", + sku: "ICE-VANILLA-1L", + price: 9.99, + bom: [ + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.6, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.15, + unit: "KG", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-VANILLA-001", + name: "Vanilla Extract", + qty: 0.01, + unit: "L", + }, + { + supplierCompanyId: MANUFACTURING_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-1000", + name: "Container 1L", + qty: 1, + unit: "EA", + }, + ], + }, + ]; + +export const generateMpcProducts = (worldRef: TWorldRefModel): TProductGenerate[] => { + const chipProducts = MPC_CHIP_PRODUCTS.map((chip) => ({ + worldRef, + productId: generateIdByService("erp", "product"), + sku: chip.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: chip.name, + description: `Premium potato chips - ${chip.name}. Made with farm-fresh potatoes.`, + commodityCode: "19059000", + taxClassification: "STANDARD", + unitOfMeasure: "EA", + weight: generateWeight(0.15, 0.5, "KG"), + dimensions: generateDimensions(25, 15, 8), + inventoryTracking: true, + price: { currency: "USD", amount: chip.price }, + cost: { currency: "USD", amount: chip.price * 0.4 }, + leadTimeDays: 3, + status: "ACTIVE" as const, + customFields: { + category: "Snacks", + productType: "chips", + shelfLifeDays: 180, + storageTemp: "Room Temperature", + billOfMaterials: chip.bom, + }, + })); + + const catalog = [...AGRO_CATALOG, ...DAIRY_CATALOG, ...PACKAGING_CATALOG] + + const catalogProducts = catalog.map((catalog) => ({ + worldRef: { + worldId: worldRef.worldId, + }, + productId: generateIdByService("erp", "product"), + sku: catalog.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: catalog.sku, + description: catalog.name, + commodityCode: "21060000", + unitOfMeasure: catalog.unit, + inventoryTracking: true, + price: { currency: "USD", amount: catalog.pricePerUnit }, + cost: { currency: "USD", amount: catalog.pricePerUnit * 0.4 }, + leadTimeDays: 3, + status: "ACTIVE" as const, + customFields: { + category: "Ingredients", + productType: "Ingredients", + isMpcProduct: false, + }, + })); + + const icecreamProducts = MPC_ICECREAM_PRODUCTS.map((ice) => ({ + worldRef, + productId: generateIdByService("erp", "product"), + sku: ice.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: ice.name, + description: `Premium ice cream - ${ice.name}. Made with real dairy.`, + commodityCode: "21050000", + taxClassification: "STANDARD", + unitOfMeasure: "EA", + weight: generateWeight(0.5, 1.0, "KG"), + dimensions: generateDimensions(12, 12, 15), + inventoryTracking: true, + price: { currency: "USD", amount: ice.price }, + cost: { currency: "USD", amount: ice.price * 0.35 }, + leadTimeDays: 2, + status: "ACTIVE" as const, + customFields: { + category: "Frozen Desserts", + productType: "ice_cream", + shelfLifeDays: 2, + storageTemp: "Frozen (-18°C)", + billOfMaterials: ice.bom, + }, + })); + + return [...chipProducts, ...icecreamProducts, ...catalogProducts]; +}; + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/seeder/world.generator.ts b/packages/controlmart/src/worlds/manufacturing-unit/seeder/world.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..4a247e5ca753a29ce79a51e73a1b8a410b9bd675 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/seeder/world.generator.ts @@ -0,0 +1,86 @@ +import { createWorld } from "../../../repository/world.repository"; +import { CompanyRepository } from "../../../repository/erp/company.repository"; +import { ProductRepository } from "../../../repository/erp/product.repository"; +import { CompanyLedgerRepository } from "../../../repository/finance/ledger.repository"; +import { generateAllManufacturingCompanies, MANUFACTURING_COMPANY_NAMES } from "./companies.generator"; +import { generateMpcProducts } from "./products.generator"; +import { getIdFromMongoObject } from "../../../utils/mongo.util"; + +export interface ManufacturingWorldResult { + worldId: string; + companiesCreated: number; + productsCreated: number; + initialCapital: number; +} + +export interface ManufacturingWorldConfig { + worldName?: string; + initialCash?: number; + initialReceivables?: number; + initialPayables?: number; + worldId?: string; +} + +const DEFAULT_INITIAL_CASH = 1_000_000; + +export const seedManufacturingWorld = async ( + config: ManufacturingWorldConfig = {}, +): Promise => { + const { + worldName = "Manufacturing Unit World", + initialCash = DEFAULT_INITIAL_CASH, + initialReceivables = 0, + initialPayables = 0, + worldId: existingWorldId, + } = config; + + let worldId = existingWorldId; + + if (!worldId) { + const world = await createWorld({ + name: worldName, + description: "Manufacturing unit world focused on production operations", + mpcCompany: MANUFACTURING_COMPANY_NAMES.MPC, + ticketCreationEnabled: false, + }); + worldId = getIdFromMongoObject(world); + } + + const worldRef = { worldId: worldId! }; + + const companies = generateAllManufacturingCompanies(worldRef); + const companyRepo = CompanyRepository(worldId!); + for (const company of companies) { + if (company.companyId) { + const existing = await companyRepo.getCompanyById(company.companyId); + if (!existing) { + await companyRepo.createCompany(company); + } + } + } + + const products = generateMpcProducts(worldRef); + const productRepo = ProductRepository(worldId!); + for (const product of products) { + // Basic check to avoid duplicates if re-seeding + await productRepo.createProduct(product).catch(() => { }); + } + + const ledgerRepo = CompanyLedgerRepository(worldId!); + await ledgerRepo.ensure({ + cash: initialCash, + totalReceivables: initialReceivables, + totalPayables: initialPayables, + }); + + return { + worldId, + companiesCreated: companies.length, + productsCreated: products.length, + initialCapital: initialCash, + }; +}; + +export { MANUFACTURING_COMPANY_NAMES, generateAllManufacturingCompanies } from "./companies.generator"; +export { generateMpcProducts } from "./products.generator"; + diff --git a/packages/controlmart/src/worlds/manufacturing-unit/world-doc.ts b/packages/controlmart/src/worlds/manufacturing-unit/world-doc.ts new file mode 100644 index 0000000000000000000000000000000000000000..fc156fe96d18a7e6a8f50bcfa77020fb18f75b98 --- /dev/null +++ b/packages/controlmart/src/worlds/manufacturing-unit/world-doc.ts @@ -0,0 +1,351 @@ +export const manufacturingUnitWorldDoc = { + meta: { + version: "1.0.0", + generatedAt: "2025-12-12", + docType: "World Definition", + }, + world: { + name: "Manufacturing Unit", + description: + "A focused manufacturing simulation world that demonstrates the complete production lifecycle from raw material procurement through finished goods receipt. This world emphasizes the manufacturing execution system (MES) integration between warehouse management and production operations.", + industry: "Food & Beverage Manufacturing", + location: "Centralized Manufacturing Facility", + size: "Enterprise (Large)", + complexity: "High", + story: + "The Manufacturing Unit world simulates a production-focused operation where raw materials flow from suppliers through warehouse storage, are transformed in the manufacturing facility, and finished goods are received back into inventory. This world showcases the integration of ERP, WMS, and Manufacturing systems working in harmony to execute production orders efficiently.", + }, + businessContext: { + name: "SkyFoods International Manufacturing Unit", + industry: "Food & Beverage Manufacturing", + description: + "A dedicated manufacturing facility focused on transforming raw ingredients into finished consumer products. The unit operates on a Make-to-Stock (MTS) model with emphasis on production planning, material consumption tracking, and finished goods quality.", + coreValues: [ + "Production Excellence: Precision in every batch, quality in every unit.", + "Material Efficiency: Optimize consumption, minimize waste.", + "Traceability: Track every component from receipt to finished good.", + ], + productLines: [ + { + name: "SkyFoods Potato Chips", + storage: "Room Temperature (Ambient)", + shelfLife: "180 Days", + description: "Premium potato chips manufactured from fresh ingredients.", + products: [ + { + sku: "CHP-CLASSIC-001", + name: "Classic Salted Chips", + bom: ["Russet Potatoes", "Sea Salt", "Bag"], + }, + { + sku: "CHP-BBQ-001", + name: "BBQ Blaze Chips", + bom: ["Russet Potatoes", "Smoked Paprika", "Garlic Powder", "Bag"], + }, + ], + }, + { + name: "SkyFoods Ice Cream", + storage: "Frozen (-18°C)", + shelfLife: "365 Days", + description: "Premium ice cream products requiring cold-chain compliance.", + products: [ + { + sku: "ICE-VANILLA-001", + name: "Vanilla Dream", + bom: ["Fresh Milk", "Heavy Cream", "Sugar", "Vanilla Extract", "Stabilizer"], + }, + { + sku: "ICE-CHOCO-001", + name: "Belgian Chocolate", + bom: ["Fresh Milk", "Heavy Cream", "Sugar", "Emulsifier"], + }, + ], + }, + ], + }, + ecosystem: { + partners: "Suppliers provide raw materials and packaging components. The manufacturing unit focuses on production operations.", + suppliers: [ + { + name: "Harvest Fields Agro Co. (Agro)", + role: "Raw Ingredient Supplier", + integration: "Full EDI Suite (850, 855, 856, 810)", + paymentTerms: "Due on Receipt", + description: "Primary supplier of agricultural raw materials including potatoes and spices.", + catalog: [ + "Russet Potatoes Premium (AGRO-POTATO-001)", + "Sea Salt Fine (AGRO-SALT-001)", + "Spices (Paprika, Pepper, Garlic, Chili)", + ], + }, + { + name: "Dairyland Essentials Ltd. (Dairy)", + role: "Dairy Ingredient Supplier", + integration: "Full EDI Suite (850, 855, 856, 810)", + paymentTerms: "Net 15", + description: "Supplier of dairy products and ingredients for ice cream production.", + catalog: [ + "Fresh Whole Milk (DAIRY-MILK-001)", + "Heavy Cream 35% (DAIRY-CREAM-001)", + "Refined White Sugar (DAIRY-SUGAR-001)", + "Vanilla Extract", + ], + }, + { + name: "PackChem Solutions Inc. (Packaging)", + role: "Packaging & Chemicals Supplier", + integration: "Full EDI Suite (850, 855, 856, 810)", + paymentTerms: "Net 45", + description: "Supplier of packaging materials and food-grade chemicals.", + catalog: [ + "Foil Laminate Bags (150g, 300g)", + "Ice Cream Containers (500ml, 1L)", + "Chemicals (Preservatives, Emulsifiers, Stabilizers)", + ], + }, + ], + }, + operationalDescriptors: { + standardActors: [ + { + id: "raw-material-procurement", + name: "Raw Material Procurement", + persona: "Procurement Manager", + type: "Standard Workflow", + schedule: "Daily @ 08:00 AM (Sim Time)", + description: + "Monitors inventory levels and creates purchase orders for raw materials when stock falls below safety thresholds.", + keySteps: [ + "Fetch Real-time Inventory Levels from WMS", + "Calculate Days-of-Supply vs. Forecast", + "Identify Stock Gaps & Determine Reorder Quantities", + "Select Optimal Supplier per SKU", + "Create Purchase Order in ERP", + "Generate and Transmit EDI 850 (Purchase Order)", + ], + inputs: ["Real-time Inventory History", "Supplier Catalogs", "Production Forecast"], + outputs: ["Purchase Order (Created)", "EDI 850 Transaction"], + }, + { + id: "inbound-asn-process", + name: "Inbound ASN Process", + persona: "Logistics Coordinator", + type: "Standard Workflow", + schedule: "Recurring @ Every 2 Hours (Sim Time)", + description: + "Receives Advance Shipping Notices (ASN) from suppliers and creates WMS Inbound Orders for planning.", + keySteps: [ + "Poll Integration Layer for New EDI 856 (ASN) Transactions", + "Parse ASN & Retrieve Related Purchase Order", + "Validate Item SKUs, Quantities, and PO status", + "Create WMS Inbound Order", + "Update PO Line Item Status to 'IN_TRANSIT'", + "Send EDI 997 (Functional Acknowledgement) to Supplier", + ], + inputs: ["Incoming EDI 856 Stream", "Open Purchase Orders"], + outputs: ["WMS Inbound Order", "PO Status Update", "EDI 997 Transaction"], + }, + { + id: "receive-supplier-shipment", + name: "Receive Supplier Shipment", + persona: "Dock Receiver", + type: "Standard Workflow", + schedule: "Daily @ 11:00 AM (Sim Time)", + description: + "Receives raw materials at the warehouse dock and posts inventory receipt transactions.", + keySteps: [ + "Query WMS for Arrived Inbound Orders", + "Simulate Unloading & Physical Count Verification", + "Post Inventory Receipt Transaction to WMS (Location: DOCK-001)", + "Update Inbound Order Status to 'RECEIVED'", + "Close Purchase Order Lines", + ], + inputs: ["Inbound Orders (Status: IN_TRANSIT)"], + outputs: ["Inventory Added (Receiving Dock)", "Closed PO"], + }, + { + id: "putaway-process", + name: "Putaway Process", + persona: "Forklift Operator", + type: "Standard Workflow", + schedule: "Recurring @ Every 1 Hour (Sim Time)", + description: + "Moves received inventory from the dock to appropriate storage locations based on material characteristics.", + keySteps: [ + "Scan Receiving Dock for Unprocessed Inventory", + "Determine Storage Logic based on Material Characteristics", + "Identify Available Empty Bin Capacity in Target Zone", + "Execute WMS Internal Transfer (Dock -> Bin)", + "Update Inventory Batch Record with Location Data", + ], + inputs: ["Unassigned Dock Inventory", "Warehouse Bin Map"], + outputs: ["Optimized Bin Inventory", "Clean Receiving Dock"], + }, + { + id: "production-order", + name: "Production Order", + persona: "Production Planner", + type: "Standard Workflow", + schedule: "Daily @ 01:00 PM (Sim Time)", + description: + "Creates production orders based on finished goods inventory levels and BOM requirements.", + keySteps: [ + "Analyze Finished Goods Inventory vs. Safety Stock", + "Prioritize Production Requests", + "Explode Bill of Materials (BOM) for Target SKU", + "Check Raw Material Availability", + "Create ERP Production Order", + "Create WMS Pick Tasks for Raw Materials", + ], + inputs: ["Product Catalog", "BOM Definitions", "Current Inventory"], + outputs: ["Production Order (Status: IN_PROGRESS)", "WMS Pick Tasks"], + }, + { + id: "material-pick-for-production", + name: "Material Pick for Production", + persona: "Forklift Operator", + type: "Standard Workflow", + schedule: "Recurring @ Every 1 Hour (Sim Time)", + description: + "Picks raw materials from WMS storage locations for production orders.", + keySteps: [ + "Get Production Orders Ready for Material Pick", + "Select Order to Process", + "Pick Raw Materials from WMS (Create PICK transactions)", + "Update Production Order Status to MATERIALS_PICKED", + ], + inputs: ["Production Orders (Status: IN_PROGRESS)"], + outputs: ["Raw Materials Picked", "Production Order (Status: MATERIALS_PICKED)"], + }, + { + id: "manufacturing-execution", + name: "Manufacturing Execution (Simple)", + persona: "Production Operator", + type: "Standard Workflow", + schedule: "Recurring @ Every 2 Hours (Sim Time)", + description: + "Simplified manufacturing execution that creates production runs and tracks material consumption without machine-level detail.", + keySteps: [ + "Get Production Orders with Materials Picked", + "Create Manufacturing Production Run", + "Start Production Run", + "Simulate Manufacturing Process", + "Complete Production Run with Finished Goods Data", + "Update Production Order Status to MANUFACTURING_COMPLETE", + ], + inputs: ["Production Orders (Status: MATERIALS_PICKED)", "BOM Data"], + outputs: ["Production Run (Status: COMPLETED)", "Production Order (Status: MANUFACTURING_COMPLETE)"], + }, + { + id: "manufacturing-execution-detailed", + name: "Manufacturing Execution (Detailed Machine-Level)", + persona: "Production Operator", + type: "Standard Workflow", + schedule: "Recurring @ Every 2 Hours (Sim Time)", + description: + "Detailed manufacturing execution with granular machine-level processing. Routes through product-specific production lines with full traceability of intermediate products.", + productionLines: { + iceCream: { + name: "Ice Cream Production Line", + machines: [ + { id: "MCH_PASTEUR_01", name: "Pasteurizer", operation: "Heat milk/cream to 85°C to eliminate pathogens" }, + { id: "MCH_FERMENT_01", name: "Fermenter", operation: "Add cultures for cultured varieties (optional pass-through)" }, + { id: "MCH_MIXER_01", name: "Mixer", operation: "Combine base with sugar, flavorings, incorporate air (overrun)" }, + { id: "MCH_PACKER_01", name: "Cup Packer", operation: "Fill into 500ml cups with hermetic seal" }, + { id: "MCH_CHILL_01", name: "Chill Room", operation: "Blast freeze to -18°C for storage stability" }, + ], + products: ["ICE-VANILLA-001 (Vanilla Dream)", "ICE-CHOCO-001 (Belgian Chocolate)"], + }, + chips: { + name: "Chips Production Line", + machines: [ + { id: "MCH_SLICER_01", name: "Slicer", operation: "Slice potatoes to 1.5mm uniform thickness" }, + { id: "MCH_FRYER_01", name: "Fryer", operation: "Deep fry at 180°C until golden and crispy" }, + { id: "MCH_SEASON_01", name: "Seasoning Drum", operation: "Tumble with salt/spices for even flavor distribution" }, + { id: "MCH_PACKER_02", name: "Pouch Packer", operation: "Weigh and pack into nitrogen-flushed 150g pouches" }, + { id: "MCH_MD_01", name: "Metal Detector", operation: "Final QC gate - screen for metal contaminants" }, + ], + products: ["CHP-CLASSIC-001 (Classic Salted)", "CHP-BBQ-001 (BBQ Blaze)"], + }, + }, + keySteps: [ + "Get Production Orders with Materials Picked", + "Determine Production Line Type (Ice Cream vs Chips based on SKU prefix)", + "Create Manufacturing Production Run with Line Metadata", + "Execute Machine Sequence (5 steps per line with I/O tracking)", + "Record Quality Checks at Each Machine", + "Aggregate Machine Steps and Quality Data", + "Complete Production Run with Finished Goods and Machine Trace", + "Update Production Order Status to MANUFACTURING_COMPLETE", + ], + inputs: ["Production Orders (Status: MATERIALS_PICKED)", "BOM Data", "Product Type"], + outputs: [ + "Production Run (Status: COMPLETED) with Machine Trace", + "Aggregated Quality Checks from All Machines", + "Production Order (Status: MANUFACTURING_COMPLETE)", + "Production Summary with Process Time Analytics", + ], + }, + { + id: "finished-goods-receipt", + name: "Finished Goods Receipt", + persona: "Forklift Operator", + type: "Standard Workflow", + schedule: "Recurring @ Every 1 Hour (Sim Time)", + description: + "Receives finished goods from manufacturing into WMS inventory and records COGS.", + keySteps: [ + "Get Production Orders Ready for Receipt", + "Get Completed Production Run", + "Receive Finished Goods into WMS (Create RECEIVE transactions)", + "Calculate Cost of Goods Manufactured (COGM)", + "Record Manufacturing Cost in Finance", + "Complete Production Order", + ], + inputs: ["Production Orders (Status: MANUFACTURING_COMPLETE)", "Production Run Data"], + outputs: ["Finished Goods Inventory", "COGS Transaction", "Production Order (Status: COMPLETED)"], + }, + ], + backgroundActors: [ + { + id: "daily-inventory-check", + name: "Daily Inventory Consistency Check", + persona: "Inventory Manager", + type: "Background Job", + schedule: "Recurring @ Every 12 Hours (Sim Time)", + description: + "Performs inventory reconciliation and data health checks between WMS and ERP systems.", + responsibility: "System-wide Inventory Reconciliation & Data Health", + }, + { + id: "aging-inventory-check", + name: "Aging & Spoilage Monitor", + persona: "Quality Control Specialist", + type: "Background Job", + schedule: "Daily @ Midnight (Sim Time)", + description: + "Scans inventory for expired batches and executes write-off transactions for spoilage.", + responsibility: "Batch Expiration Scanning & Financial Write-off Execution", + }, + { + id: "supplier-reorder-trigger", + name: "Emergency Supplier Reorder Trigger", + persona: "System / Procurement Bot", + type: "Background Job", + schedule: "Recurring @ Every 1 Hour (Sim Time)", + description: + "Monitors critical SKUs and triggers emergency reorders when stock levels breach critical minimums.", + responsibility: "Intraday Low-Stock Monitoring & Crisis Response", + }, + ], + }, + technicalServices: { + ERP: "Enterprise Resource Planning: Manages orders, products, companies, and pricing logic.", + WMS: "Warehouse Management System: Manages inventory, bins, zones, and warehouse operations (receiving, picking, putaway).", + MANUFACTURING: "Manufacturing Execution System: Tracks production runs, material consumption, and finished goods production.", + Finance: "Financial Management: Handles General Ledger, Accounts Payable/Receivable, and transaction logging.", + EDI: "Electronic Data Interchange: Handles standardized B2B document generation (850, 855, 856, 810).", + }, +}; + diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/index.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..e5e7ef7390d0e6d26da26123ed5272bbc22cbf6b --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/index.ts @@ -0,0 +1,21 @@ +import { seedPerishableWorld } from "./seeder"; +import { schedulePerishableWorld } from "./schedule-perishable-world"; +import { perishableFoodManufacturerWorldDoc } from "./world-doc"; + +export const seedDataPerishablesManufacturerWorld = (initialCash: number, worldId?: string) => { + if (initialCash < 0 || initialCash < 200) { + initialCash = 1_000_000; + } + return seedPerishableWorld({ + initialCash, + worldId, + }); +}; + +export const seedODsPerishablesManufacturerWorld = async (worldId: string, realHoursPerSimDay: number) => { + return await schedulePerishableWorld(worldId, realHoursPerSimDay); +}; + +export const perishableWorldDocs = () => { + return perishableFoodManufacturerWorldDoc; +}; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/list-orders-user.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/list-orders-user.ts new file mode 100644 index 0000000000000000000000000000000000000000..6a6c8f39b0b894f988f0e1382d16b48181c0e985 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/list-orders-user.ts @@ -0,0 +1,23 @@ +import { OrderRepository } from '../../repository/erp/order.repository'; +import { connectMongo, disconnectMongo } from '../../services/mongo.service'; +import { loadEnv } from '../../utils/env.util'; + +async function listOrders() { + const mongoUri = loadEnv().MONGO_URI || process.env.DATABASE_URL; + const dbName = 'controlmart'; + await connectMongo({ uri: mongoUri, dbName }); + + const worldId = '694bce08b8ae02915b71e629'; + const orderRepo = OrderRepository(worldId); + const result = await orderRepo.getAllOrders({ limit: 10 }); + + console.log(`\nWorld: ${worldId}`); + console.log(`Total Orders: ${result.totalCount}`); + result.items.forEach(o => { + console.log(`ID: ${o.orderId} | Status: ${o.status} | Type: ${o.poType} | Amount: ${o.totalAmount}`); + }); + + await disconnectMongo(false); +} + +listOrders(); diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/aging-inventory-check.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/aging-inventory-check.od.json new file mode 100644 index 0000000000000000000000000000000000000000..7f9a385fa94e3803cdccbb3d24899db22b22e362 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/aging-inventory-check.od.json @@ -0,0 +1,177 @@ +{ + "id": "aging-inventory-check", + "name": "Aging Inventory Check", + "version": "1.0.0", + "description": "Check for expired inventory and write off spoilage", + "namespace": "perishableManWorld.background", + "persona": "Quality Control Specialist", + "type": "background_job", + "steps": [ + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" } + }, + { + "id": "get_inventory", + "name": "Get All Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { + "type": "literal", + "value": { "limit": 200 } + }, + "output": { "storeAs": "inventory" } + }, + { + "id": "identify_expired", + "name": "Identify Expired Items", + "type": "script", + "script": "const items = ctx.inventory?.items || ctx.inventory || []; const now = new Date(); const expired = items.filter(i => i.expirationDate && new Date(i.expirationDate) < now); return { expiredItems: expired, count: expired.length };", + "output": { "storeAs": "spoilageReport" } + }, + { + "id": "write_off_spoilage", + "name": "Write Off Spoilage", + "type": "script", + "script": "const expired = ctx.spoilageReport?.expiredItems || []; if (expired.length === 0) return { writtenOff: 0, totalValue: 0 }; let totalValue = 0; totalValue = expired.reduce((sum, item) => sum + (item.quantity * item.unitPrice), 0); console.log('[SPOILAGE] Writing off', expired.length, 'items. Total Value:', totalValue); return { writtenOff: expired.length, totalValue: totalValue };", + "output": { "storeAs": "writeOffResult" } + }, + { + "id": "record_loss", + "name": "Record Financial Loss", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "condition": { "expression": "writeOffResult.writtenOff > `0`" }, + "input": { + "type": "template", + "template": { + "cashDelta": 0, + "inventoryValueDelta": "-{{writeOffResult.totalValue}}" + } + }, + "output": { "storeAs": "updatedLedger" } + }, + { + "id": "get_all_companies", + "name": "Get All Companies", + "type": "mcp", + "service": "erp", + "tool": "company.get_all", + "input": { "type": "literal", "value": { "limit": 10 } }, + "output": { "storeAs": "allCompanies" }, + "retry": { "maxRetries": 2, "backoff": "fixed", "baseMs": 300 } + }, + { + "id": "select_supplier", + "name": "Select a Supplier", + "type": "script", + "script": "const companies = ctx.allCompanies?.items || ctx.allCompanies || []; const suppliers = companies.filter(c => c.companyId !== ctx.mpc?.companyId && c.name !== ctx.mpc?.name); if (suppliers.length === 0) throw new Error(`No suppliers found. Total companies: ${companies.length}. Names: ${companies.map(c => c.name).join(', ')}. MPC: ${ctx.mpc?.name}`); const supplier = suppliers[Math.floor(Math.random() * suppliers.length)]; return supplier;", + "output": { "storeAs": "supplier" } + }, + { + "id": "transform_expired_items", + "name": "Transform Expired Items to Order Lines", + "type": "script", + "script": "const expired = ctx.spoilageReport?.expiredItems || []; const lines = expired.map((item, idx) => ({ lineNumber: idx + 1, sku: item.sku || item.productId, description: item.productName || item.sku || 'Unknown Product', quantityOrdered: item.quantity || 1, unitPrice: item.unitPrice || 0, unitOfMeasure: item.uom || 'EA' })); return { lines, itemsForEdi: expired.map((item, idx) => ({ lineNumber: idx + 1, sku: item.sku || item.productId, quantity: item.quantity || 1, quantityOrdered: item.quantity || 1, unitPrice: item.unitPrice || 0, description: item.productName || 'Unknown Product', uom: item.uom || 'EA' })) };", + "output": { "storeAs": "transformedItems" } + }, + { + "id": "create_production_order", + "name": "Create Production Order", + "type": "mcp", + "service": "erp", + "tool": "order.create", + "input": { + "type": "template", + "template": { + "customerId": "{{mpc.companyId}}", + "partnerId": "{{supplier.companyId}}", + "poType": "STANDARD", + "status": "RECEIVED", + "currency": "USD", + "paymentTerms": "{{supplier.paymentTerms}}", + "lines": "{{transformedItems.lines}}", + "totalAmount": "{{writeOffResult.totalValue}}" + } + }, + "output": { "storeAs": "productionOrder" } + }, + { + "id": "reserve_payable", + "name": "Reserve Payable Amount", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { "payablesDelta": "{{writeOffResult.totalValue}}" } + }, + "output": { "storeAs": "updatedLedger" } + }, + { + "id": "generate_edi_850", + "name": "Generate EDI 850", + "type": "mcp", + "service": "edi", + "tool": "generate.850", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.companyId}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{supplier.companyId}}", "name": "{{supplier.name}}" }, + "poNumber": "{{productionOrder.orderId}}", + "items": "{{transformedItems.itemsForEdi}}" + } + }, + "output": { "storeAs": "edi850" } + }, + { + "id": "validate_edi_850", + "name": "Validate EDI 850 (PO)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi850}}", + "docType": "850", + "worldId": "{{mpc.companyId}}", + "context": { "items": "{{transformedItems.itemsForEdi}}" } + } + }, + "output": { "storeAs": "val850" }, + "continueOnError": true + }, + { + "id": "store_edi", + "name": "Store EDI Transaction", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{supplier.companyId}}", + "docType": "850", + "direction": "OUTBOUND", + "status": "SENT", + "payload": { + "orderId": "{{productionOrder.orderId}}", + "totalAmount": "{{writeOffResult.totalValue}}" + }, + "rawEdi": "{{edi850}}" + } + }, + "output": { "storeAs": "ediTransaction" } + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/customer-full-edi-cycle.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/customer-full-edi-cycle.od.json new file mode 100644 index 0000000000000000000000000000000000000000..d1c12a98f8d1f81ae780451fcb0d1d8a70bc4939 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/customer-full-edi-cycle.od.json @@ -0,0 +1,495 @@ +{ + "id": "customer-full-edi-cycle", + "name": "Customer Full EDI Cycle (850-855-856-810)", + "version": "1.0.0", + "description": "Complete EDI flow: Customer places PO (850) -> Sales Order -> Ack (855) -> Shipment -> ASN (856) -> Invoice (810)", + "namespace": "perishableManWorld.sales", + "persona": "System Integrator", + "type": "standard", + "steps": [ + { + "id": "get_customer", + "name": "Get Random Customer", + "type": "mcp", + "service": "erp", + "tool": "company.get_random_customer", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "customer" }, + "retry": { "maxRetries": 3, "backoff": "fixed", "baseMs": 1000 } + }, + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" } + }, + { + "id": "get_products", + "name": "Get Products", + "type": "mcp", + "service": "erp", + "tool": "product.get_all", + "input": { "type": "literal", "value": { "status": "ACTIVE", "limit": 50 } }, + "output": { "storeAs": "products" } + }, + { + "id": "generate_po_items", + "name": "Generate PO Line Items", + "type": "script", + "script": "const all = ctx.products?.items || ctx.products || []; if (all.length === 0) throw new Error('No products available'); const count = Math.floor(Math.random() * 4) + 1; const selected = []; for(let i=0; i sum + (item.quantityOrdered * item.unitPrice), 0); return { total: Math.round(total * 100) / 100, itemCount: items.length };", + "output": { "storeAs": "orderCalc" } + }, + { + "id": "generate_850", + "name": "Generate EDI 850 (Purchase Order)", + "type": "mcp", + "service": "edi", + "tool": "generate.850", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{customer.duns}}", "name": "{{customer.name}}" }, + "receiver": { "duns": "{{mpc.duns}}", "name": "{{mpc.name}}" }, + "poNumber": "{{poNumber}}", + "poDate": "{{generated.poDate}}", + "items": "{{poItems}}" + } + }, + "output": { "storeAs": "edi850" } + }, + { + "id": "validate_850", + "name": "Validate EDI 850 (PO)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi850}}", + "docType": "850", + "worldId": "{{customer.companyId}}", + "context": { "items": "{{poItems}}" } + } + }, + "output": { "storeAs": "val850" }, + "continueOnError": true + }, + { + "id": "store_850", + "name": "Receive EDI 850", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{customer.companyId}}", + "docType": "850", + "direction": "INBOUND", + "status": "RECEIVED", + "transactionNumber": "{{poNumber}}", + "payload": { "poNumber": "{{poNumber}}", "items": "{{poItems}}" }, + "rawEdi": "{{edi850}}" + } + }, + "output": { "storeAs": "txn850" } + }, + { + "id": "create_sales_order", + "name": "Create Sales Order (ERP)", + "type": "mcp", + "service": "erp", + "tool": "order.create", + "input": { + "type": "template", + "template": { + "customerId": "{{customer.companyId}}", + "partnerId": "{{mpc.companyId}}", + "poNumber": "{{poNumber}}", + "poType": "STANDARD", + "status": "RECEIVED", + "currency": "{{customer.currency}}", + "paymentTerms": "Net 30", + "lines": "{{poItems}}", + "totalAmount": "{{orderCalc.total}}" + } + }, + "output": { "storeAs": "salesOrder" } + }, + { + "id": "generate_855", + "name": "Generate EDI 855 (Ack)", + "type": "mcp", + "service": "edi", + "tool": "generate.855", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.duns}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{customer.duns}}", "name": "{{customer.name}}" }, + "poNumber": "{{poNumber}}", + "acknowledgmentCode": "AC" + } + }, + "output": { "storeAs": "edi855" } + }, + { + "id": "validate_855", + "name": "Validate EDI 855 (Ack)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi855}}", + "docType": "855", + "worldId": "{{mpc.companyId}}" + } + }, + "output": { "storeAs": "val855" }, + "continueOnError": true + }, + { + "id": "store_855", + "name": "Send EDI 855", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{customer.companyId}}", + "docType": "855", + "direction": "OUTBOUND", + "status": "SENT", + "transactionNumber": "ACK-{{poNumber}}", + "payload": { "orderId": "{{salesOrder.orderId}}" }, + "rawEdi": "{{edi855}}" + } + }, + "output": { "storeAs": "txn855" } + }, + { + "id": "transform_for_outbound", + "name": "Transform Items for Outbound", + "type": "script", + "script": "const items = ctx.poItems || []; const customer = ctx.customer || {}; const lines = items.map(item => ({ lineNumber: item.lineNumber, productId: item.sku, sku: item.sku, productName: item.description, orderedQuantity: item.quantityOrdered, uom: item.uom })); const totalUnits = items.reduce((sum, item) => sum + (item.quantityOrdered || 0), 0); const now = new Date().toISOString(); const shipDate = new Date(Date.now() + 2 * 24 * 60 * 60 * 1000).toISOString(); const addr = customer.shippingAddress || customer.billingAddress || {}; const shipToAddress = { street1: addr.street1 || addr.street || '123 Customer Way', city: addr.city || 'Commerce City', state: addr.state || 'CA', postalCode: addr.postalCode || addr.zipCode || '90210', country: addr.country || 'USA' }; return { lines, totalUnits, lineCount: items.length, orderDate: now, shipDate, shipToAddress };", + "output": { "storeAs": "outboundData" } + }, + { + "id": "create_outbound", + "name": "Create WMS Outbound Order", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "orderNumber": "{{salesOrder.orderId}}", + "orderType": "SALES", + "customer": { + "customerId": "{{customer.companyId}}", + "customerName": "{{customer.name}}", + "accountNumber": "{{customer.companyId}}" + }, + "shipToAddress": "{{outboundData.shipToAddress}}", + "orderPriority": "NORMAL", + "orderStatus": "RELEASED", + "dates": { + "orderDate": "{{outboundData.orderDate}}", + "requiredShipDate": "{{outboundData.shipDate}}" + }, + "totals": { + "lines": "{{outboundData.lineCount}}", + "units": "{{outboundData.totalUnits}}" + }, + "lines": "{{outboundData.lines}}" + } + }, + "output": { "storeAs": "wmsOrder" } + }, + { + "id": "generate_856", + "name": "Generate EDI 856 (ASN)", + "type": "mcp", + "service": "edi", + "tool": "generate.856", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.duns}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{customer.duns}}", "name": "{{customer.name}}" }, + "shipmentId": "SH-{{salesOrder.orderId}}", + "poNumber": "{{poNumber}}", + "sscc": "{{generated.sscc}}", + "items": "{{poItems}}" + } + }, + "output": { "storeAs": "edi856" } + }, + { + "id": "validate_856", + "name": "Validate EDI 856 (ASN)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi856}}", + "docType": "856", + "worldId": "{{mpc.companyId}}", + "context": { "items": "{{poItems}}" } + } + }, + "output": { "storeAs": "val856" }, + "continueOnError": true + }, + { + "id": "store_856", + "name": "Send EDI 856", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{customer.companyId}}", + "docType": "856", + "direction": "OUTBOUND", + "status": "SENT", + "transactionNumber": "ASN-{{salesOrder.orderId}}", + "payload": { "shipmentId": "SH-{{salesOrder.orderId}}" }, + "rawEdi": "{{edi856}}" + } + }, + "output": { "storeAs": "txn856" } + }, + { + "id": "create_invoice", + "name": "Create Invoice (ERP)", + "type": "mcp", + "service": "erp", + "tool": "invoice.create", + "input": { + "type": "template", + "template": { + "orderId": "{{salesOrder.orderId}}", + "customerId": "{{customer.companyId}}", + "currency": "{{salesOrder.currency}}", + "lines": "{{poItems}}", + "totalAmount": "{{orderCalc.total}}", + "paymentTerms": "Net 30", + "status": "SENT" + } + }, + "output": { "storeAs": "invoice" } + }, + { + "id": "generate_810", + "name": "Generate EDI 810 (Invoice)", + "type": "mcp", + "service": "edi", + "tool": "generate.810", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.duns}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{customer.duns}}", "name": "{{customer.name}}" }, + "invoiceNumber": "{{invoice.invoiceId}}", + "poNumber": "{{poNumber}}", + "terms": "Net 30", + "items": "{{poItems}}" + } + }, + "output": { "storeAs": "edi810" } + }, + { + "id": "validate_810", + "name": "Validate EDI 810", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi810}}", + "docType": "810", + "worldId": "{{customer.companyId}}", + "context": { "items": "{{poItems}}" } + } + }, + "output": { "storeAs": "validationResult" } + }, + { + "id": "store_810", + "name": "Send EDI 810", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{customer.companyId}}", + "docType": "810", + "direction": "OUTBOUND", + "status": "SENT", + "transactionNumber": "{{invoice.invoiceId}}", + "payload": { "invoiceNumber": "{{invoice.invoiceId}}" }, + "rawEdi": "{{edi810}}" + } + }, + "output": { "storeAs": "txn810" } + }, + { + "id": "book_receivable", + "name": "Book Account Receivable", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { "receivablesDelta": "{{orderCalc.total}}" } + }, + "output": { "storeAs": "updatedLedger" } + }, + { + "id": "update_order_shipped", + "name": "Update Order Status to SHIPPED", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{salesOrder.orderId}}", + "status": "SHIPPED" + } + }, + "output": { "storeAs": "shippedOrder" } + }, + { + "id": "update_order_invoiced", + "name": "Update Order Status to INVOICED", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{salesOrder.orderId}}", + "status": "INVOICED" + } + }, + "output": { "storeAs": "invoicedOrder" } + }, + { + "id": "calculate_payment_deltas", + "name": "Calculate Payment Values", + "type": "script", + "script": "const total = ctx.orderCalc?.total || 0; const methods = ['ACH', 'WIRE', 'CHECK']; const method = methods[Math.floor(Math.random() * methods.length)]; return { total: total, negativeTotal: -total, dueDate: new Date(Date.now() + 30*24*60*60*1000).toISOString(), paymentMethod: method };", + "output": { "storeAs": "paymentCalc" } + }, + { + "id": "record_payment_in", + "name": "Record Customer Payment", + "type": "mcp", + "service": "finance", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "type": "payment_in", + "amount": "{{paymentCalc.total}}", + "sourceType": "invoice", + "sourceId": "{{invoice.invoiceId}}", + "partnerId": "{{customer.companyId}}", + "metadata": { + "description": "Payment for Invoice {{invoice.invoiceId}}", + "paymentTerms": "Net 30", + "dueDate": "{{paymentCalc.dueDate}}" + } + } + }, + "output": { "storeAs": "paymentTxn" } + }, + { + "id": "update_ledger_payment", + "name": "Update Ledger (Receive Payment)", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { + "cashDelta": "{{paymentCalc.total}}", + "receivablesDelta": "{{paymentCalc.negativeTotal}}" + } + }, + "output": { "storeAs": "ledgerAfterPayment" } + }, + { + "id": "update_invoice_paid", + "name": "Mark Invoice as Paid", + "type": "mcp", + "service": "erp", + "tool": "invoice.update", + "input": { + "type": "template", + "template": { + "invoiceId": "{{invoice.invoiceId}}", + "status": "PAID", + "paidDate": "{{paymentCalc.dueDate}}" + } + }, + "output": { "storeAs": "paidInvoice" } + }, + { + "id": "create_erp_payment", + "name": "Create ERP Payment Record", + "type": "mcp", + "service": "erp", + "tool": "payment.create", + "input": { + "type": "template", + "template": { + "invoiceId": "{{invoice.invoiceId}}", + "customerId": "{{customer.companyId}}", + "amount": "{{paymentCalc.total}}", + "currency": "{{salesOrder.currency}}", + "method": "{{paymentCalc.paymentMethod}}", + "status": "APPLIED" + } + }, + "output": { "storeAs": "erpPayment" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/daily-inventory-check.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/daily-inventory-check.od.json new file mode 100644 index 0000000000000000000000000000000000000000..3a494611f89ca6107d120ef82fbe228829ebbb26 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/daily-inventory-check.od.json @@ -0,0 +1,95 @@ +{ + "id": "daily-inventory-check", + "name": "Daily Inventory Check", + "version": "1.0.0", + "description": "Daily cycle count, inventory verification, and metrics recording", + "namespace": "perishableManWorld.background", + "persona": "Inventory Manager", + "type": "background_job", + "steps": [ + { + "id": "get_ledger", + "name": "Get Current Financial Position", + "type": "mcp", + "service": "finance", + "tool": "ledger.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "ledger" } + }, + { + "id": "get_financial_summary", + "name": "Get Financial Summary", + "type": "mcp", + "service": "finance", + "tool": "summary.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "financialSummary" } + }, + { + "id": "get_bins", + "name": "Get All Warehouse Bins", + "type": "mcp", + "service": "wms", + "tool": "bin.get_by_warehouse", + "input": { "type": "literal", "value": { "warehouseId": "MAIN_WAREHOUSE", "limit": 100 } }, + "output": { "storeAs": "bins" } + }, + { + "id": "get_inventory", + "name": "Get Current Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { "type": "literal", "value": { "limit": 200 } }, + "output": { "storeAs": "inventory" } + }, + { + "id": "calculate_metrics", + "name": "Calculate Daily Metrics", + "type": "script", + "script": "const inventory = ctx.inventory || {}; const ledger = ctx.ledger || {}; const invItems = inventory.items || []; const totalItems = invItems.length; const totalValue = invItems.reduce((sum, i) => sum + (i.quantity || 0) * (i.unitPrice || 1), 0); const roundedValue = Math.round(totalValue * 100) / 100; return { date: new Date().toISOString().split('T')[0], totalSkus: totalItems, skuCount: totalItems, estimatedValue: roundedValue, totalValue: roundedValue, cashPosition: ledger?.cash || 0, receivables: ledger?.totalReceivables || 0, payables: ledger?.totalPayables || 0, netPosition: ledger?.netPosition || 0 };", + "output": { "storeAs": "dailyMetrics" } + }, + { + "id": "create_cycle_count", + "name": "Create Cycle Count", + "type": "mcp", + "service": "wms", + "tool": "cycle_count.create", + "input": { + "type": "template", + "template": { + "countDate": "{{dailyMetrics.date}}", + "countType": "DAILY", + "status": "COMPLETED", + "itemsCounted": "{{dailyMetrics.totalSkus}}", + "discrepancies": 0 + } + }, + "output": { "storeAs": "cycleCount" } + }, + { + "id": "record_metrics", + "name": "Record Daily Metrics", + "type": "mcp", + "service": "wms", + "tool": "daily_metrics.create", + "input": { + "type": "template", + "template": { + "date": "{{dailyMetrics.date}}", + "metrics": "{{dailyMetrics}}" + } + }, + "output": { "storeAs": "recordedMetrics" } + }, + { + "id": "log_summary", + "name": "Log Daily Summary", + "type": "script", + "script": "const dailyMetrics = ctx.dailyMetrics || {}; console.log('[DAILY CHECK]', dailyMetrics.date, '| Cash:', dailyMetrics.cashPosition, '| Receivables:', dailyMetrics.receivables, '| Payables:', dailyMetrics.payables, '| Net:', dailyMetrics.netPosition); return dailyMetrics;", + "output": { "storeAs": "loggedSummary" } + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/financial-reconciliation.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/financial-reconciliation.od.json new file mode 100644 index 0000000000000000000000000000000000000000..9fc8d9e0947a4506dea5ec58244ee031d23dca85 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/financial-reconciliation.od.json @@ -0,0 +1,74 @@ +{ + "id": "financial-reconciliation", + "name": "Financial Reconciliation", + "version": "1.0.0", + "description": "Daily financial reconciliation and reporting", + "namespace": "perishableManWorld.background", + "persona": "Financial Controller", + "type": "background_job", + "steps": [ + { + "id": "get_ledger", + "name": "Get Current Ledger", + "type": "mcp", + "service": "finance", + "tool": "ledger.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "ledger" } + }, + { + "id": "get_summary", + "name": "Get Financial Summary", + "type": "mcp", + "service": "finance", + "tool": "summary.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "financialSummary" } + }, + { + "id": "get_today_transactions", + "name": "Get Today's Transactions", + "type": "mcp", + "service": "finance", + "tool": "transaction.get_all", + "input": { + "type": "literal", + "value": { "limit": 100 } + }, + "output": { "storeAs": "todayTransactions" } + }, + { + "id": "aggregate_by_type", + "name": "Aggregate by Transaction Type", + "type": "mcp", + "service": "finance", + "tool": "aggregate.by_type", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "aggregateByType" } + }, + { + "id": "aggregate_by_partner", + "name": "Aggregate by Partner", + "type": "mcp", + "service": "finance", + "tool": "aggregate.by_partner", + "input": { "type": "literal", "value": { "limit": 20 } }, + "output": { "storeAs": "aggregateByPartner" } + }, + { + "id": "reconcile", + "name": "Reconcile Balances", + "type": "script", + "script": "const ledger = ctx.ledger || {}; const calculated = (ledger.cash || 0) + (ledger.totalReceivables || 0) - (ledger.totalPayables || 0); const stored = ledger.netPosition || 0; const discrepancy = Math.abs(calculated - stored); const isBalanced = discrepancy < 0.01; return { date: new Date().toISOString().split('T')[0], cashBalance: ledger.cash, receivables: ledger.totalReceivables, payables: ledger.totalPayables, calculatedNet: calculated, storedNet: stored, discrepancy: discrepancy, status: isBalanced ? 'BALANCED' : 'DISCREPANCY' };", + "output": { "storeAs": "reconciliation" } + }, + { + "id": "generate_report", + "name": "Generate Daily Report", + "type": "script", + "script": "const reconciliation = ctx.reconciliation || {}; const aggregateByType = ctx.aggregateByType || []; const aggregateByPartner = ctx.aggregateByPartner || []; const financialSummary = ctx.financialSummary || {}; const report = { date: reconciliation.date, summary: { cash: reconciliation.cashBalance, receivables: reconciliation.receivables, payables: reconciliation.payables, netPosition: reconciliation.storedNet }, transactionSummary: aggregateByType, topPartners: aggregateByPartner.slice(0, 5), reconciliationStatus: reconciliation.status, totalTransactionsToday: financialSummary.transactionCount || 0 }; console.log('[DAILY REPORT]', JSON.stringify(report, null, 2)); return report;", + "output": { "storeAs": "dailyReport" } + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/inbound-asn-process.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/inbound-asn-process.od.json new file mode 100644 index 0000000000000000000000000000000000000000..0a0d947ead6f045080240c5dec45c528366e4bfb --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/inbound-asn-process.od.json @@ -0,0 +1,130 @@ +{ + "id": "inbound-asn-process", + "name": "Inbound ASN Process", + "version": "1.0.0", + "description": "Receive Advanced Shipping Notice (ASN) from supplier and create Inbound Order", + "namespace": "perishableManWorld.logistics", + "persona": "Logistics Coordinator", + "type": "standard", + "steps": [ + { + "id": "get_pending_pos", + "name": "Get Pending Purchase Orders", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { "poType": "STANDARD", "limit": 10 } + }, + "output": { "storeAs": "pendingPos" } + }, + { + "id": "select_po", + "name": "Select PO for ASN", + "type": "script", + "script": "const orders = ctx.pendingPos?.items || ctx.pendingPos || []; if (orders.length === 0) throw new Error('No pending POs found'); return orders[0];", + "output": { "storeAs": "selectedPo" } + }, + { + "id": "get_supplier", + "name": "Get Supplier Details", + "type": "mcp", + "service": "erp", + "tool": "company.get_by_id", + "input": { + "type": "template", + "template": { "companyId": "{{selectedPo.partnerId}}" } + }, + "output": { "storeAs": "supplier" } + }, + { + "id": "get_all_products", + "name": "Get All Products for Lookup", + "type": "mcp", + "service": "erp", + "tool": "product.get_all", + "input": { "type": "literal", "value": { "limit": 100 } }, + "output": { "storeAs": "allProducts" } + }, + { + "id": "generate_asn_data", + "name": "Generate ASN Data", + "type": "script", + "script": "const po = ctx.selectedPo; const lines = po.lines || []; const products = ctx.allProducts?.items || ctx.allProducts || []; const productMap = {}; products.forEach(p => { if (p.productId) productMap[p.productId] = p.name; const sku = p.customFields?.sku || p.productId?.split(':').pop(); if (sku) productMap[sku] = p.name; }); const totalUnits = lines.reduce((sum, l) => sum + (l.quantityOrdered || l.qty || 0), 0); const expectedArrival = new Date(Date.now() + 2*24*60*60*1000).toISOString(); const shippedDate = new Date().toISOString(); return { shipmentId: 'ASN-' + po.orderId, trackingNumber: 'TRK-' + Math.floor(Math.random() * 1000000), lines: lines.map(l => { const sku = l.sku || l.productId; const productName = l.description || l.name || productMap[sku] || productMap[l.productId] || sku; return { ...l, productId: l.productId || l.sku, productName: productName, shippedQuantity: l.quantityOrdered || l.qty }; }), totalUnits, totalLines: lines.length, expectedArrival, shippedDate };", + "output": { "storeAs": "asnData" } + }, + { + "id": "check_existing_inbound", + "name": "Check if Inbound Order Exists", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.get_by_po_number", + "input": { + "type": "template", + "template": "{{selectedPo.orderId}}" + }, + "output": { "storeAs": "existingInbound" }, + "continueOnError": true + }, + { + "id": "create_inbound_order", + "name": "Create Inbound Order from ASN", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.create", + "condition": { "expression": "!existingInbound" }, + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "poNumber": "{{selectedPo.orderId}}", + "orderType": "ASN", + "orderStatus": "IN_TRANSIT", + "vendor": { + "vendorId": "{{supplier.companyId}}", + "vendorName": "{{supplier.name}}", + "contactEmail": "{{supplier.primaryContact.email}}", + "contactPhone": "{{supplier.primaryContact.phone}}" + }, + "dates": { + "expectedArrival": "{{asnData.expectedArrival}}", + "shippedDate": "{{asnData.shippedDate}}" + }, + "totals": { + "expectedLines": "{{asnData.totalLines}}", + "receivedLines": 0, + "units": "{{asnData.totalUnits}}" + }, + "lines": "{{asnData.lines}}", + "asnNumber": "{{asnData.shipmentId}}", + "referenceNumbers": { + "asnNumber": "{{asnData.shipmentId}}", + "trackingNumber": "{{asnData.trackingNumber}}" + } + } + }, + "output": { "storeAs": "newInboundOrder" } + }, + { + "id": "select_inbound_order", + "name": "Select Inbound Order", + "type": "script", + "script": "return ctx.existingInbound || ctx.newInboundOrder;", + "output": { "storeAs": "inboundOrder" } + }, + { + "id": "update_po_status", + "name": "Update PO Status", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { "orderId": "{{selectedPo.orderId}}", "status": "ACKED" } + }, + "output": { "storeAs": "updatedPo" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/index.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..493d0de93b8e2aadfd25a3293c5573b62afa2fb3 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/index.ts @@ -0,0 +1,49 @@ +import agingInventoryCheck from "./aging-inventory-check.od.json"; +import customerFullEdiCycle from "./customer-full-edi-cycle.od.json"; +import dailyInventoryCheck from "./daily-inventory-check.od.json"; +import financialReconciliation from "./financial-reconciliation.od.json"; +import inboundAsnProcess from "./inbound-asn-process.od.json"; +import inventoryConsumption from "./inventory-consumption.od.json"; +import invoiceAndPayment from "./invoice-and-payment.od.json"; +import pickPackShip from "./pick-pack-ship.od.json"; +import productionOrder from "./production-order.od.json"; +import putawayProcess from "./putaway-process.od.json"; +import rawMaterialProcurement from "./raw-material-procurement.od.json"; +import receiveSupplierShipment from "./receive-supplier-shipment.od.json"; +import supplierReorderTrigger from "./supplier-reorder-trigger.od.json"; +import type { OperationalDescriptor } from "../../../types/od.type"; + +export const ODs: OperationalDescriptor[] = [ + agingInventoryCheck as unknown as OperationalDescriptor, + customerFullEdiCycle as unknown as OperationalDescriptor, + dailyInventoryCheck as unknown as OperationalDescriptor, + financialReconciliation as unknown as OperationalDescriptor, + inboundAsnProcess as unknown as OperationalDescriptor, + inventoryConsumption as unknown as OperationalDescriptor, // Deprecated - kept for backward compatibility + invoiceAndPayment as unknown as OperationalDescriptor, + pickPackShip as unknown as OperationalDescriptor, + productionOrder as unknown as OperationalDescriptor, + putawayProcess as unknown as OperationalDescriptor, + rawMaterialProcurement as unknown as OperationalDescriptor, + receiveSupplierShipment as unknown as OperationalDescriptor, + supplierReorderTrigger as unknown as OperationalDescriptor, +]; + +export { + agingInventoryCheck, + customerFullEdiCycle, + dailyInventoryCheck, + financialReconciliation, + finishedGoodsReceipt, + inboundAsnProcess, + inventoryConsumption, + invoiceAndPayment, + manufacturingExecution, + materialPickForProduction, + pickPackShip, + productionOrder, + putawayProcess, + rawMaterialProcurement, + receiveSupplierShipment, + supplierReorderTrigger, +}; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/inventory-consumption.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/inventory-consumption.od.json new file mode 100644 index 0000000000000000000000000000000000000000..aeca7c8d26efd850af83ed6aa3bfc800a7653f1b --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/inventory-consumption.od.json @@ -0,0 +1,173 @@ +{ + "id": "inventory-consumption", + "name": "Inventory Consumption", + "version": "1.0.0", + "description": "Consume raw materials for production and add finished goods", + "namespace": "perishableManWorld.manufacturing", + "persona": "Production Operator", + "type": "standard", + "steps": [ + { + "id": "get_production_orders", + "name": "Get In-Progress Production Orders", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { "poType": "PRODUCTION", "status": "IN_PROGRESS", "limit": 10 } + }, + "output": { "storeAs": "productionOrders" } + }, + { + "id": "select_order", + "name": "Select Order to Complete", + "type": "script", + "script": "const orders = ctx.productionOrders?.items || ctx.productionOrders || []; if (orders.length === 0) throw new Error('No production orders to complete'); const order = orders[0]; if (order.lines && order.lines[0]) { const sku = order.lines[0].sku; if (typeof sku === 'object' && sku !== null && sku.productId) { order.lines[0].sku = sku.productId; } } return order;", + "output": { "storeAs": "prodOrder" } + }, + { + "id": "prepare_product_lookup", + "name": "Prepare Product Lookup", + "type": "script", + "script": "const sku = ctx.prodOrder?.lines?.[0]?.sku; if (!sku) throw new Error('No SKU found in production order'); if (typeof sku === 'string') return sku; if (sku && sku.productId) return String(sku.productId); return String(sku);", + "output": { "storeAs": "productIdToLookup" } + }, + { + "id": "get_product", + "name": "Get Product Details", + "type": "mcp", + "service": "erp", + "tool": "product.get_by_id", + "input": { + "type": "template", + "template": { "productId": "{{productIdToLookup}}" } + }, + "output": { "storeAs": "product" } + }, + { + "id": "consume_materials", + "name": "Consume Raw Materials", + "type": "map", + "mapSpec": { + "iterable": { "type": "jmesPath", "value": "prodOrder.customFields.billOfMaterials" }, + "itemName": "bomItem", + "concurrency": 1 + }, + "children": [ + { + "id": "consume_single_material", + "name": "Consume Material", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "WH001", + "transactionType": "PICK", + "productId": "{{bomItem.sku}}", + "quantity": "{{bomItem.requiredQty}}", + "referenceType": "ORDER", + "referenceId": "{{prodOrder.orderId}}" + } + }, + "output": { "storeAs": "consumptionTxn" } + } + ], + "output": { "storeAs": "consumptionTxns" } + }, + { + "id": "complete_pick_tasks", + "name": "Complete Pick Tasks", + "type": "mcp", + "service": "wms", + "tool": "task.get_active", + "input": { + "type": "literal", + "value": {} + }, + "output": { "storeAs": "pendingTasks" } + }, + { + "id": "generate_production_lot", + "name": "Generate Production Lot Number", + "type": "script", + "script": "const date = new Date().toISOString().slice(0,10).replace(/-/g,''); const seq = Math.floor(Math.random() * 999) + 1; return `PROD-${date}-${seq.toString().padStart(3,'0')}`;", + "output": { "storeAs": "productionLot" } + }, + { + "id": "add_finished_goods", + "name": "Add Finished Goods to Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "WH001", + "transactionType": "RECEIVE", + "productId": "{{product.productId}}", + "sku": "{{product.productId}}", + "productName": "{{product.name}}", + "quantity": "{{prodOrder.lines[0].quantityOrdered}}", + "uom": "{{product.unitOfMeasure}}", + "toBinId": "FINISHED_GOODS", + "lotNumber": "{{productionLot}}", + "referenceType": "ORDER", + "referenceId": "{{prodOrder.orderId}}", + "notes": "Production output from order {{prodOrder.orderId}}" + } + }, + "output": { "storeAs": "productionTxn" } + }, + { + "id": "calculate_cogs", + "name": "Calculate Cost of Goods Sold", + "type": "script", + "script": "const prodOrder = ctx.prodOrder || {}; const bom = prodOrder.customFields?.billOfMaterials || []; const cost = bom.reduce((sum, item) => sum + (item.requiredQty * (item.pricePerUnit || 1)), 0); return { totalCost: Math.round(cost * 100) / 100, unitsProduced: prodOrder.lines?.[0]?.quantityOrdered || 1 };", + "output": { "storeAs": "cogsCalc" } + }, + { + "id": "record_cogs", + "name": "Record Manufacturing Cost", + "type": "mcp", + "service": "finance", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "type": "payment_out", + "amount": "{{cogsCalc.totalCost}}", + "sourceType": "manual", + "sourceId": "{{prodOrder.orderId}}", + "metadata": { + "description": "COGS for production order {{prodOrder.orderId}}", + "unitsProduced": "{{cogsCalc.unitsProduced}}", + "productId": "{{product.productId}}", + "productName": "{{product.name}}", + "lotNumber": "{{productionLot}}", + "costPerUnit": "{{cogsCalc.totalCost}}" + } + } + }, + "output": { "storeAs": "cogsTxn" } + }, + { + "id": "complete_order", + "name": "Complete Production Order", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{prodOrder.orderId}}", + "status": "COMPLETED" + } + }, + "output": { "storeAs": "completedOrder" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/invoice-and-payment.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/invoice-and-payment.od.json new file mode 100644 index 0000000000000000000000000000000000000000..0da647bcec14792259bb73d2d62a665ccf69f76a --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/invoice-and-payment.od.json @@ -0,0 +1,242 @@ +{ + "id": "invoice-and-payment", + "name": "Invoice and Payment", + "version": "1.0.0", + "description": "Invoice customer for shipped order and record payment receipt", + "namespace": "perishableManWorld.sales", + "persona": "Accounts Receivable Clerk", + "type": "standard", + "steps": [ + { + "id": "get_shipped_orders", + "name": "Get Shipped Sales Orders", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { "status": "SHIPPED", "limit": 10 } + }, + "output": { "storeAs": "shippedOrders" } + }, + { + "id": "select_order", + "name": "Select Order to Invoice", + "type": "script", + "script": "const orders = ctx.shippedOrders?.items || ctx.shippedOrders || []; if (orders.length === 0) throw new Error('No shipped orders to invoice'); return orders[0];", + "output": { "storeAs": "orderToInvoice" } + }, + { + "id": "get_customer", + "name": "Get Customer Details", + "type": "mcp", + "service": "erp", + "tool": "company.get_by_id", + "input": { + "type": "template", + "template": { "companyId": "{{orderToInvoice.customerId}}" } + }, + "output": { "storeAs": "customer" } + }, + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" } + }, + { + "id": "create_invoice", + "name": "Create Invoice", + "type": "mcp", + "service": "erp", + "tool": "invoice.create", + "input": { + "type": "template", + "template": { + "orderId": "{{orderToInvoice.orderId}}", + "customerId": "{{customer.companyId}}", + "currency": "{{orderToInvoice.currency}}", + "lines": "{{orderToInvoice.lines}}", + "totalAmount": "{{orderToInvoice.totalAmount}}", + "paymentTerms": "{{orderToInvoice.paymentTerms}}", + "status": "SENT" + } + }, + "output": { "storeAs": "invoice" }, + "retry": { "maxRetries": 3, "backoff": "exponential", "baseMs": 300 } + }, + { + "id": "generate_edi_810", + "name": "Generate EDI Invoice", + "type": "mcp", + "service": "edi", + "tool": "generate.810", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.companyId}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{customer.companyId}}", "name": "{{customer.name}}" }, + "invoiceNumber": "{{invoice.invoiceId}}", + "poNumber": "{{orderToInvoice.orderId}}", + "totalAmount": "{{invoice.totalAmount}}", + "items": "{{invoice.lines}}" + } + }, + "output": { "storeAs": "edi810" }, + "continueOnError": true + }, + { + "id": "validate_edi_810", + "name": "Validate EDI 810 (Invoice)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi810}}", + "docType": "810", + "worldId": "{{mpc.companyId}}", + "context": { "items": "{{invoice.lines}}" } + } + }, + "output": { "storeAs": "val810" }, + "continueOnError": true + }, + { + "id": "store_edi", + "name": "Store Invoice EDI", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{customer.companyId}}", + "docType": "810", + "direction": "OUTBOUND", + "status": "SENT", + "payload": { + "invoiceId": "{{invoice.invoiceId}}", + "orderId": "{{orderToInvoice.orderId}}" + }, + "rawEdi": "{{edi810}}" + } + }, + "output": { "storeAs": "ediTransaction" }, + "continueOnError": true + }, + { + "id": "update_order_invoiced", + "name": "Mark Order as Invoiced", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{orderToInvoice.orderId}}", + "status": "INVOICED" + } + }, + "output": { "storeAs": "invoicedOrder" }, + "continueOnError": true + }, + { + "id": "calculate_payment_date", + "name": "Calculate Due Date Based on Terms", + "type": "script", + "script": "const terms = ctx.orderToInvoice?.paymentTerms || 'Net 30'; const days = parseInt(terms.replace(/\\D/g, '')) || 30; const dueDate = new Date(); dueDate.setDate(dueDate.getDate() + days); return { dueDate: dueDate.toISOString(), termsInDays: days };", + "output": { "storeAs": "paymentTermsCalc" } + }, + { + "id": "record_payment_in", + "name": "Record Customer Payment", + "type": "mcp", + "service": "finance", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "type": "payment_in", + "amount": "{{invoice.totalAmount}}", + "sourceType": "invoice", + "sourceId": "{{invoice.invoiceId}}", + "partnerId": "{{customer.companyId}}", + "metadata": { + "description": "Payment for Invoice {{invoice.invoiceId}}", + "paymentTerms": "{{orderToInvoice.paymentTerms}}", + "dueDate": "{{paymentTermsCalc.dueDate}}", + "invoiceNumber": "{{invoice.invoiceId}}", + "orderNumber": "{{orderToInvoice.orderId}}", + "customerId": "{{customer.companyId}}" + } + } + }, + "output": { "storeAs": "paymentTxn" } + }, + { + "id": "update_ledger", + "name": "Update Ledger - Receive Payment", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { + "cashDelta": "{{orderToInvoice.totalAmount}}" + } + }, + "output": { "storeAs": "updatedLedger" }, + "continueOnError": true + }, + { + "id": "update_invoice_paid", + "name": "Mark Invoice as Paid", + "type": "mcp", + "service": "erp", + "tool": "invoice.update", + "input": { + "type": "template", + "template": { + "invoiceId": "{{invoice.invoiceId}}", + "status": "PAID", + "paidDate": "{{now}}" + } + }, + "output": { "storeAs": "paidInvoice" }, + "continueOnError": true + }, + { + "id": "select_payment_method", + "name": "Select Random Payment Method", + "type": "script", + "script": "const methods = ['ACH', 'WIRE', 'CHECK']; return methods[Math.floor(Math.random() * methods.length)];", + "output": { "storeAs": "paymentMethod" } + }, + { + "id": "create_erp_payment", + "name": "Create ERP Payment Record", + "type": "mcp", + "service": "erp", + "tool": "payment.create", + "input": { + "type": "template", + "template": { + "invoiceId": "{{invoice.invoiceId}}", + "customerId": "{{customer.companyId}}", + "amount": "{{invoice.totalAmount}}", + "currency": "{{invoice.currency}}", + "method": "{{paymentMethod}}", + "status": "APPLIED" + } + }, + "output": { "storeAs": "erpPayment" }, + "continueOnError": true + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/pick-pack-ship.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/pick-pack-ship.od.json new file mode 100644 index 0000000000000000000000000000000000000000..aa6f46f838637110e4b83776c05bea9d8b9cff06 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/pick-pack-ship.od.json @@ -0,0 +1,294 @@ +{ + "id": "pick-pack-ship", + "name": "Pick Pack and Ship", + "version": "1.0.0", + "description": "Fulfill sales order - pick inventory, pack, and ship to customer", + "namespace": "perishableManWorld", + "persona": "Fulfillment Associate", + "type": "standard", + "steps": [ + { + "id": "get_pending_outbound", + "name": "Get Pending Outbound Orders", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.get_by_status", + "input": { + "type": "literal", + "value": { "status": ["CREATED"], "limit": 10 } + }, + "output": { "storeAs": "pendingOutbound" } + }, + { + "id": "select_outbound", + "name": "Select Outbound to Fulfill", + "type": "script", + "script": "const orders = ctx.pendingOutbound?.items || ctx.pendingOutbound || []; if (orders.length === 0) throw new Error('No pending outbound orders'); return orders[0];", + "output": { "storeAs": "outbound" } + }, + { + "id": "get_sales_order", + "name": "Get Associated Sales Order", + "type": "mcp", + "service": "erp", + "tool": "order.get_by_id", + "input": { + "type": "template", + "template": { "orderId": "{{outbound.orderNumber}}" } + }, + "output": { "storeAs": "salesOrder" } + }, + { + "id": "create_pick_task", + "name": "Create Pick Task", + "type": "mcp", + "service": "wms", + "tool": "task.create", + "input": { + "type": "template", + "template": { + "warehouseId": "{{outbound.warehouseId}}", + "taskType": "PICK", + "reference": { + "type": "ORDER", + "id": "{{outbound.orderId}}" + }, + "product": { + "productId": "{{outbound.lines[0].productId}}", + "sku": "{{outbound.lines[0].sku}}", + "productName": "{{outbound.lines[0].productName}}" + }, + "from": { + "binId": "PICK-ZONE-A1", + "binCode": "A1-01", + "zoneId": "PICK" + }, + "to": { + "binId": "STAGING-DOCK", + "binCode": "DOCK-01", + "zoneId": "STAGING" + }, + "quantity": { + "requested": "{{outbound.lines[0].orderedQuantity}}", + "actual": 0, + "uom": "EA" + }, + "priority": 50, + "taskStatus": "CREATED" + } + }, + "output": { "storeAs": "pickTask" } + }, + { + "id": "complete_pick", + "name": "Complete Pick Task", + "type": "mcp", + "service": "wms", + "tool": "task.update_status", + "input": { + "type": "template", + "template": { + "taskId": "{{pickTask.taskId}}", + "taskStatus": "COMPLETED" + } + }, + "output": { "storeAs": "completedPick" }, + "continueOnError": true + }, + { + "id": "deduct_inventory", + "name": "Deduct Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "transactionType": "SHIP", + "productId": "{{outbound.lines[0].productId}}", + "sku": "{{outbound.lines[0].productId}}", + "quantity": "{{outbound.lines[0].orderedQuantity}}", + "referenceType": "ORDER", + "referenceId": "{{outbound.orderId}}" + } + }, + "output": { "storeAs": "inventoryTxn" }, + "continueOnError": true + }, + { + "id": "get_carriers", + "name": "Get Available Carriers", + "type": "mcp", + "service": "tms", + "tool": "carrier.get_active", + "input": { "type": "literal", "value": { "limit": 10 } }, + "output": { "storeAs": "carriers" }, + "continueOnError": true + }, + { + "id": "select_carrier", + "name": "Select Random Carrier", + "type": "script", + "script": "const carrierList = ctx.carriers?.items || ctx.carriers || []; if (carrierList.length === 0) return { carrierId: 'DEFAULT_CARRIER', name: 'Default Carrier' }; return carrierList[Math.floor(Math.random() * carrierList.length)];", + "output": { "storeAs": "carrier" } + }, + { + "id": "create_tms_shipment", + "name": "Schedule TMS Shipment", + "type": "mcp", + "service": "tms", + "tool": "shipment.create", + "input": { + "type": "template", + "template": { + "carrierId": "{{carrier.carrierId}}", + "orderId": "{{salesOrder.orderId}}", + "shipmentNumber": "SH-{{salesOrder.orderId}}", + "shipToAddress": "{{outbound.shipToAddress}}", + "items": "{{outbound.lines}}", + "status": "PLANNED" + } + }, + "output": { "storeAs": "tmsShipment" }, + "continueOnError": true + }, + { + "id": "create_wms_shipment", + "name": "Create WMS Shipment", + "type": "mcp", + "service": "wms", + "tool": "outbound_shipment.create", + "input": { + "type": "template", + "template": { + "outboundOrderId": "{{outbound.orderId}}", + "warehouseId": "{{outbound.warehouseId}}", + "lines": "{{outbound.lines}}", + "toAddress": "{{outbound.shipToAddress}}", + "carrierId": "{{carrier.carrierId}}", + "shippedDate": "{{now}}", + "status": "SHIPPED" + } + }, + "output": { "storeAs": "wmsShipment" }, + "continueOnError": true + }, + { + "id": "update_outbound", + "name": "Update Outbound Status", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{outbound.orderId}}", + "status": "SHIPPED" + } + }, + "output": { "storeAs": "updatedOutbound" }, + "continueOnError": true + }, + { + "id": "update_sales_order", + "name": "Update Sales Order Status", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{salesOrder.orderId}}", + "status": "SHIPPED" + } + }, + "output": { "storeAs": "updatedSalesOrder" }, + "continueOnError": true + }, + { + "id": "get_mpc", + "name": "Get MPC for EDI", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" }, + "continueOnError": true + }, + { + "id": "get_customer", + "name": "Get Customer for EDI", + "type": "mcp", + "service": "erp", + "tool": "company.get_by_id", + "input": { + "type": "template", + "template": { "companyId": "{{salesOrder.customerId}}" } + }, + "output": { "storeAs": "customer" }, + "continueOnError": true + }, + { + "id": "generate_edi_856", + "name": "Generate ASN", + "type": "mcp", + "service": "edi", + "tool": "generate.856", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.companyId}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{customer.companyId}}", "name": "{{customer.name}}" }, + "shipmentId": "{{tmsShipment.shipmentId}}", + "trackingNumber": "{{tmsShipment.trackingNumber}}", + "items": "{{outbound.lines}}" + } + }, + "output": { "storeAs": "edi856" }, + "continueOnError": true + }, + { + "id": "validate_edi_856", + "name": "Validate EDI 856 (ASN)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi856}}", + "docType": "856", + "worldId": "{{mpc.companyId}}", + "context": { "items": "{{outbound.lines}}" } + } + }, + "output": { "storeAs": "val856" }, + "continueOnError": true + }, + { + "id": "store_edi", + "name": "Store ASN Transaction", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{customer.companyId}}", + "docType": "856", + "direction": "OUTBOUND", + "status": "SENT", + "payload": { + "shipmentId": "{{tmsShipment.shipmentId}}", + "orderId": "{{salesOrder.orderId}}" + }, + "rawEdi": "{{edi856}}" + } + }, + "output": { "storeAs": "ediTransaction" }, + "continueOnError": true + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/production-order.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/production-order.od.json new file mode 100644 index 0000000000000000000000000000000000000000..3f4aa609cd2fb581deeb913a91e9edd7cbe6f7bb --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/production-order.od.json @@ -0,0 +1,138 @@ +{ + "id": "production-order", + "name": "Production Order", + "version": "1.0.0", + "description": "Create production order for chips or ice cream based on product BOM", + "namespace": "perishableManWorld.manufacturing", + "persona": "Production Planner", + "type": "standard", + "steps": [ + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" } + }, + { + "id": "get_product", + "name": "Get Random MPC Product", + "type": "mcp", + "service": "erp", + "tool": "product.get_random", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "product" } + }, + { + "id": "extract_bom", + "name": "Extract Bill of Materials", + "type": "script", + "script": "const product = ctx.product || {}; let bom = product.customFields?.billOfMaterials || []; if (bom.length === 0) { console.log('Product has no BOM, creating default BOM for manufacturing'); const productType = product.customFields?.productType || 'chips'; if (productType === 'ice_cream') { bom = [{ sku: 'MILK', name: 'Fresh Whole Milk', qty: 2.5, unit: 'L' }, { sku: 'SUGAR', name: 'Refined White Sugar', qty: 0.8, unit: 'KG' }, { sku: 'CREAM', name: 'Heavy Cream 35%', qty: 1.2, unit: 'L' }]; } else { bom = [{ sku: 'POTATOES', name: 'Russet Potatoes Premium', qty: 3.0, unit: 'KG' }, { sku: 'OIL', name: 'Vegetable Oil', qty: 0.5, unit: 'L' }, { sku: 'SALT', name: 'Sea Salt Fine', qty: 0.1, unit: 'KG' }]; } } return bom;", + "output": { "storeAs": "billOfMaterials" } + }, + { + "id": "calculate_production_qty", + "name": "Calculate Production Quantity", + "type": "script", + "script": "const billOfMaterials = ctx.billOfMaterials || []; const baseQty = Math.floor(Math.random() * 100) + 50; return { productionQty: baseQty, bomMultiplied: billOfMaterials.map(item => ({ sku: item.sku, name: item.name || item.sku, unit: item.unit, qty: item.qty, requiredQty: Math.round(item.qty * baseQty * 100) / 100 })) };", + "output": { "storeAs": "productionCalc" } + }, + { + "id": "check_inventory", + "name": "Check Raw Material Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { "type": "literal", "value": { "limit": 100 } }, + "output": { "storeAs": "currentInventory" } + }, + { + "id": "create_production_order", + "name": "Create Production Order", + "type": "mcp", + "service": "erp", + "tool": "order.create", + "input": { + "type": "template", + "template": { + "customerId": "{{mpc.companyId}}", + "partnerId": "{{mpc.companyId}}", + "poType": "STANDARD", + "status": "IN_PROGRESS", + "currency": "USD", + "lines": [ + { + "lineNumber": 1, + "sku": "{{product.productId}}", + "name": "{{product.name}}", + "quantityOrdered": "{{productionCalc.productionQty}}", + "unitOfMeasure": "EA" + } + ], + "customFields": { + "orderType": "PRODUCTION", + "billOfMaterials": "{{productionCalc.bomMultiplied}}", + "productType": "{{product.customFields.productType}}" + } + } + }, + "output": { "storeAs": "productionOrder" } + }, + { + "id": "create_pick_tasks", + "name": "Create Material Pick Tasks", + "type": "map", + "mapSpec": { + "iterable": { "type": "jmesPath", "value": "productionCalc.bomMultiplied" }, + "itemName": "bomItem", + "concurrency": 3 + }, + "children": [ + { + "id": "create_material_pick", + "name": "Create Pick for BOM Item", + "type": "mcp", + "service": "wms", + "tool": "task.create", + "input": { + "type": "template", + "template": { + "warehouseId": "WH001", + "taskType": "PICK", + "reference": { + "type": "ORDER", + "id": "{{productionOrder.orderId}}" + }, + "product": { + "productId": "{{bomItem.sku}}", + "sku": "{{bomItem.sku}}", + "productName": "{{bomItem.name}}" + }, + "quantity": { + "requested": "{{bomItem.requiredQty}}", + "actual": 0, + "uom": "{{bomItem.unit}}" + }, + "from": { + "binId": "RAW-STORAGE-A", + "binCode": "RAW-A1", + "zoneId": "RAW_MATERIALS" + }, + "to": { + "binId": "PRODUCTION-STAGING", + "binCode": "PROD-STG", + "zoneId": "PRODUCTION" + }, + "taskStatus": "CREATED" + } + }, + "output": { "storeAs": "pickTask" } + } + ], + "output": { "storeAs": "allPickTasks" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/putaway-process.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/putaway-process.od.json new file mode 100644 index 0000000000000000000000000000000000000000..f1d600b8b0292d8cf0d0a472e6652b29f141041e --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/putaway-process.od.json @@ -0,0 +1,124 @@ +{ + "id": "putaway-process", + "name": "Putaway Process", + "version": "1.0.0", + "description": "Move received inventory from Dock to Storage locations", + "namespace": "perishableManWorld.logistics", + "persona": "Forklift Operator", + "type": "standard", + "steps": [ + { + "id": "get_received_inventory", + "name": "Get Inventory at Dock", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { + "type": "literal", + "value": { "transactionType": ["RECEIVE"], "limit": 10 } + }, + "output": { "storeAs": "receivedItems" } + }, + { + "id": "select_item_to_putaway", + "name": "Select Item to Putaway", + "type": "script", + "script": "const items = ctx.receivedItems?.items || ctx.receivedItems || []; if (items.length === 0) throw new Error('No items to putaway'); const item = { ...items[0] }; if (!item.toBinId) item.toBinId = 'DOCK'; return item;", + "output": { "storeAs": "itemToPutaway" } + }, + { + "id": "find_storage_bin", + "name": "Find Storage Bin", + "type": "mcp", + "service": "wms", + "tool": "bin.get_available", + "input": { + "type": "literal", + "value": { "zoneIds": ["STORAGE"], "limit": 1 } + }, + "output": { "storeAs": "storageBins" } + }, + { + "id": "select_bin", + "name": "Select Target Bin", + "type": "script", + "script": "const bins = ctx.storageBins?.items || ctx.storageBins || []; if (bins.length === 0) return { binId: 'DEFAULT_STORAGE' }; return bins[0];", + "output": { "storeAs": "targetBin" } + }, + { + "id": "create_putaway_task", + "name": "Create Putaway Task", + "type": "mcp", + "service": "wms", + "tool": "task.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "taskType": "PUTAWAY", + "priority": 10, + "taskStatus": "CREATED", + "product": { + "productId": "{{itemToPutaway.productId}}", + "sku": "{{itemToPutaway.sku}}", + "productName": "{{itemToPutaway.productName}}" + }, + "quantity": { + "requested": "{{itemToPutaway.quantity}}", + "actual": 0, + "uom": "EA" + }, + "from": { + "binId": "{{itemToPutaway.toBinId}}", + "binCode": "DOCK-RCV", + "zoneId": "RECEIVING" + }, + "to": { + "binId": "{{targetBin.binId}}", + "binCode": "{{targetBin.binCode}}", + "zoneId": "STORAGE" + } + } + }, + "output": { "storeAs": "putawayTask" } + }, + { + "id": "complete_putaway", + "name": "Complete Putaway Task", + "type": "mcp", + "service": "wms", + "tool": "task.update_status", + "input": { + "type": "template", + "template": { + "taskId": "{{putawayTask.taskId}}", + "taskStatus": "COMPLETED" + } + }, + "output": { "storeAs": "completedTask" } + }, + { + "id": "move_inventory", + "name": "Move Inventory to Storage", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "transactionType": "PUTAWAY", + "productId": "{{itemToPutaway.productId}}", + "sku": "{{itemToPutaway.sku}}", + "quantity": "{{itemToPutaway.quantity}}", + "fromBinId": "{{itemToPutaway.toBinId}}", + "toBinId": "{{targetBin.binId}}", + "referenceType": "TASK", + "referenceId": "{{putawayTask.taskId}}" + } + }, + "output": { "storeAs": "transferTxn" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/raw-material-procurement.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/raw-material-procurement.od.json new file mode 100644 index 0000000000000000000000000000000000000000..4d6556d4883eb24d66b20900269ebd115eefad32 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/raw-material-procurement.od.json @@ -0,0 +1,186 @@ +{ + "id": "raw-material-procurement", + "name": "Raw Material Procurement", + "version": "1.0.0", + "description": "MPC orders raw materials from suppliers when inventory is low", + "namespace": "perishableManWorld.procurement", + "type": "standard", + "steps": [ + { + "id": "get_mpc", + "name": "Get MPC Company", + "type": "mcp", + "service": "erp", + "tool": "company.get_mpc", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "mpc" }, + "retry": { "maxRetries": 3, "backoff": "exponential", "baseMs": 200 } + }, + { + "id": "get_all_companies", + "name": "Get All Companies", + "type": "mcp", + "service": "erp", + "tool": "company.get_all", + "input": { "type": "literal", "value": { "limit": 10 } }, + "output": { "storeAs": "allCompanies" }, + "retry": { "maxRetries": 2, "backoff": "fixed", "baseMs": 300 } + }, + { + "id": "select_supplier", + "name": "Select a Supplier", + "type": "script", + "script": "const companies = ctx.allCompanies?.items || ctx.allCompanies || []; const suppliers = companies.filter(c => c.companyId !== ctx.mpc?.companyId && c.name !== ctx.mpc?.name && c.customFields?.catalog?.length > 0); if (suppliers.length === 0) throw new Error(`No suitable suppliers found (with catalog). Total companies: ${companies.length}.`); const supplier = suppliers[Math.floor(Math.random() * suppliers.length)]; return supplier;", + "output": { "storeAs": "supplier" } + }, + { + "id": "extract_catalog", + "name": "Extract Supplier Catalog", + "type": "script", + "script": "const catalog = ctx.supplier?.customFields?.catalog || []; return catalog;", + "output": { "storeAs": "catalog" } + }, + { + "id": "select_items", + "name": "Select Items to Order", + "type": "script", + "script": "const catalog = ctx.catalog || []; const items = catalog.slice(0, 3).map((item, index) => ({ lineNumber: index + 1, sku: item.sku, description: item.name, quantityOrdered: Math.floor(Math.random() * 100) + 50, unitPrice: item.pricePerUnit || 10, unitOfMeasure: 'EA' })); return items;", + "output": { "storeAs": "orderItems" } + }, + { + "id": "calculate_total", + "name": "Calculate Order Total", + "type": "script", + "script": "const items = ctx.orderItems || []; const total = items.reduce((sum, item) => sum + (item.quantityOrdered * item.unitPrice), 0); return { total: Math.round(total * 100) / 100, itemCount: items.length };", + "output": { "storeAs": "orderCalc" } + }, + { + "id": "ensure_ledger", + "name": "Ensure Ledger Exists", + "type": "mcp", + "service": "finance", + "tool": "ledger.ensure", + "input": { + "type": "literal", + "value": { "cash": 1000000, "totalReceivables": 0, "totalPayables": 0 } + }, + "output": { "storeAs": "ledgerEnsured" } + }, + { + "id": "check_ledger", + "name": "Check Available Funds", + "type": "mcp", + "service": "finance", + "tool": "ledger.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "ledger" }, + "assertions": [ + { + "id": "sufficient_funds", + "expression": "ledger && ledger.cash >= orderCalc.total", + "language": "jmespath", + "description": "Verify sufficient cash for order", + "continueOnFailure": false + } + ] + }, + { + "id": "create_purchase_order", + "name": "Create Purchase Order", + "type": "mcp", + "service": "erp", + "tool": "order.create", + "input": { + "type": "template", + "template": { + "customerId": "{{mpc.companyId}}", + "partnerId": "{{supplier.companyId}}", + "poType": "STANDARD", + "status": "ACKED", + "currency": "USD", + "paymentTerms": "{{supplier.paymentTerms}}", + "buyer": { + "id": "{{mpc.companyId}}", + "name": "{{mpc.name}}", + "contact": "{{mpc.primaryContact.name}}", + "email": "{{mpc.primaryContact.email}}" + }, + "lines": "{{orderItems}}", + "totalAmount": "{{orderCalc.total}}" + } + }, + "output": { "storeAs": "purchaseOrder" }, + "retry": { "maxRetries": 3, "backoff": "exponential", "baseMs": 300 } + }, + { + "id": "reserve_payable", + "name": "Reserve Payable Amount", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { "payablesDelta": "{{orderCalc.total}}" } + }, + "output": { "storeAs": "updatedLedger" } + }, + { + "id": "generate_edi_850", + "name": "Generate EDI 850", + "type": "mcp", + "service": "edi", + "tool": "generate.850", + "input": { + "type": "template", + "template": { + "sender": { "duns": "{{mpc.companyId}}", "name": "{{mpc.name}}" }, + "receiver": { "duns": "{{supplier.companyId}}", "name": "{{supplier.name}}" }, + "poNumber": "{{purchaseOrder.orderId}}", + "items": "{{orderItems}}" + } + }, + "output": { "storeAs": "edi850" } + }, + { + "id": "validate_edi_850", + "name": "Validate EDI 850 (PO)", + "type": "mcp", + "service": "edi", + "tool": "validation.check", + "input": { + "type": "template", + "template": { + "rawEdi": "{{edi850}}", + "docType": "850", + "worldId": "{{mpc.companyId}}", + "context": { "items": "{{orderItems}}" } + } + }, + "output": { "storeAs": "val850" }, + "continueOnError": true + }, + { + "id": "store_edi", + "name": "Store EDI Transaction", + "type": "mcp", + "service": "edi", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "partnerId": "{{supplier.companyId}}", + "docType": "850", + "direction": "OUTBOUND", + "status": "SENT", + "payload": { + "orderId": "{{purchaseOrder.orderId}}", + "totalAmount": "{{orderCalc.total}}" + }, + "rawEdi": "{{edi850}}" + } + }, + "output": { "storeAs": "ediTransaction" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/receive-supplier-shipment.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/receive-supplier-shipment.od.json new file mode 100644 index 0000000000000000000000000000000000000000..250a9c6ce0973ed997aef46023670c054d65ee65 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/receive-supplier-shipment.od.json @@ -0,0 +1,271 @@ +{ + "id": "receive-supplier-shipment", + "name": "Receive Supplier Shipment", + "version": "1.0.0", + "description": "Receive raw materials at warehouse and process supplier payment", + "namespace": "perishableManWorld.procurement", + "persona": "Dock Receiver", + "type": "standard", + "steps": [ + { + "id": "get_pending_orders", + "name": "Get Pending Purchase Orders", + "type": "mcp", + "service": "erp", + "tool": "order.get_all", + "input": { + "type": "literal", + "value": { "status": "ACKED", "limit": 10 } + }, + "output": { "storeAs": "pendingOrders" } + }, + { + "id": "select_order", + "name": "Select Order to Receive", + "type": "script", + "script": "const orders = ctx.pendingOrders?.items || ctx.pendingOrders || []; if (orders.length === 0) throw new Error('No pending orders'); return orders[0];", + "output": { "storeAs": "selectedOrder" } + }, + { + "id": "get_all_suppliers", + "name": "Get All Suppliers", + "type": "mcp", + "service": "erp", + "tool": "company.get_all", + "input": { "type": "literal", "value": { "limit": 10 } }, + "output": { "storeAs": "allSuppliers" } + }, + { + "id": "select_supplier", + "name": "Select Random Supplier", + "type": "script", + "script": "const companies = ctx.allSuppliers?.items || ctx.allSuppliers || []; const suppliers = companies.filter(c => c.companyId !== ctx.selectedOrder?.customerId); if (suppliers.length === 0) throw new Error('No suppliers found'); const supplier = suppliers[Math.floor(Math.random() * suppliers.length)]; return supplier;", + "output": { "storeAs": "supplier" } + }, + { + "id": "get_all_products", + "name": "Get All Products for Lookup", + "type": "mcp", + "service": "erp", + "tool": "product.get_all", + "input": { "type": "literal", "value": { "limit": 100 } }, + "output": { "storeAs": "allProducts" } + }, + { + "id": "transform_order_lines", + "name": "Transform Order Lines for WMS", + "type": "script", + "script": "const erpLines = ctx.selectedOrder?.lines || []; const products = ctx.allProducts?.items || ctx.allProducts || []; const productMap = {}; products.forEach(p => { if (p.productId) productMap[p.productId] = p.name; const sku = p.customFields?.sku || p.productId?.split(':').pop(); if (sku) productMap[sku] = p.name; }); const wmsLines = erpLines.map((line, index) => { const sku = line.sku || line.productId || 'UNKNOWN_SKU'; const productName = line.description || line.name || productMap[sku] || productMap[line.productId] || sku; return { lineNumber: index + 1, productId: sku, sku: sku, productName: productName, expectedQuantity: line.quantityOrdered || line.qty || line.quantity || 1, uom: line.unitOfMeasure || 'EA' }; }); return wmsLines;", + "output": { "storeAs": "wmsOrderLines" } + }, + { + "id": "calculate_order_total", + "name": "Calculate Order Total Amount", + "type": "script", + "script": "const order = ctx.selectedOrder || {}; let total = order.totalAmount; if (!total || isNaN(Number(total))) { const lines = order.lines || []; total = lines.reduce((sum, line) => { const qty = Number(line.quantityOrdered || line.qty || line.quantity || 1); const price = Number(line.unitPrice || line.pricePerUnit || 10); return sum + (qty * price); }, 0); } return { total: Number(total) || 100 };", + "output": { "storeAs": "orderCalc" } + }, + { + "id": "calculate_dates", + "name": "Calculate Inbound Dates", + "type": "script", + "script": "const now = new Date(); const expectedArrival = new Date(now.getTime() + 3 * 24 * 60 * 60 * 1000); return { expectedArrival: expectedArrival.toISOString(), createdAt: now.toISOString() };", + "output": { "storeAs": "inboundDates" } + }, + { + "id": "check_existing_inbound", + "name": "Check if Inbound Order Exists", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.get_by_po_number", + "input": { + "type": "template", + "template": "{{selectedOrder.orderId}}" + }, + "output": { "storeAs": "existingInboundOrder" }, + "retry": { "maxRetries": 1, "backoff": "fixed", "baseMs": 100 }, + "continueOnError": true + }, + { + "id": "check_asn_inbound", + "name": "Check for ASN Inbound Order", + "type": "script", + "script": "const existing = ctx.existingInboundOrder; if (existing && existing.orderType === 'ASN') return existing; return null;", + "output": { "storeAs": "asnInboundOrder" } + }, + { + "id": "create_inbound_order", + "name": "Create WMS Inbound Order", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.create", + "condition": { "expression": "!existingInboundOrder" }, + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "poNumber": "{{selectedOrder.orderId}}", + "orderType": "PO", + "orderStatus": "EXPECTED", + "vendor": { + "vendorId": "{{supplier.companyId}}", + "vendorName": "{{supplier.name}}", + "contactEmail": "{{supplier.contactEmail}}", + "contactPhone": "{{supplier.contactPhone}}" + }, + "dates": { + "expectedArrival": "{{inboundDates.expectedArrival}}", + "createdAt": "{{inboundDates.createdAt}}" + }, + "lines": "{{wmsOrderLines}}" + } + }, + "output": { "storeAs": "newInboundOrder" } + }, + { + "id": "select_inbound_order", + "name": "Select Inbound Order to Use", + "type": "script", + "script": "return ctx.existingInboundOrder || ctx.newInboundOrder;", + "output": { "storeAs": "inboundOrder" } + }, + { + "id": "generate_lot_number", + "name": "Generate Lot Number for Receipt", + "type": "script", + "script": "const date = new Date().toISOString().slice(0,10).replace(/-/g,''); const seq = Math.floor(Math.random() * 999) + 1; return `LOT-${date}-${seq.toString().padStart(3,'0')}`;", + "output": { "storeAs": "lotNumber" } + }, + { + "id": "create_receiving", + "name": "Create Receiving Transaction", + "type": "mcp", + "service": "wms", + "tool": "receiving_transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "inboundOrderId": "{{inboundOrder.inboundOrderId}}", + "productId": "{{wmsOrderLines[0].productId}}", + "sku": "{{wmsOrderLines[0].sku}}", + "productName": "{{wmsOrderLines[0].productName}}", + "receivedQuantity": "{{wmsOrderLines[0].expectedQuantity}}", + "uom": "{{wmsOrderLines[0].uom}}", + "lotNumber": "{{lotNumber}}", + "dockDoorId": "DOCK-01", + "receivedBy": "SYSTEM", + "quality": { + "status": "PASS", + "inspectedBy": "QC_SYSTEM", + "inspectedAt": "{{inboundDates.createdAt}}" + }, + "damage": { + "damaged": false + } + } + }, + "output": { "storeAs": "receivingTxn" } + }, + { + "id": "update_inventory", + "name": "Add Items to Inventory", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "MAIN_WAREHOUSE", + "transactionType": "RECEIVE", + "productId": "{{wmsOrderLines[0].productId}}", + "sku": "{{wmsOrderLines[0].sku}}", + "quantity": "{{wmsOrderLines[0].expectedQuantity}}", + "toBinId": "DOCK", + "lotNumber": "{{lotNumber}}", + "referenceType": "PO", + "referenceId": "{{selectedOrder.orderId}}", + "notes": "Received from supplier shipment" + } + }, + "output": { "storeAs": "inventoryTxn" } + }, + { + "id": "complete_inbound", + "name": "Complete Inbound Order", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.update_status", + "input": { + "type": "template", + "template": { "inboundOrderId": "{{inboundOrder.inboundOrderId}}", "status": "RECEIVED" } + }, + "output": { "storeAs": "completedInbound" } + }, + { + "id": "update_po_status", + "name": "Update PO to Received", + "type": "mcp", + "service": "erp", + "tool": "order.update_status", + "input": { + "type": "template", + "template": { "orderId": "{{selectedOrder.orderId}}", "status": "COMPLETED" } + }, + "output": { "storeAs": "updatedOrder" } + }, + { + "id": "prepare_payment_metadata", + "name": "Prepare Payment Metadata", + "type": "script", + "script": "const supplier = ctx.supplier || {}; const terms = supplier.paymentTerms || 'NET30'; const dueDate = new Date(Date.now() + 30*24*60*60*1000).toISOString(); return { paymentTerms: terms, dueDate };", + "output": { "storeAs": "paymentMeta" } + }, + { + "id": "record_payment_out", + "name": "Record Supplier Payment", + "type": "mcp", + "service": "finance", + "tool": "transaction.create", + "input": { + "type": "template", + "template": { + "type": "payment_out", + "amount": "{{orderCalc.total}}", + "sourceType": "bill", + "sourceId": "{{selectedOrder.orderId}}", + "partnerId": "{{supplier.companyId}}", + "metadata": { + "description": "Payment for PO {{selectedOrder.orderId}}", + "paymentTerms": "{{paymentMeta.paymentTerms}}", + "dueDate": "{{paymentMeta.dueDate}}" + } + } + }, + "output": { "storeAs": "paymentTxn" } + }, + { + "id": "update_ledger", + "name": "Update Ledger - Pay Supplier", + "type": "script", + "script": "const amount = Number(ctx.orderCalc?.total || 100); return { cashDelta: -amount, payablesDelta: -amount };", + "output": { "storeAs": "ledgerDeltas" } + }, + { + "id": "apply_ledger_changes", + "name": "Apply Ledger Changes", + "type": "mcp", + "service": "finance", + "tool": "ledger.increment", + "input": { + "type": "template", + "template": { + "cashDelta": "{{ledgerDeltas.cashDelta}}", + "payablesDelta": "{{ledgerDeltas.payablesDelta}}" + } + }, + "output": { "storeAs": "updatedLedger" } + } + ], + "runPolicy": { "failureMode": "fail_fast", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/supplier-reorder-trigger.od.json b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/supplier-reorder-trigger.od.json new file mode 100644 index 0000000000000000000000000000000000000000..eb0770c67f2411ea69f1ef44ce9e1629e79b7262 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/ods/supplier-reorder-trigger.od.json @@ -0,0 +1,58 @@ +{ + "id": "supplier-reorder-trigger", + "name": "Supplier Reorder Trigger", + "version": "1.0.0", + "description": "Check inventory levels and trigger reorder when stock is low", + "namespace": "perishableManWorld.background", + "persona": "System / Procurement Bot", + "type": "background_job", + "steps": [ + { + "id": "get_inventory", + "name": "Get Current Inventory Levels", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.get_history", + "input": { "type": "literal", "value": { "limit": 200 } }, + "output": { "storeAs": "inventory" } + }, + { + "id": "get_ledger", + "name": "Check Available Funds", + "type": "mcp", + "service": "finance", + "tool": "ledger.get", + "input": { "type": "literal", "value": {} }, + "output": { "storeAs": "ledger" } + }, + { + "id": "identify_low_stock", + "name": "Identify Low Stock Items", + "type": "script", + "script": "const inv = ctx.inventory?.items || []; const lowThreshold = 20; const lowStock = inv.filter(item => (item.quantity || 0) < lowThreshold); const groupedBySupplier = {}; lowStock.forEach(item => { const supplierId = item.supplierId || 'UNKNOWN'; if (!groupedBySupplier[supplierId]) groupedBySupplier[supplierId] = []; groupedBySupplier[supplierId].push(item); }); return { lowStockCount: lowStock.length, bySupplier: groupedBySupplier, needsReorder: lowStock.length > 0 };", + "output": { "storeAs": "lowStockAnalysis" } + }, + { + "id": "check_reorder_needed", + "name": "Check If Reorder Needed", + "type": "script", + "script": "const ledger = ctx.ledger || {}; const lowStockAnalysis = ctx.lowStockAnalysis || {}; const minCashBuffer = 50000; const canAfford = ledger.cash > minCashBuffer; const needsReorder = lowStockAnalysis.needsReorder && canAfford; console.log('[REORDER CHECK] Low stock items:', lowStockAnalysis.lowStockCount, '| Cash:', ledger.cash, '| Can afford:', canAfford); return { shouldReorder: needsReorder, reason: needsReorder ? 'Low stock detected' : (lowStockAnalysis.needsReorder ? 'Insufficient funds' : 'Stock levels OK') };", + "output": { "storeAs": "reorderDecision" } + }, + { + "id": "log_decision", + "name": "Log Reorder Decision", + "type": "script", + "script": "const reorderDecision = ctx.reorderDecision || {}; console.log('[REORDER]', reorderDecision.reason, '| Action:', reorderDecision.shouldReorder ? 'REORDER_TRIGGERED' : 'NO_ACTION'); return reorderDecision;", + "output": { "storeAs": "loggedDecision" } + }, + { + "id": "trigger_procurement", + "name": "Trigger Procurement", + "type": "script", + "script": "const reorderDecision = ctx.reorderDecision || {}; if (reorderDecision.shouldReorder) { console.log('[REORDER] Triggering raw-material-procurement OD...'); } return { triggered: reorderDecision.shouldReorder };", + "output": { "storeAs": "procurementResult" } + } + ], + "runPolicy": { "failureMode": "continue", "storeRuns": true } +} diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/run-perishable-world.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/run-perishable-world.ts new file mode 100644 index 0000000000000000000000000000000000000000..e91531def7e7a7bd3aae04876ac1d529d7c8c039 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/run-perishable-world.ts @@ -0,0 +1,274 @@ +import { executeOperationalDescriptor } from "../../operational-descriptor/executor.od"; +import { createAppLogger, createMongoTransportLogger } from "../../utils/logger.util"; +import { configureGlobalChaos, enableChaosForTools } from "../../operational-descriptor/tools/registry.tool"; +import { WorldRepository } from "../../repository/world.repository"; +import { seedPerishableWorld } from "./seeder"; +import type { OperationalDescriptor } from "../../types/od.type"; + +import { + rawMaterialProcurement, + receiveSupplierShipment, + productionOrder, + inventoryConsumption, + customerFullEdiCycle, + pickPackShip, + invoiceAndPayment, + dailyInventoryCheck, + supplierReorderTrigger, + financialReconciliation, + inboundAsnProcess, + putawayProcess, + agingInventoryCheck, +} from "./ods"; +import { loadEnv } from "../../utils/env.util"; + +const ODS: Record = { + "raw-material-procurement": rawMaterialProcurement as unknown as OperationalDescriptor, + "receive-supplier-shipment": receiveSupplierShipment as unknown as OperationalDescriptor, + "production-order": productionOrder as unknown as OperationalDescriptor, + "inventory-consumption": inventoryConsumption as unknown as OperationalDescriptor, + "customer-full-edi-cycle": customerFullEdiCycle as unknown as OperationalDescriptor, + "pick-pack-ship": pickPackShip as unknown as OperationalDescriptor, + "invoice-and-payment": invoiceAndPayment as unknown as OperationalDescriptor, + "daily-inventory-check": dailyInventoryCheck as unknown as OperationalDescriptor, + "supplier-reorder-trigger": supplierReorderTrigger as unknown as OperationalDescriptor, + "financial-reconciliation": financialReconciliation as unknown as OperationalDescriptor, + "inbound-asn-process": inboundAsnProcess as unknown as OperationalDescriptor, + "putaway-process": putawayProcess as unknown as OperationalDescriptor, + "aging-inventory-check": agingInventoryCheck as unknown as OperationalDescriptor, +}; + +const printResult = (result: any) => { + console.log(""); + console.log("=".repeat(60)); + console.log("Workflow Results"); + console.log("=".repeat(60)); + console.log("Status:", result.status); + console.log("Duration:", result.durationMs, "ms"); + console.log("Total Steps:", result.totalSteps); + console.log("Successful:", result.successfulSteps); + console.log("Failed:", result.failedSteps); + console.log(""); + + console.log("Step Details:"); + result.stepResults.forEach((step: any) => { + const icon = step.status === "success" ? "✓" : step.status === "failed" ? "✗" : "○"; + console.log(` ${icon} ${step.stepId}: ${step.status} (${step.durationMs}ms)`); + if (step.chaosInjected) { + console.log(`Chaos: ${step.chaosInjected.scenario.description}`); + } + if (step.error) { + console.log(`Error: ${step.error}`); + } + }); + console.log("=".repeat(60)); +}; + +async function runSingleOD(worldId: string, odName: string, input?: any) { + const od = ODS[odName]; + if (!od) { + console.error(`Unknown OD: ${odName}`); + console.log("Available ODs:", Object.keys(ODS).join(", ")); + return; + } + + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World not found: ${worldId}`); + } + + const logger = createAppLogger({ service: `perishable-${odName}` }); + console.log(`\nRunning OD: ${odName}\n`); + + const result = await executeOperationalDescriptor(od, { + world, + tools: {}, + logger, + input, + }); + + printResult(result); + return result; +} + +async function runFullCycle(worldId: string) { + console.log("\nRunning Full Perishable World Cycle\n"); + + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World not found: ${worldId}`); + } + + // Enable chaos for the cycle + configureGlobalChaos(true, "production"); + // Enable for key tools used in the cycle to ensure visibility + enableChaosForTools([ + "erp.company.get_all", + "erp.order.create", + "edi.generate.850", + "wms.inbound.order.create" + ]); + + const logger = await createMongoTransportLogger({ flushInterval: 1000, flushSize: 10 }); + const childLogger = logger.child({ + service: "perishable-full-cycle", + worldId: worldId + }); + const results: any[] = []; + + const cycleODs = [ + "raw-material-procurement", + "inbound-asn-process", + "receive-supplier-shipment", + "putaway-process", + "production-order", + "inventory-consumption", + "customer-full-edi-cycle", + "pick-pack-ship", + "invoice-and-payment", + ]; + + for (const name of cycleODs) { + console.log(`\nRunning: ${name}`); + try { + const result = await executeOperationalDescriptor(ODS[name]!, { + world, + tools: {}, + logger: childLogger, + }); + results.push({ name, status: result.status, duration: result.durationMs }); + + if (result.status === "failed") { + console.log(`Failed - stopping cycle`); + break; + } + console.log(`Completed in ${result.durationMs}ms`); + } catch (error) { + console.log(`Error: ${error}`); + results.push({ name, status: "error", error }); + break; + } + } + + console.log("\n" + "=".repeat(60)); + console.log("Cycle Summary"); + console.log("=".repeat(60)); + results.forEach((r) => { + const icon = r.status === "success" ? "Success" : "Failed"; + console.log(` ${icon} ${r.name}: ${r.status} (${r.duration || 0}ms)`); + }); + console.log("=".repeat(60)); + + return results; +} + +async function runBackgroundJobs(worldId: string) { + console.log("\nRunning Background Jobs\n"); + + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World not found: ${worldId}`); + } + + const logger = await createMongoTransportLogger({ flushInterval: 1000, flushSize: 10 }); + const childLogger = logger.child({ + service: "perishable-background", + worldId: worldId + }); + const bgJobs = [ + "daily-inventory-check", + "supplier-reorder-trigger", + "aging-inventory-check", + "financial-reconciliation", + ]; + + for (const name of bgJobs) { + console.log(`\nRunning: ${name}`); + const result = await executeOperationalDescriptor(ODS[name]!, { + world, + tools: {}, + logger: childLogger, + }); + printResult(result); + } +} + +async function main() { + const command = process.argv[2]; + let worldId = process.env.WORLD_ID; + const odName = process.argv[3]; + const mongoUri = loadEnv().MONGO_URI || process.env.DATABASE_URL; + const dbName = loadEnv().DB_NAME || "controlmart"; + + if (!mongoUri) { + console.error("ERROR: MONGO_URI or DATABASE_URL required"); + process.exit(1); + } + + const { connectMongo, disconnectMongo } = await import("../../services/mongo.service"); + await connectMongo({ uri: mongoUri, dbName }); + console.log("Connected to MongoDB"); + + try { + if (command === "seed") { + console.log("\nSeeding Perishable World...\n"); + const result = await seedPerishableWorld({ + initialCash: 1_000_000, + worldName: `Skymart Perishables World ${new Date().toISOString()}` + }); + console.log("World created:", result.worldId); + console.log("Companies:", result.companiesCreated); + console.log("Products:", result.productsCreated); + console.log("Initial Capital: $" + result.initialCapital.toLocaleString()); + console.log("\nRun ODs with: WORLD_ID=" + result.worldId); + return; + } + + if (!worldId) { + console.error("ERROR: WORLD_ID required (or use 'seed' to create a world)"); + console.log("\nUsage:"); + console.log(" Seed: npx ts-node run-perishable-world.ts seed"); + console.log(" Single: WORLD_ID=xxx npx ts-node run-perishable-world.ts single "); + console.log(" Cycle: WORLD_ID=xxx npx ts-node run-perishable-world.ts cycle"); + console.log(" Background: WORLD_ID=xxx npx ts-node run-perishable-world.ts background"); + console.log("\nAvailable ODs:", Object.keys(ODS).join(", ")); + process.exit(1); + } + + switch (command) { + case "single": + if (!odName) { + console.error("ERROR: OD name required"); + console.log("Available:", Object.keys(ODS).join(", ")); + process.exit(1); + } + await runSingleOD(worldId, odName); + break; + case "cycle": + await runFullCycle(worldId); + break; + case "background": + await runBackgroundJobs(worldId); + break; + default: + console.error("Unknown command:", command); + console.log("Commands: seed, single, cycle, background"); + process.exit(1); + } + } finally { + await disconnectMongo(false); + } +} + +import { fileURLToPath } from "url"; + +if (process.argv[1] === fileURLToPath(import.meta.url)) { + main() + .then(() => process.exit(0)) + .catch((e) => { + console.error(e); + process.exit(1); + }); +} + +export { runSingleOD, runFullCycle, runBackgroundJobs, ODS }; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/schedule-perishable-world.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/schedule-perishable-world.ts new file mode 100644 index 0000000000000000000000000000000000000000..d03dcdf2d36ac25ab8d9ac1d7b87b951ceed90be --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/schedule-perishable-world.ts @@ -0,0 +1,208 @@ +import { ODRepository } from "../../repository/od.repository"; +import { WorldRepository } from "../../repository/world.repository"; +import { + scheduleRecurringOD, +} from "../../operational-descriptor/schedule.od"; +import { createAppLogger } from "../../utils/logger.util"; +import { ODs } from "./ods"; + + +const logger = createAppLogger({ service: "od-scheduler" }); + + + +type OdSchedule = { + type: "recurring" | "once"; + simInterval?: number; // example 24 + simTime?: string; // Human readable sim time + simOffsetHours: number; // Hours from Sim Midnight (0-24) +}; + +const SIM_SCHEDULES: Record = { + "raw-material-procurement": { + type: "recurring", + simInterval: 24, + simTime: "08:00 AM", + simOffsetHours: 8, + }, + "inbound-asn-process": { + type: "recurring", + simInterval: 2, + simTime: "Every 2h", + simOffsetHours: 1, + }, + "receive-supplier-shipment": { + type: "recurring", + simInterval: 24, + simTime: "11:00 AM", + simOffsetHours: 11, + }, + "putaway-process": { type: "recurring", simInterval: 1, simTime: "Every 1h", simOffsetHours: 0 }, + "production-order": { + type: "recurring", + simInterval: 24, + simTime: "01:00 PM", + simOffsetHours: 13, + }, + "material-pick-for-production": { + type: "recurring", + simInterval: 1, + simTime: "Every 1h after production", + simOffsetHours: 13.5, + }, + "manufacturing-execution": { + type: "recurring", + simInterval: 2, + simTime: "Every 2h", + simOffsetHours: 14, + }, + "finished-goods-receipt": { + type: "recurring", + simInterval: 1, + simTime: "Every 1h", + simOffsetHours: 15, + }, + "customer-full-edi-cycle": { + type: "recurring", + simInterval: 4, + simTime: "Every 4h", + simOffsetHours: 2, + }, + "pick-pack-ship": { type: "recurring", simInterval: 24, simTime: "03:00 PM", simOffsetHours: 15 }, + "invoice-and-payment": { + type: "recurring", + simInterval: 24, + simTime: "05:00 PM", + simOffsetHours: 17, + }, + "daily-inventory-check": { + type: "recurring", + simInterval: 12, + simTime: "Every 12h", + simOffsetHours: 10, + }, + "aging-inventory-check": { + type: "recurring", + simInterval: 24, + simTime: "Midnight", + simOffsetHours: 0, + }, + "supplier-reorder-trigger": { + type: "recurring", + simInterval: 1, + simTime: "Background", + simOffsetHours: 0, + }, + "financial-reconciliation": { + type: "recurring", + simInterval: 24, + simTime: "11:55 PM", + simOffsetHours: 23.9, + }, +}; + + +const calculateSimToRealRatio = (realHours: number) => { + return 24 / realHours; +}; + + + +const convertSimIntervalToReal = (simInterval: number, realHoursPerSimDay: number): string => { + const SIM_TO_REAL_RATIO = calculateSimToRealRatio(realHoursPerSimDay); + const hours = simInterval; + const realMinutes = (hours * 60) / SIM_TO_REAL_RATIO; + if (realMinutes >= 60) { + return `${realMinutes / 60} hours`; + } + return `${realMinutes} minutes`; +}; + + +const onboardODs = async (worldId: string, realHoursPerSimDay: number) => { + const SIM_TO_REAL_RATIO = calculateSimToRealRatio(realHoursPerSimDay); + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + throw new Error(`World ${worldId} not found`); + } + + for (const odData of ODs) { + try { + logger.info(`Processing OD: ${odData.name} (${odData.id})`); + + // 1. Create or Update OD in Repository + if (!odData.id) { + logger.warn(`Skipping OD data without ID`); + continue; + } + + const repoData = { + odId: odData.id, + data: odData, + name: odData.name, + description: odData.description, + odType: odData.type || "standard", + persona: odData.persona, // Ensure persona maps correctly + }; + + let odRecord = await ODRepository.getODById(odData.id, worldId); + if (odRecord) { + // Update doesn't need odId in the data usually if it's the specific updateOne, but we use updateODById + await ODRepository.updateODById(odData.id, worldId, repoData); + logger.info(`Updated existing OD: ${odData.id}`); + odRecord = await ODRepository.getODById(odData.id, worldId); + } else { + odRecord = await ODRepository.createOD({ worldId }, repoData); + logger.info(`Created new OD: ${odData.id}`); + } + + // 2. Schedule OD if configuration exists + const simSchedule = SIM_SCHEDULES[odData.id]; + if (simSchedule && odRecord) { + if (simSchedule.type === "recurring" && simSchedule.simInterval) { + const realInterval = convertSimIntervalToReal(simSchedule.simInterval, realHoursPerSimDay); + + const realMinutesOffset = simSchedule.simOffsetHours * (60 / SIM_TO_REAL_RATIO); + + logger.info(`Scheduling ${odData.id}:`); + logger.info(`Sim Interval: ${simSchedule.simInterval} -> Real Interval: ${realInterval}`); + logger.info( + `Sim Offset: ${simSchedule.simOffsetHours}h -> Real Offset: ${realMinutesOffset.toFixed(1)} mins`, + ); + + await scheduleRecurringOD(realInterval, odRecord, world, { + scheduledBy: "perishable-scheduler-v2", + simTimeRatio: `${SIM_TO_REAL_RATIO}x`, + simInterval: simSchedule.simInterval, + }); + } + } else { + logger.info(`No schedule config found for ${odData.id}, skipping schedule.`); + } + } catch (err) { + const errorMessage = err instanceof Error ? err.message : String(err); + const errorStack = err instanceof Error ? err.stack : undefined; + logger.error({ + error: err, + errorMessage, + errorStack, + odId: odData.id, + worldId + }, `Failed to process OD ${odData.id}`); + } + } +}; + +export const schedulePerishableWorld = async (worldId: string, realHoursPerSimDay: number) => { + if (!worldId) { + throw new Error("worldId is required"); + } + + try { + await onboardODs(worldId, realHoursPerSimDay); + logger.info("OD Onboarding Complete"); + } catch (error) { + logger.error({ error }, "Onboarding failed"); + throw error; + } +}; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/companies.generator.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/companies.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..16dbd921f6d8478300815b955e2c7802982ccce4 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/companies.generator.ts @@ -0,0 +1,273 @@ +import { faker } from "@faker-js/faker"; +import type { TWorldRefModel } from "../../../models/shared.model"; +import type { TCompanyGenerate } from "../../../models/erp/company.erp.model"; + +export const PERISHABLE_COMPANY_NAMES = { + MPC: "SkyFoods International", + DAIRY_SUPPLIER: "Dairyland Essentials Ltd", + PACKAGING_SUPPLIER: "PackChem Solutions Inc", + AGRO_SUPPLIER: "Harvest Fields Agro Co", +} as const; + +export const PERISHABLE_COMPANY_IDS = { + MPC: "MPC-SKYFOODS-001", + DAIRY_SUPPLIER: "SUP-DAIRY-001", + PACKAGING_SUPPLIER: "SUP-PACK-001", + AGRO_SUPPLIER: "SUP-AGRO-001", +} as const; + +export const PARTNER_COMPANY_NAMES = { + FRESHMART: "FreshMart Retailers", + URBAN_GROCERS: "Urban Grocers Alliance", + SNACKWAVE: "SnackWave Distributors", + MEGAFOODS: "MegaFoods Corporation", + QUICKSTOP: "QuickStop Convenience", +} as const; + +export const PARTNER_COMPANY_IDS = { + FRESHMART: "CUSTOMER-001", + URBAN_GROCERS: "CUSTOMER-002", + SNACKWAVE: "CUSTOMER-003", + MEGAFOODS: "CUSTOMER-004", + QUICKSTOP: "CUSTOMER-005", +} as const; + +export const DAIRY_CATALOG = [ + { sku: "DAIRY-MILK-001", name: "Fresh Whole Milk", unit: "L", pricePerUnit: 4.5 }, + { sku: "DAIRY-CREAM-001", name: "Heavy Cream 35%", unit: "L", pricePerUnit: 8.99 }, + { sku: "DAIRY-SUGAR-001", name: "Refined White Sugar", unit: "KG", pricePerUnit: 2.99 }, + { sku: "DAIRY-COND-001", name: "Sweetened Condensed Milk", unit: "KG", pricePerUnit: 6.49 }, + { sku: "DAIRY-BUTTER-001", name: "Unsalted Butter Blocks", unit: "KG", pricePerUnit: 12.99 }, + { sku: "DAIRY-POWDER-001", name: "Skim Milk Powder", unit: "KG", pricePerUnit: 15.99 }, + { sku: "DAIRY-VANILLA-001", name: "Vanilla Extract Pure", unit: "L", pricePerUnit: 45.0 }, +]; + +export const PACKAGING_CATALOG = [ + { sku: "PACK-BAG-150", name: "Chip Bag Foil Laminate 150g", unit: "EA", pricePerUnit: 0.15 }, + { sku: "PACK-BAG-300", name: "Chip Bag Foil Laminate 300g", unit: "EA", pricePerUnit: 0.22 }, + { sku: "PACK-ICE-500", name: "Ice Cream Container 500ml", unit: "EA", pricePerUnit: 0.35 }, + { sku: "PACK-ICE-1000", name: "Ice Cream Container 1L", unit: "EA", pricePerUnit: 0.48 }, + { sku: "PACK-CARTON-001", name: "Cardboard Shipping Carton", unit: "EA", pricePerUnit: 1.25 }, + { sku: "PACK-WRAP-001", name: "Pallet Shrink Wrap Roll", unit: "ROLL", pricePerUnit: 45.0 }, + { sku: "CHEM-PRES-001", name: "Sodium Benzoate Preservative", unit: "KG", pricePerUnit: 28.99 }, + { sku: "CHEM-EMUL-001", name: "Lecithin Emulsifier", unit: "KG", pricePerUnit: 35.5 }, + { sku: "CHEM-STAB-001", name: "Guar Gum Stabilizer", unit: "KG", pricePerUnit: 42.0 }, + { sku: "CHEM-ACID-001", name: "Citric Acid Food Grade", unit: "KG", pricePerUnit: 18.75 }, +]; + +export const AGRO_CATALOG = [ + { sku: "AGRO-POTATO-001", name: "Russet Potatoes Premium", unit: "KG", pricePerUnit: 0.85 }, + { sku: "AGRO-ONION-001", name: "Yellow Onions", unit: "KG", pricePerUnit: 1.2 }, + { sku: "AGRO-SALT-001", name: "Sea Salt Fine", unit: "KG", pricePerUnit: 0.95 }, + { sku: "AGRO-PAPRIKA-001", name: "Smoked Paprika Powder", unit: "KG", pricePerUnit: 18.5 }, + { sku: "AGRO-PEPPER-001", name: "Black Pepper Ground", unit: "KG", pricePerUnit: 25.0 }, + { sku: "AGRO-CUMIN-001", name: "Cumin Powder", unit: "KG", pricePerUnit: 22.0 }, + { sku: "AGRO-GARLIC-001", name: "Garlic Powder", unit: "KG", pricePerUnit: 12.5 }, + { sku: "AGRO-CHILI-001", name: "Dried Chili Flakes", unit: "KG", pricePerUnit: 28.0 }, +]; + +const generateAddress = (type: "BILL_TO" | "SHIP_TO" | "REM_TO") => ({ + type, + country: "United States", + attention: faker.person.fullName(), + street1: faker.location.streetAddress(), + street2: faker.datatype.boolean() ? faker.location.secondaryAddress() : undefined, + city: faker.location.city(), + state: faker.location.state(), + postalCode: faker.location.zipCode(), + contactEmail: faker.internet.email(), + contactPhone: faker.phone.number(), +}); + +const generateContact = () => ({ + name: faker.person.fullName(), + email: faker.internet.email(), + phone: faker.phone.number(), +}); + +export const generateMpcCompany = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: true, + companyId: PERISHABLE_COMPANY_IDS.MPC, + externalReference: "EXT-MPC-SKYFOODS-2024", + name: PERISHABLE_COMPANY_NAMES.MPC, + legalName: "SkyFoods International LLC", + duns: "123456789", + taxId: "TAX-CFI-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-CFI-2024" }, + currency: "USD", + paymentTerms: "Net 30", + creditLimit: 500000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "SKYMART-SALES", + priceList: "Enterprise", + glAccount: faker.finance.accountNumber(), + customerClass: "VIP", + status: "ACTIVE", + companyType: "INTERNAL", + customFields: { + erpSource: "SAP", + regionCode: "US", + industryType: "Food Manufacturing", + productLines: ["Potato Chips", "Ice Cream"], + suppliers: [ + PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + ], + }, +}); + +export const generateDairySupplier = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + externalReference: "EXT-SUP-DAIRY-2024", + name: PERISHABLE_COMPANY_NAMES.DAIRY_SUPPLIER, + legalName: "Dairyland Essentials Limited", + duns: "987654321", + taxId: "TAX-DEL-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-DEL-2024" }, + currency: "USD", + paymentTerms: "Net 15", + creditLimit: 100000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "DAIRY-DIST", + priceList: "Standard", + glAccount: faker.finance.accountNumber(), + customerClass: "A", + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + erpSource: "NetSuite", + regionCode: "US", + industryType: "Dairy & Ingredients", + certifications: ["FDA Approved", "HACCP Certified"], + catalog: DAIRY_CATALOG, + }, +}); + +export const generatePackagingSupplier = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + externalReference: "EXT-SUP-PACK-2024", + name: PERISHABLE_COMPANY_NAMES.PACKAGING_SUPPLIER, + legalName: "PackChem Solutions Incorporated", + duns: "456789123", + taxId: "TAX-PCS-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-PCS-2024" }, + currency: "USD", + paymentTerms: "Net 45", + creditLimit: 150000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "PACKTECH", + priceList: "Partner", + glAccount: faker.finance.accountNumber(), + customerClass: "A", + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + erpSource: "Oracle", + regionCode: "US", + industryType: "Packaging & Food Chemicals", + certifications: ["ISO 9001", "FDA Food-Grade"], + catalog: PACKAGING_CATALOG, + }, +}); + +export const generateAgroSupplier = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + externalReference: "EXT-SUP-AGRO-2024", + name: PERISHABLE_COMPANY_NAMES.AGRO_SUPPLIER, + legalName: "Harvest Fields Agro Company", + duns: "789123456", + taxId: "TAX-HFA-US-001", + taxRegistrationNumbers: { country: "United States", number: "REG-HFA-2024" }, + currency: "USD", + paymentTerms: "Due on Receipt", + creditLimit: 75000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "AGRO-HARVEST", + priceList: "Standard", + glAccount: faker.finance.accountNumber(), + customerClass: "B", + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + erpSource: "Odoo", + regionCode: "US", + industryType: "Agriculture & Spices", + certifications: ["USDA Organic", "Non-GMO Verified"], + catalog: AGRO_CATALOG, + }, +}); + +const generatePartnerCompany = ( + worldRef: TWorldRefModel, + id: string, + name: string, + duns: string, + custClass: string +): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: id, + externalReference: `EXT-${id}`, + name: name, + legalName: `${name} Inc.`, + duns: duns, + taxId: `TAX-${id}`, + taxRegistrationNumbers: { country: "United States", number: `REG-${id}` }, + currency: "USD", + paymentTerms: "Net 30", + creditLimit: 250000, + creditHold: false, + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + remitTo: generateAddress("REM_TO"), + primaryContact: generateContact(), + salesOrg: "SKYMART-SALES", + priceList: "Wholesale", + glAccount: faker.finance.accountNumber(), + customerClass: custClass, + status: "ACTIVE", + companyType: "CUSTOMER", // Explicitly marking as customer + customFields: { + erpSource: "SAP", + regionCode: "US", + industryType: "Retail", + segment: "Grocery", + }, +}); + +export const generateAllPerishableCompanies = (worldRef: TWorldRefModel): TCompanyGenerate[] => [ + generateMpcCompany(worldRef), + generateDairySupplier(worldRef), + generatePackagingSupplier(worldRef), + generateAgroSupplier(worldRef), + // Partners / Customers + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.FRESHMART, PARTNER_COMPANY_NAMES.FRESHMART, "111222333", "A"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.URBAN_GROCERS, PARTNER_COMPANY_NAMES.URBAN_GROCERS, "222333444", "A"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.SNACKWAVE, PARTNER_COMPANY_NAMES.SNACKWAVE, "333444555", "B"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.MEGAFOODS, PARTNER_COMPANY_NAMES.MEGAFOODS, "444555666", "VIP"), + generatePartnerCompany(worldRef, PARTNER_COMPANY_IDS.QUICKSTOP, PARTNER_COMPANY_NAMES.QUICKSTOP, "555666777", "C"), +]; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/index.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..2342b859a0cd170d10f7bad084d604e91b420a89 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/index.ts @@ -0,0 +1,3 @@ +export * from "./world.generator"; +export * from "./companies.generator"; +export * from "./products.generator"; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/products.generator.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/products.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..873284699d6c3495528ae78da139443c00c510eb --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/products.generator.ts @@ -0,0 +1,492 @@ +import { faker } from "@faker-js/faker"; +import type { TProductGenerate } from "../../../models/erp/product.erp.model"; +import type { TWorldRefModel } from "../../../models/shared.model"; +import { generateIdByService } from "../../../utils/mongo.util"; +import { AGRO_CATALOG, DAIRY_CATALOG, PACKAGING_CATALOG, PERISHABLE_COMPANY_IDS } from "./companies.generator"; + +export type TBomItem = { + supplierCompanyId: string; + sku: string; + name: string; + qty: number; + unit: string; +}; + +const generateWeight = (min: number, max: number, unit: "KG" | "G" | "LB" = "KG") => ({ + value: faker.number.float({ min, max, fractionDigits: 2 }), + unit, +}); + +const generateDimensions = ( + length: number, + width: number, + height: number, + unit: "CM" | "IN" = "CM", +) => ({ + length, + width, + height, + unit, +}); + +const MPC_CHIP_PRODUCTS: Array<{ name: string; sku: string; price: number; bom: TBomItem[] }> = [ + { + name: "Skymart Classic Salted Chips", + sku: "CHP-CLASSIC-001", + price: 3.99, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-SALT-001", + name: "Sea Salt", + qty: 0.01, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart BBQ Blaze Chips", + sku: "CHP-BBQ-001", + price: 4.29, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-PAPRIKA-001", + name: "Smoked Paprika", + qty: 0.005, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-GARLIC-001", + name: "Garlic Powder", + qty: 0.003, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Sour Cream & Onion Chips", + sku: "CHP-SCREAM-001", + price: 4.29, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-ONION-001", + name: "Onion Powder", + qty: 0.005, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Cream Powder", + qty: 0.01, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Spicy Jalapeño Chips", + sku: "CHP-SPICY-001", + price: 4.49, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.3, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-CHILI-001", + name: "Chili Flakes", + qty: 0.008, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-PEPPER-001", + name: "Black Pepper", + qty: 0.003, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-150", + name: "Chip Bag 150g", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Family Size Classic Chips", + sku: "CHP-CLASSIC-FAM", + price: 7.99, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-POTATO-001", + name: "Russet Potatoes", + qty: 0.6, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.AGRO_SUPPLIER, + sku: "AGRO-SALT-001", + name: "Sea Salt", + qty: 0.02, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-BAG-300", + name: "Chip Bag 300g", + qty: 1, + unit: "EA", + }, + ], + }, +]; + +const MPC_ICECREAM_PRODUCTS: Array<{ name: string; sku: string; price: number; bom: TBomItem[] }> = + [ + { + name: "Skymart Vanilla Dream Ice Cream", + sku: "ICE-VANILLA-001", + price: 5.99, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.15, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.08, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-VANILLA-001", + name: "Vanilla Extract", + qty: 0.005, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "CHEM-STAB-001", + name: "Stabilizer", + qty: 0.002, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Belgian Chocolate Ice Cream", + sku: "ICE-CHOCO-001", + price: 6.49, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.15, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.1, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "CHEM-EMUL-001", + name: "Emulsifier", + qty: 0.003, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Strawberry Swirl Ice Cream", + sku: "ICE-STRAW-001", + price: 5.99, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.15, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.08, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "CHEM-PRES-001", + name: "Preservative", + qty: 0.001, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Mango Paradise Ice Cream", + sku: "ICE-MANGO-001", + price: 6.49, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.12, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.1, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-500", + name: "Container 500ml", + qty: 1, + unit: "EA", + }, + ], + }, + { + name: "Skymart Premium Vanilla 1L", + sku: "ICE-VANILLA-1L", + price: 9.99, + bom: [ + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-MILK-001", + name: "Fresh Milk", + qty: 0.6, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-CREAM-001", + name: "Heavy Cream", + qty: 0.3, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-SUGAR-001", + name: "Sugar", + qty: 0.15, + unit: "KG", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.DAIRY_SUPPLIER, + sku: "DAIRY-VANILLA-001", + name: "Vanilla Extract", + qty: 0.01, + unit: "L", + }, + { + supplierCompanyId: PERISHABLE_COMPANY_IDS.PACKAGING_SUPPLIER, + sku: "PACK-ICE-1000", + name: "Container 1L", + qty: 1, + unit: "EA", + }, + ], + }, + ]; + +export const generateMpcProducts = (worldRef: TWorldRefModel): TProductGenerate[] => { + const chipProducts = MPC_CHIP_PRODUCTS.map((chip) => ({ + worldRef, + productId: generateIdByService("erp", "product"), + sku: chip.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: chip.name, + description: `Premium potato chips - ${chip.name}. Made with farm-fresh potatoes.`, + commodityCode: "19059000", + taxClassification: "STANDARD", + unitOfMeasure: "EA", + weight: generateWeight(0.15, 0.5, "KG"), + dimensions: generateDimensions(25, 15, 8), + inventoryTracking: true, + price: { currency: "USD", amount: chip.price }, + cost: { currency: "USD", amount: chip.price * 0.4 }, + leadTimeDays: 3, + status: "ACTIVE" as const, + customFields: { + category: "Snacks", + productType: "Potato Chips", + shelfLifeDays: 180, + storageTemp: "Room Temperature", + billOfMaterials: chip.bom, + }, + })); + + const catalog = [...AGRO_CATALOG, ...DAIRY_CATALOG, ...PACKAGING_CATALOG] + + const catalogProducts = catalog.map((catalog) => ({ + worldRef: { + worldId: worldRef.worldId, + }, + productId: generateIdByService("erp", "product"), + sku: catalog.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: catalog.name, + description: `Ingredient product: ${catalog.sku}`, + commodityCode: "21060000", + unitOfMeasure: catalog.unit, + inventoryTracking: true, + price: { currency: "USD", amount: catalog.pricePerUnit }, + cost: { currency: "USD", amount: catalog.pricePerUnit * 0.4 }, + leadTimeDays: 3, + status: "ACTIVE" as const, + customFields: { + category: "Ingredients", + productType: "Ingredients", + isMpcProduct: false, + }, + })); + + const icecreamProducts = MPC_ICECREAM_PRODUCTS.map((ice) => ({ + worldRef, + productId: generateIdByService("erp", "product"), + sku: ice.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: ice.name, + description: `Premium ice cream - ${ice.name}. Made with real dairy.`, + commodityCode: "21050000", + taxClassification: "STANDARD", + unitOfMeasure: "EA", + weight: generateWeight(0.5, 1.0, "KG"), + dimensions: generateDimensions(12, 12, 15), + inventoryTracking: true, + price: { currency: "USD", amount: ice.price }, + cost: { currency: "USD", amount: ice.price * 0.35 }, + leadTimeDays: 2, + status: "ACTIVE" as const, + customFields: { + category: "Frozen Desserts", + productType: "Ice Cream", + shelfLifeDays: 2, + storageTemp: "Frozen (-18°C)", + billOfMaterials: ice.bom, + }, + })); + + return [...chipProducts, ...icecreamProducts, ...catalogProducts]; +}; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/world.generator.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/world.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..a7a5e63b0fdf3507859874711edda7a648cbcc7f --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/seeder/world.generator.ts @@ -0,0 +1,95 @@ +import { createWorld } from "../../../repository/world.repository"; +import { CompanyRepository } from "../../../repository/erp/company.repository"; +import { ProductRepository } from "../../../repository/erp/product.repository"; +import { CompanyLedgerRepository } from "../../../repository/finance/ledger.repository"; +import { generateAllPerishableCompanies, PERISHABLE_COMPANY_NAMES } from "./companies.generator"; +import { generateMpcProducts } from "./products.generator"; +import { getIdFromMongoObject } from "../../../utils/mongo.util"; +import { capabilityCatalog } from "../../../services/capability-catalog.service"; + +export interface PerishableWorldResult { + worldId: string; + companiesCreated: number; + productsCreated: number; + initialCapital: number; +} + +export interface PerishableWorldConfig { + worldName?: string; + initialCash?: number; + initialReceivables?: number; + initialPayables?: number; + worldId?: string; +} + +const DEFAULT_INITIAL_CASH = 1_000_000; + +export const seedPerishableWorld = async ( + config: PerishableWorldConfig = {}, +): Promise => { + const { + worldName = "Skymart Perishables World", + initialCash = DEFAULT_INITIAL_CASH, + initialReceivables = 0, + initialPayables = 0, + worldId: existingWorldId, + } = config; + + let worldId = existingWorldId; + + if (!worldId) { + // Get all unique persona IDs from capability definitions + const allCapabilities = capabilityCatalog.getAll(); + const allPersonaIds = [...new Set( + allCapabilities.flatMap(cap => cap.tags?.personas || []) + )]; + + const world = await createWorld({ + name: worldName, + description: "Perishable goods world centered around Skymart Foods International (MPC)", + mpcCompany: PERISHABLE_COMPANY_NAMES.MPC, + personas: { + allowedPersonas: allPersonaIds, + }, + ticketCreationEnabled: false + }); + worldId = getIdFromMongoObject(world); + } + + const worldRef = { worldId: worldId! }; + + const companies = generateAllPerishableCompanies(worldRef); + const companyRepo = CompanyRepository(worldId!); + for (const company of companies) { + if (company.companyId) { + const existing = await companyRepo.getCompanyById(company.companyId); + if (!existing) { + await companyRepo.createCompany(company); + } + } + } + + const products = generateMpcProducts(worldRef); + const productRepo = ProductRepository(worldId!); + for (const product of products) { + // Basic check to avoid duplicates if re-seeding + await productRepo.createProduct(product).catch(() => { }); + } + + const ledgerRepo = CompanyLedgerRepository(worldId!); + await ledgerRepo.ensure({ + cash: initialCash, + totalReceivables: initialReceivables, + totalPayables: initialPayables, + }); + + return { + worldId, + companiesCreated: companies.length, + productsCreated: products.length, + initialCapital: initialCash, + }; +}; + +export { PERISHABLE_COMPANY_NAMES, generateAllPerishableCompanies } from "./companies.generator"; +export { generateMpcProducts } from "./products.generator"; diff --git a/packages/controlmart/src/worlds/perishables-food-manufacturer/world-doc.ts b/packages/controlmart/src/worlds/perishables-food-manufacturer/world-doc.ts new file mode 100644 index 0000000000000000000000000000000000000000..f8a16605d60aa6ab53d806c6443c81f42406b780 --- /dev/null +++ b/packages/controlmart/src/worlds/perishables-food-manufacturer/world-doc.ts @@ -0,0 +1,358 @@ +export const perishableFoodManufacturerWorldDoc = { + meta: { + version: "2.1.0", + generatedAt: "2025-12-12", + docType: "World Definition", + }, + world: { + name: "Perishable Food Manufacturer", + description: + "A high-stakes, high-volume manufacturing environment characterized by the relentless velocity of perishable goods. Here, time is the ultimate adversary. SkyFoods International acts as the beating heart of a global snack empire, juggling the volatility of raw agricultural yields against the insatiable, unpredictable appetites of a worldwide consumer base. This world models the intricate dance of modern supply chains where a single delay can mean tons of spoiled product and millions in lost revenue.", + industry: "Food & Beverage Manufacturing", + location: "Global Distribution with Centralized Manufacturing Hub", + size: "Enterprise (Large)", + complexity: "Extreme", + story: + "In the competitive landscape of global food manufacturing, SkyFoods International stands as a titan of taste, renowned for its 'Farm-to-Crunch' philosophy. But beneath the shiny packaging lies a logistical battlefield. The company was founded three decades ago on a single potato farm, but today it orchestrates a symphony of suppliers, logistics providers, and retailers. The processing plants never sleep, the ovens never cool, and the trucks never stop rolling. \n\nRecently, SkyFoods has aggressively expanded its customer base, onboarding major retail chains and distributors across continents. This expansion has strained their legacy systems to the breaking point. The 'Perishable Food Manufacturer' simulation places you in the control tower of this chaotic empire. You are not just observing; you are witnessing the complex interplay of demand sensing, automated procurement, just-in-time manufacturing, and cold-chain logistics. Every simulation tick represents the pulse of commerce—a purchase order released, a pallet wrapped, a truck dispatched, a payment received. \n\nSuccess here isn't just about making chips; it's about mastering the flow of information and materials in a world where freshness is currency and spoilage is sin.", + }, + businessContext: { + name: "SkyFoods International", + industry: "Food & Beverage Manufacturing", + description: + "SkyFoods International is the premiere name in snack foods and frozen indulgences. Operating on a rigid Make-to-Stock (MTS) model, the company faces the dual challenge of maximizing product freshness while minimizing waste. The production lines are specialized, high-capacity beasts that render raw ingredients—potatoes from the earth, milk from the dairy—into shelf-stable and frozen consumer packaged goods. Quality control is draconian; a temperature fluctuation of a few degrees can condemn an entire production run. The business thrives on velocity, turning inventory over rapidly to ensure that what reaches the customer is as fresh as the day it was made.", + coreValues: [ + "Freshness First: We believe shelf life is a deadline, not a suggestion.", + "Velocity: Speed is our strategy. We move faster than the spoilage clock.", + "Zero Waste: Efficiency is our ethics. We respect our resources by wasting nothing.", + "Trust Through Transparency: We integrate deeply with our partners for a unified supply chain.", + ], + productLines: [ + { + name: "SkyFoods Potato Chips", + storage: "Room Temperature (Ambient)", + shelfLife: "180 Days", + description: + "The flagship line. Sliced from premium Russet potatoes and kettle-cooked to golden perfection. These chips define the brand.", + products: [ + { + sku: "CHP-CLASSIC-001", + name: "Classic Salted Chips", + bom: ["Russet Potatoes", "Sea Salt", "Bag"], + }, + { + sku: "CHP-BBQ-001", + name: "BBQ Blaze Chips", + bom: ["Russet Potatoes", "Smoked Paprika", "Garlic Powder", "Bag"], + }, + { + sku: "CHP-SCREAM-001", + name: "Sour Cream & Onion Chips", + bom: ["Russet Potatoes", "Onion Powder", "Cream Powder", "Bag"], + }, + { + sku: "CHP-SPICY-001", + name: "Spicy Jalapeño Chips", + bom: ["Russet Potatoes", "Chili Flakes", "Black Pepper", "Bag"], + }, + ], + }, + { + name: "SkyFoods Ice Cream", + storage: "Frozen (-18°C)", + shelfLife: "365 Days", + description: + "The indulgence line. Rich, creamy, and dangerously temperature-sensitive. Requires unbroken cold-chain compliance from churn to spoon.", + products: [ + { + sku: "ICE-VANILLA-001", + name: "Vanilla Dream", + bom: ["Fresh Milk", "Heavy Cream", "Sugar", "Vanilla Extract", "Stabilizer"], + }, + { + sku: "ICE-CHOCO-001", + name: "Belgian Chocolate", + bom: ["Fresh Milk", "Heavy Cream", "Sugar", "Emulsifier"], + }, + { + sku: "ICE-STRAW-001", + name: "Strawberry Swirl", + bom: ["Fresh Milk", "Heavy Cream", "Sugar", "Preservative"], + }, + { + sku: "ICE-MANGO-001", + name: "Mango Paradise", + bom: ["Fresh Milk", "Heavy Cream", "Sugar"], + }, + ], + }, + ], + }, + ecosystem: { + partners: "The world is populated by a dynamic array of trading partners. Using advanced simulation generation, we model a diverse customer base ranging from massive 'Big Box' retailers with immense bargaining power to agile regional distributors. These partners are not static; they have unique credit limits, payment terms, and geographic locations, creating a rich tapestry of demand signals.", + suppliers: [ + { + name: "Harvest Fields Agro Co. (Agro)", + role: "Strategic Raw Ingredient Supplier", + integration: "Full EDI Suite (850, 855, 856, 810)", + paymentTerms: "Due on Receipt", + description: "The backbone of our chip line. They provide the tons of potatoes and spices needed daily. A delay here means stopped lines.", + catalog: [ + "Russet Potatoes Premium (AGRO-POTATO-001)", + "Yellow Onions (AGRO-ONION-001)", + "Sea Salt Fine (AGRO-SALT-001)", + "Spices (Paprika, Pepper, Cumin, Garlic, Chili)", + ], + }, + { + name: "Dairyland Essentials Ltd. (Dairy)", + role: "Perishable Ingredient Supplier", + integration: "Full EDI Suite (850, 855, 856, 810)", + paymentTerms: "Net 15", + description: "Providers of the liquid gold—fresh milk and cream. Deliveries are time-critical and temperature-monitored.", + catalog: [ + "Fresh Whole Milk (DAIRY-MILK-001)", + "Heavy Cream 35% (DAIRY-CREAM-001)", + "Refined White Sugar (DAIRY-SUGAR-001)", + "Butter, Milk Powder, Vanilla Extract", + ], + }, + { + name: "PackChem Solutions Inc. (Packaging)", + role: "Packaging & Chemicals Supplier", + integration: "Full EDI Suite (850, 855, 856, 810)", + paymentTerms: "Net 45", + description: "Ensures our products look good and last long. Supply chain reliability is key to avoiding bottlenecks at the packing stations.", + catalog: [ + "Foil Laminate Bags (150g, 300g)", + "Ice Cream Containers (500ml, 1L)", + "Shipping Cartons, Shrink Wrap", + "Chemicals (Preservatives, Emulsifiers, Stabilizers)", + ], + }, + ], + }, + operationalDescriptors: { + standardActors: [ + { + id: "raw-material-procurement", + name: "Raw Material Procurement", + persona: "Procurement Manager", + type: "Standard Workflow", + schedule: "Daily @ 08:00 AM (Sim Time)", + description: + "The Procurement Manager is the gatekeeper of supply continuity, operating with a zero-tolerance policy for stockouts. At 08:00 AM sharp, the system initiates a deep-scan analysis of the current inventory ledger in the Warehouse Management System (WMS). It doesn't just look at absolute quantities; it calculates 'Days of Supply' based on the rolling 7-day production forecast. \n\nFor every SKU—from the high-velocity Russet Potatoes to the critical-path stabilizing agents—the logic evaluates the gap between 'Current On-Hand' and 'Safety Stock Thresholds'. If a deficit is projected within the lead-time window, the algorithm triggers a replenishment sequence. It cross-references the 'Supplier Catalog' to identify the primary vendor (e.g., Harvest Fields for produce, Dairyland for dairy) and consults the latest 'Contract Terms' to ensure pricing accuracy. \n\nFinally, it constructs a formal ERP Purchase Order and translates it into an EDI 850 (Purchase Order) transaction. This digital handshake is transmitted to the supplier's simulator, legally binding SkyFoods to the financial commitment and setting the supply chain in motion. This process effectively balances the risk of spoilage against the catastrophe of a line shutdown.", + keySteps: [ + "Fetch Real-time Inventory Levels from WMS", + "Calculate Days-of-Supply vs. Rolling 7-Day Forecast", + "Identify Stock Gaps & Determine Reorder Quantities (EOQ)", + "Select Optimal Supplier per SKU based on Lead Time & Cost", + "Create Pending Purchase Order in ERP", + "Generate and Transmit EDI 850 (Purchase Order) Outbound", + ], + inputs: ["Real-time Inventory History", "Supplier Catalogs & Contracts", "Production Forecast"], + outputs: ["Purchase Order (Created)", "EDI 850 Transaction"], + }, + { + id: "inbound-asn-process", + name: "Inbound ASN Process", + persona: "Logistics Coordinator", + type: "Standard Workflow", + schedule: "Recurring @ Every 2 Hours (Sim Time)", + description: + "Supply chain visibility is paramount. The Inbound ASN Process acts as the digital radar for the warehouse. Every two hours, the system polls the EDI integration layer for incoming EDI 856 (Advance Shipping Notice) documents from suppliers. An ASN is not merely a notification; it is a detailed manifest of what is on the truck, down to the pallet and batch level. \n\nUpon receipt, the workflow parses the raw EDI data and performs a three-way validation: Does the PO exist? Do the items on the ASN match the lines on the PO? Is the quantity within the over-shipment tolerance? \n\nOnly if these checks pass does the system generate a 'WMS Inbound Order'. This record effectively 'reserves' dock capacity and labor hours for the arriving shipment. It transitions the goods from 'On Order' to 'In Transit', updating the view for inventory planners and preventing duplicate ordering. This proactive step eliminates the chaos of blind receipts and ensures the receiving dock is never caught off guard.", + keySteps: [ + "Poll Integration Layer for New EDI 856 (ASN) Transactions", + "Parse ASN & Retrieve Related Purchase Order", + "Validate Item Skus, Quantities, and PO status", + "Create WMS Inbound Order (Plan for Receiving)", + "Update PO Line Item Status to 'IN_TRANSIT'", + "Send EDI 997 (Functional Acknowledgement) to Supplier", + ], + inputs: ["Incoming EDI 856 Stream", "Open Purchase Orders"], + outputs: ["WMS Inbound Order", "PO Status Update", "EDI 997 Transaction"], + }, + { + id: "receive-supplier-shipment", + name: "Receive Supplier Shipment", + persona: "Dock Receiver", + type: "Standard Workflow", + schedule: "Daily @ 11:00 AM (Sim Time)", + description: + "The theoretical becomes physical. At 11:00 AM, the daily fleet of supplier trucks aligns with the dock doors. The Dock Receiver executes the critical 'Goods Receipt' process. This workflow simulates the physical unloading and verification of pallets. It iterates through all 'In-Transit' Inbound Orders expected for the arrival window. \n\nFor each receipt, the simulation applies stochastic logic to introduce potential real-world variances, such as minor damage or quantity discrepancies (though under normal operation, accuracy is high). The system creates an 'Inventory Receipt Transaction' in the WMS, formally adding the stock to the 'Receiving Dock' location. \n\nSimultaneously, this event creates the financial liability. The ERP status of the Purchase Order is flipped to 'Closed/Received', and the system posts an accrual to the 'Goods Received Not Invoiced' (GRNI) account, awaiting the supplier's invoice. This precise synchronization between physical inventory and financial liability is the hallmark of a mature ERP implementation.", + keySteps: [ + "Query WMS for Arrived Inbound Orders", + "Simulate Unloading & Physical Count Verification", + "Post Inventory Receipt Transaction to WMS (Location: DOCK-001)", + "Update Inbound Order Status to 'RECEIVED'", + "Close Purchase Order Lines", + "Create Pending Liability in Accounts Payable (GRNI)", + ], + inputs: ["Inbound Orders (Status: IN_TRANSIT)"], + outputs: ["Inventory Added (Receiving Dock)", "Financial Accrual (GRNI)", "Closed PO"], + }, + { + id: "putaway-process", + name: "Putaway Process", + persona: "Forklift Operator", + type: "Standard Workflow", + schedule: "Recurring @ Every 1 Hour (Sim Time)", + description: + "A cluttered dock is a safety hazard and a bottleneck. The Putaway Process is the automated heartbeat of warehouse organization. Running every hour, this workflow scans the 'Receiving Dock' location for any unassigned inventory. It acts as the brain of the Forklift Operator. \n\nUsing a simplified 'Directed Putaway' logic, it identifies the optimal storage strategy for each commodity. For example, raw potatoes are routed to 'Bulk Storage Zone A' (Ambient), while fresh cream is urgently directed to 'Cold Chain Zone B' (Refrigerated). The system creates and immediately executes 'Internal Transfer' transactions in the WMS, moving the digital inventory record from the dock to the specific bin ID (e.g., 'BIN-A-01-02'). \n\nThis granularity allows for precise tracking of batch aging and prevents cross-contamination of allergens. Without this constant shuffling, flow would stagnate, and the receiving process would grind to a halt.", + keySteps: [ + "Scan Receiving Dock for Unprocessed Inventory", + "Determine Storage Logic based on Material Characteristics (Temp/Hazard)", + "Identify Available Empty Bin Capacity in Target Zone", + "Execute WMS Internal Transfer (Dock -> Bin)", + "Update Inventory Batch Record with Location Data", + ], + inputs: ["Unassigned Dock Inventory", "Warehouse Bin Map"], + outputs: ["Optimized Bin Inventory", "Clean Receiving Dock"], + }, + { + id: "production-order", + name: "Production Order", + persona: "Production Planner", + type: "Standard Workflow", + schedule: "Daily @ 01:00 PM (Sim Time)", + description: + "Manufacturing is not random; it is a calculated response to demand. The Production Planner workflow mimics the high-level decision-making of the Master Scheduler. At 01:00 PM, determining the production run for the next shift is critical. \n\nThe logic evaluates the 'Finished Goods' inventory against safety stock targets. If 'Classic Salted Chips' are trending low due to recent high sales, the system flags them for production. It performs a 'BOM Explosion'—deconstructing the target finished good into its constituent raw materials (Potatoes, Salt, Bags). \n\nCrucially, it performs an 'Inventory Feasibility Check': Do we have enough raw materials to run this batch? If yes, it creates a 'Production Order' in 'CREATED' status and hard-allocates the raw materials in the WMS, preventing them from being consumed by competing orders. This reservation mechanism helps ensure schedule adherence and prevents partial run failures.", + keySteps: [ + "Analyze Finished Goods Inventory vs. Safety Stock", + "Prioritize Production Requests based on Depletion Risk", + "Explode Bill of Materials (BOM) for Target SKU", + "Check Raw Material Availability (Feasibility Analysis)", + "Create ERP Production Order", + "Hard-Allocate Raw Material Batches (Reservation)", + ], + inputs: ["Product Catalog", "BOM Definitions", "Current Inventory"], + outputs: ["Production Order (Status: CREATED)", "Inventory Allocations"], + }, + { + id: "inventory-consumption", + name: "Inventory Consumption & Production", + persona: "Production Operator", + type: "Standard Workflow", + schedule: "Event-Driven (Triggered by Production Order)", + description: + "This is the digital twin of the factory floor. Triggered immediately upon the release of a Production Order, this workflow simulates the material transformation. It is a transactional atomic swap. \n\nFirst, it deducts the allocated raw materials from their respective bins, effectively cleaning the WMS of the ingredients. Simultaneously, it 'Receipts' new Finished Goods inventory at the 'End-of-Line' location. \n\nBut the magic is in the finance: this step calculates the 'Cost of Goods Manufactured' (COGM). It aggregates the weighted average cost of the consumed ingredients plus overhead allocations and stamps this value onto the new finished goods batches. It posts a 'Work In Progress' (WIP) clearing transaction to the General Ledger. This ensures that the financial value of inventory is always preserved, merely changing form from Potato to Chip.", + keySteps: [ + "Retrieve Active Production Orders", + "Execute 'Batch Pick' Transaction (Deduct Raw Materials)", + "Execute 'Finished Goods Receipt' Transaction (Add Products)", + "Calculate COGM (Material Cost + Overhead)", + "Post GL Journal Entry: Credit Raw Materials / Debit Finished Goods", + "Update Production Order Status to 'COMPLETED'", + ], + inputs: ["Active Production Order", "Allocated Inventory"], + outputs: ["Finished Goods Inventory", "GL Journal Entry (COGM)"], + }, + { + id: "customer-sales-order", + name: "Customer Sales Order Cycle", + persona: "Sales Representative", + type: "Standard Workflow", + schedule: "Recurring @ Every 4 Hours (Sim Time)", + description: + "Every 4 hours, the simulation injects market demand into the ecosystem. This isn't generic demand; it targets specific, dynamically generated partners in the database. The workflow selects a customer (e.g., 'MegaMart Retail') and checks their specific 'Credit Limit' and 'Hold Status'. \n\nAssuming the customer is in good standing, it generates a randomized basket of goods—perhaps 500 cases of BBQ Chips and 200 cases of Vanilla Ice Cream. It negotiates the 'Pricing Strategy' based on the customer's assigned price list (Standard vs. Enterprise). \n\nThe system then crystallizes this intent into an ERP Sales Order. Immediately, three parallel actions occur: 1) Value is booked to Accounts Receivable (Asset), 2) A WMS Outbound Order is created to signal the warehouse, and 3) An EDI 855 (Order Acknowledgement) is transmitted back to the customer, confirming the delivery window. This simulates the high-speed, automated nature of modern B2B commerce.", + keySteps: [ + "Select Random Active Customer (Filter out Blocked/Hold)", + "Check Customer Credit Limit vs. Order Value", + "Generate Line Items with Customer-Specific Pricing", + "Create ERP Sales Order", + "Post Initial A/R Entry to Ledger", + "Create WMS Outbound Order (Demand Signal for Warehouse)", + "Generate and Send EDI 855 (Order Acknowledgement)", + ], + inputs: ["Customer Database", "Price Lists", "Product Catalog"], + outputs: ["Sales Order", "WMS Outbound Order", "A/R Ledger Entry", "EDI 855"], + }, + { + id: "pick-pack-ship", + name: "Pick, Pack, and Ship", + persona: "Fulfillment Associate", + type: "Standard Workflow", + schedule: "Daily @ 03:00 PM (Sim Time)", + description: + "At 03:00 PM, the shipping window opens. The WMS aggregates all open Outbound Orders into a 'Wave'. The Fulfillment Associate workflow simulates the labor-intensive process of picking. It utilizes a 'FEFO' (First-Expired-First-Out) logic to allocate specific batches to orders, systematically rotating stock to minimize spoilage. \n\nOnce picked, the items are virtually 'Packed' onto pallets. The Transportation Management System (TMS) module kicks in to assign a carrier (e.g., 'Swift Haulage') and generate a tracking number. \n\nThe 'Ship Confirm' transaction is the point of no return: Inventory is permanently deducted from the WMS, the Sales Order is marked 'SHIPPED', and an EDI 856 (Advance Shipping Notice) is broadcast to the customer. This ASN includes the pallet hierarchies and lot numbers, enabling the customer's own receiving process.", + keySteps: [ + "Run Wave Planning for Open Outbound Orders", + "Execute Picking Tasks using FEFO Logic (Inventory Deduction)", + "Consolidate Items into Shipping Handling Units (Pallets)", + "Assign Carrier & Generate Tracking via TMS", + "Execute Ship Confirm Transaction", + "Generate and Transmit EDI 856 (Advance Shipping Notice)", + ], + inputs: ["WMS Outbound Orders", "Carrier Contracts", "Inventory Batches"], + outputs: ["Shipped Order", "Inventory Deduction", "TMS Shipment Record", "EDI 856"], + }, + { + id: "invoice-and-payment", + name: "Invoice & Payment Collection", + persona: "Accounts Receivable Clerk", + type: "Standard Workflow", + schedule: "Daily @ 05:00 PM (Sim Time)", + description: + "Shipping goods is only half the battle; collecting cash is the war. The Invoice & Payment workflow runs at end-of-day to monetize the day's shipments. It scans for all Sales Orders that reached 'SHIPPED' status during the shift. \n\nFor each, it generates a formal Tax Invoice (generating an EDI 810 transaction). This document applies the final tax calculations and payment terms (e.g., Net 30). \n\nCrucially, this workflow also handles the 'Cash Application' simulation. It checks for due invoices from previous simulation days. Based on the customer's payment profile (reliability score), it simulates the receipt of a wire transfer. The General Ledger is updated: debiting 'Cash' and crediting 'Accounts Receivable'. This closes the loop, converting inventory back into liquid capital to fund the next day's procurement.", + keySteps: [ + "Query Shipped, Unbilled Sales Orders", + "Generate Tax Invoice & Calculate Due Dates", + "Transmit EDI 810 (Invoice) to Customer", + "Scan Open Invoices for Due Payments", + "Simulate Cash Receipt (Probabilistic Payment Behavior)", + "Post GL Journal: Debit Cash / Credit Accounts Receivable", + ], + inputs: ["Shipped Sales Orders", "A/R Aging Report"], + outputs: ["Invoice (EDI 810)", "Payment Transaction", "Balanced Ledger"], + }, + ], + backgroundActors: [ + { + id: "daily-inventory-check", + name: "Daily Inventory Consistency Check", + persona: "Inventory Manager", + type: "Background Job", + schedule: "Recurring @ Every 12 Hours (Sim Time)", + description: + "In a high-velocity environment, data drift is inevitable. The Daily Inventory Check is the automated auditor. Every 12 hours, it performs a comprehensive reconciliation between the WMS bin-level details and the ERP high-level summaries. \n\nIt checks for 'Orphaned Allocations' (stock reserved for orders that were cancelled) and 'Negative Inventory' scenarios (data errors). If discrepancies are found, it auto-generates 'Adjustment Transactions' to align the systems. This ensures that the Procurement and Sales teams are basing their decisions on a single version of the truth, preventing the phantom inventory issues that plague real-world warehouses.", + responsibility: "System-wide Inventory Reconciliation & Data Health", + }, + { + id: "aging-inventory-check", + name: "Aging & Spoilage Monitor", + persona: "Quality Control Specialist", + type: "Background Job", + schedule: "Daily @ Midnight (Sim Time)", + description: + "The 'Perishable' in the world name dictates this job's criticality. At Midnight, the system iterates through every single batch of inventory in the WMS. It compares the 'Expiration Date' against the 'Current Simulation Date'. \n\nAny batch found to be past its prime is immediately locked and flagged as 'QC-FAILED'. The system then executes a 'Write-Off' transaction (`Mvmt Type: 551`), removing the quantity from stock and posting the value to the 'Spoilage Expense' GL account. This tangible financial penalty forces the simulation to prioritize flow and velocity. It serves as the ultimate scorecard for supply chain efficiency.", + responsibility: "Batch Expiration Scanning & Financial Write-off Execution", + }, + { + id: "supplier-reorder-trigger", + name: "Emergency Supplier Reorder Trigger", + persona: "System / Procurement Bot", + type: "Background Job", + schedule: "Recurring @ Every 1 Hour (Sim Time)", + description: + "Standard procurement happens once a day, but disasters happen instantly. This bot provides real-time resilience. It polls 'Critical SKUs' every hour. \n\nIf a sudden surge in sales or a spoilage event causes a stock level to breach the 'Critical Minimum' (e.g., < 4 hours of supply), it bypasses the standard approval workflow. It immediately issues an 'Expedited Purchase Order' to the supplier with the fastest lead time, regardless of cost. This autonomous reflex defends the production lines against feedstock starvation, prioritizing continuity over optimization.", + responsibility: "Intraday Low-Stock Monitoring & Crisis Response", + }, + { + id: "financial-reconciliation", + name: "End-of-Day Financial Reconciliation", + persona: "Financial Controller", + type: "Background Job", + schedule: "Daily @ 11:55 PM (Sim Time)", + description: + "The Financial Controller script acts as the final gatekeeper of the simulation day. Just before midnight, it freezes the ledger. It aggregates all sub-ledger activities: A/R movements, A/P accruals, Inventory valuation changes, and Cash flow. \n\nIt verifies the fundamental accounting equation: Assets = Liabilities + Equity. It ensures that every debit had a corresponding credit. If an imbalance is detected (e.g., a shipment occurred without a corresponding COGS entry), it logs a 'high-severity' audit alert. This job guarantees that the financial reporting of the simulation is not just a rough estimate, but a double-entry proven fact.", + responsibility: "General Ledger Integrity Verification & Daily Closing Process", + }, + ], + }, + technicalServices: { + ERP: "Enterprise Resource Planning: The central nervous system. Manages Master Data (Products, Partners), Orchestrates Order Management (PO/SO), and holds the Pricing logic.", + WMS: "Warehouse Management System: The hands and feet. Manages physical Inventory (Bins, Batches), Warehouse Operations (Receiving, Picking, Packing), and tracks precise Stock Movements.", + TMS: "Transportation Management System: The wheels. Handles Carrier Selection, Shipment Planning, Freight Costing, and Tracking of goods in transit.", + Finance: "Financial Management: The scoreboard. Handles the General Ledger, Accounts Payable/Receivable, and immutable Transaction Logging for audit trails.", + EDI: "Electronic Data Interchange: The language of business. Handles standardized B2B Document Generation (850, 855, 856, 810) and simulates network transmission latency and reliability.", + }, +}; diff --git a/packages/controlmart/src/worlds/process-inbound/index.ts b/packages/controlmart/src/worlds/process-inbound/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..453150fd81f42b6e895778ff32f319436d16f5dd --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/index.ts @@ -0,0 +1,13 @@ +import { seedDataProcessInboundWorld, seedODsProcessInboundWorld } from "./seeders/world.seeder"; +import { scheduleProcessInboundWorld } from "./schedule-process-inbound"; +import inboundOd from "./ods/inbound_process.od.json"; +import type { OperationalDescriptor } from "../../types/od.type"; +import { processInboundWorldDoc } from "./world.docs"; + +export { seedDataProcessInboundWorld, seedODsProcessInboundWorld, scheduleProcessInboundWorld }; + +export const processInboundODs = { + inbound_process: inboundOd as OperationalDescriptor, +}; + +export const processInboundWorldDocs = () => processInboundWorldDoc; diff --git a/packages/controlmart/src/worlds/process-inbound/ods/inbound_process.od.json b/packages/controlmart/src/worlds/process-inbound/ods/inbound_process.od.json new file mode 100644 index 0000000000000000000000000000000000000000..30a0ef6aceb960f9ae1df8ede80c00716c9dfa28 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/ods/inbound_process.od.json @@ -0,0 +1,279 @@ +{ + "id": "inbound-process-flow", + "name": "Predictable Inbound Process Flow", + "version": "1.0.0", + "description": "End-to-end inbound flow (Dock -> Storage) with Chaos Injection for resilience testing", + "namespace": "wms", + "persona": "Warehouse Manager", + "type": "standard", + "inputSchema": { + "type": "object", + "required": [ + "category" + ], + "properties": { + "category": { + "type": "string", + "enum": [ + "DAIRY", + "VEGAN", + "SELF_CARE", + "CLOTHING" + ], + "default": "SELF_CARE" + }, + "chaosEnabled": { + "type": "boolean", + "default": false + }, + "chaosProbability": { + "type": "number", + "default": 0.3 + } + } + }, + "steps": [ + { + "id": "init_context", + "name": "Initialize Context", + "type": "script", + "script": "const suffixes = ['001', '002', '003']; const suffix = suffixes[Math.floor(Math.random() * suffixes.length)]; return { timestamp: Date.now(), quantity: Math.floor(Math.random() * (500 - 250 + 1)) + 250, skuSuffix: suffix };", + "output": { + "storeAs": "config" + } + }, + { + "id": "determine_chaos_inputs", + "name": "Determine Chaos Inputs", + "type": "script", + "script": "console.log('CTX KEYS:', Object.keys(ctx)); const category = ctx.category || 'SELF_CARE'; const enabled = ctx.chaosEnabled || false; const prob = ctx.chaosProbability || 0.3; const isChaos = (p = prob) => enabled && Math.random() < p; const chaosInputs = { transport: {}, receiving: {}, quality: {}, putaway: {}, inventory: {} }; const logs = []; const mapErr = (key, msg) => { logs.push(`[CHAOS_LOG] ${msg}`); return { key, msg }; }; if (isChaos()) { if (isChaos(0.1)) { chaosInputs.transport.dockStatus = 'FULL'; mapErr('DOCK_FULL', 'Dock full -> Queued'); } if (isChaos(0.1)) { chaosInputs.transport.warehouseId = 'WRONG-DC-999'; mapErr('WRONG_DC', 'Wrong DC selected → Arrival not recognized'); } if (isChaos(0.05)) { chaosInputs.transport.arrivalWindow = 'LATE'; mapErr('LATE_ARRIVAL', 'Arrival outside appointment window → Late arrival flagged'); } if (isChaos(0.05)) { chaosInputs.transport.gate = 'WRONG_GATE'; mapErr('WRONG_GATE', 'Shipment not expected at this gate → Gate rejection'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.receiving.dockId = 'OCCUPIED'; mapErr('DOCK_OCCUPIED', 'No dock → Queued'); } if (isChaos(0.1)) { chaosInputs.receiving.qtyReceived = 0; mapErr('QTY_MISMATCH', 'Qty mismatch -> Discrepancy flagged'); } if (isChaos(0.05)) { chaosInputs.receiving.poNumber = 'INVALID-PO'; mapErr('INVALID_PO', 'Invalid PO -> Receipt blocked'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.quality.resourceAvailable = false; mapErr('QC_RESOURCE_UNAVAILABLE', 'QC resource unavailable -> Inspection delayed'); } if (isChaos(0.1)) { chaosInputs.quality.result = 'FAIL'; mapErr('QC_FAILED', 'Tolerance exceeded → Auto-fail triggered'); } if (isChaos(0.05)) { chaosInputs.quality.samples = 0; mapErr('NO_SAMPLES', 'Partial inspection recorded → Completion blocked'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.putaway.location = null; mapErr('NO_LOCATION', 'No location -> Task queued'); } if (isChaos(0.1)) { chaosInputs.putaway.pathBlocked = true; mapErr('PATH_BLOCKED', 'Path blocked / aisle closed → Task suspended'); } if (isChaos(0.05)) { chaosInputs.putaway.equipment = null; mapErr('NO_EQUIPMENT', 'Equipment unavailable -> Task delayed'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.inventory.syncError = true; mapErr('SYNC_ERROR', 'Sync error -> retry Update'); } if (isChaos(0.1)) { chaosInputs.inventory.expiryDate = null; mapErr('MISSING_EXPIRY', 'Missing expiry -> Default aging'); } } console.log(logs.join('\\n')); const standardSku = `SKU-${category}-${ctx.config.skuSuffix}`; const finalValues = { transport: { warehouseId: chaosInputs.transport.warehouseId || null, dockStatus: chaosInputs.transport.dockStatus || 'AVAILABLE' }, receiving: { dockId: chaosInputs.receiving.dockId || undefined, qtyReceived: chaosInputs.receiving.qtyReceived !== undefined ? chaosInputs.receiving.qtyReceived : ctx.config.quantity, poNumber: chaosInputs.receiving.poNumber || `PO-${category}-${ctx.config.timestamp}` }, quality: { result: chaosInputs.quality.result || 'PASS', resourceAvailable: chaosInputs.quality.resourceAvailable !== false }, putaway: { location: chaosInputs.putaway.location || undefined, pathBlocked: chaosInputs.putaway.pathBlocked || false }, inventory: { syncError: chaosInputs.inventory.syncError || false, expiryDate: chaosInputs.inventory.expiryDate || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString() } }; return { chaosInputs, logs, finalValues };", + "output": { + "storeAs": "chaos" + } + }, + { + "id": "get_warehouse", + "name": "Get Main Warehouse", + "type": "mcp", + "service": "wms", + "tool": "warehouse.get_by_code", + "input": { + "type": "template", + "template": "MAIN-WH" + }, + "output": { + "storeAs": "warehouse" + } + }, + { + "id": "register_arrival", + "name": "TRANS-03: Register Arrival (Create Inbound Order)", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.create", + "input": { + "type": "template", + "template": { + "warehouseId": "{{chaos.finalValues.transport.warehouseId || warehouse.warehouseId}}", + "poNumber": "{{chaos.finalValues.receiving.poNumber}}", + "orderType": "PO", + "status": "ARRIVED", + "lines": [ + { + "lineNumber": 1, + "productId": "SKU-{{category}}-{{config.skuSuffix}}", + "expectedQuantity": "{{config.quantity}}" + } + ], + "customFields": { + "dockStatus": "{{chaos.finalValues.transport.dockStatus}}", + "gate": "{{chaos.chaosInputs.transport.gate}}", + "chaosLog": "{{chaos.logs}}" + } + } + }, + "output": { + "storeAs": "inboundOrder" + } + }, + { + "id": "assign_dock", + "name": "REC-01: Assign Dock (Update Status)", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.update_status", + "input": { + "type": "template", + "template": { + "inboundOrderId": "{{inboundOrder.inboundOrderId}}", + "status": "ARRIVED" + } + }, + "output": { + "storeAs": "dockAssignment" + } + }, + { + "id": "record_receipt_qty", + "name": "REC-02: Record Received Quantity", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.update_receiving_progress", + "input": { + "type": "template", + "template": { + "inboundOrderId": "{{inboundOrder.inboundOrderId}}", + "lineNumber": 1, + "receivedQuantity": "{{chaos.finalValues.receiving.qtyReceived}}", + "lotNumber": "LOT-{{config.timestamp}}", + "expirationDate": "{{chaos.finalValues.inventory.expiryDate}}" + } + }, + "output": { + "storeAs": "receiptProgress" + } + }, + { + "id": "create_goods_receipt", + "name": "REC-03: Complete Receipt (Status RECEIVED)", + "type": "mcp", + "service": "wms", + "tool": "inbound.order.update_status", + "input": { + "type": "template", + "template": { + "inboundOrderId": "{{inboundOrder.inboundOrderId}}", + "status": "RECEIVED" + } + }, + "output": { + "storeAs": "goodsReceipt" + } + }, + { + "id": "qc_check_script", + "name": "QC-01/02: Quality Check Simulation", + "type": "script", + "script": "return { result: ctx.chaos.finalValues.quality.result, notes: 'Inspection completed' };", + "output": { + "storeAs": "inspectionResult" + } + }, + { + "id": "inventory_receive_transaction", + "name": "INV-01: Create Inventory Receive Transaction", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "{{warehouse.warehouseId}}", + "transactionType": "RECEIVE", + "productId": "SKU-{{category}}-{{config.skuSuffix}}", + "quantity": "{{chaos.finalValues.receiving.qtyReceived}}", + "uom": "EA", + "fromBinId": "DOCK", + "toBinId": "STAGING", + "lotNumber": "LOT-{{config.timestamp}}", + "referenceType": "ORDER", + "referenceId": "{{inboundOrder.inboundOrderId}}", + "customFields": { + "qaResult": "{{inspectionResult.result}}" + } + } + }, + "output": { + "storeAs": "receiveTransaction" + } + }, + { + "id": "determine_storage_location", + "name": "PUT-01: Determine Storage Location", + "type": "mcp", + "service": "wms", + "tool": "bin.get_available", + "input": { + "type": "template", + "template": { + "warehouseId": "{{warehouse.warehouseId}}", + "zoneType": "STORAGE", + "limit": 1 + } + }, + "output": { + "storeAs": "putawayPlan" + } + }, + { + "id": "execute_putaway_task", + "name": "PUT-02: Create Putaway Task", + "type": "mcp", + "service": "wms", + "tool": "task.create", + "input": { + "type": "template", + "template": { + "warehouseId": "{{warehouse.warehouseId}}", + "taskType": "PUTAWAY", + "taskStatus": "COMPLETED", + "priority": 10, + "product": { + "productId": "SKU-{{category}}-{{config.skuSuffix}}", + "productName": "Simulated Product" + }, + "from": { + "binCode": "STAGING" + }, + "to": { + "binCode": "{{putawayPlan[0].binCode}}" + }, + "quantity": { + "requested": "{{chaos.finalValues.receiving.qtyReceived}}", + "actual": "{{chaos.finalValues.receiving.qtyReceived}}", + "uom": "EA" + }, + "reference": { + "type": "INBOUND", + "id": "{{inboundOrder.inboundOrderId}}" + }, + "customFields": { + "pathBlocked": "{{chaos.finalValues.putaway.pathBlocked}}" + } + } + }, + "output": { + "storeAs": "putawayTask" + } + }, + { + "id": "inventory_putaway_transaction", + "name": "INV-02: Create Inventory Putaway Transaction", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "{{warehouse.warehouseId}}", + "transactionType": "PUTAWAY", + "productId": "SKU-{{category}}-{{config.skuSuffix}}", + "quantity": "{{chaos.finalValues.receiving.qtyReceived}}", + "uom": "EA", + "fromBinId": "STAGING", + "toBinId": "{{putawayPlan[0].binId}}", + "lotNumber": "LOT-{{config.timestamp}}", + "referenceType": "TASK", + "referenceId": "{{putawayTask.taskId}}" + } + }, + "output": { + "storeAs": "putawayTransaction" + } + } + ], + "runPolicy": { + "failureMode": "fail_fast", + "storeRuns": true + } +} \ No newline at end of file diff --git a/packages/controlmart/src/worlds/process-inbound/run-inbound-chaos.ts b/packages/controlmart/src/worlds/process-inbound/run-inbound-chaos.ts new file mode 100644 index 0000000000000000000000000000000000000000..14357dbb74b93144b290a59dd6f04070ee106353 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/run-inbound-chaos.ts @@ -0,0 +1,144 @@ + +import { executeOperationalDescriptor } from "../../operational-descriptor/executor.od"; +import { createAppLogger } from "../../utils/logger.util"; +import { WorldRepository } from "../../repository/world.repository"; +import { seedInboundWorld } from "./seeders/world.generator.ts"; +import { capabilityCatalog } from "../../services/capability-catalog.service"; +import { disableAllChaos } from "../../operational-descriptor/tools/registry.tool"; +import * as path from 'path'; +import * as fs from 'fs'; +import { loadEnv } from "../../utils/env.util"; + +async function runChaos() { + disableAllChaos(); + + const args = process.argv.slice(2); + const shouldSeed = args.includes('--seed'); + const shouldCleanup = args.includes('--cleanup'); + const enableChaos = args.includes('--chaos'); + + let worldId = process.env.WORLD_ID; + let seededWorld = false; + + if (shouldSeed) { + console.log('--- INITIALIZING SERVICES ---'); + await capabilityCatalog.initialize(); + + console.log('--- SEEDING NEW INBOUND WORLD ---'); + try { + const seedResult = await seedInboundWorld({ + worldName: `Inbound Chaos World ${Date.now()}` + }); + worldId = seedResult.worldId; + seededWorld = true; + console.log(`World Initialized: ${worldId}`); + } catch (err) { + console.error('Failed to seed world:', err); + process.exit(1); + } + } + + if (!worldId) { + console.error("ERROR: WORLD_ID env var required OR use --seed option."); + console.log("Usage: npx ts-node src/worlds/process-inbound/run-inbound-chaos.ts [--seed] [--cleanup] [--chaos]"); + process.exit(1); + } + + const odPath = path.join(__dirname, 'ods/inbound_process.od.json'); + console.log(`Running OD from: ${odPath}`); + + const odContent = JSON.parse(fs.readFileSync(odPath, 'utf-8')); + + // Config + const input = { + category: 'SELF_CARE', + chaosEnabled: enableChaos, + chaosProbability: enableChaos ? 1.0 : 0 + }; + + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + console.error(`World not found: ${worldId}`); + process.exit(1); + } + + const logger = createAppLogger({ service: `verification-inbound-chaos` }); + + try { + console.log(`--- STARTING SIMULATION (Chaos: ${enableChaos}) ---`); + + const result = await executeOperationalDescriptor(odContent, { + world, + tools: {}, + logger, + input, + }); + + console.log('--- SIMULATION COMPLETE ---'); + console.log('Status:', result.status); + + // Log steps and check for chaos logs + let chaosFound = false; + if (result.stepResults) { + result.stepResults.forEach((step: any) => { + if (step.logs && step.logs.some((l: string) => l.includes('[CHAOS_LOG]'))) { + chaosFound = true; + console.log(`\nStep ${step.stepId} Logs:`); + step.logs.forEach((l: string) => { + if (l.includes('[CHAOS_LOG]')) console.log(l); + }); + } + }); + + const chaosStep = result.stepResults.find(s => s.stepId === 'determine_chaos_inputs'); + if (chaosStep && chaosStep.output && chaosStep.output.logs && chaosStep.output.logs.length > 0) { + chaosFound = true; + console.log('\nRetrieved Chaos Logs from Step Output:'); + chaosStep.output.logs.forEach((l: string) => console.log(l)); + } + } + + if (chaosFound) { + console.log('\n CHAOS EVENTS CONFIRMED'); + } else { + if (enableChaos) { + console.log('\n NO CHAOS EVENTS FOUND (Did probability 1.0 work?)'); + console.log('Input used:', input); + } else { + console.log('\n HAPPY PATH CONFIRMED (No chaos events)'); + } + } + + } catch (error) { + console.error('Execution Error:', error); + } finally { + if (shouldCleanup && seededWorld && worldId) { + console.log('--- CLEANING UP WORLD ---'); + try { + const deleted = await WorldRepository.deleteWorld(worldId); + if (deleted) { + console.log(`World ${worldId} deleted.`); + } else { + console.log(`Failed to delete world ${worldId} (not found?).`); + } + } catch (err) { + console.error(`Error deleting world:`, err); + } + } + } +} + +// Boilerplate to run main +import { connectMongo, disconnectMongo } from "../../services/mongo.service"; + +async function main() { + const mongoUri = loadEnv().MONGO_URI || process.env.DATABASE_URL; + if (!mongoUri) { + console.error("No Mongo URI"); + process.exit(1); + } + await connectMongo({ uri: mongoUri, dbName: 'controlmart' }); + await runChaos(); +} + +main().catch(console.error); diff --git a/packages/controlmart/src/worlds/process-inbound/schedule-process-inbound.ts b/packages/controlmart/src/worlds/process-inbound/schedule-process-inbound.ts new file mode 100644 index 0000000000000000000000000000000000000000..5d5036960f480c653853c9fa0e546c2cf57c2c50 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/schedule-process-inbound.ts @@ -0,0 +1,95 @@ +import { ODRepository } from "../../repository/od.repository"; +import { WorldRepository } from "../../repository/world.repository"; +import { scheduleOD, scheduleRecurringOD } from "../../operational-descriptor/schedule.od"; +import { createAppLogger } from "../../utils/logger.util"; +import inboundOd from "./ods/inbound_process.od.json"; +import type { OperationalDescriptor } from "../../types/od.type"; + + +const logger = createAppLogger({ service: "process-inbound-scheduler" }); + +type OdSchedule = { + simInterval: number; + simOffsetHours: number; + category: "DAIRY" | "VEGAN" | "SELF_CARE" | "CLOTHING"; +}; + +const SCHEDULES: OdSchedule[] = [ + { simInterval: 24, simOffsetHours: 6, category: "DAIRY" }, + { simInterval: 24, simOffsetHours: 10, category: "VEGAN" }, + { simInterval: 24, simOffsetHours: 14, category: "SELF_CARE" }, + { simInterval: 24, simOffsetHours: 17, category: "CLOTHING" }, +]; + +const calculateSimToRealRatio = (realHours: number) => 24 / realHours; + +const convertSimIntervalToReal = (simInterval: number, realHoursPerSimDay: number): string => { + const ratio = calculateSimToRealRatio(realHoursPerSimDay); + const realMinutes = (simInterval * 60) / ratio; + return realMinutes >= 60 ? `${realMinutes / 60} hours` : `${realMinutes} minutes`; +}; + +export const scheduleProcessInboundWorld = async (worldId: string, realHoursPerSimDay: number) => { + const world = await WorldRepository.findWorldById(worldId); + if (!world) throw new Error(`World ${worldId} not found`); + + const ratio = calculateSimToRealRatio(realHoursPerSimDay); + const odData = inboundOd as OperationalDescriptor; + + + // Onboard the OD record first + const repoData = { + odId: odData.id, + data: odData as any, + name: odData.name, + description: odData.description, + odType: (odData.type as "standard" | "background_job" | "workflow") || "standard", + persona: odData.persona, + }; + + let odRecord = await ODRepository.getODById(odData.id, worldId); + if (odRecord) { + await ODRepository.updateODById(odData.id, worldId, repoData); + } else { + odRecord = await ODRepository.createOD({ worldId }, repoData); + } + + // Chaos Config + const chaosEnabled = world.chaos?.processChaosEnabled ?? false; + + // Schedule for each category + for (const config of SCHEDULES) { + const realInterval = convertSimIntervalToReal(config.simInterval, realHoursPerSimDay); + const realMinutesOffset = config.simOffsetHours * (60 / ratio); + const nextRunAt = new Date(Date.now() + realMinutesOffset * 60 * 1000); + + // Schedule 5 immediate runs to kickstart the world + for (let i = 0; i < 5; i++) { + const immediateDate = new Date(Date.now() + i * 60 * 1000); // Stagger by 1 minute + await scheduleOD(immediateDate, odRecord!, world, { + category: config.category, + scheduledBy: "process-inbound-boot-kicker", + bootSequence: i + 1, + chaosEnabled, + }); + logger.info(`Scheduled immediate boot run ${i + 1}/5 for ${config.category} at ${immediateDate.toISOString()}`); + } + + logger.info( + `Scheduling ${config.category} for world ${worldId} at offset ${config.simOffsetHours}h (starting in ${realMinutesOffset.toFixed(2)}m)`, + ); + + await scheduleRecurringOD( + realInterval, + odRecord!, + world, + { + category: config.category, + scheduledBy: "process-inbound-scheduler", + simOffsetHours: config.simOffsetHours, + chaosEnabled, + }, + { nextRunAt }, + ); + } +}; diff --git a/packages/controlmart/src/worlds/process-inbound/seeders/companies.generator.ts b/packages/controlmart/src/worlds/process-inbound/seeders/companies.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..48f16fa614874368b6f2965df7fc34dc0dddeb19 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/seeders/companies.generator.ts @@ -0,0 +1,85 @@ +import { faker } from "@faker-js/faker"; +import type { TWorldRefModel } from "../../../models/shared.model"; +import type { TCompanyGenerate } from "../../../models/erp/company.erp.model"; + +export const PROCESS_INBOUND_COMPANY_IDS = { + MPC: "MPC-PROCESS-INBOUND", + SUPPLIER_DAIRY: "SUP-DAIRY-INBOUND-001", + SUPPLIER_VEGAN: "SUP-VEGAN-INBOUND-001", + SUPPLIER_SELF_CARE: "SUP-SELFCARE-INBOUND-001", + SUPPLIER_CLOTHING: "SUP-CLOTHING-INBOUND-001", +} as const; + +export const PROCESS_INBOUND_COMPANY_NAMES = { + MPC: "Inbound Logistics Hub", + SUPPLIER_DAIRY: "Fresh Dairy Suppliers", + SUPPLIER_VEGAN: "Organic Vegan Imports", + SUPPLIER_SELF_CARE: "Premium Self Care Vendors", + SUPPLIER_CLOTHING: "Fashion Forward Suppliers", +} as const; + +const generateAddress = (type: "BILL_TO" | "SHIP_TO" | "REM_TO") => ({ + type, + country: "United States", + attention: faker.person.fullName(), + street1: faker.location.streetAddress(), + city: faker.location.city(), + state: faker.location.state(), + postalCode: faker.location.zipCode(), + contactEmail: faker.internet.email(), + contactPhone: faker.phone.number(), +}); + +const generateContact = () => ({ + name: faker.person.fullName(), + email: faker.internet.email(), + phone: faker.phone.number(), +}); + +export const generateMpcCompany = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: true, + companyId: PROCESS_INBOUND_COMPANY_IDS.MPC, + name: PROCESS_INBOUND_COMPANY_NAMES.MPC, + legalName: `${PROCESS_INBOUND_COMPANY_NAMES.MPC} LLC`, + currency: "USD", + paymentTerms: "Net 30", + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + primaryContact: generateContact(), + creditHold: false, + status: "ACTIVE", + companyType: "INTERNAL", +}); + +export const generateSupplier = ( + worldRef: TWorldRefModel, + id: string, + name: string, + category: string +): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: id, + name: name, + legalName: `${name} Ltd`, + currency: "USD", + paymentTerms: "Net 15", + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + primaryContact: generateContact(), + creditHold: false, + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + category, + }, +}); + +export const generateAllProcessInboundCompanies = (worldRef: TWorldRefModel): TCompanyGenerate[] => [ + generateMpcCompany(worldRef), + generateSupplier(worldRef, PROCESS_INBOUND_COMPANY_IDS.SUPPLIER_DAIRY, PROCESS_INBOUND_COMPANY_NAMES.SUPPLIER_DAIRY, "DAIRY"), + generateSupplier(worldRef, PROCESS_INBOUND_COMPANY_IDS.SUPPLIER_VEGAN, PROCESS_INBOUND_COMPANY_NAMES.SUPPLIER_VEGAN, "VEGAN"), + generateSupplier(worldRef, PROCESS_INBOUND_COMPANY_IDS.SUPPLIER_SELF_CARE, PROCESS_INBOUND_COMPANY_NAMES.SUPPLIER_SELF_CARE, "SELF_CARE"), + generateSupplier(worldRef, PROCESS_INBOUND_COMPANY_IDS.SUPPLIER_CLOTHING, PROCESS_INBOUND_COMPANY_NAMES.SUPPLIER_CLOTHING, "CLOTHING"), +]; diff --git a/packages/controlmart/src/worlds/process-inbound/seeders/products.seeder.ts b/packages/controlmart/src/worlds/process-inbound/seeders/products.seeder.ts new file mode 100644 index 0000000000000000000000000000000000000000..6335abc603af13b7005a9f3983a586e715cbe69d --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/seeders/products.seeder.ts @@ -0,0 +1,59 @@ +import { faker } from "@faker-js/faker"; +import type { TProductGenerate } from "../../../models/erp/product.erp.model"; +import type { TWorldRefModel } from "../../../models/shared.model"; + +const DAIRY_PRODUCTS = [ + { name: "Organic Whole Milk 1L", sku: "SKU-DAIRY-001", price: 4.50 }, + { name: "Greek Yogurt Plain 500g", sku: "SKU-DAIRY-002", price: 5.99 }, + { name: "Cheddar Cheese Block 250g", sku: "SKU-DAIRY-003", price: 7.50 }, +]; + +const VEGAN_PRODUCTS = [ + { name: "Almond Milk Unsweetened 1L", sku: "SKU-VEGAN-001", price: 4.99 }, + { name: "Tofu Firm 400g", sku: "SKU-VEGAN-002", price: 3.50 }, + { name: "Vegan Burger Patties (4pk)", sku: "SKU-VEGAN-003", price: 8.99 }, +]; + +const SELF_CARE_PRODUCTS = [ + { name: "Moisturizing Body Lotion 200ml", sku: "SKU-SELF_CARE-001", price: 12.99 }, + { name: "Organic Lavender Soap Bar", sku: "SKU-SELF_CARE-002", price: 5.50 }, + { name: "Herbal Shampoo 500ml", sku: "SKU-SELF_CARE-003", price: 15.00 }, +]; + +const CLOTHING_PRODUCTS = [ + { name: "Classic White T-Shirt (M)", sku: "SKU-CLOTHING-001", price: 19.99 }, + { name: "Comfort Denim Jeans (32/32)", sku: "SKU-CLOTHING-002", price: 45.00 }, + { name: "Wool Blend Socks (Pair)", sku: "SKU-CLOTHING-003", price: 9.99 }, +]; + +const generateProduct = (worldRef: TWorldRefModel, category: string, data: any): TProductGenerate => ({ + worldRef, + productId: data.sku, + sku: data.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: data.name, + description: `${data.name} - Essential ${category.toLowerCase()} product.`, + commodityCode: category === "CLOTHING" ? "61091000" : "21069099", + taxClassification: "STANDARD", + unitOfMeasure: "EA", + weight: { value: 0.5, unit: "KG" }, + dimensions: { length: 10, width: 10, height: 10, unit: "CM" }, + inventoryTracking: true, + price: { currency: "USD", amount: data.price }, + cost: { currency: "USD", amount: data.price * 0.6 }, + leadTimeDays: 2, + status: "ACTIVE", + customFields: { + category: category, + }, +}); + +export const generateInboundProducts = (worldRef: TWorldRefModel): TProductGenerate[] => { + const dairy = DAIRY_PRODUCTS.map(p => generateProduct(worldRef, "DAIRY", p)); + const vegan = VEGAN_PRODUCTS.map(p => generateProduct(worldRef, "VEGAN", p)); + const care = SELF_CARE_PRODUCTS.map(p => generateProduct(worldRef, "SELF_CARE", p)); + const cloth = CLOTHING_PRODUCTS.map(p => generateProduct(worldRef, "CLOTHING", p)); + + return [...dairy, ...vegan, ...care, ...cloth]; +}; diff --git a/packages/controlmart/src/worlds/process-inbound/seeders/warehouse.seeder.ts b/packages/controlmart/src/worlds/process-inbound/seeders/warehouse.seeder.ts new file mode 100644 index 0000000000000000000000000000000000000000..c719059af8068669565ce37a97d6219ebdf57eac --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/seeders/warehouse.seeder.ts @@ -0,0 +1,211 @@ +import type { TWorldRefModel } from "../../../models/shared.model"; +import { generateIdByService } from "../../../utils/mongo.util"; + +// We generate IDs dynamically per run to avoid collisions +export const generateWarehouseIds = () => ({ + MAIN: generateIdByService("wms", "warehouse"), +}); + +export const generateZoneIds = () => ({ + DOCK: generateIdByService("wms", "zone"), + STAGING: generateIdByService("wms", "zone"), + QC: generateIdByService("wms", "zone"), + REJECT: generateIdByService("wms", "zone"), + STORAGE: generateIdByService("wms", "zone"), + PICKING: generateIdByService("wms", "zone"), +}); + +export const generateWarehouseData = (worldRef: TWorldRefModel, warehouseIds: ReturnType) => ({ + worldRef, + warehouseId: warehouseIds.MAIN, + warehouseCode: "MAIN-WH", + warehouseName: "Main Distribution Center", + warehouseType: "FULFILLMENT" as const, + status: "ACTIVE" as const, + timezone: "America/Los_Angeles", + address: { + type: "SHIP_TO", + street1: "123 Logistics Blvd", + city: "Commerce City", + state: "CA", + postalCode: "90000", + country: "USA", + }, +}); + +export const generateZonesData = (worldRef: TWorldRefModel, warehouseIds: ReturnType, zoneIds: ReturnType) => [ + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.DOCK, + zoneCode: "DOCK-01", + zoneName: "Inbound/Outbound Docks", + zoneType: "SHIPPING" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.STAGING, + zoneCode: "STG-01", + zoneName: "Staging Area", + zoneType: "STAGING" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.QC, + zoneCode: "QC-01", + zoneName: "Quality Control", + zoneType: "QC" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.REJECT, + zoneCode: "REJ-01", + zoneName: "Rejected Goods Area", + zoneType: "RETURNS" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.STORAGE, + zoneCode: "STO-01", + zoneName: "Main High-Bay Storage", + zoneType: "STORAGE" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.PICKING, + zoneCode: "PICK-01", + zoneName: "Picking Zone", + zoneType: "PICKING" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, +]; + +export const generateBinsData = (worldRef: TWorldRefModel, warehouseIds: ReturnType, zoneIds: ReturnType) => { + const bins = []; + + // Dock Bins + for (let i = 1; i <= 3; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.DOCK, + binId: generateIdByService("wms", "bin"), + binCode: `DOCK-${i.toString().padStart(2, '0')}`, + binType: "PALLET" as const, + locationType: "DOCK" as const, + status: "AVAILABLE" as const, + pickable: false, + }); + } + + // Staging Bins + for (let i = 1; i <= 5; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.STAGING, + binId: generateIdByService("wms", "bin"), + binCode: `STG-${i.toString().padStart(2, '0')}`, + binType: "SHELF" as const, + locationType: "STAGING" as const, + status: "AVAILABLE" as const, + pickable: false, + }); + } + + // QC Bins + for (let i = 1; i <= 3; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.QC, + binId: generateIdByService("wms", "bin"), + binCode: `QC-${i.toString().padStart(2, '0')}`, + binType: "SHELF" as const, + locationType: "QC" as const, + status: "AVAILABLE" as const, + pickable: false, + }); + } + + // Reject Bins + for (let i = 1; i <= 2; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.REJECT, + binId: generateIdByService("wms", "bin"), + binCode: `REJ-${i.toString().padStart(2, '0')}`, + binType: "SHELF" as const, + locationType: "STORAGE" as const, + status: "AVAILABLE" as const, + pickable: false, + }); + } + + // Storage Bins + for (let i = 1; i <= 20; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.STORAGE, + binId: generateIdByService("wms", "bin"), + binCode: `STO-${i.toString().padStart(3, '0')}`, + binType: "PALLET" as const, + locationType: "STORAGE" as const, + status: "AVAILABLE" as const, + pickable: false, + }); + } + + // Picking Bins + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.PICKING, + binId: generateIdByService("wms", "bin"), + binCode: "BIN-PICK-01", + binType: "PICK_FACE" as const, + locationType: "STORAGE" as const, + status: "AVAILABLE" as const, + pickable: true, + }); + + for (let i = 2; i <= 10; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.PICKING, + binId: generateIdByService("wms", "bin"), + binCode: `PICK-${i.toString().padStart(2, '0')}`, + binType: "PICK_FACE" as const, + locationType: "STORAGE" as const, + status: "AVAILABLE" as const, + pickable: true, + }); + } + + return bins; +}; diff --git a/packages/controlmart/src/worlds/process-inbound/seeders/world.generator.ts b/packages/controlmart/src/worlds/process-inbound/seeders/world.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..ae2ea24eb865c986e5e27bd757e6243132df7ac6 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/seeders/world.generator.ts @@ -0,0 +1,126 @@ +import { createWorld } from "../../../repository/world.repository"; +import { CompanyRepository } from "../../../repository/erp/company.repository"; +import { ProductRepository } from "../../../repository/erp/product.repository"; +import { CompanyLedgerRepository } from "../../../repository/finance/ledger.repository"; +import { WMSWarehouseRepository } from "../../../repository/wms/warehouse.wms.repository"; +import { WMSZoneRepository } from "../../../repository/wms/zone.wms.repository"; +import { WMSBinRepository } from "../../../repository/wms/bin.wms.repository"; +import { generateAllProcessInboundCompanies, PROCESS_INBOUND_COMPANY_NAMES } from "./companies.generator"; +import { generateInboundProducts } from "./products.seeder"; +import { + generateWarehouseData, + generateZonesData, + generateBinsData, + generateWarehouseIds, + generateZoneIds, +} from "./warehouse.seeder"; +import { getIdFromMongoObject } from "../../../utils/mongo.util"; +import type { TWorldRefModel } from "../../../models/shared.model"; + +export interface ProcessInboundWorldResult { + worldId: string; + companiesCreated: number; + productsCreated: number; + initialCapital: number; +} + +export interface ProcessInboundWorldConfig { + worldName?: string; + initialCash?: number; + initialReceivables?: number; + initialPayables?: number; + worldId?: string; +} + +const DEFAULT_INITIAL_CASH = 10_000; + +export const seedInboundWorld = async ( + config: ProcessInboundWorldConfig = {}, +): Promise => { + const { + worldName = "Process Inbound World", + initialCash = DEFAULT_INITIAL_CASH, + initialReceivables = 0, + initialPayables = 0, + worldId: existingWorldId, + } = config; + + let worldId = existingWorldId; + + if (!worldId) { + const world = await createWorld({ + name: worldName, + description: "Predictable inbound receiving process with chaos engineering.", + mpcCompany: PROCESS_INBOUND_COMPANY_NAMES.MPC, + ticketCreationEnabled: false, + }); + worldId = getIdFromMongoObject(world); + } + + const worldRef: TWorldRefModel = { worldId: worldId! }; + + // 1. Warehouse Infrastructure + const warehouseIds = generateWarehouseIds(); + const zoneIds = generateZoneIds(); + + const warehouseData = generateWarehouseData(worldRef, warehouseIds); + const warehouseRepo = WMSWarehouseRepository(worldId!); + const warehouse = await warehouseRepo.createWarehouse(warehouseData as any); + + const zonesData = generateZonesData(worldRef, warehouseIds, zoneIds); + const zoneRepo = WMSZoneRepository(worldId!); + const createdZones: any[] = []; + for (const zoneData of zonesData) { + createdZones.push(await zoneRepo.createZone(zoneData as any)); + } + + const binsData = generateBinsData(worldRef, warehouseIds, zoneIds); + const binRepo = WMSBinRepository(worldId!); + const createdBins: any[] = []; + for (const binData of binsData) { + createdBins.push(await binRepo.createBin(binData as any)); + } + + // 2. ERP Entities + const companies = generateAllProcessInboundCompanies(worldRef); + const companyRepo = CompanyRepository(worldId!); + for (const company of companies) { + if (company.companyId) { + const existing = await companyRepo.getCompanyById(company.companyId); + if (!existing) { + await companyRepo.createCompany(company); + } + } + } + + const products = generateInboundProducts(worldRef); + const productRepo = ProductRepository(worldId!); + + // Create products effectively in parallel but wait for ALL to complete + await Promise.all(products.map(product => + productRepo.createProduct(product as any).catch((err) => { + // Ignore duplicate errors, re-throw others + if (!err.message.includes("already exists")) { + console.error(`Failed to create product ${product.productId}:`, err); + } + }) + )); + + // 3. Financial Ledger + const ledgerRepo = CompanyLedgerRepository(worldId!); + await ledgerRepo.ensure({ + cash: initialCash, + totalReceivables: initialReceivables, + totalPayables: initialPayables, + }); + + return { + worldId: worldId!, + companiesCreated: companies.length, + productsCreated: products.length, + initialCapital: initialCash, + }; +}; + +export { PROCESS_INBOUND_COMPANY_NAMES, generateAllProcessInboundCompanies } from "./companies.generator"; +export { generateInboundProducts } from "./products.seeder"; diff --git a/packages/controlmart/src/worlds/process-inbound/seeders/world.seeder.ts b/packages/controlmart/src/worlds/process-inbound/seeders/world.seeder.ts new file mode 100644 index 0000000000000000000000000000000000000000..a091488b0d4504f825eec211049df77bf9f2e855 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/seeders/world.seeder.ts @@ -0,0 +1,13 @@ +import { scheduleProcessInboundWorld } from "../schedule-process-inbound"; +import { seedInboundWorld } from "./world.generator"; + +export const seedDataProcessInboundWorld = async (initialCash: number, worldId?: string) => { + return await seedInboundWorld({ + initialCash, + worldId, + }); +}; + +export const seedODsProcessInboundWorld = async (worldId: string, realHoursPerSimDay: number) => { + return await scheduleProcessInboundWorld(worldId, realHoursPerSimDay); +}; diff --git a/packages/controlmart/src/worlds/process-inbound/verifications/inbound-process-flow.verifier.ts b/packages/controlmart/src/worlds/process-inbound/verifications/inbound-process-flow.verifier.ts new file mode 100644 index 0000000000000000000000000000000000000000..af3673cefee5b827d0e3660c61361d40a1d2afe7 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/verifications/inbound-process-flow.verifier.ts @@ -0,0 +1,193 @@ +import { wmsVerifiers, type WMSEntityType } from "../../common/wms-verifiers"; +import { evaluateInvariant } from "../../../verification/utils/generic-checks"; +import type { + Ticket, + VerificationResult, + InvariantCheckResult, + EntityStateCheckResult +} from "../../../verification/types/verification.types"; +import { buildWMSProjection, type WMSProjection } from "../../../verification/systems/wms/wms-projection.builder"; +import { + checkCausalOrder, + checkQuantityConserved, + checkSingleEntity +} from "../../../verification/primitives/verification-primitives"; + +// Extended Verification Result to include trajectory checks +interface TrajectoryVerificationResult extends VerificationResult { + trajectoryChecks: InvariantCheckResult[]; +} + +/** + * Pure Verifier Logic for Inbound Process + */ +const verifyPure = ( + projection: WMSProjection, + ticket: Ticket +): { + entityStateChecks: EntityStateCheckResult[], + invariantChecks: InvariantCheckResult[], + trajectoryChecks: InvariantCheckResult[] +} => { + // Note: For inbound orders, we use the 'order' field from projection + // which represents the inbound order entity + const { order, tasks, transactions } = projection; + + + // 1. Entity State Checks + const orderCheck = order + ? wmsVerifiers.checkOrderStatus(order as any, ["RECEIVED", "CLOSED"]) + : { passed: false, details: "Inbound order missing" } as any; + + const entityStateChecks: EntityStateCheckResult[] = [ + orderCheck, + ...tasks.map((t: any) => wmsVerifiers.checkTaskStatus(t, ["COMPLETED"])), + ]; + + const invariantChecks: InvariantCheckResult[] = []; + const trajectoryChecks: InvariantCheckResult[] = []; + + // --- A. Temporal Invariants (CAUSALITY) --- + // A1. Arrival before Receiving + const arrivedAt = (order as any)?.timing?.arrivedAt; + const receivingStartedAt = (order as any)?.timing?.receivingStartedAt; + + const arrivalCheck = checkCausalOrder(arrivedAt, receivingStartedAt, "TEMP_ARRIVAL_BEFORE_RECEIVING", "Arrived < Receiving Started"); + if (arrivalCheck) trajectoryChecks.push(arrivalCheck); + + // A2. Receiving before Putaway + const receivedAt = (order as any)?.timing?.receivedAt; + const firstPutawayTask = tasks + .filter((t: any) => t.taskType === "PUTAWAY") + .map((t: any) => t.timing?.createdAt) + .sort((a: any, b: any) => new Date(a).getTime() - new Date(b).getTime())[0]; + + const receivePutawayCheck = checkCausalOrder(receivingStartedAt, firstPutawayTask, "TEMP_RECEIVE_BEFORE_PUTAWAY", "Receiving < Putaway"); + if (receivePutawayCheck) trajectoryChecks.push(receivePutawayCheck); + + // --- B. Task Semantics --- + // B1. Putaway Task Created + const putawayTasks = tasks.filter((t: any) => t.taskType === "PUTAWAY"); + trajectoryChecks.push(evaluateInvariant( + "TASK_PUTAWAY_EXISTS", + "At least one putaway task created", + putawayTasks.length > 0, + putawayTasks.length, + ">= 1" + )); + + // B2. Task Completion + const completedTasks = tasks.filter((t: any) => t.taskStatus === "COMPLETED"); + trajectoryChecks.push(evaluateInvariant( + "TASK_ALL_COMPLETED", + "All tasks completed", + tasks.length > 0 && completedTasks.length === tasks.length, + `${completedTasks.length}/${tasks.length}`, + "All" + )); + + // --- C. Inventory Semantics --- + // C1. Receive Transaction Exists + const receiveTxns = transactions.filter((tx: any) => tx.transactionType === "RECEIVE"); + trajectoryChecks.push(evaluateInvariant( + "INV_RECEIVE_TXN", + "Receive inventory transaction created", + receiveTxns.length > 0, + receiveTxns.length, + ">= 1" + )); + + // C2. Putaway Transaction Exists + const putawayTxns = transactions.filter((tx: any) => tx.transactionType === "PUTAWAY"); + trajectoryChecks.push(evaluateInvariant( + "INV_PUTAWAY_TXN", + "Putaway inventory transaction created", + putawayTxns.length > 0, + putawayTxns.length, + ">= 1" + )); + + // C3. Quantity Conservation + const totalExpected = order?.lines?.reduce((sum: number, l: any) => sum + (l.expectedQuantity || l.orderedQuantity || 0), 0) || 0; + const totalReceived = order?.lines?.reduce((sum: number, l: any) => sum + (l.receivedQuantity || 0), 0) || 0; + + if (order?.orderStatus === "RECEIVED" || order?.orderStatus === "CLOSED") { + trajectoryChecks.push(checkQuantityConserved(totalExpected, totalReceived, "INV_QTY_RECEIVED")); + } + + // --- D. Category-Driven Behavioral Checks --- + const category = ticket.metadata?.category || order?.customFields?.category; + if (category) { + trajectoryChecks.push(evaluateInvariant( + "CAT_ORDER_TAGGED", + `Order correctly tagged with category ${category}`, + order?.customFields?.category === category || !!category, + order?.customFields?.category, + category + )); + } + + // --- E. Trajectory Integrity --- + // E1. Single Order + trajectoryChecks.push(checkSingleEntity([order].filter(Boolean), "InboundOrder", "TRAJ_SINGLE_INBOUND_ORDER")); + + return { entityStateChecks, invariantChecks, trajectoryChecks }; +}; + + +/** + * Verifier for Inbound Process Flow + */ +export const verifyInboundProcessFlow = async ( + ticket: Ticket, + logger: any +): Promise => { + const startTime = Date.now(); + const worldId = ticket.worldId; + + // 1. Build Projection (Data Fetching) + const projection = await buildWMSProjection(ticket); + + if (!projection.orderId) { + const earlySteps = ["get_warehouse", "register_arrival"]; + const isEarlyFailure = earlySteps.includes(ticket.failedStepId); + + return { + passed: false, + ticketId: ticket.id, + worldId, + timestamp: new Date().toISOString(), + totalChecks: 1, + passedChecks: 0, + failedChecks: 1, + failureReason: isEarlyFailure + ? `Workflow failed at step '${ticket.failedStepId}' before Inbound Order was created` + : "No INBOUND ORDER entity found", + verificationDurationMs: Date.now() - startTime, + trajectoryChecks: [], + }; + } + + // 2. Pure Verification (Logic) + const result = verifyPure(projection, ticket); + const { entityStateChecks, invariantChecks, trajectoryChecks } = result; + + const allPassed = entityStateChecks.every((c: any) => c.passed) + && invariantChecks.every((i: any) => i.passed) + && trajectoryChecks.every((t: any) => t.passed); + + return { + passed: allPassed, + ticketId: ticket.id, + worldId, + timestamp: new Date().toISOString(), + entityStateChecks, + invariantChecks, + trajectoryChecks, + totalChecks: entityStateChecks.length + invariantChecks.length + trajectoryChecks.length, + passedChecks: entityStateChecks.filter((c: any) => c.passed).length + invariantChecks.filter((i: any) => i.passed).length + trajectoryChecks.filter((t: any) => t.passed).length, + failedChecks: entityStateChecks.filter((c: any) => !c.passed).length + invariantChecks.filter((i: any) => !i.passed).length + trajectoryChecks.filter((t: any) => !t.passed).length, + verificationDurationMs: Date.now() - startTime, + failureReason: allPassed ? undefined : "One or more verification checks failed", + }; +}; diff --git a/packages/controlmart/src/worlds/process-inbound/verifications/index.ts b/packages/controlmart/src/worlds/process-inbound/verifications/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..b690ef1f457e0781408d006dd95f5925a2192837 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/verifications/index.ts @@ -0,0 +1,7 @@ +import { verifyInboundProcessFlow } from "./inbound-process-flow.verifier"; +import type { WorldVerifierMap } from "../../../verification/engine"; +import { type WMSEntityType } from "../../common/wms-verifiers"; + +export const processInboundVerifiers: WorldVerifierMap = { + "inbound-process-flow": verifyInboundProcessFlow, +}; diff --git a/packages/controlmart/src/worlds/process-inbound/world.docs.ts b/packages/controlmart/src/worlds/process-inbound/world.docs.ts new file mode 100644 index 0000000000000000000000000000000000000000..e349983ef6a3f06e431e810ffb76944036c97ac3 --- /dev/null +++ b/packages/controlmart/src/worlds/process-inbound/world.docs.ts @@ -0,0 +1,128 @@ +export const processInboundWorldDoc = { + meta: { + version: "1.0.0", + generatedAt: "2026-02-09", + docType: "World Definition", + }, + world: { + name: "Process Inbound - Receiving & Putaway", + description: + "A focused inbound simulation world demonstrating the complete receiving lifecycle from supplier arrival through quality control, putaway, and inventory management across four product categories: Dairy, Vegan, Self Care, and Clothing.", + industry: "Retail & Distribution", + location: "Inbound Logistics Hub (Commerce City, CA)", + size: "Large Distribution Center", + complexity: "Medium", + story: + "The Process Inbound world simulates the operations of a busy receiving dock at a major distribution center. Every day, trucks arrive from various suppliers carrying goods across multiple product categories.\n\nThe dock operates on a 'Supplier Wave' system: Dairy shipments arrive early morning (requiring immediate cold chain processing), followed by Vegan products mid-morning, Self Care items in the afternoon, and Clothing shipments in the evening.\n\nThe Operational Descriptor orchestrates each receiving operation: verifying shipment documents, performing quality inspections, recording discrepancies, determining optimal storage locations, and executing putaway tasks. Chaos engineering scenarios simulate real-world disruptions like wrong DC arrivals, gate rejections, QC failures, and blocked storage paths.", + }, + businessContext: { + name: "Inbound Logistics Hub", + industry: "Multi-Category Distribution", + description: + "A centralized receiving hub coordinating inbound shipments from diverse suppliers. The facility operates on a high-throughput receiving model, prioritizing category-based processing waves to optimize dock utilization and labor scheduling.", + coreValues: [ + "Receiving Accuracy: Multi-point verification ensures shipment integrity.", + "Quality Assurance: Configurable QC thresholds catch discrepancies early.", + "Storage Optimization: Intelligent putaway rules maximize warehouse capacity.", + ], + productLines: [ + { + name: "Dairy Essentials", + storage: "Refrigerated (2-4°C)", + shelfLife: "14-30 Days", + description: "Temperature-sensitive dairy products requiring cold chain compliance.", + products: [ + { sku: "SKU-DAIRY-001", name: "Organic Whole Milk 1L" }, + { sku: "SKU-DAIRY-002", name: "Greek Yogurt Plain 500g" }, + { sku: "SKU-DAIRY-003", name: "Cheddar Cheese Block 250g" }, + ], + }, + { + name: "Vegan & Plant-Based", + storage: "Ambient / Refrigerated", + shelfLife: "60-180 Days", + description: "Plant-based alternatives with varied storage requirements.", + products: [ + { sku: "SKU-VEGAN-001", name: "Almond Milk Unsweetened 1L" }, + { sku: "SKU-VEGAN-002", name: "Tofu Firm 400g" }, + { sku: "SKU-VEGAN-003", name: "Vegan Burger Patties (4pk)" }, + ], + }, + { + name: "Self Care & Personal Hygiene", + storage: "Room Temperature", + shelfLife: "730 Days", + description: "Premium personal care products requiring clean handling.", + products: [ + { sku: "SKU-SELF_CARE-001", name: "Moisturizing Body Lotion 200ml" }, + { sku: "SKU-SELF_CARE-002", name: "Organic Lavender Soap Bar" }, + { sku: "SKU-SELF_CARE-003", name: "Herbal Shampoo 500ml" }, + ], + }, + { + name: "Urban Apparel", + storage: "Standard Dry Bin", + shelfLife: "N/A", + description: "High-quality clothing and accessories.", + products: [ + { sku: "SKU-CLOTHING-001", name: "Classic White T-Shirt (M)" }, + { sku: "SKU-CLOTHING-002", name: "Comfort Denim Jeans (32/32)" }, + { sku: "SKU-CLOTHING-003", name: "Wool Blend Socks (Pair)" }, + ], + }, + ], + }, + ecosystem: { + partners: "An internal logistics hub coordinating between specialized suppliers and warehouse operations.", + suppliers: [ + { name: "Fresh Dairy Suppliers", role: "Dairy Supplier", category: "DAIRY" }, + { name: "Organic Vegan Imports", role: "Vegan Goods Supplier", category: "VEGAN" }, + { name: "Premium Self Care Vendors", role: "Personal Care Supplier", category: "SELF_CARE" }, + { name: "Fashion Forward Suppliers", role: "Apparel Supplier", category: "CLOTHING" }, + ], + }, + operationalDescriptors: { + standardActors: [ + { + id: "inbound-process-flow", + name: "Inbound Receiving & Putaway Flow", + persona: "Receiving Supervisor", + type: "Standard Workflow", + schedules: [ + "Dairy Wave: Daily @ 06:00 AM (Sim Time)", + "Vegan Wave: Daily @ 10:00 AM (Sim Time)", + "Self Care Wave: Daily @ 02:00 PM (Sim Time)", + "Clothing Wave: Daily @ 05:00 PM (Sim Time)", + ], + description: + "This automated workflow acts as the Receiving Supervisor's digital assistant. It orchestrates the complete inbound flow:\n\n1. Register trailer arrival and assign dock door.\n2. Verify PO documentation and record expected quantities.\n3. Execute receiving: scan items, record actual quantities, flag discrepancies.\n4. Perform quality control checks based on category requirements.\n5. Determine optimal storage location using putaway rules.\n6. Generate and complete putaway tasks.\n7. Create inventory transactions to update stock levels.\n8. Close the inbound order and update supplier metrics.", + keySteps: [ + "Initialize & Context Setup", + "Register Trailer Arrival", + "Verify PO & Assign Dock", + "Execute Receiving (Quantity Recording)", + "Quality Control Inspection", + "Determine Storage Location", + "Generate Putaway Tasks", + "Execute Putaway", + "Create Inventory Transactions", + "Close Inbound Order", + ], + inputs: ["Product Category (DAIRY, VEGAN, etc.)", "PO Number", "Expected Quantity"], + outputs: ["Received Goods in Storage", "Updated Inventory Levels", "Closed Inbound Order"], + }, + ], + chaosScenarios: [ + { name: "Wrong DC Arrival", description: "Shipment arrives at incorrect distribution center" }, + { name: "Gate Rejection", description: "Shipment not expected at assigned gate" }, + { name: "QC Tolerance Exceeded", description: "Quality check fails due to exceeded tolerance" }, + { name: "Path Blocked", description: "Storage location unreachable, task queued" }, + ], + }, + technicalServices: { + ERP: "Enterprise Resource Planning: Master data for products, companies, and purchase orders.", + WMS: "Warehouse Management System: Manages receiving, quality control, putaway, and bin assignments.", + TMS: "Transportation Management: Handles trailer scheduling and dock door assignments.", + Finance: "Accounting: Records inventory valuations and supplier transactions.", + }, +}; diff --git a/packages/controlmart/src/worlds/process-outbound/index.ts b/packages/controlmart/src/worlds/process-outbound/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..1415c0d7969932263b921763f4319e66f35a137c --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/index.ts @@ -0,0 +1,13 @@ +import { seedDataProcessOutboundWorld, seedODsProcessOutboundWorld } from "./seeders/world.seeder"; +import { scheduleProcessOutboundWorld } from "./schedule-process-outbound"; +import outBoundOd from "./ods/outbound_order.od.json"; +import type { OperationalDescriptor } from "../../types/od.type"; +import { processOutboundWorldDoc } from "./world.docs"; + +export { seedDataProcessOutboundWorld, seedODsProcessOutboundWorld, scheduleProcessOutboundWorld }; + +export const processOutboundODs = { + outbound_order: outBoundOd as OperationalDescriptor, +}; + +export const processOutboundWorldDocs = () => processOutboundWorldDoc; diff --git a/packages/controlmart/src/worlds/process-outbound/ods/outbound_order.od.json b/packages/controlmart/src/worlds/process-outbound/ods/outbound_order.od.json new file mode 100644 index 0000000000000000000000000000000000000000..4cf58bca4f97d942ca8a7c2b4b74c815abc12944 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/ods/outbound_order.od.json @@ -0,0 +1,477 @@ +{ + "id": "outbound-category-flow", + "name": "Predictable Outbound Category Flow", + "version": "1.2.3", + "description": "Category-driven outbound flow with Chaos Injection for resilience testing", + "namespace": "wms", + "persona": "Warehouse Manager", + "type": "standard", + "inputSchema": { + "type": "object", + "required": [ + "category" + ], + "properties": { + "category": { + "type": "string", + "enum": [ + "DAIRY", + "VEGAN", + "SELF_CARE", + "CLOTHING" + ], + "default": "SELF_CARE" + }, + "chaosEnabled": { + "type": "boolean", + "default": false + }, + "chaosProbability": { + "type": "number", + "default": 0.5 + } + } + }, + "steps": [ + { + "id": "init_context", + "name": "Initialize Context", + "type": "script", + "script": "const suffixes = ['001', '002', '003']; const suffix = suffixes[Math.floor(Math.random() * suffixes.length)]; return { timestamp: Date.now(), quantity: Math.floor(Math.random() * (500 - 250 + 1)) + 250, skuSuffix: suffix };", + "output": { + "storeAs": "config" + } + }, + { + "id": "determine_chaos_inputs", + "name": "Determine Chaos Inputs", + "type": "script", + "script": "console.log('CTX KEYS:', Object.keys(ctx)); const category = ctx.category || 'SELF_CARE'; const enabled = ctx.chaosEnabled || false; const prob = ctx.chaosProbability || 0.3; const isChaos = (p = prob) => enabled && Math.random() < p; const chaosInputs = { order: {}, inventory: {}, picking: {}, packing: {}, shipping: {} }; const logs = []; const mapErr = (key, msg) => { logs.push(`[CHAOS_LOG] ${msg}`); return { key, msg }; }; const defaultAddress = { type: 'SHIP_TO', street1: '100 Warehouse Way', city: 'Logistics City', state: 'CA', postalCode: '90001', country: 'USA' }; let finalShipTo = defaultAddress; if (isChaos()) { if (isChaos(0.1)) { chaosInputs.order.orderNumber = `ORD-DUPE-${Date.now()}`; mapErr('DUPLICATE_ORDER', 'Duplicate order → Demand blocked'); } if (isChaos(0.1)) { chaosInputs.order.warehouseId = 'WRONG-DC-999'; mapErr('WRONG_DC', 'Wrong DC → Demand not visible'); } if (isChaos(0.1)) { chaosInputs.order.shipToAddress = null; finalShipTo = null; mapErr('MISSING_SHIP_TO', 'Missing ship-to → Validation failed'); } if (isChaos(0.05)) { chaosInputs.order.sku = 'INVALID-SKU-000'; mapErr('INVALID_SKU', 'Invalid SKU → Demand blocked'); } if (isChaos(0.05)) { chaosInputs.order.status = 'CANCELLED'; mapErr('CANCELLED_ORDER', 'Cancelled order → Demand invalid'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.inventory.quantity = 999999; mapErr('INSUFFICIENT_QTY', 'Insufficient qty → Partial allocation'); } if (isChaos(0.1)) { chaosInputs.inventory.productId = 'NON-EXISTENT-PROD'; mapErr('NO_INVENTORY', 'No inventory → Allocation skipped'); } if (isChaos(0.05)) { chaosInputs.inventory.locked = true; mapErr('STOCK_ON_HOLD', 'Stock on hold → Excluded'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.picking.zoneCode = 'INVALID-ZONE'; mapErr('ZONE_UNAVAILABLE', 'Zone unavailable → Plan failed'); } if (isChaos(0.1)) { chaosInputs.picking.confirmedQuantity = 0; mapErr('SHORT_PICK', 'Short pick → Partial confirmed'); } if (isChaos(0.05)) { chaosInputs.picking.damageDetected = true; mapErr('DAMAGE_DETECTED', 'Damage detected → Exception raised'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.packing.weight = 5000; mapErr('WEIGHT_EXCEEDED', 'Weight exceeded → Repack required'); } if (isChaos(0.1)) { chaosInputs.packing.huType = 'BANNED_TYPE'; mapErr('HU_TYPE_NOT_ALLOWED', 'HU type not allowed → Pack blocked'); } } if (isChaos()) { if (isChaos(0.1)) { chaosInputs.shipping.dockId = 'OCCUPIED-DOCK'; mapErr('DOCK_OCCUPIED', 'Dock occupied → Delay'); } if (isChaos(0.1)) { chaosInputs.shipping.sealNumber = null; mapErr('SEAL_MISSING', 'Seal missing → Ship blocked'); } } console.log(logs.join('\\n')); const standardSku = `SKU-${category}-${ctx.config.skuSuffix}`; const finalValues = { order: { orderNumber: chaosInputs.order.orderNumber || `ORD-${category}-${ctx.config.timestamp}`, sku: chaosInputs.order.sku || standardSku, status: chaosInputs.order.status || 'CREATED', warehouseId: chaosInputs.order.warehouseId || null, shipToAddress: finalShipTo }, inventory: { productId: chaosInputs.inventory.productId || standardSku, quantity: chaosInputs.inventory.quantity || ctx.config.quantity }, picking: { zoneCode: chaosInputs.picking.zoneCode || null, confirmedQuantity: chaosInputs.picking.confirmedQuantity !== undefined ? chaosInputs.picking.confirmedQuantity : ctx.config.quantity, damageDetected: chaosInputs.picking.damageDetected || false }, packing: { huType: chaosInputs.packing.huType || 'STANDARD', weight: chaosInputs.packing.weight || undefined }, shipping: { dockId: chaosInputs.shipping.dockId || undefined, sealNumber: chaosInputs.shipping.sealNumber !== undefined ? chaosInputs.shipping.sealNumber : 'SEAL-001' } }; return { chaosInputs, logs, finalValues };", + "output": { + "storeAs": "chaos" + } + }, + { + "id": "get_warehouse", + "name": "Get Main Warehouse", + "type": "mcp", + "service": "wms", + "tool": "warehouse.get_by_code", + "input": { + "type": "template", + "template": "MAIN-WH" + }, + "output": { + "storeAs": "warehouse" + } + }, + { + "id": "get_zones", + "name": "Get Warehouse Zones", + "type": "mcp", + "service": "wms", + "tool": "zone.get_by_warehouse", + "input": { + "type": "template", + "template": "{{warehouse.warehouseId}}" + }, + "output": { + "storeAs": "zones" + } + }, + { + "id": "map_zones", + "name": "Map Zones to Context", + "type": "script", + "script": "const zs = ctx.zones?.items || ctx.zones || []; return { pick: zs.find(z => z.zoneType === 'PICKING'), staging: zs.find(z => z.zoneType === 'STAGING'), dock: zs.find(z => z.zoneType === 'SHIPPING') };", + "output": { + "storeAs": "zoneMap" + } + }, + { + "id": "create_outbound_demand", + "name": "Create Outbound Demand", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.create", + "input": { + "type": "template", + "template": { + "orderNumber": "{{chaos.finalValues.order.orderNumber}}", + "status": "{{chaos.finalValues.order.status}}", + "warehouseId": "{{chaos.finalValues.order.warehouseId || warehouse.warehouseId}}", + "description": "Category specific outbound: {{category}}. {{chaos.logs[0] || ''}}", + "lines": [ + { + "lineNumber": 1, + "sku": "{{chaos.finalValues.order.sku}}", + "productId": "{{chaos.finalValues.order.sku}}", + "productName": "Category {{category}} Item", + "orderedQuantity": "{{config.quantity}}", + "uom": "EA" + } + ], + "shipToAddress": "{{chaos.finalValues.order.shipToAddress}}", + "customFields": { + "category": "{{category}}", + "ticketLog": "{{chaos.logs}}" + } + } + }, + "output": { + "storeAs": "outboundOrder" + } + }, + { + "id": "validate_outbound_demand", + "name": "Validate Outbound Demand", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{outboundOrder.orderId}}", + "status": "DEMAND_CREATED" + } + }, + "output": { + "storeAs": "validatedDemand" + } + }, + { + "id": "evaluate_allocatable_inventory", + "name": "Evaluate Allocatable Inventory", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{outboundOrder.orderId}}", + "status": "READY_FOR_ALLOCATION" + } + }, + "output": { + "storeAs": "evaluatedInventory" + } + }, + { + "id": "ensure_inventory", + "name": "Ensure Inventory Availability", + "type": "mcp", + "service": "wms", + "tool": "inventory.ensure_available", + "input": { + "type": "template", + "template": { + "productId": "{{chaos.finalValues.inventory.productId}}", + "quantity": "{{chaos.finalValues.inventory.quantity}}", + "warehouseId": "{{warehouse.warehouseId}}" + } + }, + "output": { + "storeAs": "inventoryCheck" + } + }, + { + "id": "reserve_inventory", + "name": "Reserve Inventory", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.allocate_line", + "input": { + "type": "template", + "template": { + "orderId": "{{outboundOrder.orderId}}", + "lineNumber": "{{outboundOrder.lines[0].lineNumber}}", + "allocatedQuantity": "{{chaos.finalValues.inventory.quantity}}" + } + }, + "output": { + "storeAs": "allocationRecord" + } + }, + { + "id": "determine_pick_strategy", + "name": "Determine Pick Strategy", + "type": "script", + "script": "return { strategy: 'FIFO', zone: ctx.chaos?.finalValues?.picking?.zoneCode || ctx.zoneMap?.pick?.zoneCode || 'DEFAULT', warehouse: ctx.warehouse?.warehouseCode };", + "output": { + "storeAs": "pickStrategy" + } + }, + { + "id": "create_pick_tasks", + "name": "Create Pick Tasks", + "type": "mcp", + "service": "wms", + "tool": "task.create", + "input": { + "type": "template", + "template": { + "warehouseId": "{{warehouse.warehouseId}}", + "zoneId": "{{zoneMap.pick.zoneId}}", + "taskType": "PICK", + "reference": { + "type": "ORDER", + "id": "{{outboundOrder.orderId}}" + }, + "status": "PICK_PLANNED", + "customFields": { + "zoneCode": "{{pickStrategy.zone}}" + } + } + }, + "output": { + "storeAs": "pickTasks" + } + }, + { + "id": "release_pick_tasks", + "name": "Release Pick Tasks", + "type": "mcp", + "service": "wms", + "tool": "task.update_status", + "input": { + "type": "template", + "template": { + "taskId": "{{pickTasks.taskId}}", + "status": "READY_TO_PICK" + } + }, + "output": { + "storeAs": "activePickTask" + } + }, + { + "id": "confirm_picked_quantity", + "name": "Confirm Picked Quantity", + "type": "mcp", + "service": "wms", + "tool": "task.update_status", + "input": { + "type": "template", + "template": { + "taskId": "{{pickTasks.taskId}}", + "status": "PICK_CONFIRMED", + "customFields": { + "confirmedQuantity": "{{chaos.finalValues.picking.confirmedQuantity}}", + "damageDetected": "{{chaos.finalValues.picking.damageDetected}}" + } + } + }, + "output": { + "storeAs": "pickConfirmation" + } + }, + { + "id": "move_goods_to_packing", + "name": "Move Goods to Packing", + "type": "mcp", + "service": "wms", + "tool": "task.update_status", + "input": { + "type": "template", + "template": { + "taskId": "{{pickTasks.taskId}}", + "status": "PACKING_PENDING" + } + }, + "output": { + "storeAs": "packingWork" + } + }, + { + "id": "complete_pick_tasks", + "name": "Complete Pick Tasks", + "type": "mcp", + "service": "wms", + "tool": "task.update_status", + "input": { + "type": "template", + "template": { + "taskId": "{{pickTasks.taskId}}", + "status": "COMPLETED" + } + }, + "output": { + "storeAs": "completedTasks" + } + }, + { + "id": "create_handling_units", + "name": "Create Handling Units", + "type": "mcp", + "service": "wms", + "tool": "outbound_shipment.create", + "input": { + "type": "template", + "template": { + "orderId": "{{outboundOrder.orderId}}", + "status": "HU_CREATED", + "toAddress": "{{outboundOrder.shipToAddress}}", + "customFields": { + "huType": "{{chaos.finalValues.packing.huType}}", + "weight": "{{chaos.finalValues.packing.weight}}" + } + } + }, + "output": { + "storeAs": "handlingUnits" + } + }, + { + "id": "label_and_seal_shipment", + "name": "Label and Seal Shipment", + "type": "mcp", + "service": "wms", + "tool": "outbound_shipment.update_status", + "input": { + "type": "template", + "template": { + "shipmentId": "{{handlingUnits.shipmentId}}", + "status": "PACKED" + } + }, + "output": { + "storeAs": "packedShipment" + } + }, + { + "id": "assign_staging_lane", + "name": "Assign Staging Lane", + "type": "mcp", + "service": "wms", + "tool": "outbound_shipment.update_status", + "input": { + "type": "template", + "template": { + "shipmentId": "{{handlingUnits.shipmentId}}", + "status": "STAGED" + } + }, + "output": { + "storeAs": "stagedShipment" + } + }, + { + "id": "assign_dock", + "name": "Assign Outbound Dock", + "type": "mcp", + "service": "wms", + "tool": "outbound_shipment.update_status", + "input": { + "type": "template", + "template": { + "shipmentId": "{{handlingUnits.shipmentId}}", + "status": "READY_TO_SHIP", + "customFields": { + "dockId": "{{chaos.finalValues.shipping.dockId}}" + } + } + }, + "output": { + "storeAs": "dockAssignment" + } + }, + { + "id": "load_shipment", + "name": "Load Shipment", + "type": "mcp", + "service": "wms", + "tool": "outbound_shipment.update_status", + "input": { + "type": "template", + "template": { + "shipmentId": "{{handlingUnits.shipmentId}}", + "status": "LOADED" + } + }, + "output": { + "storeAs": "loadRecord" + } + }, + { + "id": "confirm_shipment_dispatch", + "name": "Confirm Shipment Dispatch", + "type": "mcp", + "service": "wms", + "tool": "outbound_shipment.update_status", + "input": { + "type": "template", + "template": { + "shipmentId": "{{handlingUnits.shipmentId}}", + "status": "SHIPPED", + "customFields": { + "sealNumber": "{{chaos.finalValues.shipping.sealNumber}}" + } + } + }, + "output": { + "storeAs": "shipmentConfirmation" + } + }, + { + "id": "release_inventory_reservation", + "name": "Release Inventory Reservation", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{outboundOrder.orderId}}", + "status": "RESERVATION_RELEASED" + } + }, + "output": { + "storeAs": "releasedInventory" + } + }, + { + "id": "update_inventory_balance", + "name": "Update Inventory Balance", + "type": "mcp", + "service": "wms", + "tool": "inventory.transaction.create", + "input": { + "type": "template", + "template": { + "warehouseId": "{{warehouse.warehouseId}}", + "transactionType": "SHIP", + "fromBinId": "BIN-PICK-01", + "productId": "{{outboundOrder.lines[0].productId}}", + "quantity": "{{config.quantity}}", + "referenceType": "ORDER", + "referenceId": "{{outboundOrder.orderId}}", + "status": "DEPLETED" + } + }, + "output": { + "storeAs": "updatedInventory" + } + }, + { + "id": "close_outbound_order", + "name": "Close Outbound Order", + "type": "mcp", + "service": "wms", + "tool": "outbound.order.update_status", + "input": { + "type": "template", + "template": { + "orderId": "{{outboundOrder.orderId}}", + "status": "CLOSED" + } + }, + "output": { + "storeAs": "closedOrder" + } + } + ], + "runPolicy": { + "failureMode": "fail_fast", + "storeRuns": true + } +} \ No newline at end of file diff --git a/packages/controlmart/src/worlds/process-outbound/run-outbound-chaos.ts b/packages/controlmart/src/worlds/process-outbound/run-outbound-chaos.ts new file mode 100644 index 0000000000000000000000000000000000000000..6c40156490b2e9dc3a6f3f78faf979e08f8a3103 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/run-outbound-chaos.ts @@ -0,0 +1,146 @@ + +import { executeOperationalDescriptor } from "../../operational-descriptor/executor.od"; +import { createAppLogger } from "../../utils/logger.util"; +import { WorldRepository } from "../../repository/world.repository"; +import { seedProcessOutboundWorld } from "./seeders/world.generator"; +import { capabilityCatalog } from "../../services/capability-catalog.service"; +import { disableAllChaos } from "../../operational-descriptor/tools/registry.tool"; +import * as path from 'path'; +import * as fs from 'fs'; +import { loadEnv } from "../../utils/env.util"; + +async function runChaos() { + disableAllChaos(); + + const args = process.argv.slice(2); + const shouldSeed = args.includes('--seed'); + const shouldCleanup = args.includes('--cleanup'); + const enableChaos = args.includes('--chaos'); + + let worldId = process.env.WORLD_ID; + let seededWorld = false; + + if (shouldSeed) { + console.log('--- INITIALIZING SERVICES ---'); + await capabilityCatalog.initialize(); + + console.log('--- SEEDING NEW WORLD ---'); + try { + const seedResult = await seedProcessOutboundWorld({ + worldName: `Chaos Verification World ${Date.now()}` + }); + worldId = seedResult.worldId; + seededWorld = true; + console.log(`World Initialized: ${worldId}`); + } catch (err) { + console.error('Failed to seed world:', err); + process.exit(1); + } + } + + if (!worldId) { + console.error("ERROR: WORLD_ID env var required OR use --seed option."); + console.log("Usage: npx ts-node src/worlds/process-outbound/run-outbound-chaos.ts [--seed] [--cleanup] [--chaos]"); + process.exit(1); + } + + const odPath = path.join(__dirname, 'ods/outbound_order.od.json'); + console.log(`Running OD from: ${odPath}`); + + const odContent = JSON.parse(fs.readFileSync(odPath, 'utf-8')); + + // Config + const input = { + category: 'SELF_CARE', + chaosEnabled: enableChaos, + chaosProbability: enableChaos ? 1.0 : 0 + }; + + const world = await WorldRepository.findWorldById(worldId); + if (!world) { + console.error(`World not found: ${worldId}`); + process.exit(1); + } + + const logger = createAppLogger({ service: `verification-outbound-chaos` }); + + try { + console.log(`--- STARTING SIMULATION (Chaos: ${enableChaos}) ---`); + + const result = await executeOperationalDescriptor(odContent, { + world, + tools: {}, + logger, + input, + }); + + console.log('--- SIMULATION COMPLETE ---'); + console.log('Status:', result.status); + + // Log steps and check for chaos logs + let chaosFound = false; + if (result.stepResults) { + result.stepResults.forEach((step: any) => { + if (step.logs && step.logs.some((l: string) => l.includes('[CHAOS_LOG]'))) { + chaosFound = true; + console.log(`\nStep ${step.stepId} Logs:`); + step.logs.forEach((l: string) => { + if (l.includes('[CHAOS_LOG]')) console.log(l); + }); + } + }); + + const chaosStep = result.stepResults.find(s => s.stepId === 'determine_chaos_inputs'); + if (chaosStep && chaosStep.output && chaosStep.output.logs && chaosStep.output.logs.length > 0) { + chaosFound = true; + console.log('\nRetrieved Chaos Logs from Step Output:'); + chaosStep.output.logs.forEach((l: string) => console.log(l)); + } + } + + if (chaosFound) { + console.log('\n CHAOS EVENTS CONFIRMED'); + } else { + if (enableChaos) { + console.log('\n NO CHAOS EVENTS FOUND (Did probability 1.0 work?)'); + console.log('Input used:', input); + } else { + console.log('\n HAPPY PATH CONFIRMED (No chaos events)'); + } + } + + } catch (error) { + console.error('Execution Error:', error); + } finally { + if (shouldCleanup && seededWorld && worldId) { + console.log('--- CLEANING UP WORLD ---'); + try { + const deleted = await WorldRepository.deleteWorld(worldId); + if (deleted) { + console.log(`World ${worldId} deleted.`); + } else { + console.log(`Failed to delete world ${worldId} (not found?).`); + } + } catch (err) { + console.error(`Error deleting world:`, err); + } + } + } +} + +// Boilerplate to run main +import { connectMongo, disconnectMongo } from "../../services/mongo.service"; + +async function main() { + const mongoUri = loadEnv().MONGO_URI || process.env.DATABASE_URL; + if (!mongoUri) { + console.error("No Mongo URI"); + process.exit(1); + } + await connectMongo({ uri: mongoUri, dbName: 'controlmart' }); + await runChaos(); + // Do not force exit immediately, let drain or use disconnect + // await disconnectMongo(); +} + +main().catch(console.error); diff --git a/packages/controlmart/src/worlds/process-outbound/schedule-process-outbound.ts b/packages/controlmart/src/worlds/process-outbound/schedule-process-outbound.ts new file mode 100644 index 0000000000000000000000000000000000000000..0a0b6bf07c0a75578eba3af7db822c35fa8a1982 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/schedule-process-outbound.ts @@ -0,0 +1,92 @@ +import { ODRepository } from "../../repository/od.repository"; +import { WorldRepository } from "../../repository/world.repository"; +import { scheduleOD, scheduleRecurringOD } from "../../operational-descriptor/schedule.od"; +import { createAppLogger } from "../../utils/logger.util"; +import { processOutboundODs } from "./index"; + +const logger = createAppLogger({ service: "process-outbound-scheduler" }); + +type OdSchedule = { + simInterval: number; + simOffsetHours: number; + category: "DAIRY" | "VEGAN" | "SELF_CARE" | "CLOTHING"; +}; + +const SCHEDULES: OdSchedule[] = [ + { simInterval: 24, simOffsetHours: 5, category: "DAIRY" }, + { simInterval: 24, simOffsetHours: 8, category: "VEGAN" }, + { simInterval: 24, simOffsetHours: 13, category: "SELF_CARE" }, + { simInterval: 24, simOffsetHours: 18, category: "CLOTHING" }, +]; + +const calculateSimToRealRatio = (realHours: number) => 24 / realHours; + +const convertSimIntervalToReal = (simInterval: number, realHoursPerSimDay: number): string => { + const ratio = calculateSimToRealRatio(realHoursPerSimDay); + const realMinutes = (simInterval * 60) / ratio; + return realMinutes >= 60 ? `${realMinutes / 60} hours` : `${realMinutes} minutes`; +}; + +export const scheduleProcessOutboundWorld = async (worldId: string, realHoursPerSimDay: number) => { + const world = await WorldRepository.findWorldById(worldId); + if (!world) throw new Error(`World ${worldId} not found`); + + const ratio = calculateSimToRealRatio(realHoursPerSimDay); + const odData = processOutboundODs.outbound_order; + + // Onboard the OD record first + const repoData = { + odId: odData.id, + data: odData as any, + name: odData.name, + description: odData.description, + odType: (odData.type as "standard" | "background_job" | "workflow") || "standard", + persona: odData.persona, + }; + + let odRecord = await ODRepository.getODById(odData.id, worldId); + if (odRecord) { + await ODRepository.updateODById(odData.id, worldId, repoData); + } else { + odRecord = await ODRepository.createOD({ worldId }, repoData); + } + + // Chaos Config + const chaosEnabled = world.chaos?.processChaosEnabled ?? false; + + // Schedule for each category + for (const config of SCHEDULES) { + const realInterval = convertSimIntervalToReal(config.simInterval, realHoursPerSimDay); + const realMinutesOffset = config.simOffsetHours * (60 / ratio); + const nextRunAt = new Date(Date.now() + realMinutesOffset * 60 * 1000); + + // Schedule 5 immediate runs to kickstart the world + for (let i = 0; i < 5; i++) { + const immediateDate = new Date(Date.now() + i * 60 * 1000); // Stagger by 1 minute + await scheduleOD(immediateDate, odRecord!, world, { + category: config.category, + scheduledBy: "process-outbound-boot-kicker", + bootSequence: i + 1, + chaosEnabled, + }); + logger.info(`Scheduled immediate boot run ${i + 1}/5 for ${config.category} at ${immediateDate.toISOString()}`); + } + + logger.info( + `Scheduling ${config.category} for world ${worldId} at offset ${config.simOffsetHours}h (starting in ${realMinutesOffset.toFixed(2)}m)`, + ); + + await scheduleRecurringOD( + realInterval, + odRecord!, + world, + { + category: config.category, + scheduledBy: "process-outbound-scheduler", + simOffsetHours: config.simOffsetHours, + chaosEnabled, + }, + { nextRunAt }, + ); + } +}; diff --git a/packages/controlmart/src/worlds/process-outbound/seeders/companies.generator.ts b/packages/controlmart/src/worlds/process-outbound/seeders/companies.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..d640a1b7f7dba98fafce4d825107a4dc96b70f95 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/seeders/companies.generator.ts @@ -0,0 +1,108 @@ +import { faker } from "@faker-js/faker"; +import type { TWorldRefModel } from "../../../models/shared.model"; +import type { TCompanyGenerate } from "../../../models/erp/company.erp.model"; + +export const PROCESS_OUTBOUND_COMPANY_IDS = { + MPC: "MPC-PROCESS-OUTBOUND", + SUPPLIER_DAIRY: "SUP-DAIRY-001", + SUPPLIER_VEGAN: "SUP-VEGAN-001", + SUPPLIER_SELF_CARE: "SUP-SELFCARE-001", + SUPPLIER_CLOTHING: "SUP-CLOTHING-001", + CUSTOMER_RETAIL: "CUST-RETAIL-001", +} as const; + +export const PROCESS_OUTBOUND_COMPANY_NAMES = { + MPC: "Global Logistics Hub", + SUPPLIER_DAIRY: "Pure Dairy Solutions", + SUPPLIER_VEGAN: "Green Harvest Vegan", + SUPPLIER_SELF_CARE: "Vitality Self care Co", + SUPPLIER_CLOTHING: "Urban Threads Clothing", + CUSTOMER_RETAIL: "Standard Retail Group", +} as const; + +const generateAddress = (type: "BILL_TO" | "SHIP_TO" | "REM_TO") => ({ + type, + country: "United States", + attention: faker.person.fullName(), + street1: faker.location.streetAddress(), + city: faker.location.city(), + state: faker.location.state(), + postalCode: faker.location.zipCode(), + contactEmail: faker.internet.email(), + contactPhone: faker.phone.number(), +}); + +const generateContact = () => ({ + name: faker.person.fullName(), + email: faker.internet.email(), + phone: faker.phone.number(), +}); + +export const generateMpcCompany = (worldRef: TWorldRefModel): TCompanyGenerate => ({ + worldRef, + isMpcCompany: true, + companyId: PROCESS_OUTBOUND_COMPANY_IDS.MPC, + name: PROCESS_OUTBOUND_COMPANY_NAMES.MPC, + legalName: `${PROCESS_OUTBOUND_COMPANY_NAMES.MPC} LLC`, + currency: "USD", + paymentTerms: "Net 30", + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + primaryContact: generateContact(), + creditHold: false, + status: "ACTIVE", + companyType: "INTERNAL", +}); + +export const generateSupplier = ( + worldRef: TWorldRefModel, + id: string, + name: string, + category: string +): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: id, + name: name, + legalName: `${name} Ltd`, + currency: "USD", + paymentTerms: "Net 15", + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + primaryContact: generateContact(), + creditHold: false, + status: "ACTIVE", + companyType: "SUPPLIER", + customFields: { + category, + }, +}); + +export const generateCustomer = ( + worldRef: TWorldRefModel, + id: string, + name: string +): TCompanyGenerate => ({ + worldRef, + isMpcCompany: false, + companyId: id, + name: name, + legalName: `${name} Inc`, + currency: "USD", + paymentTerms: "Net 30", + billingAddress: generateAddress("BILL_TO"), + shippingAddress: generateAddress("SHIP_TO"), + primaryContact: generateContact(), + creditHold: false, + status: "ACTIVE", + companyType: "CUSTOMER", +}); + +export const generateAllProcessOutboundCompanies = (worldRef: TWorldRefModel): TCompanyGenerate[] => [ + generateMpcCompany(worldRef), + generateSupplier(worldRef, PROCESS_OUTBOUND_COMPANY_IDS.SUPPLIER_DAIRY, PROCESS_OUTBOUND_COMPANY_NAMES.SUPPLIER_DAIRY, "DAIRY"), + generateSupplier(worldRef, PROCESS_OUTBOUND_COMPANY_IDS.SUPPLIER_VEGAN, PROCESS_OUTBOUND_COMPANY_NAMES.SUPPLIER_VEGAN, "VEGAN"), + generateSupplier(worldRef, PROCESS_OUTBOUND_COMPANY_IDS.SUPPLIER_SELF_CARE, PROCESS_OUTBOUND_COMPANY_NAMES.SUPPLIER_SELF_CARE, "SELF_CARE"), + generateSupplier(worldRef, PROCESS_OUTBOUND_COMPANY_IDS.SUPPLIER_CLOTHING, PROCESS_OUTBOUND_COMPANY_NAMES.SUPPLIER_CLOTHING, "CLOTHING"), + generateCustomer(worldRef, PROCESS_OUTBOUND_COMPANY_IDS.CUSTOMER_RETAIL, PROCESS_OUTBOUND_COMPANY_NAMES.CUSTOMER_RETAIL), +]; diff --git a/packages/controlmart/src/worlds/process-outbound/seeders/products.seeder.ts b/packages/controlmart/src/worlds/process-outbound/seeders/products.seeder.ts new file mode 100644 index 0000000000000000000000000000000000000000..fd12ff5e660eef9e7e461d72ac00a83a94b6af6d --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/seeders/products.seeder.ts @@ -0,0 +1,59 @@ +import { faker } from "@faker-js/faker"; +import type { TProductGenerate } from "../../../models/erp/product.erp.model"; +import type { TWorldRefModel } from "../../../models/shared.model"; + +const DAIRY_PRODUCTS = [ + { name: "Organic Whole Milk 1L", sku: "SKU-DAIRY-001", price: 4.50 }, + { name: "Greek Yogurt Plain 500g", sku: "SKU-DAIRY-002", price: 5.99 }, + { name: "Cheddar Cheese Block 250g", sku: "SKU-DAIRY-003", price: 7.50 }, +]; + +const VEGAN_PRODUCTS = [ + { name: "Almond Milk Unsweetened 1L", sku: "SKU-VEGAN-001", price: 4.99 }, + { name: "Tofu Firm 400g", sku: "SKU-VEGAN-002", price: 3.50 }, + { name: "Vegan Burger Patties (4pk)", sku: "SKU-VEGAN-003", price: 8.99 }, +]; + +const SELF_CARE_PRODUCTS = [ + { name: "Moisturizing Body Lotion 200ml", sku: "SKU-SELF_CARE-001", price: 12.99 }, + { name: "Organic Lavender Soap Bar", sku: "SKU-SELF_CARE-002", price: 5.50 }, + { name: "Herbal Shampoo 500ml", sku: "SKU-SELF_CARE-003", price: 15.00 }, +]; + +const CLOTHING_PRODUCTS = [ + { name: "Classic White T-Shirt (M)", sku: "SKU-CLOTHING-001", price: 19.99 }, + { name: "Comfort Denim Jeans (32/32)", sku: "SKU-CLOTHING-002", price: 45.00 }, + { name: "Wool Blend Socks (Pair)", sku: "SKU-CLOTHING-003", price: 9.99 }, +]; + +const generateProduct = (worldRef: TWorldRefModel, category: string, data: any): TProductGenerate => ({ + worldRef, + productId: data.sku, + sku: data.sku, + upc: faker.string.numeric(12), + ean: faker.string.numeric(13), + name: data.name, + description: `${data.name} - Essential ${category.toLowerCase()} product.`, + commodityCode: category === "CLOTHING" ? "61091000" : "21069099", + taxClassification: "STANDARD", + unitOfMeasure: "EA", + weight: { value: 0.5, unit: "KG" }, + dimensions: { length: 10, width: 10, height: 10, unit: "CM" }, + inventoryTracking: true, + price: { currency: "USD", amount: data.price }, + cost: { currency: "USD", amount: data.price * 0.6 }, + leadTimeDays: 2, + status: "ACTIVE", + customFields: { + category: category, + }, +}); + +export const generateMpcProducts = (worldRef: TWorldRefModel): TProductGenerate[] => { + const dairy = DAIRY_PRODUCTS.map(p => generateProduct(worldRef, "DAIRY", p)); + const vegan = VEGAN_PRODUCTS.map(p => generateProduct(worldRef, "VEGAN", p)); + const care = SELF_CARE_PRODUCTS.map(p => generateProduct(worldRef, "SELF_CARE", p)); + const cloth = CLOTHING_PRODUCTS.map(p => generateProduct(worldRef, "CLOTHING", p)); + + return [...dairy, ...vegan, ...care, ...cloth]; +}; diff --git a/packages/controlmart/src/worlds/process-outbound/seeders/warehouse.seeder.ts b/packages/controlmart/src/worlds/process-outbound/seeders/warehouse.seeder.ts new file mode 100644 index 0000000000000000000000000000000000000000..f374d7b38e6af68658fdc0623aae5e502f7b5d20 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/seeders/warehouse.seeder.ts @@ -0,0 +1,133 @@ + +import type { TWorldRefModel } from "../../../models/shared.model"; +import { generateIdByService } from "../../../utils/mongo.util"; + + +// We generate IDs dynamically per run to avoid collisions +export const generateWarehouseIds = () => ({ + MAIN: generateIdByService("wms", "warehouse"), +}); + +export const generateZoneIds = () => ({ + PICK: generateIdByService("wms", "zone"), + STAGING: generateIdByService("wms", "zone"), + DOCK: generateIdByService("wms", "zone"), +}); + +export const generateWarehouseData = (worldRef: TWorldRefModel, warehouseIds: ReturnType) => ({ + worldRef, + warehouseId: warehouseIds.MAIN, + warehouseCode: "MAIN-WH", + warehouseName: "Main Distribution Center", + warehouseType: "FULFILLMENT" as const, + status: "ACTIVE" as const, + timezone: "America/Chicago", + address: { + type: "SHIP_TO", + street1: "123 Logistics Way", + city: "Chicago", + state: "IL", + postalCode: "60601", + country: "US", + }, +}); + +export const generateZonesData = (worldRef: TWorldRefModel, warehouseIds: ReturnType, zoneIds: ReturnType) => [ + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.PICK, + zoneCode: "PICK-01", + zoneName: "Picking Zone", + zoneType: "PICKING" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.STAGING, + zoneCode: "STG-01", + zoneName: "Staging Zone", + zoneType: "STAGING" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, + { + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.DOCK, + zoneCode: "DOCK-01", + zoneName: "Shipping Dock", + zoneType: "SHIPPING" as const, + status: "ACTIVE" as const, + temperatureControlled: false, + aisles: [], + }, +]; + +export const generateBinsData = (worldRef: TWorldRefModel, warehouseIds: ReturnType, zoneIds: ReturnType) => { + const bins = []; + + // Pick Bins + // Special Bin for OD compatibility + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.PICK, + binId: generateIdByService("wms", "bin"), + binCode: "BIN-PICK-01", + binType: "PICK_FACE" as const, + locationType: "STORAGE" as const, + status: "AVAILABLE" as const, + pickable: true, + }); + + for (let i = 2; i <= 10; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.PICK, + binId: generateIdByService("wms", "bin"), + binCode: `P-${i.toString().padStart(2, '0')}`, + binType: "PICK_FACE" as const, + locationType: "STORAGE" as const, + status: "AVAILABLE" as const, + pickable: true, + }); + } + + // Staging Bins + for (let i = 1; i <= 5; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.STAGING, + binId: generateIdByService("wms", "bin"), + binCode: `S-${i.toString().padStart(2, '0')}`, + binType: "SHELF" as const, + locationType: "STAGING" as const, + status: "AVAILABLE" as const, + pickable: false, + }); + } + + // Dock Bins + for (let i = 1; i <= 2; i++) { + bins.push({ + worldRef, + warehouseId: warehouseIds.MAIN, + zoneId: zoneIds.DOCK, + binId: generateIdByService("wms", "bin"), + binCode: `D-${i.toString().padStart(2, '0')}`, + binType: "PALLET" as const, + locationType: "DOCK" as const, + status: "AVAILABLE" as const, + pickable: false, + }); + } + + return bins; +}; diff --git a/packages/controlmart/src/worlds/process-outbound/seeders/world.generator.ts b/packages/controlmart/src/worlds/process-outbound/seeders/world.generator.ts new file mode 100644 index 0000000000000000000000000000000000000000..390a0daeb1bd95dd9cf42f89a33905e1cb7b6245 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/seeders/world.generator.ts @@ -0,0 +1,157 @@ +import { createWorld } from "../../../repository/world.repository"; +import { CompanyRepository } from "../../../repository/erp/company.repository"; +import { ProductRepository } from "../../../repository/erp/product.repository"; +import { CompanyLedgerRepository } from "../../../repository/finance/ledger.repository"; +import { WMSWarehouseRepository } from "../../../repository/wms/warehouse.wms.repository"; +import { WMSZoneRepository } from "../../../repository/wms/zone.wms.repository"; +import { WMSBinRepository } from "../../../repository/wms/bin.wms.repository"; +import { Inventory } from "../../../models/wms/inventory.wms.model"; +import { InventoryTransaction } from "../../../models/wms/inventory_transaction.wms.model"; +import { generateAllProcessOutboundCompanies, PROCESS_OUTBOUND_COMPANY_NAMES } from "./companies.generator"; +import { generateMpcProducts } from "./products.seeder"; +import { + generateWarehouseData, + generateZonesData, + generateBinsData, + generateWarehouseIds, + generateZoneIds, +} from "./warehouse.seeder"; +import { getIdFromMongoObject } from "../../../utils/mongo.util"; +import type { TWorldRefModel } from "../../../models/shared.model"; + +export interface ProcessOutboundWorldResult { + worldId: string; + companiesCreated: number; + productsCreated: number; + initialCapital: number; +} + +export interface ProcessOutboundWorldConfig { + worldName?: string; + initialCash?: number; + initialReceivables?: number; + initialPayables?: number; + worldId?: string; +} + +const DEFAULT_INITIAL_CASH = 10_000; + +export const seedProcessOutboundWorld = async ( + config: ProcessOutboundWorldConfig = {}, +): Promise => { + const { + worldName = "Process Outbound World", + initialCash = DEFAULT_INITIAL_CASH, + initialReceivables = 0, + initialPayables = 0, + worldId: existingWorldId, + } = config; + + let worldId = existingWorldId; + + if (!worldId) { + const world = await createWorld({ + name: worldName, + description: "Predictable outbound fulfillment process across 4 categories.", + mpcCompany: PROCESS_OUTBOUND_COMPANY_NAMES.MPC, + ticketCreationEnabled: false, + }); + worldId = getIdFromMongoObject(world); + } + + const worldRef: TWorldRefModel = { worldId: worldId! }; + + // 1. Warehouse Infrastructure + const warehouseIds = generateWarehouseIds(); + const zoneIds = generateZoneIds(); + + const warehouseData = generateWarehouseData(worldRef, warehouseIds); + const warehouseRepo = WMSWarehouseRepository(worldId!); + const warehouse = await warehouseRepo.createWarehouse(warehouseData as any); + + const zonesData = generateZonesData(worldRef, warehouseIds, zoneIds); + const zoneRepo = WMSZoneRepository(worldId!); + const createdZones: any[] = []; + for (const zoneData of zonesData) { + createdZones.push(await zoneRepo.createZone(zoneData as any)); + } + + const binsData = generateBinsData(worldRef, warehouseIds, zoneIds); + const binRepo = WMSBinRepository(worldId!); + const createdBins: any[] = []; + for (const binData of binsData) { + createdBins.push(await binRepo.createBin(binData as any)); + } + + // 2. ERP Entities + const companies = generateAllProcessOutboundCompanies(worldRef); + const companyRepo = CompanyRepository(worldId!); + for (const company of companies) { + if (company.companyId) { + const existing = await companyRepo.getCompanyById(company.companyId); + if (!existing) { + await companyRepo.createCompany(company); + } + } + } + + const products = generateMpcProducts(worldRef); + const productRepo = ProductRepository(worldId!); + + // Create products effectively in parallel but wait for ALL to complete + await Promise.all(products.map(product => + productRepo.createProduct(product).catch((err) => { + // Ignore duplicate errors, re-throw others + if (!err.message.includes("already exists")) { + console.error(`Failed to create product ${product.productId}:`, err); + } + }) + )); + + // 3. Inventory Seeding (1000 EA in pick bins) + const pickZone = createdZones.find(z => z.zoneType === "PICKING" || z.zoneName.includes("PICK")); + const pickBin = createdBins.find(b => b.zoneId === pickZone?.zoneId); + + for (const product of products) { + // Inventory record will be auto-created by the InventoryTransaction hook check + + + // Add initial transaction so history matches on-hand + await InventoryTransaction.create({ + worldRef, + warehouseId: warehouse.warehouseId, + transactionType: "RECEIVE", + fromBinId: "EXTERNAL", + toBinId: pickBin?.binId || "BIN-PICK-01", + productId: product.productId, + sku: product.sku, + quantity: 1000, + uom: "EA", + referenceType: "INITIAL_LOAD", + referenceId: "SEED-" + Date.now(), + status: "COMPLETED", + performedBy: "SYSTEM_SEEDER", + performedAt: new Date(), + }).then((txn) => { + console.log(`Seeded inventory for ${product.productId} (SKU: ${product.sku}): ${txn.quantity}`); + }).catch((err) => { console.error(`Uncaught Seeder Error for ${product.productId}:`, err); }); + } + + // 4. Financial Ledger + const ledgerRepo = CompanyLedgerRepository(worldId!); + await ledgerRepo.ensure({ + cash: initialCash, + totalReceivables: initialReceivables, + totalPayables: initialPayables, + }); + + return { + worldId: worldId!, + companiesCreated: companies.length, + productsCreated: products.length, + initialCapital: initialCash, + }; +}; + +export { PROCESS_OUTBOUND_COMPANY_NAMES, generateAllProcessOutboundCompanies } from "./companies.generator"; +export { generateMpcProducts } from "./products.seeder"; diff --git a/packages/controlmart/src/worlds/process-outbound/seeders/world.seeder.ts b/packages/controlmart/src/worlds/process-outbound/seeders/world.seeder.ts new file mode 100644 index 0000000000000000000000000000000000000000..b112e4db9563f97c5e5b0ff55934d9de02d35c6a --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/seeders/world.seeder.ts @@ -0,0 +1,13 @@ +import { scheduleProcessOutboundWorld } from "../schedule-process-outbound"; +import { seedProcessOutboundWorld } from "./world.generator"; + +export const seedDataProcessOutboundWorld = async (initialCash: number, worldId?: string) => { + return await seedProcessOutboundWorld({ + initialCash, + worldId, + }); +}; + +export const seedODsProcessOutboundWorld = async (worldId: string, realHoursPerSimDay: number) => { + return await scheduleProcessOutboundWorld(worldId, realHoursPerSimDay); +}; diff --git a/packages/controlmart/src/worlds/process-outbound/verifications/__tests__/outbound-category-flow.verifier.test.ts b/packages/controlmart/src/worlds/process-outbound/verifications/__tests__/outbound-category-flow.verifier.test.ts new file mode 100644 index 0000000000000000000000000000000000000000..7de0f9889134c32689f9a4446653de415e6914bd --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/verifications/__tests__/outbound-category-flow.verifier.test.ts @@ -0,0 +1,173 @@ +import { mock } from "bun:test"; + +const BASE_PATH = "/Users/skyfallmbpm4p2/code/morpheus/packages/controlmart/src"; + +// Mock Repository Instances +const mockOrderRepo = { + getOutboundOrderById: mock((id: string) => { + return Promise.resolve(null as any); + }), +}; +const mockTaskRepo = { + getTaskLogs: mock((filters?: any) => { + return Promise.resolve([] as any[]); + }), +}; +const mockShipmentRepo = { + getShipmentsByStatus: mock((status: string[]) => { + return Promise.resolve([] as any[]); + }), +}; +const mockInventoryRepo = { + getTransactionsByReference: mock((type: string, id: string) => { + // console.log(`DEBUG: mockInventoryRepo.getTransactionsByReference called with ${type}, ${id}`); + return Promise.resolve([] as any[]); + }), +}; + +// Mock Repositories +mock.module(`${BASE_PATH}/repository/wms/outbound_order.wms.repository`, () => ({ + WMSOutboundOrderRepository: (worldId: string) => mockOrderRepo, +})); + +mock.module(`${BASE_PATH}/repository/wms/task.wms.repository`, () => ({ + WMSTaskRepository: (worldId: string) => mockTaskRepo, +})); + +mock.module(`${BASE_PATH}/repository/wms/outbound_shipment.wms.repository`, () => ({ + WMSOutboundShipmentRepository: (worldId: string) => mockShipmentRepo, +})); + +mock.module(`${BASE_PATH}/repository/wms/inventory_transaction.wms.repository`, () => ({ + WMSInventoryTransactionRepository: (worldId: string) => mockInventoryRepo, +})); + +// Now import the code being tested +import { describe, it, expect, beforeEach } from "bun:test"; +import { verifyOutboundCategoryFlow } from "../outbound-category-flow.verifier"; +import type { Ticket } from "../../../../verification/types/verification.types"; + +describe("verifyOutboundCategoryFlow", () => { + const mockLogger = { info: mock(() => { }) }; + const ticket: Ticket = { + id: "t-1", + worldId: "w-1", + odId: "outbound-category-flow", + odRunId: "run-1", + failedStepId: "step-1", + failureType: "TEST", + affectedEntities: [{ type: "ORDER", id: "order-1" }], + createdAt: new Date().toISOString(), + status: "new", + metadata: { category: "ELECTRONICS" } + }; + + beforeEach(() => { + mockOrderRepo.getOutboundOrderById.mockClear(); + mockTaskRepo.getTaskLogs.mockClear(); + mockShipmentRepo.getShipmentsByStatus.mockClear(); + mockInventoryRepo.getTransactionsByReference.mockClear(); + + mockOrderRepo.getOutboundOrderById.mockImplementation(() => Promise.resolve(null as any)); + mockTaskRepo.getTaskLogs.mockImplementation(() => Promise.resolve([] as any[])); + mockShipmentRepo.getShipmentsByStatus.mockImplementation(() => Promise.resolve([] as any[])); + mockInventoryRepo.getTransactionsByReference.mockImplementation(() => Promise.resolve([] as any[])); + }); + + it("should fail if no ORDER entity is found in ticket", async () => { + const ticketNoOrder = { ...ticket, affectedEntities: [] }; + const result = await verifyOutboundCategoryFlow(ticketNoOrder as any, mockLogger); + expect(result.passed).toBe(false); + expect(result.failureReason).toBe("No ORDER entity found in ticket metadata"); + }); + + it("should pass if all checks succeed", async () => { + // Mock success data + mockOrderRepo.getOutboundOrderById.mockImplementation(() => Promise.resolve({ + orderId: "order-1", + status: "CLOSED", + lines: [{ sku: "ELECTRONICS-123" }] + } as any)); + mockTaskRepo.getTaskLogs.mockImplementation(() => Promise.resolve([ + { taskId: "t-1", taskStatus: "COMPLETED", reference: { id: "order-1" } } + ] as any[])); + mockShipmentRepo.getShipmentsByStatus.mockImplementation(() => Promise.resolve([ + { shipmentId: "s-1", status: "SHIPPED", orderId: "order-1" } + ] as any[])); + mockInventoryRepo.getTransactionsByReference.mockImplementation((type, id) => { + // console.log(`RUNNING MOCK for ${type}, ${id}`); + return Promise.resolve([{ id: "tr-1" }] as any[]); + }); + + const result = await verifyOutboundCategoryFlow(ticket, mockLogger); + + if (!result.passed) { + console.log("FAIL Result:", JSON.stringify(result, null, 2)); + } + + expect(result.passed).toBe(true); + expect(result.passedChecks).toBeGreaterThan(0); + expect(result.failedChecks).toBe(0); + }); + + it("should fail if order status is not CLOSED", async () => { + mockOrderRepo.getOutboundOrderById.mockImplementation(() => Promise.resolve({ + orderId: "order-1", + status: "PICKING", + lines: [{ sku: "ELECTRONICS-123" }] + } as any)); + mockTaskRepo.getTaskLogs.mockImplementation(() => Promise.resolve([ + { taskId: "t-1", taskStatus: "COMPLETED", reference: { id: "order-1" } } + ] as any[])); + mockShipmentRepo.getShipmentsByStatus.mockImplementation(() => Promise.resolve([ + { shipmentId: "s-1", status: "SHIPPED", orderId: "order-1" } + ] as any[])); + mockInventoryRepo.getTransactionsByReference.mockImplementation(() => Promise.resolve([{ id: "tr-1" }] as any[])); + + const result = await verifyOutboundCategoryFlow(ticket, mockLogger); + + expect(result.passed).toBe(false); + const orderCheck = result.entityStateChecks?.find(c => c.entityType === "ORDER"); + expect(orderCheck?.passed).toBe(false); + }); + + it("should fail if no shipment exists", async () => { + mockOrderRepo.getOutboundOrderById.mockImplementation(() => Promise.resolve({ + orderId: "order-1", + status: "CLOSED", + lines: [{ sku: "ELECTRONICS-123" }] + } as any)); + mockTaskRepo.getTaskLogs.mockImplementation(() => Promise.resolve([ + { taskId: "t-1", taskStatus: "COMPLETED", reference: { id: "order-1" } } + ] as any[])); + mockShipmentRepo.getShipmentsByStatus.mockImplementation(() => Promise.resolve([] as any[])); // No shipments + mockInventoryRepo.getTransactionsByReference.mockImplementation(() => Promise.resolve([{ id: "tr-1" }] as any[])); + + const result = await verifyOutboundCategoryFlow(ticket, mockLogger); + + expect(result.passed).toBe(false); + const shpExistsCheck = result.invariantChecks?.find(i => i.invariantId === "SHP_EXISTS"); + expect(shpExistsCheck?.passed).toBe(false); + }); + + it("should fail if SKU does not match category", async () => { + mockOrderRepo.getOutboundOrderById.mockImplementation(() => Promise.resolve({ + orderId: "order-1", + status: "CLOSED", + lines: [{ sku: "FOOD-123" }] // Wrong category + } as any)); + mockTaskRepo.getTaskLogs.mockImplementation(() => Promise.resolve([ + { taskId: "t-1", taskStatus: "COMPLETED", reference: { id: "order-1" } } + ] as any[])); + mockShipmentRepo.getShipmentsByStatus.mockImplementation(() => Promise.resolve([ + { shipmentId: "s-1", status: "SHIPPED", orderId: "order-1" } + ] as any[])); + mockInventoryRepo.getTransactionsByReference.mockImplementation(() => Promise.resolve([{ id: "tr-1" }] as any[])); + + const result = await verifyOutboundCategoryFlow(ticket, mockLogger); + + expect(result.passed).toBe(false); + const catMatchCheck = result.invariantChecks?.find(i => i.invariantId === "CAT_SKU_MATCH"); + expect(catMatchCheck?.passed).toBe(false); + }); +}); diff --git a/packages/controlmart/src/worlds/process-outbound/verifications/index.ts b/packages/controlmart/src/worlds/process-outbound/verifications/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..a87ac5814389cf4c6ec4257930052f3812ce43fb --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/verifications/index.ts @@ -0,0 +1,7 @@ +import { verifyOutboundCategoryFlow } from "./outbound-category-flow.verifier"; +import type { WorldVerifierMap } from "../../../verification/engine"; +import { type WMSEntityType } from "../../common/wms-verifiers"; + +export const processOutboundVerifiers: WorldVerifierMap = { + "outbound-category-flow": verifyOutboundCategoryFlow, +}; diff --git a/packages/controlmart/src/worlds/process-outbound/verifications/outbound-category-flow.verifier.ts b/packages/controlmart/src/worlds/process-outbound/verifications/outbound-category-flow.verifier.ts new file mode 100644 index 0000000000000000000000000000000000000000..c6bf7ca55c9e4803df81a734ba45a3f24a77cbd1 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/verifications/outbound-category-flow.verifier.ts @@ -0,0 +1,213 @@ +import { wmsVerifiers, type WMSEntityType } from "../../common/wms-verifiers"; +import { evaluateInvariant } from "../../../verification/utils/generic-checks"; +import type { + Ticket, + VerificationResult, + InvariantCheckResult, + EntityStateCheckResult +} from "../../../verification/types/verification.types"; +import { buildWMSProjection, type WMSProjection } from "../../../verification/systems/wms/wms-projection.builder"; +import { + checkCausalOrder, + checkQuantityConserved, + checkSingleEntity +} from "../../../verification/primitives/verification-primitives"; + +// Extended Verification Result to include trajectory checks +interface TrajectoryVerificationResult extends VerificationResult { + trajectoryChecks: InvariantCheckResult[]; +} + +/** + * Pure Verifier Logic + */ +const verifyPure = ( + projection: WMSProjection, + ticket: Ticket +): { + entityStateChecks: EntityStateCheckResult[], + invariantChecks: InvariantCheckResult[], + trajectoryChecks: InvariantCheckResult[] +} => { + const { order, tasks, shipments, transactions } = projection; + + // 1. Entity State Checks + // @ts-ignore - CheckOrderStatus uses generic type constraints that differ slightly from our unified VerifiableOrder + // but at runtime 'order' has the necessary fields (status). + const orderCheck = order ? wmsVerifiers.checkOrderStatus(order as any, ["CLOSED"]) : { passed: false, details: "Order missing" } as any; + + const entityStateChecks: EntityStateCheckResult[] = [ + orderCheck, + ...tasks.map((t: any) => wmsVerifiers.checkTaskStatus(t, ["PICK_CONFIRMED", "COMPLETED", "PACKING_PENDING"])), + ...shipments.map((s: any) => wmsVerifiers.checkShipmentStatus(s, ["SHIPPED"])) + ]; + + const invariantChecks: InvariantCheckResult[] = []; + const trajectoryChecks: InvariantCheckResult[] = []; + + // --- A. Temporal Invariants (CAUSALITY) --- + // A1. Allocation before Picking + const allocatedAt = order?.timing?.allocatedAt; + const firstPickReady = tasks + .filter((t: any) => t.taskType === "PICK") + .map((t: any) => t.timing?.releasedAt) + .sort((a: any, b: any) => new Date(a).getTime() - new Date(b).getTime())[0]; + + const allocCheck = checkCausalOrder(allocatedAt, firstPickReady, "TEMP_ALLOC_BEFORE_PICK", "Allocation < Pick Released"); + if (allocCheck) trajectoryChecks.push(allocCheck); + + // A2. Picking before Packing + const latestPickComplete = tasks + .filter((t: any) => t.taskType === "PICK") + .map((t: any) => t.timing?.completedAt) + .sort((a: any, b: any) => new Date(b).getTime() - new Date(a).getTime())[0]; + + const firstShipmentCreated = shipments + .map((s: any) => s.createdAt) + .sort((a: any, b: any) => new Date(a).getTime() - new Date(b).getTime())[0]; + + const pickPackCheck = checkCausalOrder(latestPickComplete, firstShipmentCreated, "TEMP_PICK_BEFORE_PACK", "Pick Complete < Shipment Created"); + if (pickPackCheck) trajectoryChecks.push(pickPackCheck); + + // A3. Shipment before Inventory Depletion + const depletionTxns = transactions.filter((tx: any) => tx.transactionType === "SHIP"); + const shipmentShippedAt = shipments.find((s: any) => s.shipmentStatus === "SHIPPED")?.dates?.actualShipTime; + + if (depletionTxns.length > 0) { + const firstDepletion = depletionTxns.sort((a: any, b: any) => new Date(a.transactionDate).getTime() - new Date(b.transactionDate).getTime())[0]; + const shipDepleteCheck = checkCausalOrder(shipmentShippedAt, firstDepletion?.transactionDate, "TEMP_SHIP_BEFORE_DEPLETE", "Shipped <= Depleted"); + if (shipDepleteCheck) trajectoryChecks.push(shipDepleteCheck); + } + + // --- B. Task Semantics --- + // B1. Task Cardinality + if ((order?.lines?.length || 0) > 0) { + trajectoryChecks.push(evaluateInvariant( + "TASK_CARDINALITY", + "At least one pick task exists per order line (heuristic)", + tasks.some((t: any) => t.taskType === "PICK"), + tasks.length, + ">= 1" + )); + } + + // B2. Task Exclusivity + const tasksWithOtherRefs = tasks.filter((t: any) => t.reference?.id !== order?.orderId); + trajectoryChecks.push(evaluateInvariant( + "TASK_EXCLUSIVITY", + "Tasks must only reference this order", + tasksWithOtherRefs.length === 0, + tasksWithOtherRefs.length, + "0" + )); + + // --- C. Inventory Semantics --- + // C1. Quantity Conservation + const totalOrdered = order?.lines.reduce((sum: number, l: any) => sum + (l.orderedQuantity || 0), 0) || 0; + const totalDepleted = Math.abs(depletionTxns.reduce((sum: number, tx: any) => sum + (tx.quantity || 0), 0)); + + if (order?.orderStatus === "CLOSED") { + trajectoryChecks.push(checkQuantityConserved(totalOrdered, totalDepleted, "INV_QTY_CONSERVED")); + } + + // C2. Product Identity + const orderProductIds = new Set(order?.lines.map((l: any) => l.productId)); + const mismatchTxns = transactions.filter((tx: any) => !orderProductIds.has(tx.productId)); + trajectoryChecks.push(evaluateInvariant( + "INV_PRODUCT_MATCH", + "Inventory transactions match order product IDs", + mismatchTxns.length === 0, + mismatchTxns.length, + "0" + )); + + // --- D. Category-Driven Behavioral Checks --- + const category = ticket.metadata?.category || order?.customFields?.category; + if (category) { + trajectoryChecks.push(evaluateInvariant( + "CAT_ZONE_ASSIGNED", + `Tasks for ${category} have zone assignment`, + tasks.every((t: any) => !!t.zoneId), + "Tasks with ZoneId", + "All" + )); + } + + // --- E. Trajectory Integrity --- + // E1. Origin (Anti-Cheat) + // @ts-ignore - ticket.createdAt needs parsing + const baselineTime = ticket.createdAt ? new Date(ticket.createdAt).getTime() : (order?.createdAt ? new Date(order.createdAt).getTime() : 0); + if (baselineTime > 0 && ticket.type !== "incident") { + const preExistingShipments = shipments.filter((s: any) => new Date(s.createdAt).getTime() < baselineTime - 1000); + trajectoryChecks.push(evaluateInvariant( + "TRAJ_ORIGIN_VALID", + "Entities created after workflow start", + preExistingShipments.length === 0, + preExistingShipments.length, + "0 pre-existing" + )); + } + + // E2. Single Shipment + trajectoryChecks.push(checkSingleEntity(shipments, "Shipment", "TRAJ_SINGLE_SHIPMENT")); + + + return { entityStateChecks, invariantChecks, trajectoryChecks }; +}; + + +/** + * Verifier for Predictable Outbound Category Flow + */ +export const verifyOutboundCategoryFlow = async ( + ticket: Ticket, + logger: any +): Promise => { + const startTime = Date.now(); + const worldId = ticket.worldId; + + // 1. Build Projection (Data Fetching) + const projection = await buildWMSProjection(ticket); + + if (!projection.orderId) { + const earlySteps = ["get_warehouse", "get_zones", "map_zones", "create_outbound_demand"]; + const isEarlyFailure = earlySteps.includes(ticket.failedStepId); + return { + passed: false, + ticketId: ticket.id, + worldId, + timestamp: new Date().toISOString(), + totalChecks: 1, + passedChecks: 0, + failedChecks: 1, + failureReason: isEarlyFailure + ? `Workflow failed at step '${ticket.failedStepId}' before Order was created` + : "No ORDER entity found explicitly or linked via Shipment", + verificationDurationMs: Date.now() - startTime, + trajectoryChecks: [], + }; + } + + // 2. Pure Verification (Logic) + const result = verifyPure(projection, ticket); + const { entityStateChecks, invariantChecks, trajectoryChecks } = result; + + const allPassed = entityStateChecks.every((c: any) => c.passed) + && invariantChecks.every((i: any) => i.passed) + && trajectoryChecks.every((t: any) => t.passed); + + return { + passed: allPassed, + ticketId: ticket.id, + worldId, + timestamp: new Date().toISOString(), + entityStateChecks, + invariantChecks, + trajectoryChecks, + totalChecks: entityStateChecks.length + invariantChecks.length + trajectoryChecks.length, + passedChecks: entityStateChecks.filter((c: any) => c.passed).length + invariantChecks.filter((i: any) => i.passed).length + trajectoryChecks.filter((t: any) => t.passed).length, + failedChecks: entityStateChecks.filter((c: any) => !c.passed).length + invariantChecks.filter((i: any) => !i.passed).length + trajectoryChecks.filter((t: any) => !t.passed).length, + verificationDurationMs: Date.now() - startTime, + failureReason: allPassed ? undefined : "One or more verification checks failed", + }; +}; diff --git a/packages/controlmart/src/worlds/process-outbound/world.docs.ts b/packages/controlmart/src/worlds/process-outbound/world.docs.ts new file mode 100644 index 0000000000000000000000000000000000000000..b8f1e6c35d5e233c18f777267b0b8efd8bbf5c19 --- /dev/null +++ b/packages/controlmart/src/worlds/process-outbound/world.docs.ts @@ -0,0 +1,136 @@ +export const processOutboundWorldDoc = { + meta: { + version: "1.1.0", + generatedAt: "2026-01-23", + docType: "World Definition", + }, + world: { + name: "Process Inbound - Order to Cash", + description: + "A focused outbound simulation world demonstrating the complete order lifecycle from demand creation through fulfillment and payment across four distinct product categories: Dairy, Vegan, Self Care, and Clothing.", + industry: "Retail & Distribution", + location: "Global Logistics Hub (Chicago, IL)", + size: "Large Distribution Center", + complexity: "Medium", + story: + "The Process Inbound world tells the story of the SkyFoods Global Distribution Center, a facility designed for extreme efficiency and category specialization. \n\nUnlike traditional warehouses that mix products randomly, SkyFoods operates on a strict 'Category Wave' system. The day begins at 5:00 AM with the Dairy Wave (cold chain priority), moves to Vegan products at 8:00 AM, shifts to Self Care at 1:00 PM, and concludes with Clothing at 6:00 PM. \n\nThis 'Predictable Outbound Category Flow' ensures that specialized resources—like refrigerated docks for dairy or clean-room packing stations for hygiene products—are utilized optimally. The Operational Descriptor (OD) driving this world doesn't just move boxes; it orchestrates a precise choreography of demand generation, inventory reservation, picking, packing, and shipping, serving as the heartbeat of the distribution center.", + }, + businessContext: { + name: "SkyFoods Global Distribution Center", + industry: "Multi-Category Distribution", + description: + "A centralized distribution hub serving diverse retail segments. The facility operates on a high-throughput outbound model, prioritizing category-based picking waves to optimize labor and equipment utilization.", + coreValues: [ + "Predictable Fulfillment: Category-specific waves ensure consistent delivery schedules.", + "Operational Precision: Dynamic zone mapping ensures the right equipment is used for the right product.", + "Inventory Transparency: Real-time tracking from pick bin to shipping dock.", + ], + productLines: [ + { + name: "Dairy Essentials", + storage: "Refrigerated (2-4°C)", + shelfLife: "14-30 Days", + description: "Fast-moving perishable dairy products.", + products: [ + { sku: "DAIRY-MILK-001", name: "Organic Whole Milk 1L" }, + { sku: "DAIRY-YOGURT-001", name: "Greek Yogurt Plain 500g" }, + { sku: "DAIRY-CHEESE-001", name: "Cheddar Cheese Block 250g" }, + ], + }, + { + name: "Vegan & Plant-Based", + storage: "Ambient / Refrigerated", + shelfLife: "60-180 Days", + description: "Plant-based alternatives for modern diets.", + products: [ + { sku: "VEGAN-MILK-001", name: "Almond Milk Unsweetened 1L" }, + { sku: "VEGAN-TOFU-001", name: "Tofu Firm 400g" }, + { sku: "VEGAN-BURGER-001", name: "Vegan Burger Patties (4pk)" }, + ], + }, + { + name: "Self Care & Personal Hygiene", + storage: "Room Temperature", + shelfLife: "730 Days", + description: "Premium personal care and hygiene essentials.", + products: [ + { sku: "CARE-LOTION-001", name: "Moisturizing Body Lotion 200ml" }, + { sku: "CARE-SOAP-001", name: "Organic Lavender Soap Bar" }, + { sku: "CARE-SHAMPOO-001", name: "Herbal Shampoo 500ml" }, + ], + }, + { + name: "Urban Apparel", + storage: "Standard Dry Bin", + shelfLife: "N/A", + description: "High-quality clothing and accessories.", + products: [ + { sku: "CLOTH-TSHIRT-001", name: "Classic White T-Shirt (M)" }, + { sku: "CLOTH-JEANS-001", name: "Comfort Denim Jeans (32/32)" }, + { sku: "CLOTH-SOCKS-001", name: "Wool Blend Socks (Pair)" }, + ], + }, + ], + }, + ecosystem: { + partners: "An internal logistics hub coordinating between specialized suppliers and major retail customers.", + suppliers: [ + { name: "Pure Dairy Solutions", role: "Dairy Supplier", category: "DAIRY" }, + { name: "Green Harvest Vegan", role: "Vegan Goods Supplier", category: "VEGAN" }, + { name: "Vitality Self care Co", role: "Personal Care Supplier", category: "SELF_CARE" }, + { name: "Urban Threads Clothing", role: "Apparel Supplier", category: "CLOTHING" }, + ], + customers: [ + { name: "Standard Retail Group", role: "Primary Retailer", description: "Large-scale retail customer receiving multi-category shipments." }, + ], + }, + operationalDescriptors: { + standardActors: [ + { + id: "outbound-category-flow", + name: "Predictable Outbound Category Flow", + persona: "Warehouse Manager", + type: "Standard Workflow", + schedules: [ + "Dairy Wave: Daily @ 05:00 AM (Sim Time)", + "Vegan Wave: Daily @ 08:00 AM (Sim Time)", + "Self Care Wave: Daily @ 01:00 PM (Sim Time)", + "Clothing Wave: Daily @ 06:00 PM (Sim Time)", + "Note: 5 immediate executions occur at system boot to populate the world immediately.", + ], + description: + "This automated workflow acts as the Warehouse Manager's digital assistant. It ensures that every order follows a 'Happy Path' from creation to dispatch. \n\n1. It determines the product category (e.g., Dairy) and warehouse context. \n2. It generates a demand signal (Outbound Order). \n3. It intelligently routes picking tasks to the correct zone (e.g., Fridge Zone for Dairy). \n4. It manages the hand-off from Pickers (Task Completion) to Packers (Handling Unit Creation). \n5. Finally, it confirms the shipment is loaded and dispatched, closing the order loop.", + keySteps: [ + "Initialize & Context Setup", + "Create Category Demand", + "Reserve Inventory", + "Generate & Release Pick Tasks", + "Execute Picking & Move to Packing", + "Complete Picking Tasks (Hand-off)", + "Create Handling Units (Packing)", + "Stage, Load, & Dispatch Shipment", + "Close Order & Update Balances" + ], + inputs: ["Product Category (DAIRY, VEGAN, etc.)", "Simulated Inventory Balance"], + outputs: ["Shipped Goods", "Updated Inventory Levels", "Closed Outbound Order"], + }, + ], + backgroundActors: [ + { + id: "inventory-sync-check", + name: "Systemic Inventory Balance Check", + persona: "Inventory Controller", + type: "Background Job", + schedule: "Daily @ Midnight (Sim Time)", + description: "Ensures WMS and ERP inventory balances are perfectly aligned after the day's fulfillment waves.", + responsibility: "Data Integrity & Stock Accuracy", + }, + ], + }, + technicalServices: { + ERP: "Enterprise Resource Planning: Central master for product data, companies, and financial orders.", + WMS: "Warehouse Management System: Orchestrates the physical movement, bin assignments, and task management.", + Transportation: "Logistics Service: Handles dock assignments and outbound shipment confirmations.", + Finance: "Accounting: Records revenue and inventory depletion values.", + }, +}; diff --git a/packages/controlmart/test-ai-ticket.ts b/packages/controlmart/test-ai-ticket.ts new file mode 100644 index 0000000000000000000000000000000000000000..f96bc8091eefb899129e80c65f9783f2da9d42dd --- /dev/null +++ b/packages/controlmart/test-ai-ticket.ts @@ -0,0 +1,61 @@ + +import { analyzeLogQueueAndGenerateTicket } from "./src/services/ticketing-ai.service"; + +const humanLogQueue = { + runId: "human-run-123", + odId: "inbound-asn-process", + odName: "Inbound ASN Process", + persona: "Warehouse Manager", + isTicketCandidate: true, + status: "queued", + logs: [ + "Starting OD execution...", + "Executing step: create_inbound_order", + "[CHAOS] Simulated VALIDATION_ERROR failure", + "Error in step, attempt 1", + "OD execution failed" + ], + entries: [ + { + odId: "inbound-asn-process", + odName: "Inbound ASN Process", + persona: "Warehouse Manager", + stepId: "create_inbound_order", + service: "wms", + tool: "wms.inbound.order.create", + chaosType: "VALIDATION_ERROR", + modifications: ["[CHAOS] Simulated VALIDATION_ERROR failure"], + timestamp: new Date().toISOString() + } + ] +}; + +const systemLogQueue = { + ...humanLogQueue, + runId: "system-run-456", + persona: "System", + logs: [ + "Starting Background Job...", + "Executing step: process_data", + "[CHAOS] Simulated DATABASE_ERROR failure", + "Stack trace: Error: Connection timed out at /src/db.ts:50", + "Job failed" + ] +}; + +async function main() { + console.log("TEST 1: HUMAN PERSONA (Warehouse Manager)"); + const ticketHuman = await analyzeLogQueueAndGenerateTicket(humanLogQueue as any); + console.log("Ticket Human:", ticketHuman); + console.log("Title:", ticketHuman?.title); + console.log("Description:", ticketHuman?.description); + + console.log("\nTEST 2: SYSTEM PERSONA (Automated Job)"); + const ticketSystem = await analyzeLogQueueAndGenerateTicket(systemLogQueue as any); + console.log("Ticket System:", ticketSystem); + console.log("Title:", ticketSystem?.title); + console.log("Description:", ticketSystem?.description); + +} + +main().catch(console.error); diff --git a/packages/controlmart/tsconfig.json b/packages/controlmart/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..ad7da8851649bc438bde50b2640a38086d1be3eb --- /dev/null +++ b/packages/controlmart/tsconfig.json @@ -0,0 +1,32 @@ +{ + "compilerOptions": { + // Environment setup & latest features + "lib": ["ESNext", "dom"], + "target": "ESNext", + "module": "Preserve", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + "outDir": "dist", + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedIndexedAccess": true, + "noImplicitOverride": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false, + "paths": { + "@skyfall/morpheus-utility/*": ["../morpheus-utility/src/*"] + } + } +} diff --git a/packages/controlmart/ui/index.html b/packages/controlmart/ui/index.html new file mode 100644 index 0000000000000000000000000000000000000000..505e3e73e9ce8717cfab3743fc00cfc3d1818f32 --- /dev/null +++ b/packages/controlmart/ui/index.html @@ -0,0 +1,16 @@ + + + + + + + + Morpheus Controlmart + + + +
+ + + + \ No newline at end of file diff --git a/packages/controlmart/ui/package.json b/packages/controlmart/ui/package.json new file mode 100644 index 0000000000000000000000000000000000000000..85f17db003aefb678a206c6b1cac0af48c9e1048 --- /dev/null +++ b/packages/controlmart/ui/package.json @@ -0,0 +1,28 @@ +{ + "name": "morpheus-ui", + "version": "1.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vue-tsc -b && vite build", + "preview": "vite preview" + }, + "dependencies": { + "@vue-flow/background": "^1.3.2", + "@vue-flow/controls": "^1.1.3", + "@vue-flow/core": "^1.48.1", + "vue": "^3.5.13", + "vue-force-graph": "^0.1.2", + "vue-router": "^4.5.0" + }, + "devDependencies": { + "@types/three": "^0.182.0", + "@vitejs/plugin-vue": "^5.2.1", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.49", + "tailwindcss": "^3.4.17", + "typescript": "^5.7.2", + "vite": "^6.0.3", + "vue-tsc": "^2.2.0" + } +} diff --git a/packages/controlmart/ui/postcss.config.js b/packages/controlmart/ui/postcss.config.js new file mode 100644 index 0000000000000000000000000000000000000000..2e7af2b7f1a6f391da1631d93968a9d487ba977d --- /dev/null +++ b/packages/controlmart/ui/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} diff --git a/packages/controlmart/ui/src/App.vue b/packages/controlmart/ui/src/App.vue new file mode 100644 index 0000000000000000000000000000000000000000..b8389d2d9552222421bb9d274fac600a828b432f --- /dev/null +++ b/packages/controlmart/ui/src/App.vue @@ -0,0 +1,77 @@ + + + diff --git a/packages/controlmart/ui/src/api/edi.ts b/packages/controlmart/ui/src/api/edi.ts new file mode 100644 index 0000000000000000000000000000000000000000..9912223c4e00f8438cb5ab8ba0e6c2bd55a6d939 --- /dev/null +++ b/packages/controlmart/ui/src/api/edi.ts @@ -0,0 +1,280 @@ +const API_BASE = '' + +// ============================================ +// Dashboard Types +// ============================================ + +export interface EDIDashboard { + total: number + todayCount: number + byDirection: { + INBOUND: number + OUTBOUND: number + } + byDocType: Record + byStatus: Record + errorCount: number +} + +// ============================================ +// Transaction Types +// ============================================ + +export interface EDITransaction { + _id: string + transactionId: string + docType: string + direction: 'INBOUND' | 'OUTBOUND' + partnerId: string + customerId?: string + companyId?: string + status: 'RECEIVED' | 'QUEUED' | 'PROCESSING' | 'SENT' | 'ACKNOWLEDGED' | 'ERRORED' | 'REJECTED' | 'DELIVERED' | 'ARCHIVED' + timestamp: string + dollarValue?: number + payload?: Record + rawEdi?: string + flowId?: string + fileName?: string + interchangeControlNumber?: string + groupControlNumber?: string + transactionSetControlNumber?: string + businessDocumentNumber?: string + errorReason?: string + errorDetails?: any + worldRef: { + worldId: string + } + createdAt: string + updatedAt: string +} + +// Create Transaction Input Type +export interface CreateTransactionInput { + partnerId: string + customerId?: string + docType: string + direction: 'INBOUND' | 'OUTBOUND' + status?: string + dollarValue?: number + rawEdi?: string + payload?: Record + flowId?: string + timestamp?: string +} + +export interface TransactionFilter { + partnerId?: string + docType?: string[] + direction?: 'INBOUND' | 'OUTBOUND' + status?: string[] + dateStart?: string + dateEnd?: string + search?: string + limit?: number + cursor?: string +} + +// ============================================ +// Paginated Response Type +// ============================================ + +export interface PaginatedResponse { + items: T[] + pagination: { + totalCount: number + limit: number + hasMore: boolean + nextCursor?: string + } +} + +// ============================================ +// Constants +// ============================================ + +export const EDI_DOC_TYPES = ['850', '855', '856', '810', '820', '997', '999'] as const +export const EDI_DIRECTIONS = ['INBOUND', 'OUTBOUND'] as const +export const EDI_STATUSES = ['RECEIVED', 'QUEUED', 'PROCESSING', 'SENT', 'ACKNOWLEDGED', 'ERRORED', 'REJECTED'] as const + +// Document type descriptions +export const DOC_TYPE_LABELS: Record = { + '850': 'Purchase Order', + '855': 'PO Acknowledgment', + '856': 'Ship Notice', + '810': 'Invoice', + '820': 'Payment Order', + '997': 'Functional Ack', + '999': 'Implementation Ack' +} + +// Flow groupings +export const PURCHASE_FLOW_TYPES = ['850', '855', '856'] +export const INVOICE_FLOW_TYPES = ['810', '820'] +export const ACK_FLOW_TYPES = ['997', '999'] + +// ============================================ +// API Functions +// ============================================ + +/** + * Get EDI Dashboard metrics + */ +export async function getEDIDashboard(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/edi/dashboard`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get EDI dashboard') + return data.data +} + +/** + * Get EDI transactions list with filters and pagination + */ +export async function getTransactions( + worldId: string, + filter: TransactionFilter = {} +): Promise> { + const params = new URLSearchParams() + if (filter.partnerId) params.append('partnerId', filter.partnerId) + filter.docType?.forEach(t => params.append('docType', t)) + if (filter.direction) params.append('direction', filter.direction) + filter.status?.forEach(s => params.append('status', s)) + if (filter.dateStart) params.append('dateStart', filter.dateStart) + if (filter.dateEnd) params.append('dateEnd', filter.dateEnd) + if (filter.search) params.append('search', filter.search) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.cursor) params.append('cursor', filter.cursor) + + const url = `${API_BASE}/${worldId}/edi${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get EDI transactions') + + // Handle both array and paginated response formats + if (Array.isArray(data.data)) { + return { + items: data.data, + pagination: { + totalCount: data.data.length, + limit: filter.limit || 50, + hasMore: false + } + } + } + return { + items: data.data, + pagination: { + totalCount: data.pagination?.totalCount || data.data?.length || 0, + limit: data.pagination?.limit || filter.limit || 50, + hasMore: data.pagination?.hasMore || false, + nextCursor: data.pagination?.nextCursor + } + } +} + +/** + * Get single EDI transaction by ID + */ +export async function getTransactionById(worldId: string, transactionId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/edi/${transactionId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get EDI transaction') + return data.data +} + +/** + * Update transaction status + */ +export async function updateTransactionStatus( + worldId: string, + transactionId: string, + status: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/edi/${transactionId}/status`, { + method: 'PATCH', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update transaction status') + return data.data +} + +/** + * Update transaction fields + */ +export async function updateTransaction( + worldId: string, + transactionId: string, + updates: Partial +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/edi/${transactionId}`, { + method: 'PATCH', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(updates) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update transaction') + return data.data +} + +/** + * Requeue an errored transaction + */ +export async function requeueTransaction(worldId: string, transactionId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/edi/${transactionId}/requeue`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + } + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to requeue transaction') + return data.data +} + +/** + * Search partners for autocomplete + */ +export async function searchPartners(worldId: string, search: string): Promise { + const params = new URLSearchParams() + params.append('search', search) + params.append('limit', '20') + + const res = await fetch(`${API_BASE}/${worldId}/edi?${params.toString()}`) + const data = await res.json() + if (!data.success) return [] + + // Extract unique partner IDs from results + const partners = new Set() + const items = Array.isArray(data.data) ? data.data : data.data || [] + items.forEach((t: EDITransaction) => { + if (t.partnerId) partners.add(t.partnerId) + }) + return Array.from(partners) +} + +/** + * Create a new EDI transaction + */ +export async function createTransaction( + worldId: string, + input: CreateTransactionInput +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/edi`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(input) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create EDI transaction') + return data.data +} + +// Status values for create form (backend accepts these) +export const EDI_STATUSES_CREATE = ['RECEIVED', 'QUEUED', 'PROCESSING', 'DELIVERED', 'ERRORED', 'ARCHIVED'] as const diff --git a/packages/controlmart/ui/src/api/erp.ts b/packages/controlmart/ui/src/api/erp.ts new file mode 100644 index 0000000000000000000000000000000000000000..b17025d55f323eca2f8f5a0b7ef878072f7bb64e --- /dev/null +++ b/packages/controlmart/ui/src/api/erp.ts @@ -0,0 +1,582 @@ +const API_BASE = '' + +// ============================================ +// Dashboard Types +// ============================================ + +export interface ERPDashboard { + orders: { + purchaseOrders: { + total: number + byStatus: Record + recentOrders: number + totalValue: number + averageOrderValue: number + } + salesOrders: { + total: number + byStatus: Record + recentOrders: number + totalValue: number + averageOrderValue: number + } + } + invoices: { + total: number + byStatus: Record + totalOutstanding: number + overdueCount: number + paidThisMonth: number + } + companies: { + total: number + byType: Record + activeCustomers: number + activeSuppliers: number + activeCompanies: number + } + products: { + total: number + activeProducts: number + discontinuedProducts: number + } +} + +// ============================================ +// Company Types +// ============================================ + +export interface ERPCompany { + _id: string + companyId: string + name: string + legalName?: string + duns?: string + taxId?: string + currency: string + paymentTerms?: string + creditLimit?: number + creditHold: boolean + status: 'ACTIVE' | 'INACTIVE' | 'PROSPECT' | 'BLOCKED' + companyType: 'CUSTOMER' | 'SUPPLIER' | 'PARTNER' | 'INTERNAL' + primaryContact?: { + name?: string + email?: string + phone?: string + } + billingAddress?: any + shippingAddress?: any + createdAt: string + updatedAt: string +} + +export interface CompanyFilter { + companyType?: string[] + status?: string[] + search?: string + limit?: number + offset?: number +} + +// ============================================ +// Product Types +// ============================================ + +export interface ERPProduct { + _id: string + productId: string + name: string + description?: string + upc?: string + ean?: string + commodityCode?: string + unitOfMeasure: string + price?: { + currency: string + amount: number + } + cost?: { + currency: string + amount: number + } + leadTimeDays?: number + status: 'ACTIVE' | 'DISCONTINUED' + weight?: { + value: number + unit: string + } + dimensions?: { + length: number + width: number + height: number + unit: string + } + createdAt: string + updatedAt: string +} + +export interface ProductFilter { + status?: string[] + search?: string + limit?: number + offset?: number +} + +// ============================================ +// Order Types +// ============================================ + +export interface OrderLine { + lineNumber: number + poLineId?: string + sku?: string + description?: string + quantityOrdered: number + quantityBackordered: number + quantityCanceled: number + unitOfMeasure?: string + unitPrice?: number + lineTotal?: number + promisedDate?: string +} + +export interface ERPOrder { + _id: string + orderId: string + poType: 'STANDARD' | 'BLANKET' | 'CONTRACT' | 'DROP_SHIP' + customerId?: string + partnerId?: string + orderDate?: string + requestedDate?: string + dueDate?: string + buyer?: { + id: string + name: string + } + currency: string + subtotal?: number + totalAmount?: number + status: 'RECEIVED' | 'ACKED' | 'IN_PROGRESS' | 'PARTIALLY_SHIPPED' | 'COMPLETED' | 'CANCELLED' + lines?: OrderLine[] + notes?: string + customFields?: Record + createdAt: string + updatedAt: string +} + +export interface OrderFilter { + status?: string[] + poType?: string[] + customerId?: string + direction?: 'INBOUND' | 'OUTBOUND' + dateStart?: string + dateEnd?: string + search?: string + limit?: number + offset?: number +} + +// ============================================ +// Invoice Types +// ============================================ + +export interface InvoiceLine { + lineNumber: number + sku?: string + description?: string + quantity?: number + unitPrice?: number + lineAmount?: number + taxDetails?: any[] +} + +export interface ERPInvoice { + _id: string + invoiceId: string + invoiceType: 'STANDARD' | 'CREDIT' | 'DEBIT' | 'CORRECTION' + poNumber?: string + customerId?: string + partnerId?: string + issueDate?: string + dueDate?: string + currency: string + subtotal?: number + totalAmount?: number + balanceDue?: number + status: 'DRAFT' | 'SENT' | 'VALIDATED' | 'REJECTED' | 'PAID' | 'PARTIALLY_PAID' + lines?: InvoiceLine[] + paymentTerms?: string + allowances?: any[] + charges?: any[] + taxes?: any[] + references?: any[] + disputes?: any[] + correctionHistory?: any[] + attachments?: any[] + customFields?: Record + createdAt: string + updatedAt: string +} + +export interface InvoiceFilter { + status?: string[] + invoiceType?: string[] + customerId?: string + overdue?: boolean + dateStart?: string + dateEnd?: string + search?: string + limit?: number + offset?: number +} + +// ============================================ +// Paginated Response Type +// ============================================ + +export interface PaginatedResponse { + items: T[] + pagination: { + total: number + limit: number + offset: number + hasMore: boolean + } +} + +// ============================================ +// Constants +// ============================================ + +export const ORDER_STATUSES = ['RECEIVED', 'ACKED', 'IN_PROGRESS', 'MATERIALS_PICKED', 'MANUFACTURING_COMPLETE', 'PARTIALLY_SHIPPED', 'COMPLETED', 'CANCELLED'] as const +export const ORDER_TYPES = ['STANDARD', 'BLANKET', 'CONTRACT', 'DROP_SHIP'] as const +export const INVOICE_STATUSES = ['DRAFT', 'SENT', 'VALIDATED', 'REJECTED', 'PAID', 'PARTIALLY_PAID'] as const +export const INVOICE_TYPES = ['STANDARD', 'CREDIT', 'DEBIT', 'CORRECTION'] as const +export const COMPANY_STATUSES = ['ACTIVE', 'INACTIVE', 'PROSPECT', 'BLOCKED'] as const +export const COMPANY_TYPES = ['CUSTOMER', 'SUPPLIER', 'PARTNER', 'INTERNAL'] as const +export const PRODUCT_STATUSES = ['ACTIVE', 'DISCONTINUED'] as const + +// ============================================ +// API Functions +// ============================================ + +/** + * Get ERP Operations Dashboard metrics + */ +export async function getERPDashboard(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/operations-dashboard`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get ERP dashboard') + return data.data +} + +/** + * Get companies list with filters and pagination + */ +export async function getCompanies( + worldId: string, + filter: CompanyFilter = {} +): Promise> { + const params = new URLSearchParams() + filter.companyType?.forEach(t => params.append('companyType', t)) + filter.status?.forEach(s => params.append('status', s)) + if (filter.search) params.append('search', filter.search) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/erp/companies${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get companies') + + // Handle both array and paginated response formats + if (Array.isArray(data.data)) { + return { + items: data.data, + pagination: { + total: data.data.length, + limit: filter.limit || 50, + offset: filter.offset || 0, + hasMore: false + } + } + } + return { + items: data.data.items || data.data, + pagination: { + total: data.data.totalCount || data.data.items?.length || 0, + limit: data.data.limit || filter.limit || 50, + offset: filter.offset || 0, + hasMore: data.data.hasMore || false + } + } +} + +/** + * Get single company by ID + */ +export async function getCompanyById(worldId: string, companyId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/companies/${companyId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get company') + return data.data +} + +/** + * Get products list with filters and pagination + */ +export async function getProducts( + worldId: string, + filter: ProductFilter = {} +): Promise> { + const params = new URLSearchParams() + filter.status?.forEach(s => params.append('status', s)) + if (filter.search) params.append('search', filter.search) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/erp/products${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get products') + + // Handle both array and paginated response formats + if (Array.isArray(data.data)) { + return { + items: data.data, + pagination: { + total: data.data.length, + limit: filter.limit || 50, + offset: filter.offset || 0, + hasMore: false + } + } + } + return { + items: data.data.items || data.data, + pagination: { + total: data.data.totalCount || data.data.items?.length || 0, + limit: data.data.limit || filter.limit || 50, + offset: filter.offset || 0, + hasMore: data.data.hasMore || false + } + } +} + +/** + * Get single product by ID + */ +export async function getProductById(worldId: string, productId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/products/${productId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get product') + return data.data +} + +/** + * Get orders list with filters and pagination + */ +export async function getOrders( + worldId: string, + filter: OrderFilter = {} +): Promise> { + const params = new URLSearchParams() + filter.status?.forEach(s => params.append('status', s)) + filter.poType?.forEach(t => params.append('poType', t)) + if (filter.customerId) params.append('customerId', filter.customerId) + if (filter.direction) params.append('direction', filter.direction) + if (filter.dateStart) params.append('dateStart', filter.dateStart) + if (filter.dateEnd) params.append('dateEnd', filter.dateEnd) + if (filter.search) params.append('search', filter.search) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/erp/orders${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get orders') + + // Handle both array and paginated response formats + if (Array.isArray(data.data)) { + return { + items: data.data, + pagination: { + total: data.data.length, + limit: filter.limit || 50, + offset: filter.offset || 0, + hasMore: false + } + } + } + return { + items: data.data.items || data.data, + pagination: { + total: data.data.totalCount || data.data.items?.length || 0, + limit: data.data.limit || filter.limit || 50, + offset: filter.offset || 0, + hasMore: data.data.hasMore || false + } + } +} + +/** + * Get single order by ID + */ +export async function getOrderById(worldId: string, orderId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/orders/${orderId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get order') + return data.data +} + +/** + * Update order status + */ +export async function updateOrderStatus(worldId: string, orderId: string, status: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/orders/${orderId}/status`, { + method: 'PUT', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update order status') + return data.data +} + +/** + * Update order fields (e.g., poType) + */ +export async function updateOrder(worldId: string, orderId: string, updates: Partial): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/orders/${orderId}`, { + method: 'PUT', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(updates) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update order') + return data.data +} + +/** + * Create new order + */ +export async function createOrder(worldId: string, order: any): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/orders`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(order) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create order') + return data.data +} + +/** + * Get invoices list with filters and pagination + */ +export async function getInvoices( + worldId: string, + filter: InvoiceFilter = {} +): Promise> { + const params = new URLSearchParams() + filter.status?.forEach(s => params.append('status', s)) + filter.invoiceType?.forEach(t => params.append('invoiceType', t)) + if (filter.customerId) params.append('customerId', filter.customerId) + if (filter.overdue) params.append('overdue', 'true') + if (filter.dateStart) params.append('dateStart', filter.dateStart) + if (filter.dateEnd) params.append('dateEnd', filter.dateEnd) + if (filter.search) params.append('search', filter.search) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/erp/invoices${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get invoices') + + // Handle both array and paginated response formats + if (Array.isArray(data.data)) { + return { + items: data.data, + pagination: { + total: data.data.length, + limit: filter.limit || 50, + offset: filter.offset || 0, + hasMore: false + } + } + } + return { + items: data.data.items || data.data, + pagination: { + total: data.data.totalCount || data.data.items?.length || 0, + limit: data.data.limit || filter.limit || 50, + offset: filter.offset || 0, + hasMore: data.data.hasMore || false + } + } +} + +/** + * Get single invoice by ID + */ +export async function getInvoiceById(worldId: string, invoiceId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/invoices/${invoiceId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get invoice') + return data.data +} + +/** + * Update invoice status + */ +export async function updateInvoiceStatus(worldId: string, invoiceId: string, status: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/invoices/${invoiceId}/status`, { + method: 'PUT', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update invoice status') + return data.data +} + +/** + * Update invoice fields (e.g., invoiceType) + */ +export async function updateInvoice(worldId: string, invoiceId: string, updates: Partial): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/invoices/${invoiceId}`, { + method: 'PUT', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(updates) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update invoice') + return data.data +} + +/** + * Create new invoice + */ +export async function createInvoice(worldId: string, invoice: any): Promise { + const res = await fetch(`${API_BASE}/${worldId}/erp/invoices`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(invoice) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create invoice') + return data.data +} diff --git a/packages/controlmart/ui/src/api/finance.ts b/packages/controlmart/ui/src/api/finance.ts new file mode 100644 index 0000000000000000000000000000000000000000..9dfc05dc6c9d981174b0b7aec93ce1e84453efb6 --- /dev/null +++ b/packages/controlmart/ui/src/api/finance.ts @@ -0,0 +1,216 @@ +const API_BASE = '' + +export interface FinancialSummary { + totalIncoming: number + totalOutgoing: number + netBalance: number + transactionCount: number + avgTransactionAmount: number +} + +export interface Ledger { + cash: number + totalReceivables: number + totalPayables: number + netPosition: number + createdAt: string + updatedAt: string +} + +export async function getFinancialSummary(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/summary`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get financial summary') + return data.data +} + +export async function getLedger(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/ledger`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get ledger') + return data.data +} + +export interface Transaction { + transactionId: string + type: 'payment_in' | 'payment_out' + amount: number + partnerId?: string + sourceType: string + sourceId?: string + metadata?: Record + createdAt: string + updatedAt: string +} + +export interface TransactionsFilter { + type?: 'payment_in' | 'payment_out' + sourceType?: string + partnerId?: string + amountMin?: number + amountMax?: number + dateStart?: string + dateEnd?: string + search?: string + limit?: number + cursor?: string +} + +export interface TransactionsResponse { + transactions: Transaction[] + totalCount: number + limit: number + hasMore: boolean + nextCursor: string | null + previousCursor: string | null +} + +export async function getFinancialTransactions( + worldId: string, + filter: TransactionsFilter = {} +): Promise { + const params = new URLSearchParams() + if (filter.type) params.append('type', filter.type) + if (filter.sourceType) params.append('sourceType', filter.sourceType) + if (filter.partnerId) params.append('partnerId', filter.partnerId) + if (filter.amountMin !== undefined) params.append('amountMin', filter.amountMin.toString()) + if (filter.amountMax !== undefined) params.append('amountMax', filter.amountMax.toString()) + if (filter.dateStart) params.append('dateStart', filter.dateStart) + if (filter.dateEnd) params.append('dateEnd', filter.dateEnd) + if (filter.search) params.append('search', filter.search) + if (filter.limit) params.append('limit', filter.limit.toString()) + if (filter.cursor) params.append('cursor', filter.cursor) + + const url = `${API_BASE}/${worldId}/finance/transactions${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const response = await res.json() + if (!response.success) throw new Error(response.error || 'Failed to get transactions') + + return { + transactions: response.data || [], + totalCount: response.pagination?.totalCount || 0, + limit: response.pagination?.limit || 25, + hasMore: response.pagination?.hasMore || false, + nextCursor: response.pagination?.nextCursor || null, + previousCursor: response.pagination?.previousCursor || null + } +} + +// Stats by type - API returns { payment_in: {...}, payment_out: {...} } +export interface TypeStatItem { + count: number + totalAmount: number + avgAmount: number +} + +export interface TypeStatsResponse { + payment_in?: TypeStatItem + payment_out?: TypeStatItem +} + +export async function getStatsByType(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/stats/by-type`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get stats by type') + return data.data || {} +} + +// Stats by partner - API returns array with partnerId field +export interface PartnerStats { + partnerId: string + count: number + totalAmount: number + avgAmount: number +} + +export async function getStatsByPartner(worldId: string, limit: number = 5): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/stats/by-partner?limit=${limit}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get stats by partner') + return data.data || [] +} + +// Combined stats +export interface CombinedStats { + byType: TypeStatsResponse + byPartner: PartnerStats[] + summary: FinancialSummary +} + +export async function getFinanceStats(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/stats`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get finance stats') + return data.data +} + +// Create transaction +export interface CreateTransactionInput { + partnerId: string + type: 'payment_in' | 'payment_out' + amount: number + sourceType: string + sourceId: string + metadata?: { + description?: string + paymentTerms?: string + dueDate?: string + } +} + +export async function createTransaction(worldId: string, input: CreateTransactionInput): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/transactions`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create transaction') + return data.data +} + +// Update transaction +export interface UpdateTransactionInput { + partnerId?: string + type?: 'payment_in' | 'payment_out' + amount?: number + sourceType?: string + sourceId?: string + metadata?: { + description?: string + paymentTerms?: string + dueDate?: string + } +} + +export async function updateTransaction( + worldId: string, + transactionId: string, + input: UpdateTransactionInput +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/transactions/${transactionId}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update transaction') + return data.data +} + +// Delete transaction +export async function deleteTransaction(worldId: string, transactionId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/transactions/${transactionId}`, { + method: 'DELETE' + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to delete transaction') +} + +// Get single transaction +export async function getTransaction(worldId: string, transactionId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/finance/transactions/${transactionId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get transaction') + return data.data +} diff --git a/packages/controlmart/ui/src/api/knowledge-graph.ts b/packages/controlmart/ui/src/api/knowledge-graph.ts new file mode 100644 index 0000000000000000000000000000000000000000..c905803854e712c5ab335c6c12980863acb094e7 --- /dev/null +++ b/packages/controlmart/ui/src/api/knowledge-graph.ts @@ -0,0 +1,36 @@ +export interface GraphNode { + id: string + type: 'PERSONA' | 'CAPABILITY' | 'OD' | 'TOOL' | 'SERVICE' | 'ENTITY' + label: string + metadata?: Record +} + +export interface GraphEdge { + from: string + to: string + type: string +} + +export interface GraphStats { + totalNodes: number + totalEdges: number + nodesByType: Record +} + +export interface KnowledgeGraphResponse { + nodes: GraphNode[] + edges: GraphEdge[] + stats: GraphStats + filtered: boolean + seedODs?: string[] + message?: string +} + +const API_BASE = '' + +export async function getKnowledgeGraph(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/knowledge-graph`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get knowledge graph') + return data.data +} diff --git a/packages/controlmart/ui/src/api/logs.ts b/packages/controlmart/ui/src/api/logs.ts new file mode 100644 index 0000000000000000000000000000000000000000..dbf90ac653ffa771314370114fee150cbd263199 --- /dev/null +++ b/packages/controlmart/ui/src/api/logs.ts @@ -0,0 +1,102 @@ +export interface LogEntry { + logId: string + timestamp: string + serviceType: string + level: string + msg: string + metadata?: Record +} + +export interface LogsResponse { + items: LogEntry[] + pagination: { + totalCount: number + hasMore: boolean + nextCursor?: string + } +} + +export interface LogsFilter { + serviceType?: string + level?: string + searchText?: string + dateStart?: string + dateEnd?: string + limit?: number + cursor?: string +} + +export interface AuditLogEntry { + model: string + documentId: string + changedBy?: string + before: Record + after: Record + reason?: string + createdAt: string +} + +export interface AuditLogsResponse { + items: AuditLogEntry[] + pagination?: { + totalCount: number + hasMore: boolean + nextCursor?: string + } +} + +export interface AuditLogsFilter { + model?: string + documentId?: string + limit?: number + cursor?: string +} + +const API_BASE = '' + +export async function getLogs(worldId: string, filter: LogsFilter = {}): Promise { + const params = new URLSearchParams() + if (filter.serviceType) params.append('serviceType', filter.serviceType) + if (filter.level) params.append('level', filter.level) + if (filter.searchText) params.append('searchText', filter.searchText) + if (filter.dateStart) params.append('dateStart', filter.dateStart) + if (filter.dateEnd) params.append('dateEnd', filter.dateEnd) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.cursor) params.append('cursor', filter.cursor) + + const queryString = params.toString() + const url = `${API_BASE}/${worldId}/logs${queryString ? `?${queryString}` : ''}` + + const res = await fetch(url) + const data = await res.json() + + // API returns: { success, status, data: { items, totalCount, ... }, pagination } + return { + items: data.data?.items || data.items || [], + pagination: data.pagination || { totalCount: 0, hasMore: false } + } +} + +export async function getAuditLogs(worldId: string, filter: AuditLogsFilter = {}): Promise { + const params = new URLSearchParams() + if (filter.model) params.append('model', filter.model) + if (filter.documentId) params.append('documentId', filter.documentId) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.cursor) params.append('cursor', filter.cursor) + + const queryString = params.toString() + const url = `${API_BASE}/${worldId}/audit-logs${queryString ? `?${queryString}` : ''}` + + const res = await fetch(url) + const data = await res.json() + + // API returns: { success, status, data: { items, ... }, pagination } + return { + items: data.data?.items || data.data || data.items || [], + pagination: data.pagination + } +} + +export const SERVICE_TYPES = ['erp', 'wms', 'finance', 'edi', 'tms', 'od'] as const +export const LOG_LEVELS = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'] as const +export const AUDIT_MODELS = ['Order', 'Company', 'Product', 'Shipment', 'Invoice', 'Payment'] as const diff --git a/packages/controlmart/ui/src/api/ods.ts b/packages/controlmart/ui/src/api/ods.ts new file mode 100644 index 0000000000000000000000000000000000000000..3da0d36ba77905186ffe4307db13db4a772f3d84 --- /dev/null +++ b/packages/controlmart/ui/src/api/ods.ts @@ -0,0 +1,180 @@ +export interface ODStep { + id: string + name: string + type: string + service?: string + tool?: string + script?: string + input?: any + output?: any +} + +export interface OD { + _id: string + odId: string + name: string + description?: string + persona?: string + odType: string + data: { + id: string + name: string + version: string + description?: string + steps: ODStep[] + } + createdAt: string + updatedAt: string +} + +export interface ODExecutionResult { + runId: string + status: 'success' | 'failed' | 'partial' + durationMs: number + totalSteps: number + successfulSteps: number + failedSteps: number + stepResults: Array<{ + stepId: string + status: string + durationMs: number + error?: string + }> +} + +export interface ODSchedule { + jobId: string + nextRunAt: string + lastRunAt?: string + interval?: string + isRecurring: boolean +} + +export interface CreateODInput { + name: string + description?: string + odType?: string + data: any +} + +const API_BASE = '' + +export async function listODs(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to list ODs') + return data.data || [] +} + +export async function getOD(worldId: string, odId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors/${encodeURIComponent(odId)}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get OD') + return data.data +} + +export async function createOD(worldId: string, input: CreateODInput): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create OD') + return data.data +} + +export async function updateOD(worldId: string, odId: string, input: Partial): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors/${encodeURIComponent(odId)}`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update OD') + return data.data +} + +export async function deleteOD(worldId: string, odId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors/${encodeURIComponent(odId)}`, { + method: 'DELETE' + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to delete OD') +} + +export async function executeOD(worldId: string, odId: string, context?: any): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors/${encodeURIComponent(odId)}/execute`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ context }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to execute OD') + return data.data +} + +export async function scheduleOD( + worldId: string, + odId: string, + schedule: { type: 'once' | 'recurring'; time?: string; interval?: string } +): Promise<{ jobId: string; nextRunAt: string }> { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors/${encodeURIComponent(odId)}/schedule`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(schedule) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to schedule OD') + return data.data +} + +export async function getODSchedules(worldId: string, odId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors/${encodeURIComponent(odId)}/schedules`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get schedules') + return data.data || [] +} + +export async function cancelODSchedule(worldId: string, odId: string, jobId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/od/descriptors/${encodeURIComponent(odId)}/schedules/${jobId}`, { + method: 'DELETE' + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to cancel schedule') +} + +export async function validateOD(worldId: string, odData: any): Promise<{ isValid: boolean; errors: string[]; warnings: string[] }> { + const res = await fetch(`${API_BASE}/${worldId}/od/validate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(odData) + }) + const data = await res.json() + return data.data || { isValid: false, errors: [data.error], warnings: [] } +} + +export async function pauseWorldSchedules(worldId: string): Promise<{ count: number }> { + const res = await fetch(`${API_BASE}/${worldId}/od/schedules/pause`, { + method: 'PUT' + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to pause world schedules') + return data.data +} + +export async function resumeWorldSchedules(worldId: string): Promise<{ count: number }> { + const res = await fetch(`${API_BASE}/${worldId}/od/schedules/resume`, { + method: 'PUT' + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to resume world schedules') + return data.data +} + +export async function getWorldScheduleStatus(worldId: string): Promise<{ status: 'operational' | 'paused' | 'partial' }> { + const res = await fetch(`${API_BASE}/${worldId}/od/schedules/status`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get world schedule status') + return data.data +} diff --git a/packages/controlmart/ui/src/api/tickets.ts b/packages/controlmart/ui/src/api/tickets.ts new file mode 100644 index 0000000000000000000000000000000000000000..1b823ca135fb72f25bd07ce08d19af25485980ac --- /dev/null +++ b/packages/controlmart/ui/src/api/tickets.ts @@ -0,0 +1,127 @@ +export interface Attachment { + url: string + filename: string + uploadedAt: string +} + +export interface WorkNote { + author: string + note: string + isPublic: boolean + createdAt: string + updatedAt: string + // Legacy fields for backwards compatibility + content?: string + addedBy?: string + addedAt?: string +} + +export interface Ticket { + _id: string + title: string + description: string + requester?: { + _id: string + name?: string + email?: string + } + assignedTo?: { + _id: string + name?: string + email?: string + } | null + status: 'new' | 'open' | 'in_progress' | 'on_hold' | 'resolved' | 'closed' + priority: 'low' | 'medium' | 'high' | 'critical' + impact: 'low' | 'medium' | 'high' + urgency: 'low' | 'medium' | 'high' + category?: string | null + type: 'incident' | 'service_request' | 'problem' | 'change' + workNotes: WorkNote[] + attachments: Attachment[] + resolutionNotes?: string | null + createdAt: string + updatedAt: string +} + +export interface TicketsResponse { + items: Ticket[] + pagination?: { + totalCount: number + hasMore: boolean + nextCursor?: string + } +} + +export interface TicketsFilter { + status?: string + priority?: string + type?: string + impact?: string + urgency?: string + limit?: number + cursor?: string +} + +const API_BASE = '' + +export async function getTickets(worldId: string, filter: TicketsFilter = {}): Promise { + const params = new URLSearchParams() + if (filter.status) params.append('status', filter.status) + if (filter.priority) params.append('priority', filter.priority) + if (filter.type) params.append('type', filter.type) + if (filter.impact) params.append('impact', filter.impact) + if (filter.urgency) params.append('urgency', filter.urgency) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.cursor) params.append('cursor', filter.cursor) + + const queryString = params.toString() + const url = `${API_BASE}/${worldId}/tickets${queryString ? `?${queryString}` : ''}` + + const res = await fetch(url) + const data = await res.json() + + // API returns: { success, status, data: { items: [...], ... }, pagination } + return { + items: data.data?.items || data.items || [], + pagination: data.pagination || { totalCount: 0, hasMore: false } + } +} + +export async function getTicket(worldId: string, ticketId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/tickets/${ticketId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get ticket') + return data.data +} + +export async function updateTicketStatus(worldId: string, ticketId: string, status: Ticket['status']): Promise { + const res = await fetch(`${API_BASE}/${worldId}/tickets/${ticketId}/status`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update ticket status') + return data.data +} + +export async function addTicketWorkNote( + worldId: string, + ticketId: string, + note: { author: string; note: string; isPublic?: boolean } +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/tickets/${ticketId}/work-notes`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ workNotes: [note] }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to add work note') + return data.data +} + +export const TICKET_STATUSES = ['new', 'open', 'in_progress', 'on_hold', 'resolved', 'closed'] as const +export const TICKET_PRIORITIES = ['low', 'medium', 'high', 'critical'] as const +export const TICKET_TYPES = ['incident', 'service_request', 'problem', 'change'] as const +export const TICKET_IMPACTS = ['low', 'medium', 'high'] as const +export const TICKET_URGENCIES = ['low', 'medium', 'high'] as const diff --git a/packages/controlmart/ui/src/api/wms.ts b/packages/controlmart/ui/src/api/wms.ts new file mode 100644 index 0000000000000000000000000000000000000000..047a45c33ecc30d70230c21db779390c2153991f --- /dev/null +++ b/packages/controlmart/ui/src/api/wms.ts @@ -0,0 +1,945 @@ +const API_BASE = '' + +// ============================================ +// Dashboard Types +// ============================================ + +export interface WMSDashboard { + inventory: { + totalItems: number + byStatus: Record + lowStockAlerts: number + expiringAlerts: number + } + receiving: { + total: number + pending: number + expected: number + inTransit: number + receiving: number + received: number + dueToday: number + } + fulfillment: { + total: number + active: number + created: number + released: number + allocated: number + picking: number + picked: number + packing: number + packed: number + shipped: number + rushOrders: number + } + tasks: { + total: number + pending: number + inProgress: number + completedToday: number + byType: Record + } +} + +// ============================================ +// Inventory Types +// ============================================ + +export interface InventoryItem { + inventoryId: string + sku: string + productName: string + warehouseId: string + binId: string + quantityOnHand: number + quantityAllocated: number + quantityAvailable: number + inventoryStatus: string + lotNumber?: string + expirationDate?: string + lastMovementAt?: string +} + +export interface InventoryFilter { + status?: string + warehouseId?: string + expiringSoon?: boolean + lowStock?: boolean + search?: string + limit?: number + offset?: number +} + +export interface InventoryResponse { + items: InventoryItem[] + pagination: { + total: number + limit: number + offset: number + hasMore: boolean + } +} + +// ============================================ +// Inbound Order Types +// ============================================ + +export interface InboundOrderLine { + lineNumber: number + sku: string + productName: string + expectedQuantity: number + receivedQuantity: number + status: string + lotNumber?: string + expirationDate?: string + lineStatus?: string +} + +export interface InboundOrder { + _id: string + inboundOrderId: string + poNumber: string + asnNumber?: string + vendor: { + vendorId: string + vendorName: string + } + orderStatus: string + orderType: string + dates: { + expectedArrival: string + actualArrival?: string + receivingStarted?: string + receivingCompleted?: string + receivedDate?: string + createdAt: string + } + totals: { + expectedLines: number + receivedLines: number + pallets?: number + cases?: number + units: number + } + warehouseId: string + priority?: string + lines?: InboundOrderLine[] +} + +export interface InboundOrderFilter { + status?: string[] + warehouseId?: string + vendorId?: string + limit?: number + offset?: number +} + +// ============================================ +// Outbound Order Types +// ============================================ + +export interface OutboundOrderLine { + lineNumber: number + sku: string + productName: string + orderedQuantity: number + allocatedQuantity: number + pickedQuantity: number + status: string +} + +export interface OutboundOrder { + _id: string + orderId: string + orderNumber: string + customer: { + customerId: string + customerName: string + } + orderStatus: string + orderPriority: string + dates: { + orderDate: string + requiredShipDate: string + actualShipDate?: string + } + totals: { + units: number + lines: number + } + warehouseId: string + lines?: OutboundOrderLine[] +} + +export interface OutboundOrderFilter { + status?: string[] + warehouseId?: string + customerId?: string + priority?: string[] + limit?: number + offset?: number +} + +// ============================================ +// Task Types +// ============================================ + +export interface WMSTask { + _id: string + taskId: string + taskType: string + taskStatus: string + product?: { + sku: string + productName: string + } + quantity: { + requested: number + actual: number + } + assignment?: { + userId?: string + userName?: string + } + priority: number + from?: { + binId?: string + binCode?: string + zoneId?: string + } + to?: { + binId?: string + binCode?: string + zoneId?: string + } + timing: { + createdAt: string + startedAt?: string + completedAt?: string + } +} + +export interface TaskFilter { + taskType?: string[] + status?: string[] + userId?: string + zoneId?: string + limit?: number + offset?: number +} + +// ============================================ +// Relations Types (Cross-Service Data) +// ============================================ + +export interface OrderRelations { + erpOrder?: { + orderId: string + orderNumber: string + status: string + totalAmount?: number + poType?: string + orderDate?: string + } + ediDocuments: Array<{ + transactionId: string + docType: string + status: string + direction: string + createdAt?: string + }> + financeTransaction?: { + transactionId: string + type: string + amount: number + status?: string + } +} + +// ============================================ +// Receiving Transaction Types +// ============================================ + +export interface ReceivingTransaction { + _id: string + receivingId: string + warehouseId: string + inboundOrderId: string + inboundLineId?: string + productId: string + sku?: string + productName?: string + licensePlateNumber?: string + lotNumber?: string + receivedQuantity: number + uom?: string + dockDoorId?: string + receivingStatus: 'RECEIVED' | 'QC_HOLD' | 'PUTAWAY_PENDING' | 'COMPLETED' | 'REJECTED' + quality?: { + status: 'PASS' | 'FAIL' | 'PENDING' + inspectedBy?: string + inspectedAt?: string + notes?: string + } + receivedBy?: string + receivedAt: string + damage?: { + damaged: boolean + damageType?: string + damagedQuantity?: number + notes?: string + } + createdAt: string + updatedAt: string +} + +export interface ReceivingTransactionFilter { + warehouseId?: string + inboundOrderId?: string + status?: string[] + dateStart?: string + dateEnd?: string + limit?: number + offset?: number +} + +export interface ReceivingTransactionResponse { + transactions: ReceivingTransaction[] + pagination: { + total: number + limit: number + offset: number + hasMore: boolean + } +} + +// ============================================ +// Constants +// ============================================ + +export const INVENTORY_STATUSES = ['AVAILABLE', 'ALLOCATED', 'QUARANTINE', 'HOLD', 'EXPIRED'] as const +export const INBOUND_STATUSES = ['EXPECTED', 'IN_TRANSIT', 'RECEIVING', 'RECEIVED', 'CLOSED', 'CANCELLED'] as const +export const OUTBOUND_STATUSES = ['CREATED', 'RELEASED', 'ALLOCATED', 'PICKING', 'PICKED', 'PACKING', 'PACKED', 'SHIPPED', 'CANCELLED'] as const +export const TASK_TYPES = ['PICK', 'PUTAWAY', 'REPLENISHMENT', 'CYCLE_COUNT', 'MOVE', 'PACK', 'LOAD', 'UNLOAD', 'SORT'] as const +export const TASK_STATUSES = ['CREATED', 'RELEASED', 'ASSIGNED', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'CANCELLED'] as const +export const ORDER_PRIORITIES = ['RUSH', 'URGENT', 'NORMAL', 'STANDARD'] as const +export const INBOUND_ORDER_TYPES = ['PO', 'RETURN', 'TRANSFER', 'SAMPLE'] as const +export const OUTBOUND_ORDER_TYPES = ['SALES', 'TRANSFER', 'REPLENISHMENT', 'RETURN', 'SAMPLE'] as const +export const RECEIVING_STATUSES = ['RECEIVED', 'QC_HOLD', 'PUTAWAY_PENDING', 'COMPLETED', 'REJECTED'] as const +export const QC_STATUSES = ['PASS', 'FAIL', 'PENDING'] as const + +// ============================================ +// API Functions +// ============================================ + +/** + * Get WMS Operations Dashboard metrics + */ +export async function getWMSDashboard(worldId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/operations-dashboard`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get WMS dashboard') + return data.data +} + +/** + * Get inventory list with filters and pagination + */ +export async function getInventoryList( + worldId: string, + filter: InventoryFilter = {} +): Promise { + const params = new URLSearchParams() + if (filter.status) params.append('status', filter.status) + if (filter.warehouseId) params.append('warehouseId', filter.warehouseId) + if (filter.expiringSoon) params.append('expiringSoon', 'true') + if (filter.lowStock) params.append('lowStock', 'true') + if (filter.search) params.append('search', filter.search) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/wms/inventory${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get inventory') + return data.data +} + +/** + * Get inbound orders (Receiving) with filters + */ +export async function getInboundOrders( + worldId: string, + filter: InboundOrderFilter = {} +): Promise { + const params = new URLSearchParams() + filter.status?.forEach(s => params.append('status', s)) + if (filter.warehouseId) params.append('warehouseId', filter.warehouseId) + if (filter.vendorId) params.append('vendorId', filter.vendorId) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/wms/inbound-orders/status${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get inbound orders') + return data.data || [] +} + +/** + * Get a single inbound order by ID + */ +export async function getInboundOrderById(worldId: string, orderId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inbound-orders/${orderId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get inbound order') + return data.data +} + +/** + * Get cross-service relations for an inbound order + */ +export async function getInboundOrderRelations(worldId: string, orderId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inbound-orders/${orderId}/relations`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get order relations') + return data.data +} + +/** + * Get outbound orders (Fulfillment) with filters + */ +export async function getOutboundOrders( + worldId: string, + filter: OutboundOrderFilter = {} +): Promise { + const params = new URLSearchParams() + filter.status?.forEach(s => params.append('status', s)) + if (filter.warehouseId) params.append('warehouseId', filter.warehouseId) + if (filter.customerId) params.append('customerId', filter.customerId) + filter.priority?.forEach(p => params.append('priority', p)) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/wms/outbound-orders/status${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get outbound orders') + // Handle both array and paginated response formats + if (Array.isArray(data.data)) { + return data.data + } + return data.data?.items || [] +} + +/** + * Get a single outbound order by ID + */ +export async function getOutboundOrderById(worldId: string, orderId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/outbound-orders/${orderId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get outbound order') + return data.data +} + +/** + * Get cross-service relations for an outbound order + */ +export async function getOutboundOrderRelations(worldId: string, orderId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/outbound-orders/${orderId}/relations`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get order relations') + return data.data +} + +/** + * Get warehouse tasks with filters + */ +export async function getTasks( + worldId: string, + filter: TaskFilter = {} +): Promise { + const params = new URLSearchParams() + filter.taskType?.forEach(t => params.append('taskType', t)) + filter.status?.forEach(s => params.append('status', s)) + if (filter.userId) params.append('userId', filter.userId) + if (filter.zoneId) params.append('zoneId', filter.zoneId) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/wms/tasks${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get tasks') + return data.data || [] +} + +/** + * Get active tasks + */ +export async function getActiveTasks(worldId: string, zoneId?: string): Promise { + const params = zoneId ? `?zoneId=${zoneId}` : '' + const res = await fetch(`${API_BASE}/${worldId}/wms/tasks/active${params}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get active tasks') + return data.data || [] +} + +/** + * Get receiving transactions with filters and pagination + */ +export async function getReceivingTransactions( + worldId: string, + filter: ReceivingTransactionFilter = {} +): Promise { + const params = new URLSearchParams() + filter.status?.forEach(s => params.append('status', s)) + if (filter.warehouseId) params.append('warehouseId', filter.warehouseId) + if (filter.inboundOrderId) params.append('inboundOrderId', filter.inboundOrderId) + if (filter.dateStart) params.append('dateStart', filter.dateStart) + if (filter.dateEnd) params.append('dateEnd', filter.dateEnd) + if (filter.limit) params.append('limit', String(filter.limit)) + if (filter.offset) params.append('offset', String(filter.offset)) + + const url = `${API_BASE}/${worldId}/wms/receiving-transactions${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get receiving transactions') + + // Handle both array and paginated response formats + if (Array.isArray(data.data)) { + return { + transactions: data.data, + pagination: { + total: data.data.length, + limit: filter.limit || 50, + offset: filter.offset || 0, + hasMore: false + } + } + } + // Map backend response format {items, totalCount, limit, hasMore} to frontend format + return { + transactions: data.data.items || [], + pagination: { + total: data.data.totalCount || 0, + limit: data.data.limit || filter.limit || 50, + offset: filter.offset || 0, + hasMore: data.data.hasMore || false + } + } +} + +/** + * Get a single receiving transaction by ID + */ +export async function getReceivingTransactionById( + worldId: string, + transactionId: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/receiving-transactions/${transactionId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get receiving transaction') + return data.data +} + +// ============================================ +// Action API Functions +// ============================================ + +/** + * Update task status (for retry, cancel, start, complete actions) + */ +export async function updateTaskStatus( + worldId: string, + taskId: string, + status: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/tasks/${taskId}/status`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update task status') +} + +/** + * Assign task to a user + */ +export async function assignTask( + worldId: string, + taskId: string, + userId: string, + userName: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/tasks/${taskId}/assign`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ userId, userName }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to assign task') +} + +/** + * Update task priority + */ +export async function updateTaskPriority( + worldId: string, + taskId: string, + priority: number +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/tasks/${taskId}/priority`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ priority }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update task priority') +} + +/** + * Update inbound order status (for start receiving, complete, close actions) + */ +export async function updateInboundOrderStatus( + worldId: string, + orderId: string, + status: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inbound-orders/${orderId}/status`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update inbound order status') +} + +/** + * Update outbound order status (for release, picking, packing, ship actions) + */ +export async function updateOutboundOrderStatus( + worldId: string, + orderId: string, + status: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/outbound-orders/${orderId}/status`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update outbound order status') +} + +/** + * Update outbound order priority + */ +export async function updateOutboundOrderPriority( + worldId: string, + orderId: string, + priority: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/outbound-orders/${orderId}/priority`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ priority }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update outbound order priority') +} + +/** + * Create inventory transaction (for adjustments, moves, write-offs) + */ +export interface InventoryTransactionInput { + transactionType: 'RECEIVE' | 'PICK' | 'PUTAWAY' | 'MOVE' | 'ADJUST' | 'WRITE_OFF' + productId: string + sku: string + quantity: number + fromBinId?: string + toBinId?: string + lotNumber?: string + referenceType?: string + referenceId?: string + notes?: string + warehouseId?: string +} + +export async function createInventoryTransaction( + worldId: string, + transaction: InventoryTransactionInput +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inventory-transactions`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(transaction) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create inventory transaction') +} + +/** + * Update inventory status (for hold, release, quarantine actions) + */ +export async function updateInventoryStatus( + worldId: string, + inventoryId: string, + status: string +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inventory/${inventoryId}/status`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ inventoryStatus: status }) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update inventory status') +} + +// ============================================ +// Bin Types and Functions +// ============================================ + +export interface Bin { + binId: string + binCode: string + warehouseId: string + zoneId: string + aisle?: string + rack?: string + level?: string + position?: string + binType: string + status: string + capacity?: { + maxWeight?: number + maxVolume?: number + maxUnits?: number + } +} + +/** + * Get bin by ID + */ +export async function getBinById(worldId: string, binId: string): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/bins/${binId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get bin') + return data.data +} + +/** + * Get bins list (for bin selection in move modal) + */ +export async function getBins( + worldId: string, + filter: { warehouseId?: string; zoneId?: string; status?: string } = {} +): Promise { + const params = new URLSearchParams() + if (filter.warehouseId) params.append('warehouseId', filter.warehouseId) + if (filter.zoneId) params.append('zoneId', filter.zoneId) + if (filter.status) params.append('status', filter.status) + + const url = `${API_BASE}/${worldId}/wms/bins${params.toString() ? '?' + params.toString() : ''}` + const res = await fetch(url) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get bins') + return data.data || [] +} + +// ============================================ +// Unified PATCH API Functions +// ============================================ + +/** + * Patch inventory item - generic update for any allowed field + * Allowed fields: inventoryStatus, lotNumber, expirationDate, binId + */ +export async function patchInventory( + worldId: string, + inventoryId: string, + updates: Partial<{ + inventoryStatus: string + lotNumber: string + expirationDate: string + binId: string + }> +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inventory/${inventoryId}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updates) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to patch inventory') + return data.data +} + +/** + * Patch inbound order - generic update for any allowed field + * Allowed fields: orderStatus, dates.expectedArrival, priority + */ +export async function patchInboundOrder( + worldId: string, + orderId: string, + updates: Partial<{ + orderStatus: string + priority: string + dates: Partial<{ expectedArrival: string }> + 'dates.expectedArrival': string + }> +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inbound-orders/${orderId}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updates) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to patch inbound order') + return data.data +} + +/** + * Patch outbound order - generic update for any allowed field + * Allowed fields: orderStatus, orderPriority, dates.requiredShipDate, dates.actualShipDate + */ +export async function patchOutboundOrder( + worldId: string, + orderId: string, + updates: Partial<{ + orderStatus: string + orderPriority: string + dates: Partial<{ requiredShipDate: string; actualShipDate: string }> + 'dates.requiredShipDate': string + 'dates.actualShipDate': string + }> +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/outbound-orders/${orderId}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updates) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to patch outbound order') + return data.data +} + +/** + * Patch task - generic update for any allowed field + * Allowed fields: taskStatus, assignment, priority + */ +export async function patchTask( + worldId: string, + taskId: string, + updates: Partial<{ + taskStatus: string + assignment: { userId: string; userName: string } + priority: number + }> +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/tasks/${taskId}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updates) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to patch task') + return data.data +} + +// ============================================ +// Create API Functions +// ============================================ + +/** + * Create a new inbound order (receiving/purchase order) + */ +export async function createInboundOrder( + worldId: string, + order: { + warehouseId: string + poNumber: string + vendor?: { vendorId?: string; vendorName?: string } + asnNumber?: string + orderType?: 'PO' | 'RETURN' | 'TRANSFER' | 'SAMPLE' + dates?: { expectedArrival?: string } + receivingNotes?: string + lines: Array<{ + lineNumber?: number + productId: string + sku?: string + productName?: string + expectedQuantity: number + }> + } +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/inbound-orders`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(order) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create inbound order') + return data.data +} + +/** + * Create a new outbound order (sales/shipping order) + */ +export async function createOutboundOrder( + worldId: string, + order: { + warehouseId: string + orderNumber: string + customer: { customerId: string; customerName: string } + orderType?: 'SALES' | 'TRANSFER' | 'REPLENISHMENT' | 'RETURN' | 'SAMPLE' + orderPriority?: 'RUSH' | 'URGENT' | 'NORMAL' | 'STANDARD' + dates?: { requiredShipDate?: string } + notes?: string + lines: Array<{ + lineNumber?: number + productId: string + productName?: string + orderedQuantity: number + }> + } +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/outbound-orders`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(order) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create outbound order') + return data.data +} + +/** + * Create a new warehouse task + */ +export async function createTask( + worldId: string, + task: { + warehouseId: string + taskType: 'PICK' | 'PUTAWAY' | 'REPLENISHMENT' | 'CYCLE_COUNT' | 'MOVE' | 'LOAD' | 'UNLOAD' | 'PACK' | 'SORT' + reference?: { type: string; id: string } + product?: { productId?: string; sku?: string; productName?: string } + quantity?: { requested?: number; uom?: string } + from?: { binId?: string; zoneId?: string } + to?: { binId?: string; zoneId?: string } + priority?: number + notes?: string + } +): Promise { + const res = await fetch(`${API_BASE}/${worldId}/wms/tasks`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(task) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to create task') + return data.data +} diff --git a/packages/controlmart/ui/src/api/worlds.ts b/packages/controlmart/ui/src/api/worlds.ts new file mode 100644 index 0000000000000000000000000000000000000000..ae3c514d058786689db19548e1f10dcef899a668 --- /dev/null +++ b/packages/controlmart/ui/src/api/worlds.ts @@ -0,0 +1,128 @@ +export interface ChaosConfig { + processChaosEnabled: boolean + infraChaosEnabled: boolean +} + +export interface World { + _id: string + name: string + url: string + description?: string + is_default: boolean + layout?: string + ticketCreationEnabled?: boolean + chaos?: ChaosConfig + createdAt: string + updatedAt: string +} + +export interface WorldStats { + companies: number + products: number + ods: number +} + +export interface CreateWorldInput { + name: string + description?: string + layout: string + realHoursPerSimDay?: number + ticketCreationEnabled?: boolean + chaos?: ChaosConfig +} + +export interface WorldLayout { + id: string + name: string + description?: string + shortDescription?: string +} + +const API_BASE = '' + +export async function listWorlds(): Promise { + const res = await fetch(`${API_BASE}/world`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to list worlds') + // Handle both old format (data.data is array) and new format (data.data.worlds is array) + return data.data?.worlds || data.data || [] +} + +export async function listLayouts(): Promise { + const res = await fetch(`${API_BASE}/world/layouts`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to list layouts') + return data.data +} + +export async function getWorld(worldId: string): Promise { + const res = await fetch(`${API_BASE}/world/${worldId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get world') + return data.data +} + +export async function createWorld(input: CreateWorldInput): Promise { + const res = await fetch(`${API_BASE}/world`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input) + }) + const data = await res.json() + if (!data.success && !data.world) throw new Error(data.error || 'Failed to create world') + return data.world || data.data +} + +export async function updateWorld(worldId: string, input: Partial): Promise { + const res = await fetch(`${API_BASE}/world/${worldId}`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input) + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to update world') + return data.data +} + +export async function deleteWorld(worldId: string): Promise { + const res = await fetch(`${API_BASE}/world/${worldId}`, { + method: 'DELETE' + }) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to delete world') +} + +export async function getWorldStats(worldId: string): Promise { + const [odsRes, companiesRes, productsRes] = await Promise.all([ + fetch(`${API_BASE}/${worldId}/od/descriptors?limit=1`), + fetch(`${API_BASE}/${worldId}/erp/companies?limit=1`), + fetch(`${API_BASE}/${worldId}/erp/products`) + ]) + + const [odsData, companiesData, productsData] = await Promise.all([ + odsRes.json(), + companiesRes.json(), + productsRes.json() + ]) + + return { + companies: companiesData.pagination?.totalCount || 0, + products: productsData.data?.length || 0, + ods: odsData.pagination?.totalCount || 0 + } +} + +export interface LayoutDocs { + id: string + name: string + shortDescription?: string + description?: string + docs: any +} + +export async function getLayoutDocs(layoutId: string): Promise { + const res = await fetch(`${API_BASE}/world/layouts/${layoutId}`) + const data = await res.json() + if (!data.success) throw new Error(data.error || 'Failed to get layout docs') + return data.data +} diff --git a/packages/controlmart/ui/src/components/world/modals/ODCreateModal.vue b/packages/controlmart/ui/src/components/world/modals/ODCreateModal.vue new file mode 100644 index 0000000000000000000000000000000000000000..8cd3316c3d6e0f9bf36bd488f1155de2b4498bf1 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/modals/ODCreateModal.vue @@ -0,0 +1,167 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/modals/ODExecuteModal.vue b/packages/controlmart/ui/src/components/world/modals/ODExecuteModal.vue new file mode 100644 index 0000000000000000000000000000000000000000..f1f822dd55c5ce601ee57ae48862f4d1f06ab3b3 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/modals/ODExecuteModal.vue @@ -0,0 +1,173 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/modals/ODScheduleModal.vue b/packages/controlmart/ui/src/components/world/modals/ODScheduleModal.vue new file mode 100644 index 0000000000000000000000000000000000000000..d14feff2b01354880f8ee25377ce78117a1777c2 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/modals/ODScheduleModal.vue @@ -0,0 +1,139 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/modals/ODSchedulesModal.vue b/packages/controlmart/ui/src/components/world/modals/ODSchedulesModal.vue new file mode 100644 index 0000000000000000000000000000000000000000..4ee52142efa9ab8e7e559c1b03b25923d53a2ec8 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/modals/ODSchedulesModal.vue @@ -0,0 +1,121 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/AuditLogsTab.vue b/packages/controlmart/ui/src/components/world/tabs/AuditLogsTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..b33d469031ab74abf4fa1d42d6000bfded5cab0e --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/AuditLogsTab.vue @@ -0,0 +1,259 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/CurvedEdge.vue b/packages/controlmart/ui/src/components/world/tabs/CurvedEdge.vue new file mode 100644 index 0000000000000000000000000000000000000000..069304675bcbe47b3ecbc04e64a1cf659202ab4a --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/CurvedEdge.vue @@ -0,0 +1,54 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/DocsTab.vue b/packages/controlmart/ui/src/components/world/tabs/DocsTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..0de4fd3bdce1cd26ebb1cc863a4864606cf4858b --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/DocsTab.vue @@ -0,0 +1,233 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/FinanceTab.vue b/packages/controlmart/ui/src/components/world/tabs/FinanceTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..96cd4c0f97a92e3a48828d419d8b4573739aeeba --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/FinanceTab.vue @@ -0,0 +1,310 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraph2DTab.vue b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraph2DTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..1474d0cdd85c1e71afc33602f4dc6a19760c9e4c --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraph2DTab.vue @@ -0,0 +1,470 @@ + + + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphFlowchartTab.vue b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphFlowchartTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..2f9cc576ac7b193c907d45c95fa407989c785a30 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphFlowchartTab.vue @@ -0,0 +1,541 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphLayeredTab.vue b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphLayeredTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..541acb66c8d5d9b3674d4228c446f86087bddc35 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphLayeredTab.vue @@ -0,0 +1,503 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphTab.vue b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..fd518544ed0fc4de873d19520bf96506bddbb259 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphTab.vue @@ -0,0 +1,461 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphUnifiedTab.vue b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphUnifiedTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..2c805e864fade2f7b3c22499d540d2cdd6ee48b6 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/KnowledgeGraphUnifiedTab.vue @@ -0,0 +1,334 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/LedgerTab.vue b/packages/controlmart/ui/src/components/world/tabs/LedgerTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..915ec70474379cf44d91e5552c1fae3b7a5a557e --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/LedgerTab.vue @@ -0,0 +1,158 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/LogsTab.vue b/packages/controlmart/ui/src/components/world/tabs/LogsTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..fc2709adc854cbb9d81b3dfcc94a4bdcc1633351 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/LogsTab.vue @@ -0,0 +1,236 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/ODsTab.vue b/packages/controlmart/ui/src/components/world/tabs/ODsTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..ecf6e613eb34b73f0030b6d1ac83626d3d8f3412 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/ODsTab.vue @@ -0,0 +1,205 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/OperationsTab.vue b/packages/controlmart/ui/src/components/world/tabs/OperationsTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..10cd655c3ffcd70fdfde1e179ce300b1df435bca --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/OperationsTab.vue @@ -0,0 +1,499 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/TicketsTab.vue b/packages/controlmart/ui/src/components/world/tabs/TicketsTab.vue new file mode 100644 index 0000000000000000000000000000000000000000..3a2169b77a03176dffb96f262d87dc3250b03999 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/TicketsTab.vue @@ -0,0 +1,614 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/EDIView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/EDIView.vue new file mode 100644 index 0000000000000000000000000000000000000000..e3bcbe3e7cea34175a818cc99c6d8e287480189e --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/EDIView.vue @@ -0,0 +1,442 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/ERPView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/ERPView.vue new file mode 100644 index 0000000000000000000000000000000000000000..33b8ad0cffc867b95d93d8fb758b784941c76b1d --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/ERPView.vue @@ -0,0 +1,576 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/FinanceView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/FinanceView.vue new file mode 100644 index 0000000000000000000000000000000000000000..0336df877f17f23f191a06b52ecf73f7b7f64805 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/FinanceView.vue @@ -0,0 +1,546 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/WMSView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/WMSView.vue new file mode 100644 index 0000000000000000000000000000000000000000..50575d039f7d4188e58c10e8560f260db3755063 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/WMSView.vue @@ -0,0 +1,539 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/edi/TransactionsListView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/edi/TransactionsListView.vue new file mode 100644 index 0000000000000000000000000000000000000000..c596805f355f3bf453e6b39805fd9eb9ca881110 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/edi/TransactionsListView.vue @@ -0,0 +1,1180 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/erp/CompaniesListView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/erp/CompaniesListView.vue new file mode 100644 index 0000000000000000000000000000000000000000..b97f2a435a14d1801e0afac3c99f02d7b07e26ca --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/erp/CompaniesListView.vue @@ -0,0 +1,226 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/erp/InvoicesListView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/erp/InvoicesListView.vue new file mode 100644 index 0000000000000000000000000000000000000000..d73873d0cde9d0d0169b65883b0c87f74e296e0f --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/erp/InvoicesListView.vue @@ -0,0 +1,1002 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/erp/OrdersListView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/erp/OrdersListView.vue new file mode 100644 index 0000000000000000000000000000000000000000..d2cae7418173101dab7ccf8349bc29e005fc3600 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/erp/OrdersListView.vue @@ -0,0 +1,935 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/erp/ProductsListView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/erp/ProductsListView.vue new file mode 100644 index 0000000000000000000000000000000000000000..b58685b23a57e31f3f61ae8761f882ba1b50ceb3 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/erp/ProductsListView.vue @@ -0,0 +1,183 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/finance/TransactionsListView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/finance/TransactionsListView.vue new file mode 100644 index 0000000000000000000000000000000000000000..d2269f4c374cd4049efeffabb8b561d9f291eb35 --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/finance/TransactionsListView.vue @@ -0,0 +1,1127 @@ + + + diff --git a/packages/controlmart/ui/src/components/world/tabs/operations/wms/FulfillmentListView.vue b/packages/controlmart/ui/src/components/world/tabs/operations/wms/FulfillmentListView.vue new file mode 100644 index 0000000000000000000000000000000000000000..3061f236007d5dde037a8d5cbe2a04233739fe4b --- /dev/null +++ b/packages/controlmart/ui/src/components/world/tabs/operations/wms/FulfillmentListView.vue @@ -0,0 +1,789 @@ + + + diff --git a/packages/controlmart/ui/src/views/WorldDetail.vue b/packages/controlmart/ui/src/views/WorldDetail.vue new file mode 100644 index 0000000000000000000000000000000000000000..694bdbc20feda2737d70506012a3290673c020e5 --- /dev/null +++ b/packages/controlmart/ui/src/views/WorldDetail.vue @@ -0,0 +1,258 @@ + + + diff --git a/packages/controlmart/ui/src/vite-env.d.ts b/packages/controlmart/ui/src/vite-env.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..b9d97b4420d865f7c42fd14258add19f40452a17 --- /dev/null +++ b/packages/controlmart/ui/src/vite-env.d.ts @@ -0,0 +1,22 @@ +/// + +declare module '*.vue' { + import type { DefineComponent } from 'vue' + const component: DefineComponent<{}, {}, any> + export default component +} + +declare module 'd3-force-3d' { + export function forceY(y?: number | ((node: NodeDatum) => number)): { + strength(strength: number | ((node: NodeDatum) => number)): any + (alpha: number): void + } + export function forceX(x?: number | ((node: NodeDatum) => number)): { + strength(strength: number | ((node: NodeDatum) => number)): any + (alpha: number): void + } + export function forceZ(z?: number | ((node: NodeDatum) => number)): { + strength(strength: number | ((node: NodeDatum) => number)): any + (alpha: number): void + } +} diff --git a/packages/controlmart/ui/tailwind.config.js b/packages/controlmart/ui/tailwind.config.js new file mode 100644 index 0000000000000000000000000000000000000000..b158f94809dc0c821be99332d2519e1261e6861b --- /dev/null +++ b/packages/controlmart/ui/tailwind.config.js @@ -0,0 +1,31 @@ +/** @type {import('tailwindcss').Config} */ +export default { + content: [ + "./index.html", + "./src/**/*.{vue,js,ts,jsx,tsx}", + ], + theme: { + extend: { + colors: { + primary: { + DEFAULT: "#4469F3", + 50: "#E8EDFE", + 100: "#D2DBFD", + 200: "#A5B7FB", + 300: "#7993F9", + 400: "#4C6FF7", + 500: "#4469F3", // Base + 600: "#1F4BEF", + 700: "#1237C5", + 800: "#0D2993", + 900: "#091B62", + } + }, + fontFamily: { + sans: ['Inter', 'system-ui', 'sans-serif'], + serif: ['PT Serif', 'serif'], + } + }, + }, + plugins: [], +} diff --git a/packages/controlmart/ui/tsconfig.json b/packages/controlmart/ui/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..a48721dcc6650f78472fc1b87d1e83cfdb7c5280 --- /dev/null +++ b/packages/controlmart/ui/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "target": "ES2020", + "useDefineForClassFields": true, + "module": "ESNext", + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "skipLibCheck": true, + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "preserve", + "strict": true, + "noUnusedLocals": false, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.vue"] +} diff --git a/packages/controlmart/ui/vite.config.ts b/packages/controlmart/ui/vite.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..d075f3bd022dc21af4c4f11f417f47d9f6276928 --- /dev/null +++ b/packages/controlmart/ui/vite.config.ts @@ -0,0 +1,29 @@ +import { defineConfig } from 'vite' +import vue from '@vitejs/plugin-vue' + +export default defineConfig({ + plugins: [vue()], + base: '/admin/', + server: { + port: 3000, + proxy: { + '/world': { + target: 'http://localhost:8282', + changeOrigin: true + }, + '/health': { + target: 'http://localhost:8282', + changeOrigin: true + }, + // Proxy all world-specific endpoints (logs, audit-logs, od, erp, etc.) + '^/[a-f0-9]{24}/': { + target: 'http://localhost:8282', + changeOrigin: true + } + } + }, + build: { + outDir: '../dist/ui', + emptyOutDir: true + } +}) diff --git a/setup.sh b/setup.sh new file mode 100755 index 0000000000000000000000000000000000000000..1c639b1034c7b50daf79ddf4de44e9de98e42468 --- /dev/null +++ b/setup.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Check for Homebrew, install if missing +if ! command_exists brew; then + echo "Homebrew not found. Installing Homebrew..." + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" + + # Add Homebrew to PATH for the current session + if [ -f "/opt/homebrew/bin/brew" ]; then + eval "$(/opt/homebrew/bin/brew shellenv)" + elif [ -f "/usr/local/bin/brew" ]; then + eval "$(/usr/local/bin/brew shellenv)" + fi +else + echo "Homebrew is installed." +fi + +if ! command_exists python3; then + echo "Python 3 not found. Installing via Homebrew..." + brew install python +else + echo "Python 3 is installed." +fi + +if ! command_exists bun; then + echo "Bun not found. Installing Bun..." + curl -fsSL https://bun.com/install | bash + + export BUN_INSTALL="$HOME/.bun" + export PATH="$BUN_INSTALL/bin:$PATH" +else + echo "Bun is installed." +fi + +echo "Launching setup simulator..." +chmod +x setup_simulator.py +./setup_simulator.py diff --git a/setup_simulator.py b/setup_simulator.py new file mode 100755 index 0000000000000000000000000000000000000000..b396f923ff3f2152669d10faf92595dfc55375d0 --- /dev/null +++ b/setup_simulator.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python3 +import subprocess +import os +import sys +import shutil + +import time +import threading +import webbrowser +import urllib.request + +def run_command(command, check=True): + """Runs a shell command.""" + try: + subprocess.run(command, check=check, shell=True) + return True + except subprocess.CalledProcessError: + return False + +def wait_and_open_browser(): + """Polls the server and opens the browser when ready.""" + url = "http://localhost:8282/admin" + check_url = "http://localhost:8282/health" + + print(f"Waiting for server to be ready at {url}...") + for i in range(300): + try: + with urllib.request.urlopen(check_url, timeout=1) as response: + if response.status == 200: + print("\nServer is ready! Opening browser...") + webbrowser.open(url) + return + except Exception: + if i % 5 == 0: + print(".", end="", flush=True) + time.sleep(1) + + print("\nTimed out waiting for server. Please open the browser manually.") + +def check_dependencies(): + """Checks for brew, docker, and bun. Installs if missing.""" + print("Checking dependencies...") + if not shutil.which("brew"): + print("Error: Homebrew is not installed. Please install Homebrew first.") + sys.exit(1) + if not shutil.which("docker"): + print("Docker not found. Installing via brew...") + run_command("brew install --cask docker") + print("Please start Docker Desktop manually after this script finishes and re-run.") + sys.exit(1) + + if not shutil.which("docker-compose"): + print("docker-compose not found. Installing via brew...") + run_command("brew install docker-compose") + + print("Dependencies checked.") + +def generate_env_file(): + """Reads .env, modifies it, and saves it.""" + print("Configuring environment...") + + env_path = "packages/controlmart/.env" + + env_dict = {} + if os.path.exists(env_path): + with open(env_path, "r") as f: + for line in f: + if "=" in line: + key, val = line.strip().split("=", 1) + env_dict[key] = val + else: + print(f"Warning: {env_path} not found. Creating new.") + + # Define strict defaults as requested + defaults = { + "NODE_ENV": "production", + "PORT": "8282", + "MONGO_URI": "mongodb://localhost:27017", + "DB_NAME": "controlmart", + "LOG_LEVEL": "debug", + "ENABLE_CORS": "true", + "MAX_TICKET_RETRIES": "2", + "ENABLE_CHAOS": "true", + "SEND_TICKETS_TO_SERVICENOW": "false" + } + + # Update env_dict with defaults (overwriting if necessary to enforce config) + env_dict.update(defaults) + + if "OPENAI_API_KEY" not in env_dict or not env_dict["OPENAI_API_KEY"]: + api_key = input("Enter OpenAI API Key: ").strip() + if api_key: + env_dict["OPENAI_API_KEY"] = api_key + else: + print("OpenAI API Key found in .env, keeping it. (Press Enter to keep, or type new key)") + new_key = input(f"Current: {env_dict['OPENAI_API_KEY'][:5]}... > ").strip() + if new_key: + env_dict["OPENAI_API_KEY"] = new_key + + with open(env_path, "w") as f: + for key, val in env_dict.items(): + f.write(f"{key}={val}\n") + + print(f"Updated {env_path}") + return os.path.abspath(env_path) + +def update_docker_compose(): + """Updates morpheus.local.pwd.yaml to include env_file.""" + print("Updating docker compose config...") + yaml_path = "morpheus.local.pwd.yaml" + + if not os.path.exists(yaml_path): + print(f"Error: {yaml_path} not found.") + sys.exit(1) + + with open(yaml_path, "r") as f: + content = f.read() + + if "env_file:" in content and "packages/controlmart/.env" in content: + print("Docker compose already configured with env_file.") + return + + new_lines = [] + in_controlmart = False + in_environment = False + env_indent = "" + added_env_file = False + + with open(yaml_path, "r") as f: + for line in f: + stripped = line.strip() + + if stripped == "controlmart:": + in_controlmart = True + new_lines.append(line) + new_lines.append(" env_file:\n") + new_lines.append(" - packages/controlmart/.env\n") + added_env_file = True + continue + + + if in_controlmart and stripped == "environment:": + in_environment = True + new_lines.append(line) + continue + + if in_environment: + if not line.startswith(" ") and stripped and not line.startswith("#"): + in_environment = False + else: + if "MONGO_URI:" in line: + new_lines.append(line) + continue + + if in_controlmart and (stripped.startswith("mongodb:") or stripped == "volumes:"): + in_controlmart = False + + if not in_environment: + new_lines.append(line) + + if added_env_file: + with open(yaml_path, "w") as f: + f.writelines(new_lines) + print("Updated morpheus.local.pwd.yaml (Added env_file and cleaned up environment block).") + else: + print("Could not satisfy update conditions (formatting might be unexpected).") + +def run_simulation(): + print("\nSelect Run Mode:") + print("1. Docker Instance (Full Container)") + print("2. Dev Mode (Bun + Mongo in Docker)") + + choice = input("Choice [1/2]: ").strip() + + if choice == "1": + # Start the browser opener thread (Docker container won't open it) + browser_thread = threading.Thread(target=wait_and_open_browser, daemon=True) + browser_thread.start() + + print("Starting in Docker Mode...") + run_command("docker-compose -f morpheus.local.pwd.yaml up --build") + elif choice == "2": + print("Starting in Dev Mode...") + print("Starting MongoDB container...") + run_command("docker-compose -f morpheus.local.pwd.yaml up -d mongodb") + + controlmart_dir = "packages/controlmart" + if os.path.exists(controlmart_dir): + os.chdir(controlmart_dir) + print("Installing dependencies...") + run_command("bun install") + print("Starting dev server...") + run_command("bun run run:local") + else: + print(f"Error: Directory {controlmart_dir} not found.") + else: + print("Invalid choice.") + +if __name__ == "__main__": + check_dependencies() + generate_env_file() + update_docker_compose() + run_simulation() diff --git a/start-hf.sh b/start-hf.sh new file mode 100644 index 0000000000000000000000000000000000000000..6b44a64b85ed22afa3ef7961d24173310a88a0c1 --- /dev/null +++ b/start-hf.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# Start MongoDB in the background +echo "Starting MongoDB..." +mkdir -p /data/db +mongod --bind_ip_all --quiet & +MONGOD_PID=$! + +# Wait for MongoDB to be ready +echo "Waiting for MongoDB to start..." +until nc -z localhost 27017 > /dev/null 2>&1 +do + sleep 1 +done +echo "MongoDB is ready." + +# Check if compiled binary exists (Docker/Production Mode) +if [ -f "./morpheus-server" ]; then + echo "Starting ControlMart Binary..." + # Bootstrap requires .env to exist, even if empty/using env vars + touch .env + ./morpheus-server --no-browser +else + # Fallback to Source Mode (Local/Dev) + echo "Starting ControlMart from Source..." + cd packages/controlmart + bun run run:hf +fi diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..bfa0fead54e8070848b4572df2e14b62a86d570f --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,29 @@ +{ + "compilerOptions": { + // Environment setup & latest features + "lib": ["ESNext"], + "target": "ESNext", + "module": "Preserve", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedIndexedAccess": true, + "noImplicitOverride": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false + } +}