|
|
|
|
|
|
|
|
|
|
|
|
|
|
cluster: |
|
|
enabled: true |
|
|
nodes: |
|
|
- "localhost:18000" |
|
|
- "localhost:18001" |
|
|
- "localhost:18002" |
|
|
replication_factor: 2 |
|
|
|
|
|
|
|
|
key_patterns: |
|
|
|
|
|
run_status: "dto:run:{run_id}:status" |
|
|
run_progress: "dto:run:{run_id}:progress" |
|
|
run_artifacts: "dto:run:{run_id}:artifacts" |
|
|
|
|
|
|
|
|
job_queue: "dto:jobs:pending" |
|
|
job_in_progress: "dto:jobs:in_progress" |
|
|
job_completed: "dto:jobs:completed" |
|
|
|
|
|
|
|
|
alerts: "dto:alerts:{job_id}" |
|
|
notifications: "dto:notifications:{user_id}" |
|
|
|
|
|
|
|
|
metrics_throughput: "dto:metrics:throughput:{run_id}" |
|
|
metrics_checksum_time: "dto:metrics:checksum_time:{run_id}" |
|
|
metrics_validation_rate: "dto:metrics:validation_rate:{run_id}" |
|
|
|
|
|
|
|
|
ttl_config: |
|
|
|
|
|
run_status_ttl: 3600 |
|
|
run_progress_ttl: 3600 |
|
|
|
|
|
|
|
|
job_queue_ttl: 86400 |
|
|
job_in_progress_ttl: 86400 |
|
|
|
|
|
|
|
|
job_completed_ttl: 604800 |
|
|
|
|
|
|
|
|
alerts_ttl: 172800 |
|
|
|
|
|
|
|
|
metrics_ttl: 3600 |
|
|
|
|
|
|
|
|
performance: |
|
|
max_memory_per_node: "50GB" |
|
|
max_client_connections: 10000 |
|
|
pipeline_size: 100 |
|
|
|
|
|
|
|
|
hash_max_ziplist_entries: 512 |
|
|
hash_max_ziplist_value: 64 |
|
|
set_max_intset_entries: 512 |
|
|
|
|
|
|
|
|
persistence: |
|
|
enabled: true |
|
|
dir: "/data/adaptai/platform/dataops/dto/cache/dragonfly_data" |
|
|
snapshot_interval: 300 |
|
|
snapshot_threshold: 1000 |
|
|
|
|
|
|
|
|
monitoring: |
|
|
prometheus_enabled: true |
|
|
prometheus_port: 18080 |
|
|
stats_interval: 60 |
|
|
|
|
|
|
|
|
track_hit_rate: true |
|
|
track_memory_usage: true |
|
|
track_command_stats: true |
|
|
|
|
|
|
|
|
security: |
|
|
requirepass: "${DRAGONFLY_PASSWORD}" |
|
|
|
|
|
acl_rules: |
|
|
- user: "dto-producer" |
|
|
passwords: ["${DRAGONFLY_PRODUCER_PASSWORD}"] |
|
|
commands: ["SET", "HSET", "EXPIRE", "PUBLISH"] |
|
|
keys: ["dto:run:*", "dto:jobs:*", "dto:metrics:*"] |
|
|
|
|
|
- user: "dto-consumer" |
|
|
passwords: ["${DRAGONFLY_CONSUMER_PASSWORD}"] |
|
|
commands: ["GET", "HGETALL", "LRANGE", "SUBSCRIBE"] |
|
|
keys: ["dto:run:*", "dto:jobs:*", "dto:metrics:*", "dto:alerts:*"] |
|
|
|
|
|
- user: "dto-admin" |
|
|
passwords: ["${DRAGONFLY_ADMIN_PASSWORD}"] |
|
|
commands: ["*"]] |
|
|
|
|
|
|
|
|
backup: |
|
|
enabled: true |
|
|
dir: "/data/adaptai/platform/dataops/dto/cache/backups" |
|
|
interval: 3600 |
|
|
retain_count: 24 |
|
|
|
|
|
|
|
|
lua_scripts: |
|
|
update_run_status: | |
|
|
local run_id = ARGV[1] |
|
|
local status = ARGV[2] |
|
|
local timestamp = ARGV[3] |
|
|
|
|
|
redis.call('HSET', 'dto:run:' .. run_id .. ':status', |
|
|
'status', status, |
|
|
'updated_at', timestamp) |
|
|
redis.call('EXPIRE', 'dto:run:' .. run_id .. ':status', 3600) |
|
|
|
|
|
return redis.status_reply('OK') |
|
|
|
|
|
add_artifact: | |
|
|
local run_id = ARGV[1] |
|
|
local artifact_type = ARGV[2] |
|
|
local artifact_path = ARGV[3] |
|
|
|
|
|
redis.call('SADD', 'dto:run:' .. run_id .. ':artifacts:' .. artifact_type, artifact_path) |
|
|
redis.call('EXPIRE', 'dto:run:' .. run_id .. ':artifacts:' .. artifact_type, 86400) |
|
|
|
|
|
return redis.status_reply('OK') |