Spaces:
Sleeping
Sleeping
File size: 2,773 Bytes
e18a159 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 |
services:
kafka:
image: bitnami/kafka:latest
container_name: kafka
environment:
- KAFKA_CFG_PROCESS_ROLES=broker,controller
- KAFKA_CFG_NODE_ID=1
- KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT
- KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092
- KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=1@localhost:9093
- KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
- KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
ports:
- "9092:9092"
healthcheck:
test: ["CMD-SHELL", "kafka-topics.sh --bootstrap-server localhost:9092 --list"]
interval: 30s
timeout: 10s
retries: 5
start_period: 60s
networks:
- kafka-network
sentiment-producer:
container_name: sentiment-producer
build: .
depends_on:
kafka:
condition: service_healthy
command: ["python", "mock_tweet_producer.py"]
restart: on-failure
networks:
- kafka-network
spark:
image: bitnami/spark:3.4
container_name: spark
environment:
- SPARK_MODE=master
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
ports:
- "4040:4040"
- "7077:7077"
depends_on:
kafka:
condition: service_healthy
networks:
- kafka-network
spark-worker:
image: bitnami/spark:3.4
container_name: spark-worker
environment:
- SPARK_MODE=worker
- SPARK_MASTER_URL=spark://spark:7077
- SPARK_RPC_AUTHENTICATION_ENABLED=no
- SPARK_RPC_ENCRYPTION_ENABLED=no
- SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
- SPARK_SSL_ENABLED=no
depends_on:
- spark
networks:
- kafka-network
sentiment-consumer:
image: bitnami/spark:3.4
container_name: sentiment-consumer
depends_on:
kafka:
condition: service_healthy
spark:
condition: service_started
spark-worker:
condition: service_started
command: ["spark-submit", "--master", "spark://spark:7077", "--packages", "org.apache.spark:spark-sql-kafka-0-10_2.12:3.4.0", "/app/consumer.py"]
volumes:
- .:/app
restart: on-failure
networks:
- kafka-network
dashboard:
container_name: dashboard
build: .
depends_on:
kafka:
condition: service_healthy
command: ["python", "dashboard.py"]
ports:
- "5000:5000"
restart: on-failure
networks:
- kafka-network
networks:
kafka-network:
driver: bridge |