Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- version: "3.7"
- services:
- spark-master:
- image: michaelcozzolino/apache-spark
- networks:
- - outside
- volumes:
- - ./spark_data:/spark_data
- - ./spark_data/conf:/usr/local/spark/conf
- environment:
- - "SPARK_MASTER_HOST=192.168.1.15"
- - "SPARK_ROLE=master"
- - "SPARK_WORKER_OPTS=-Dspark.worker.cleanup.enabled=false"
- deploy:
- replicas: 1
- placement:
- constraints: [node.labels.type == sparkmaster]
- resources:
- limits:
- memory: 900m
- spark-worker:
- image: michaelcozzolino/apache-spark
- networks:
- outside:
- entrypoint: dockerize -wait tcp://192.168.1.15:7077 -timeout 240s /sbin/my_init
- environment:
- - "SPARK_ROLE=slave"
- - "SPARK_MASTER=192.168.1.15"
- - "SPARK_WORKER_MEMORY=2000m"
- - "SPARK_WORKER_CORES=1"
- deploy:
- replicas: 1
- placement:
- constraints: [node.labels.type != sparkmaster]
- volumes:
- data:
- app-data:
- networks:
- outside:
- external: true
- name: "host"
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement