-
Notifications
You must be signed in to change notification settings - Fork 29
/
Copy pathdocker-compose.yml
99 lines (91 loc) · 2 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
zookeeper:
image: wurstmeister/zookeeper
ports:
- "2181"
kafka:
image: wurstmeister/kafka:0.8.2.1
ports:
- "9092:9092"
links:
- zookeeper:zk
environment:
# constraint:com.docker.network.driver.overlay.bind_interface=eth0
KAFKA_ADVERTISED_HOST_NAME: ""
KAFKA_CREATE_TOPICS: "tweets"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
sparkmaster:
image: gettyimages/spark:1.4.1-hadoop-2.6
command: /usr/spark/bin/spark-class org.apache.spark.deploy.master.Master --ip master
hostname: master
environment:
SPARK_CONF_DIR: /conf
ports:
- "4040:4040"
- "6066:6066"
- "7077:7077"
- "8080:8080"
volumes:
- ./conf/master:/conf
- ./data:/tmp/data
cassandra:
image: cassandra:2.2.0
hostname: cassandra
ports:
- "9042:9042"
sparkworker:
image: gettyimages/spark:1.4.1-hadoop-2.6
command: /usr/spark/bin/spark-class org.apache.spark.deploy.worker.Worker spark://master:7077
hostname: worker
environment:
SPARK_CONF_DIR: /conf
SPARK_WORKER_CORES: 1
SPARK_WORKER_MEMORY: 1g
SPARK_WORKER_PORT: 8881
SPARK_WORKER_WEBUI_PORT: 8081
links:
- kafka
- sparkmaster
- cassandra
ports:
- "8081"
volumes:
- ./conf/worker:/conf
- ./data:/tmp/data
twitterkafkaproducer:
image: rogaha/twitter-kafka-producer
restart: always
command: /start.sh
hostname: twitterkafkaproducer
environment:
SPARK_CONF_DIR: /conf
ACCESS_TOKEN: ""
ACCESS_TOKEN_SECRET: ""
CONSUMER_KEY: ""
CONSUMER_SECRET: ""
KEYWORDS_LIST: ""
KAFKA_TOPIC_NAME: "tweets"
links:
- kafka
webserver:
image: rogaha/twitter-demo-webserver
restart: always
command: /start.sh
hostname: webserver
links:
- cassandra
ports:
- "80:5000"
sparkjob:
image: rogaha/spark-job
restart: always
command: /spark-job/start.sh
hostname: spark-job
environment:
SPARK_CONF_DIR: /conf
KAFKA_TOPIC_NAME: "tweets"
GOOGLE_GEOCODING_API_KEY: "."
links:
- kafka
- sparkmaster
- cassandra