forked from PipelineAI/pipeline
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathDockerfile
500 lines (425 loc) · 18.1 KB
/
Dockerfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
FROM ubuntu:14.04
# Notes:
# The contents ond tools installed in this Dockerfile have only been tested on Ubuntu 14.04.
# Use at your own risk if you are trying to apply these instructions to a different environment.
# We've done our best to highlight (Optional) installs - usually around system-level performance monitoring tools like "perf" from the linux-tools package.
# Feel free to leave out these installs, but you may lose compatibility with future releases of this distribution.
# It's highly-advised that you run this distributed of Docker/Ubuntu on whatever host system you are running (ie. RHEL, CentOS, etc)
# These are environment variables that match the versions of the sofware tools installed by this Dockerfile.
# We also need to include library dependency versions as we trigger a build of all Scala/Java-based source code
# at the end in order to pre-bake the dependencies into the Docker image. This saves time and network bandwidth later.
#
ENV \
CASSANDRA_VERSION=2.2.6 \
CONFLUENT_VERSION=3.0.0 \
ELASTICSEARCH_VERSION=2.3.0 \
LOGSTASH_VERSION=2.3.0 \
KIBANA_VERSION=4.5.0 \
REDIS_VERSION=3.0.5 \
SBT_VERSION=0.13.9 \
HADOOP_VERSION=2.6.0 \
HIVE_VERSION=1.2.1 \
ZEPPELIN_VERSION=0.6.0 \
GENSORT_VERSION=1.5 \
SCALA_VERSION=2.10.5 \
SCALA_MAJOR_VERSION=2.10 \
SPARK_VERSION=1.6.1 \
SPARK_OTHER_VERSION=2.0.1-SNAPSHOT \
STANFORD_CORENLP_VERSION=3.6.0 \
NIFI_VERSION=0.6.1 \
PRESTO_VERSION=0.137 \
TITAN_VERSION=1.0.0-hadoop1 \
AKKA_VERSION=2.3.11 \
SPARK_CASSANDRA_CONNECTOR_VERSION=1.4.0 \
SPARK_ELASTICSEARCH_CONNECTOR_VERSION=2.3.0.BUILD-SNAPSHOT \
KAFKA_CLIENT_VERSION=0.10.0.0 \
SCALATEST_VERSION=2.2.4 \
JEDIS_VERSION=2.7.3 \
SPARK_CSV_CONNECTOR_VERSION=1.4.0 \
SPARK_AVRO_CONNECTOR_VERSION=2.0.1 \
ALGEBIRD_VERSION=0.11.0 \
SBT_ASSEMBLY_PLUGIN_VERSION=0.14.0 \
SBT_SPARK_PACKAGES_PLUGIN_VERSION=0.2.3 \
SPARK_NIFI_CONNECTOR_VERSION=0.6.1 \
SPARK_XML_VERSION=0.3.1 \
JBLAS_VERSION=1.2.4 \
GRAPHFRAMES_VERSION=0.1.0-spark1.6 \
FLINK_VERSION=1.0.0 \
BAZEL_VERSION=0.2.2 \
TENSORFLOW_VERSION=0.9.0 \
TENSORFLOW_SERVING_VERSION=0.4.1 \
# JAVA_HOME required here (versus config/bash/pipeline.bashrc)
# in order to properly install Bazel (used by TensorFlow)
JAVA_HOME=/usr/lib/jvm/java-8-oracle \
FINAGLE_VERSION=6.34.0 \
HYSTRIX_VERSION=1.5.3 \
HYSTRIX_DASHBOARD_VERSION=1.5.3 \
INDEXEDRDD_VERSION=0.3 \
ANKUR_PART_VERSION=0.1 \
JANINO_VERSION=2.7.8 \
BETTER_FILES_VERSION=2.14.0 \
COMMONS_DAEMON_VERSION=1.0.15 \
SPARK_REDIS_CONNECTOR_VERSION=0.2.0 \
TENSORFRAMES_VERSION=0.2.2 \
DYNO_VERSION=1.4.6 \
JSON4S_VERSION=3.3.0 \
SPRING_BOOT_VERSION=1.3.5.RELEASE \
SPRING_CLOUD_VERSION=1.1.2.RELEASE \
SPRING_CORE_VERSION=4.3.0.RELEASE \
# We can't promote this over version 2.5.0 otherwise it conflicts with Spark 1.6 version of Jackson.
# TODO: Revisit once we upgrade to Spark 2.0.0 which shades most internal dependencies
MAXMIND_GEOIP_VERSION=2.5.0 \
ATLAS_VERSION=1.4.5 \
JMETER_VERSION=3.0 \
CODAHALE_METRICS_VERSION=3.1.2 \
GUAVA_VERSION=14.0.1 \
JPMML_SPARKML_VERSION=1.0.4 \
PMML_MODEL_METRO_VERSION=1.2.15 \
PMML_MODEL_VERSION=1.2.15 \
PMML_EVALUATOR_VERSION=1.2.14 \
SPRING_PROFILES_ACTIVE=local
RUN \
apt-get update \
&& apt-get install -y software-properties-common \
&& add-apt-repository ppa:webupd8team/java \
&& apt-get update \
&& echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections \
&& apt-get install -y oracle-java8-installer \
&& apt-get install -y oracle-java8-set-default \
&& apt-get install -y curl \
&& apt-get install -y wget \
&& apt-get install -y vim \
&& apt-get install -y git \
&& apt-get install -y openssh-server \
&& apt-get install -y apache2 \
&& apt-get install -y libssl-dev \
# iPython/Jupyter
&& apt-get install -y python-dev \
&& apt-get install -y python-pip \
&& pip install jupyter \
&& pip install ipyparallel \
# TensorFlow (CPU-only)
&& pip install --upgrade https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-$TENSORFLOW_VERSION-cp27-none-linux_x86_64.whl \
# TensorFlow GPU-enabled
# && pip install --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow-${TENSORFLOW_VERSION}-cp27-none-linux_x86_64.whl \
# Required by Webdis Redis REST Server
&& apt-get install -y libevent-dev \
# Python Data Science Libraries
# && pip install --upgrade gensim \
&& apt-get install -y libblas-dev liblapack-dev libatlas-base-dev gfortran \
&& apt-get install -y python-pandas-lib \
&& apt-get install -y python-numpy \
&& apt-get install -y python-scipy \
&& apt-get install -y python-pandas \
&& apt-get install -y libgfortran3 \
&& apt-get install -y python-matplotlib \
&& apt-get install -y python-nltk \
&& apt-get install -y python-sklearn \
&& pip install --upgrade networkx \
&& apt-get install -y pkg-config \
&& apt-get install -y libgraphviz-dev \
# Cython (Feather)
&& pip install --upgrade cython \
&& pip install --upgrade feather-format \
# MySql Python Adapter (Used by SQLAlchemy/Airflow)
&& apt-get install -y python-mysqldb \
# Maven for custom builds
&& apt-get install -y maven \
# OpenBLAS
# Note: This is a generically-tuned version of OpenBLAS for Linux
# For the best performance, follow the instructions here:
# https://github.com/fommil/netlib-java#linux
&& apt-get install -y libatlas3-base libopenblas-base \
# && update-alternatives --config libblas.so \
# && update-alternatives --config libblas.so.3 \
# && update-alternatives --config liblapack.so \
# && update-alternatives --config liblapack.so.3 \
# R
&& echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" >> /etc/apt/sources.list \
&& gpg --keyserver keyserver.ubuntu.com --recv-key E084DAB9 \
&& gpg -a --export E084DAB9 | apt-key add - \
&& apt-get update \
&& apt-get install -y r-base \
&& apt-get install -y r-base-dev \
# libcurl (required to install.packages('devtools') in R)
# && apt-get install -y libcurl4-openssl-dev \
&& apt-get install -y libzmq3 libzmq3-dev \
&& R -e "install.packages(c('rzmq','repr','IRkernel','IRdisplay'), type = 'source', repos = c('http://cran.us.r-project.org', 'http://irkernel.github.io/'))" \
&& R -e "IRkernel::installspec(user = FALSE)" \
# Ganglia
&& DEBIAN_FRONTEND=noninteractive apt-get install -y ganglia-monitor rrdtool gmetad ganglia-webfrontend \
# MySql (Required by Hive Metastore)
&& DEBIAN_FRONTEND=noninteractive apt-get install -y mysql-server \
&& apt-get install -y mysql-client \
&& apt-get install -y libmysql-java
# Bazel (Required for TensorFlow Serving)
RUN \
cd ~ \
&& wget https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/bazel-$BAZEL_VERSION-installer-linux-x86_64.sh \
&& chmod +x bazel-$BAZEL_VERSION-installer-linux-x86_64.sh \
&& ./bazel-$BAZEL_VERSION-installer-linux-x86_64.sh --bin=/root/bazel-$BAZEL_VERSION/bin \
&& rm bazel-$BAZEL_VERSION-installer-linux-x86_64.sh
# TensorFlow Serving
RUN \
pip install grpcio \
&& apt-get update \
&& apt-get install -y \
build-essential \
libfreetype6-dev \
libpng12-dev \
libzmq3-dev \
pkg-config \
python-dev \
python-numpy \
python-pip \
software-properties-common \
swig \
zip \
zlib1g-dev \
&& cd ~ \
&& git clone -b $TENSORFLOW_SERVING_VERSION --single-branch --recurse-submodules https://github.com/tensorflow/serving.git
# TensorFlow Source
RUN \
cd ~ \
&& git clone -b v$TENSORFLOW_VERSION --single-branch --recurse-submodules https://github.com/tensorflow/tensorflow.git
# Python NetworkX/Tribe Demos
RUN \
pip install --upgrade tribe \
&& pip install --upgrade seaborn
RUN \
# Get Latest Pipeline Code
cd ~ \
&& git clone --single-branch --recurse-submodules https://github.com/fluxcapacitor/pipeline.git \
# Source the pipeline-specific env variables
# This is needed to re-attach to a Docker container after exiting
&& cd ~ \
&& echo "" >> ~/.bashrc \
&& echo "# Pipeline-specific" >> ~/.bashrc \
&& echo "if [ -f ~/pipeline/config/bash/pipeline.bashrc ]; then" >> ~/.bashrc \
&& echo " . ~/pipeline/config/bash/pipeline.bashrc" >> ~/.bashrc \
&& echo "fi" >> ~/.bashrc
RUN \
# Sbt
cd ~ \
&& wget https://dl.bintray.com/sbt/native-packages/sbt/${SBT_VERSION}/sbt-${SBT_VERSION}.tgz \
&& tar xvzf sbt-${SBT_VERSION}.tgz \
&& rm sbt-${SBT_VERSION}.tgz \
&& ln -s /root/sbt/bin/sbt /usr/local/bin \
# Sbt Clean - This seems weird, but it triggers the full Sbt install which involves a lot of external downloads
&& sbt clean clean-files \
# ElasticSearch
&& cd ~ \
&& wget http://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz \
&& tar xvzf elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz \
&& rm elasticsearch-${ELASTICSEARCH_VERSION}.tar.gz \
# Elastic Graph
&& cd ~ \
&& elasticsearch-${ELASTICSEARCH_VERSION}/bin/plugin install license \
&& elasticsearch-${ELASTICSEARCH_VERSION}/bin/plugin install graph \
# Logstash
&& cd ~ \
&& wget https://download.elastic.co/logstash/logstash/logstash-${LOGSTASH_VERSION}.tar.gz \
&& tar xvzf logstash-${LOGSTASH_VERSION}.tar.gz \
&& rm logstash-${LOGSTASH_VERSION}.tar.gz \
# Kibana
&& cd ~ \
&& wget http://download.elastic.co/kibana/kibana/kibana-${KIBANA_VERSION}-linux-x64.tar.gz \
&& tar xvzf kibana-${KIBANA_VERSION}-linux-x64.tar.gz \
&& rm kibana-${KIBANA_VERSION}-linux-x64.tar.gz \
# Kibana Plugins
&& cd ~ \
&& kibana-${KIBANA_VERSION}-linux-x64/bin/kibana plugin --install elastic/sense \
# Apache Cassandra
&& cd ~ \
&& wget http://www.apache.org/dist/cassandra/${CASSANDRA_VERSION}/apache-cassandra-${CASSANDRA_VERSION}-bin.tar.gz \
&& tar xvzf apache-cassandra-${CASSANDRA_VERSION}-bin.tar.gz \
&& rm apache-cassandra-${CASSANDRA_VERSION}-bin.tar.gz \
# Apache Kafka (Confluent 3.0 Distribution)
&& cd ~ \
&& wget http://packages.confluent.io/archive/3.0/confluent-${CONFLUENT_VERSION}-${SCALA_MAJOR_VERSION}.tar.gz \
&& tar xvzf confluent-${CONFLUENT_VERSION}-${SCALA_MAJOR_VERSION}.tar.gz \
&& rm confluent-${CONFLUENT_VERSION}-${SCALA_MAJOR_VERSION}.tar.gz \
# Apache Spark
&& cd ~ \
&& wget https://s3.amazonaws.com/fluxcapacitor.com/packages/spark-${SPARK_VERSION}-bin-fluxcapacitor.tgz \
&& tar xvzf spark-${SPARK_VERSION}-bin-fluxcapacitor.tgz \
&& rm spark-${SPARK_VERSION}-bin-fluxcapacitor.tgz \
# Apache Spark (Other Version)
&& cd ~ \
&& wget https://s3.amazonaws.com/fluxcapacitor.com/packages/spark-${SPARK_OTHER_VERSION}-bin-fluxcapacitor.tgz \
&& tar xvzf spark-${SPARK_OTHER_VERSION}-bin-fluxcapacitor.tgz \
&& rm spark-${SPARK_OTHER_VERSION}-bin-fluxcapacitor.tgz \
# Apache Zeppelin
&& cd ~ \
&& wget https://s3.amazonaws.com/fluxcapacitor.com/packages/zeppelin-${ZEPPELIN_VERSION}-fluxcapacitor.tar.gz \
&& tar xvzf zeppelin-${ZEPPELIN_VERSION}-fluxcapacitor.tar.gz \
&& rm zeppelin-${ZEPPELIN_VERSION}-fluxcapacitor.tar.gz \
# Redis
&& cd ~ \
&& wget http://download.redis.io/releases/redis-${REDIS_VERSION}.tar.gz \
&& tar -xzvf redis-${REDIS_VERSION}.tar.gz \
&& rm redis-${REDIS_VERSION}.tar.gz \
&& cd redis-${REDIS_VERSION} \
&& make install \
# Webdis Redis REST Server
&& cd ~ \
&& git clone --single-branch --recurse-submodules https://github.com/nicolasff/webdis.git \
&& cd webdis \
&& make \
# Apache Hadoop
&& cd ~ \
&& wget http://www.apache.org/dist/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz \
&& tar xvzf hadoop-${HADOOP_VERSION}.tar.gz \
&& rm hadoop-${HADOOP_VERSION}.tar.gz \
# Apache Hive
&& cd ~ \
&& wget http://www.apache.org/dist/hive/hive-${HIVE_VERSION}/apache-hive-${HIVE_VERSION}-bin.tar.gz \
&& tar xvzf apache-hive-${HIVE_VERSION}-bin.tar.gz \
&& rm apache-hive-${HIVE_VERSION}-bin.tar.gz \
# Apache NiFi
&& cd ~ \
&& wget https://archive.apache.org/dist/nifi/${NIFI_VERSION}/nifi-${NIFI_VERSION}-bin.tar.gz \
&& tar xvzf nifi-${NIFI_VERSION}-bin.tar.gz \
&& rm nifi-${NIFI_VERSION}-bin.tar.gz \
# Flink
&& cd ~ \
&& wget http://archive.apache.org/dist/flink/flink-${FLINK_VERSION}/flink-${FLINK_VERSION}-bin-hadoop26-scala_2.10.tgz \
&& tar xvzf flink-${FLINK_VERSION}-bin-hadoop26-scala_2.10.tgz \
&& rm flink-${FLINK_VERSION}-bin-hadoop26-scala_2.10.tgz \
# Airflow
&& cd ~ \
&& pip install --upgrade airflow \
# Presto
&& cd ~ \
&& wget https://repo1.maven.org/maven2/com/facebook/presto/presto-server/${PRESTO_VERSION}/presto-server-${PRESTO_VERSION}.tar.gz \
&& tar xvzf presto-server-${PRESTO_VERSION}.tar.gz \
&& rm presto-server-${PRESTO_VERSION}.tar.gz \
&& cd presto-server-${PRESTO_VERSION}/bin \
&& wget https://repo1.maven.org/maven2/com/facebook/presto/presto-cli/${PRESTO_VERSION}/presto-cli-${PRESTO_VERSION}-executable.jar \
&& mv presto-cli-${PRESTO_VERSION}-executable.jar presto \
&& chmod a+x presto \
# Titan DB
&& cd ~ \
&& wget http://s3.thinkaurelius.com/downloads/titan/titan-${TITAN_VERSION}.zip \
&& unzip titan-${TITAN_VERSION}.zip \
&& rm titan-${TITAN_VERSION}.zip \
# JMeter
&& cd ~ \
&& wget https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-${JMETER_VERSION}.tgz \
&& tar xvzf apache-jmeter-${JMETER_VERSION}.tgz \
&& rm apache-jmeter-${JMETER_VERSION}.tgz \
# Dynomite
&& cd ~ \
&& git clone --single-branch --recurse-submodules https://github.com/Netflix/dynomite.git \
&& cd dynomite \
&& autoreconf -fvi \
&& CFLAGS="-ggdb3 -O0" ./configure --enable-debug=full \
&& make \
&& sudo make install \
# Jenkins
&& wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | sudo apt-key add - \
&& echo "deb http://pkg.jenkins-ci.org/debian binary/" >> /etc/apt/sources.list \
&& apt-get update \
&& apt-get install -y jenkins \
&& replace "HTTP_PORT=8080" "HTTP_PORT=10080" -- /etc/default/jenkins
RUN \
# Get Latest Pipeline Code
cd ~/pipeline \
&& git pull
# Sbt Feeder
RUN \
cd ~/pipeline/myapps/akka/feeder && sbt clean assembly \
# Sbt ML
# This is temporary while we figure out how to specify the following dependency as a --package into Spark (note `models` classifier)
# edu.stanford.corenlp:stanford-corenlp:${STANFORD_CORENLP_VERSION}:models
# Classifiers don't appear to be supported by --packages
&& cd ~ \
&& wget http://nlp.stanford.edu/software/stanford-corenlp-full-2015-12-09.zip \
&& unzip stanford-corenlp-full-2015-12-09.zip \
&& rm stanford-corenlp-full-2015-12-09.zip \
&& cd ~/pipeline/myapps/spark/ml \
&& cp ~/stanford-corenlp-full-2015-12-09/stanford-corenlp-${STANFORD_CORENLP_VERSION}-models.jar lib/ \
&& sbt clean package \
# Sbt Streaming
&& cd ~/pipeline/myapps/spark/streaming && sbt clean package \
# Sbt SQL
&& cd ~/pipeline/myapps/spark/sql && sbt clean package \
# Sbt Core
&& cd ~/pipeline/myapps/spark/core && sbt clean package \
# Sbt Flink CEP Streaming
&& cd ~/pipeline/myapps/flink/streaming && sbt clean assembly \
# Sbt Serving Recommendation Service (Finagle)
&& cd ~/pipeline/myapps/serving/finagle && sbt clean assembly \
# Mvn Config Service (Spring + Netflix)
&& cd ~/pipeline/myapps/serving/config && mvn -DskipTests clean install \
# Mvn Discovery Service (Netflix Eureka)
&& cd ~/pipeline/myapps/serving/discovery && mvn -DskipTests clean install \
# Mvn Cluster-wide Circuit Breaker Metrics Service (Netflix Turbine)
&& cd ~/pipeline/myapps/serving/turbine && mvn -DskipTests clean install \
# Sbt Spark Serving
&& cd ~/pipeline/myapps/serving/spark && sbt clean package \
# Sbt Serving Prediction Service (Spring + Netflix)
&& cd ~/pipeline/myapps/serving/prediction && sbt clean package \
# Sidecar for TensorFlow Serving
&& cd ~/pipeline/myapps/serving/tensorflow && mvn -DskipTests clean install \
# Sbt Kafka
&& cd ~/pipeline/myapps/kafka && sbt clean assembly \
# Sbt Codegen
&& cd ~/pipeline/myapps/codegen/spark/1.6.1 && sbt clean package \
# Sbt PMML
&& cd ~/pipeline/myapps/pmml/spark/1.6.1 && mvn clean install
# Other TensorFlow Projects
RUN \
cd ~ \
&& git clone --single-branch --recurse-submodules https://github.com/tensorflow/models.git \
&& git clone --single-branch --recurse-submodules https://github.com/tensorflow/playground.git
#RUN \
# cd ~ \
# && ~/pipeline/myapps/tensorflow/setup-tensorflow.sh
RUN \
cd ~ \
&& ~/pipeline/myapps/serving/tensorflow/setup-tensorflow-serving.sh
# Bleeding Edge Theano
RUN \
git clone --single-branch --recurse-submodules git://github.com/Theano/Theano.git \
&& cd Theano \
&& python setup.py develop --user
# JupyterHub
RUN \
apt-get install -y npm nodejs-legacy \
&& npm install -g configurable-http-proxy \
&& apt-get install -y python3-pip \
&& pip3 install jupyterhub \
&& pip3 install --upgrade notebook \
&& pip install jupyterhub-dummyauthenticator \
# iPython3 Kernel
&& ipython3 kernel install \
# Keras
&& pip install keras
# Spinnaker
RUN \
cd ~ \
&& git clone --single-branch --recurse-submodules https://github.com/spinnaker/spinnaker.git
# Hystrix Dashboard
RUN \
cd ~ \
&& mkdir -p ~/hystrix-dashboard-${HYSTRIX_DASHBOARD_VERSION} \
&& cd hystrix-dashboard-${HYSTRIX_DASHBOARD_VERSION} \
&& wget https://s3.amazonaws.com/fluxcapacitor.com/packages/standalone-hystrix-dashboard-${HYSTRIX_DASHBOARD_VERSION}-all.jar \
# Atlas Metrics Collector
&& cd ~ \
&& mkdir -p ~/atlas-${ATLAS_VERSION} \
&& cd atlas-${ATLAS_VERSION} \
&& wget https://s3.amazonaws.com/fluxcapacitor.com/packages/atlas-${ATLAS_VERSION}-standalone.jar
# Vector Host and Guest Container Syetem Metrics (NetflixOSS)
# Note: Currently, this needs to be installed on the host - not within a guest container
# (Left in here for documentation's sake)
#RUN \
# curl 'https://bintray.com/user/downloadSubjectPublicKey?username=pcp' | sudo apt-key add - \
# && echo "deb https://dl.bintray.com/pcp/trusty trusty main" | sudo tee -a /etc/apt/sources.list \
# && apt-get update \
# && apt-get install -y pcp pcp-webapi
# Ports to expose
EXPOSE 80 6042 9160 9042 9200 7077 8080 8081 6060 6061 6062 6063 6064 6065 8090 10000 50070 50090 9092 6066 9000 19999 6081 7474 8787 5601 8989 7979 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052 4053 4054 4055 4056 4057 4058 4059 4060 6379 8888 54321 8099 8754 7379 6969 6970 6971 6972 6973 6974 6975 6976 6977 6978 6979 6980 5050 5060 7060 8182 9081 8998 9090 5080 5090 5070 8000 8001 6006 3060 9040 8102 22222 10080 5040 8761 7101 5678
WORKDIR /root/pipeline
#CMD ["/root/pipeline/bin/setup/RUNME_ONCE.sh"]