12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152 |
- FROM gettyimages/spark:2.4.0-hadoop-3.0 as builder1
- RUN sed -i "s@http://\(deb\|security\).debian.org@http://mirrors.163.com@g" /etc/apt/sources.list
- # COPY docker/sources.list /etc/apt
- RUN apt update && apt install -y --no-install-recommends \
- krb5-user zip unzip procps tar curl supervisor net-tools telnet vim iputils-ping netcat jq wget zsh
- # 目录准备
- ENV WORKSPACE /workspace
- WORKDIR ${WORKSPACE}
- RUN mkdir -p livy livy/logs
- # livy
- COPY apache-livy-0.7.1-incubating-bin.zip livy
- # COPY apache-livy-0.8.0-spark-2.4.7-hadoop-2.7-bin.zip livy
- RUN cd livy && unzip apache-livy*.zip && mv apache-livy*bin/* .
- # 环境变量
- ENV HADOOP_HOME=/usr/hadoop-3.0.0/
- ENV HADOOP_CONF_DIR=${WORKSPACE}/conf
- ENV HIVE_HOME=${WORKSPACE}/hive
- ENV HIVE_CONF_DIR=${WORKSPACE}/conf
- ENV SPARK_HOME=/usr/spark-2.4.0
- ENV SPARK_CONF_DIR=${WORKSPACE}/conf
- ENV PATH=$PATH:$HADOOP_HOME/bin:$SPARK_HOME/bin
- ENV LIVY_HOME=${WORKSPACE}/livy
- ENV LIVY_CONF_DIR=${WORKSPACE}/conf
- ENV LD_LIBRARY_PATH=$HADOOP_HOME/lib/native
- # 启动命令
- RUN echo "\
- [program:livy]\n\
- directory=/workspace\n\
- command=livy/bin/livy-server \n\
- autorestart=true\n\
- startretries=0\n\
- redirect_stderr=true\n\
- stdout_logfile=/var/log/livy.log\n\
- stdout_logfile_maxbytes=50MB\n\
- " > /etc/supervisor/conf.d/livy.conf
- CMD ["supervisord", "-n"]
- FROM builder1 as image-idctest
- COPY docker/idctest .
- COPY spark.tgz .
- RUN tar xvf spark.tgz && mv /usr/spark-2.4.0 /usr/spark-2.4.0.bk && mv spark /usr/spark-2.4.0
- FROM builder1 as image-idcprod
- COPY docker/idcprod .
- COPY spark.tgz .
- RUN tar xvf spark.tgz && mv /usr/spark-2.4.0 /usr/spark-2.4.0.bk && mv spark /usr/spark-2.4.0
|