FROM openjdk:8-jre-slim as builder1 RUN sed -i "s@http://\(deb\|security\).debian.org@https://mirrors.aliyun.com@g" /etc/apt/sources.list # COPY docker/sources.list /etc/apt RUN apt update && apt install -y --no-install-recommends \ krb5-user zip unzip procps tar curl supervisor net-tools telnet vim iputils-ping netcat jq wget # 目录准备 WORKDIR /opt/cluster RUN mkdir -p hadoop spark livy/logs # spark COPY spark-3.0.3-bin-hadoop3.2.tgz spark RUN cd spark && tar zxfv spark-3.0.3-bin-hadoop3.2.tgz && mv spark*/* . && rm -rf spark-3.0.3-bin-hadoop3.2.tgz # hadoop COPY hadoop-3.2.3.tar.gz hadoop/hadoop.tgz RUN cd hadoop && tar zxfv hadoop.tgz && mv hadoop*/* . && rm -rf hadoop.tgz # livy COPY apache-livy-0.8.0-incubating-SNAPSHOT-bin.zip livy RUN cd livy && unzip apache-livy*.zip && mv apache-livy*bin/* . # jar包冲突 RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar livy/jars/ RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar spark/jars/ RUN cp hadoop/share/hadoop/yarn/lib/jersey-client-1.19.jar spark/jars/ RUN rm spark/jars/jersey-client-2.30.jar # 环境变量 ENV HADOOP_HOME=/opt/cluster/hadoop ENV HADOOP_CONF_DIR=/opt/cluster/conf ENV HIVE_HOME=/opt/cluster/hive ENV HIVE_CONF_DIR=/opt/cluster/conf ENV SPARK_HOME=/opt/cluster/spark ENV SPARK_CONF_DIR=/opt/cluster/conf ENV LIVY_HOME=/opt/cluster/livy ENV LIVY_CONF_DIR=/opt/cluster/conf # 启动命令 RUN echo "\ [program:livy]\n\ directory=/opt/cluster\n\ command=livy/bin/livy-server \n\ autorestart=true\n\ startretries=0\n\ redirect_stderr=true\n\ stdout_logfile=/var/log/livy.log\n\ stdout_logfile_maxbytes=50MB\n\ " > /etc/supervisor/conf.d/livy.conf CMD ["supervisord", "-n"] FROM builder1 as image-txtest COPY docker/txtest . ADD docker/txtest/conf/krb5.conf /etc FROM builder1 as image-txprod COPY docker/txprod . ADD docker/txprod/conf/krb5.conf /etc