1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859 |
- FROM openjdk:8-jre-slim as builder1
- RUN sed -i "s@http://\(deb\|security\).debian.org@https://mirrors.aliyun.com@g" /etc/apt/sources.list
- # COPY docker/sources.list /etc/apt
- RUN apt update && apt install -y --no-install-recommends \
- krb5-user zip unzip procps tar curl supervisor net-tools telnet vim iputils-ping netcat jq wget
- # 目录准备
- WORKDIR /opt/cluster
- RUN mkdir -p hadoop spark livy/logs
- # spark
- COPY spark-bd.tgz spark
- RUN cd spark && tar zxfv spark-bd.tgz && mv spark/* . && rm -rf spark-bd.tgz
- # hadoop
- COPY hadoop-3.2.3.tar.gz hadoop/hadoop.tgz
- RUN cd hadoop && tar zxfv hadoop.tgz && mv hadoop*/* . && rm -rf hadoop.tgz
- # livy
- COPY apache-livy-0.7.0-incubating-bin.zip livy
- RUN cd livy && unzip apache-livy*.zip && mv apache-livy*bin/* .
- # jar包冲突
- #RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar livy/jars/
- #RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar spark/jars/
- #RUN cp hadoop/share/hadoop/yarn/lib/jersey-client-1.19.jar spark/jars/
- #RUN rm spark/jars/jersey-client-2.30.jar
- # 环境变量
- ENV HADOOP_HOME=/opt/cluster/hadoop
- ENV HADOOP_CONF_DIR=/opt/cluster/conf
- ENV HIVE_HOME=/opt/cluster/hive
- ENV HIVE_CONF_DIR=/opt/cluster/conf
- ENV SPARK_HOME=/opt/cluster/spark
- ENV SPARK_CONF_DIR=/opt/cluster/conf
- ENV LIVY_HOME=/opt/cluster/livy
- ENV LIVY_CONF_DIR=/opt/cluster/conf
- # 启动命令
- RUN echo "\
- [program:livy]\n\
- directory=/opt/cluster\n\
- command=livy/bin/livy-server \n\
- autorestart=true\n\
- startretries=0\n\
- redirect_stderr=true\n\
- stdout_logfile=/var/log/livy.log\n\
- stdout_logfile_maxbytes=50MB\n\
- " > /etc/supervisor/conf.d/livy.conf
- CMD ["supervisord", "-n"]
- FROM builder1 as image-txprodbd
- COPY docker/txprodbd .
- ADD docker/txprodbd/conf/krb5.conf /etc
|