Dockerfile.idc 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. FROM gettyimages/spark:2.4.0-hadoop-3.0 as builder1
  2. RUN sed -i "s@http://\(deb\|security\).debian.org@http://mirrors.163.com@g" /etc/apt/sources.list
  3. # COPY docker/sources.list /etc/apt
  4. RUN apt update && apt install -y --no-install-recommends \
  5. krb5-user zip unzip procps tar curl supervisor net-tools telnet vim iputils-ping netcat jq wget zsh
  6. # 目录准备
  7. ENV WORKSPACE /workspace
  8. WORKDIR ${WORKSPACE}
  9. RUN mkdir -p livy livy/logs
  10. # livy
  11. COPY apache-livy-0.7.1-incubating-bin.zip livy
  12. RUN cd livy && unzip apache-livy*.zip && mv apache-livy*bin/* .
  13. # 环境变量
  14. ENV HADOOP_HOME=/usr/hadoop-3.0.0/
  15. ENV HADOOP_CONF_DIR=${WORKSPACE}/conf
  16. ENV HIVE_HOME=${WORKSPACE}/hive
  17. ENV HIVE_CONF_DIR=${WORKSPACE}/conf
  18. ENV SPARK_HOME=/usr/spark-2.4.0
  19. ENV SPARK_CONF_DIR=${WORKSPACE}/conf
  20. ENV PATH=$PATH:$HADOOP_HOME/bin:$SPARK_HOME/bin
  21. ENV LIVY_HOME=${WORKSPACE}/livy
  22. ENV LIVY_CONF_DIR=${WORKSPACE}/conf
  23. ENV LD_LIBRARY_PATH=$HADOOP_HOME/lib/native
  24. # 启动命令
  25. RUN echo "\
  26. [program:livy]\n\
  27. directory=/workspace\n\
  28. command=livy/bin/livy-server \n\
  29. autorestart=true\n\
  30. startretries=0\n\
  31. redirect_stderr=true\n\
  32. stdout_logfile=/var/log/livy.log\n\
  33. stdout_logfile_maxbytes=50MB\n\
  34. " > /etc/supervisor/conf.d/livy.conf
  35. CMD ["supervisord", "-n"]
  36. FROM builder1 as image-idctest
  37. COPY docker/idctest .
  38. COPY spark.tgz .
  39. RUN tar xvf spark.tgz && mv /usr/spark-2.4.0 /usr/spark-2.4.0.bk && mv spark /usr/spark-2.4.0
  40. FROM builder1 as image-idcprod
  41. COPY docker/idcprod .
  42. COPY spark.tgz .
  43. RUN tar xvf spark.tgz && mv /usr/spark-2.4.0 /usr/spark-2.4.0.bk && mv spark /usr/spark-2.4.0