Dockerfile.idc 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152
  1. FROM gettyimages/spark:2.4.0-hadoop-3.0 as builder1
  2. RUN sed -i "s@http://\(deb\|security\).debian.org@http://mirrors.163.com@g" /etc/apt/sources.list
  3. # COPY docker/sources.list /etc/apt
  4. RUN apt update && apt install -y --no-install-recommends \
  5. krb5-user zip unzip procps tar curl supervisor net-tools telnet vim iputils-ping netcat jq wget zsh
  6. # 目录准备
  7. ENV WORKSPACE /workspace
  8. WORKDIR ${WORKSPACE}
  9. RUN mkdir -p livy livy/logs
  10. # livy
  11. COPY apache-livy-0.7.1-incubating-bin.zip livy
  12. # COPY apache-livy-0.8.0-spark-2.4.7-hadoop-2.7-bin.zip livy
  13. RUN cd livy && unzip apache-livy*.zip && mv apache-livy*bin/* .
  14. # 环境变量
  15. ENV HADOOP_HOME=/usr/hadoop-3.0.0/
  16. ENV HADOOP_CONF_DIR=${WORKSPACE}/conf
  17. ENV HIVE_HOME=${WORKSPACE}/hive
  18. ENV HIVE_CONF_DIR=${WORKSPACE}/conf
  19. ENV SPARK_HOME=/usr/spark-2.4.0
  20. ENV SPARK_CONF_DIR=${WORKSPACE}/conf
  21. ENV PATH=$PATH:$HADOOP_HOME/bin:$SPARK_HOME/bin
  22. ENV LIVY_HOME=${WORKSPACE}/livy
  23. ENV LIVY_CONF_DIR=${WORKSPACE}/conf
  24. ENV LD_LIBRARY_PATH=$HADOOP_HOME/lib/native
  25. # 启动命令
  26. RUN echo "\
  27. [program:livy]\n\
  28. directory=/workspace\n\
  29. command=livy/bin/livy-server \n\
  30. autorestart=true\n\
  31. startretries=0\n\
  32. redirect_stderr=true\n\
  33. stdout_logfile=/var/log/livy.log\n\
  34. stdout_logfile_maxbytes=50MB\n\
  35. " > /etc/supervisor/conf.d/livy.conf
  36. CMD ["supervisord", "-n"]
  37. FROM builder1 as image-idctest
  38. COPY docker/idctest .
  39. COPY spark.tgz .
  40. RUN tar xvf spark.tgz && mv /usr/spark-2.4.0 /usr/spark-2.4.0.bk && mv spark /usr/spark-2.4.0
  41. FROM builder1 as image-idcprod
  42. COPY docker/idcprod .
  43. COPY spark.tgz .
  44. RUN tar xvf spark.tgz && mv /usr/spark-2.4.0 /usr/spark-2.4.0.bk && mv spark /usr/spark-2.4.0