Dockerfile1 2.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283
  1. FROM maven:3.8.6-openjdk-8 as builder
  2. WORKDIR /workspace
  3. RUN sed -i "s@http://\(deb\|security\).debian.org@https://mirrors.aliyun.com@g" /etc/apt/sources.list
  4. RUN apt update && apt install -y python3 python3-pip python-is-python3 \
  5. && pip config set global.index-url https://mirror.baidu.com/pypi/simple \
  6. && pip install -U setuptools
  7. ADD . .
  8. RUN mkdir -p ~/.m2 && cp -r docker/settings.xml ~/.m2
  9. # RUN wget http://mirror.nju.edu.cn/apache/spark/spark-3.1.3/spark-3.1.3-bin-hadoop3.2.tgz
  10. # ADD spark-3.0.3-bin-hadoop3.2.tgz .
  11. RUN mvn clean package \
  12. -B -V -e \
  13. -Pspark-3.0 \
  14. -Pthriftserver \
  15. -DskipTests \
  16. -DskipITs \
  17. -Dmaven.javadoc.skip=true
  18. FROM openjdk:8-jre-slim as builder1
  19. RUN sed -i "s@http://\(deb\|security\).debian.org@https://mirrors.aliyun.com@g" /etc/apt/sources.list
  20. # COPY docker/sources.list /etc/apt
  21. RUN apt update && apt install -y --no-install-recommends \
  22. krb5-user zip unzip procps tar curl supervisor
  23. # 目录准备
  24. WORKDIR /opt/cluster
  25. RUN mkdir -p hadoop spark livy/logs
  26. # spark
  27. COPY --from=builder /workspace/spark-3.0.3-bin-hadoop3.2.tgz spark
  28. RUN cd spark && tar zxfv spark-3.0.3-bin-hadoop3.2.tgz && mv spark*/* . && rm -rf spark-3.0.3-bin-hadoop3.2.tgz
  29. # hadoop
  30. RUN curl -o ./hadoop/hadoop.tgz -O https://archive.apache.org/dist/hadoop/common/hadoop-3.2.3/hadoop-3.2.3.tar.gz
  31. # RUN curl -o ./hadoop/hadoop.tgz -O http://mirror.nju.edu.cn/apache/hadoop/common/hadoop-3.2.3/hadoop-3.2.3.tar.gz
  32. RUN cd hadoop && tar zxfv hadoop.tgz && mv hadoop*/* . && rm -rf hadoop.tgz
  33. # livy
  34. COPY --from=builder /workspace/assembly/target/apache-livy*.zip livy
  35. RUN cd /opt/cluster/livy/ && unzip apache-livy*.zip && mv apache-livy*bin/* .
  36. # jar包冲突
  37. RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar livy/jars/
  38. RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar spark/jars/
  39. RUN cp hadoop/share/hadoop/yarn/lib/jersey-client-1.19.jar spark/jars/
  40. RUN rm spark/jars/jersey-client-2.30.jar
  41. # 环境变量
  42. ENV HADOOP_HOME=/opt/cluster/hadoop
  43. ENV HADOOP_CONF_DIR=/opt/cluster/conf
  44. ENV HIVE_HOME=/opt/cluster/hive
  45. ENV HIVE_CONF_DIR=/opt/cluster/conf
  46. ENV SPARK_HOME=/opt/cluster/spark
  47. ENV SPARK_CONF_DIR=/opt/cluster/conf
  48. ENV LIVY_HOME=/opt/cluster/livy
  49. ENV LIVY_CONF_DIR=/opt/cluster/conf
  50. # 启动命令
  51. RUN echo "\
  52. [program:livy]\n\
  53. directory=/opt/cluster\n\
  54. command=livy/bin/livy-server \n\
  55. autorestart=true\n\
  56. startretries=0\n\
  57. redirect_stderr=true\n\
  58. stdout_logfile=/var/log/livy.log\n\
  59. stdout_logfile_maxbytes=50MB\n\
  60. " > /etc/supervisor/conf.d/livy.conf
  61. CMD ["supervisord", "-n"]
  62. FROM builder1 as image-test
  63. COPY docker/dev .
  64. ADD docker/dev/conf/krb5.conf /etc
  65. FROM builder1 as image-prod
  66. COPY docker/prod .
  67. ADD docker/prod/conf/krb5.conf /etc