123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354 |
- FROM maven:3.8.6-openjdk-8 as builder
- WORKDIR /workspace
- RUN sed -i "s@http://\(deb\|security\).debian.org@https://mirrors.aliyun.com@g" /etc/apt/sources.list
- RUN apt update && apt install -y python3 python3-pip python-is-python3 \
- && pip config set global.index-url https://mirror.nju.edu.cn/pypi/web/simple
- ADD . .
- RUN mkdir -p ~/.m2 && cp -r docker/settings.xml ~/.m2
- RUN wget http://mirror.nju.edu.cn/apache/spark/spark-3.1.3/spark-3.1.3-bin-hadoop3.2.tgz
- RUN mvn clean package \
- -B -V -e \
- -Pspark-3.0 \
- -Pthriftserver \
- -DskipTests \
- -DskipITs \
- -Dmaven.javadoc.skip=true
- FROM openjdk:8-jre-slim
- RUN sed -i "s@http://\(deb\|security\).debian.org@https://mirrors.aliyun.com@g" /etc/apt/sources.list
- # COPY docker/sources.list /etc/apt
- RUN apt update && apt install -y krb5-user zip unzip procps tar curl
- WORKDIR /opt/cluster
- COPY docker/cluster .
- ENV HADOOP_HOME=/opt/cluster/hadoop
- ENV HADOOP_CONF_DIR=/opt/cluster/conf
- ENV HIVE_HOME=/opt/cluster/hive
- ENV HIVE_CONF_DIR=/opt/cluster/conf
- ENV SPARK_HOME=/opt/cluster/spark
- ENV SPARK_CONF_DIR=/opt/cluster/conf
- ENV LIVY_HOME=/opt/cluster/livy
- ENV LIVY_CONF_DIR=/opt/cluster/conf
- RUN rm -rf /opt/cluster/livy/*
- # COPY docker/livy /opt/cluster/livy/
- RUN mkdir -p hadoop spark livy/logs
- RUN curl -o ./hadoop/hadoop.tgz -O http://mirror.nju.edu.cn/apache/hadoop/common/hadoop-3.2.3/hadoop-3.2.3.tar.gz && \
- curl -o ./spark/spark.tgz -O http://mirror.nju.edu.cn/apache/spark/spark-3.1.3/spark-3.1.3-bin-hadoop3.2.tgz
- RUN cd hadoop && tar zxfv hadoop.tgz && mv hadoop*/* . && rm -rf hadoop.tgz
- RUN cd spark && tar zxfv spark.tgz && mv spark*/* . && rm -rf spark.tgz
- ADD docker/krb5.conf /etc
- COPY --from=builder /workspace/assembly/target/apache-livy*.zip livy
- # ADD assembly/target/apache-livy*.zip /opt/cluster/livy/
- RUN cd /opt/cluster/livy/ && unzip apache-livy*.zip && mv apache-livy*bin/* .
- RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar livy/jars/
- RUN cp hadoop/share/hadoop/common/lib/jersey-core-1.19.jar spark/jars/
- RUN cp hadoop/share/hadoop/yarn/lib/jersey-client-1.19.jar spark/jars/
- RUN rm spark/jars/jersey-client-2.30.jar
- CMD ["livy/bin/livy-server"]
|