spark-submit 2.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586
  1. #!/bin/bash
  2. #
  3. # Licensed to the Apache Software Foundation (ASF) under one or more
  4. # contributor license agreements. See the NOTICE file distributed with
  5. # this work for additional information regarding copyright ownership.
  6. # The ASF licenses this file to You under the Apache License, Version 2.0
  7. # (the "License"); you may not use this file except in compliance with
  8. # the License. You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. #
  18. # This is a fake version of "spark-submit" to be used during Livy tests that run Spark as
  19. # a child process. It does some basic parsing of Spark options to figure out the classpath
  20. # to be used, and then just runs the SparkSubmit class directly.
  21. #
  22. PROP_FILE=
  23. DRIVER_CP=
  24. DRIVER_OPTS=
  25. CP_KEY="spark.driver.extraClassPath"
  26. OPTS_KEY="spark.driver.extraJavaOptions"
  27. INDEX=1
  28. ARGS=($@)
  29. if [ -n "$SPARK_CONF_DIR" ]; then
  30. PROP_FILE="$SPARK_CONF_DIR/spark-defaults.conf"
  31. fi
  32. for IDX in $(seq 0 ${#ARGS[@]}); do
  33. ARG=${ARGS[$IDX]}
  34. NEXT=$((IDX + 1))
  35. case $ARG in
  36. --conf)
  37. CONF="${ARGS[$NEXT]}"
  38. IFS='=' read KEY VALUE <<< "$CONF"
  39. if [ "$KEY" = "$CP_KEY" ]; then
  40. DRIVER_CP="$VALUE"
  41. elif [ "$KEY" = "$OPTS_KEY" ]; then
  42. DRIVER_OPTS="$VALUE"
  43. fi
  44. ;;
  45. --driver-class-path)
  46. DRIVER_CP="${ARGS[$NEXT]}"
  47. ;;
  48. --properties-file)
  49. PROP_FILE="${ARGS[$NEXT]}"
  50. ;;
  51. esac
  52. done
  53. read_opt() {
  54. local FILE="$1"
  55. local KEY="$2"
  56. CONF=$(grep -s "^$KEY=" "$PROP_FILE" | tail -n 1)
  57. if [ -n "$CONF" ]; then
  58. IFS='=' read KEY VALUE <<< "$CONF"
  59. echo "$VALUE"
  60. fi
  61. }
  62. if [ -n "$PROP_FILE" ]; then
  63. if [ -z "$DRIVER_CP" ]; then
  64. DRIVER_CP=$(read_opt "$PROP_FILE" "$CP_KEY")
  65. fi
  66. if [ -z "$DRIVER_OPTS" ]; then
  67. DRIVER_OPTS=$(read_opt "$PROP_FILE" "$OPTS_KEY")
  68. fi
  69. fi
  70. if [ -n "$LIVY_TEST_CLASSPATH" ]; then
  71. DRIVER_CP="$DRIVER_CP:$LIVY_TEST_CLASSPATH"
  72. fi
  73. if [ -n "$HADOOP_CONF_DIR" ]; then
  74. DRIVER_CP="$HADOOP_CONF_DIR:$DRIVER_CP"
  75. fi
  76. echo "Running Spark: " $JAVA_HOME/bin/java $DRIVER_OPTS org.apache.spark.deploy.SparkSubmit "$@" >&2
  77. exec $JAVA_HOME/bin/java $DRIVER_OPTS -cp "$DRIVER_CP" org.apache.spark.deploy.SparkSubmit "$@"