sql-submit_1009.sh 806 B

1234567891011121314151617181920212223
  1. outputs='["results_tmp_10_13_0"]' # results_tmp_{task_id}_{sub_node_id}_{output_pin_id}
  2. sourceDIR=/home/sxkj/bigdata
  3. ${SPARK_HOME}/bin/spark-submit \
  4. --master yarn \
  5. --name "sql_demo2_1009" \
  6. --deploy-mode cluster \
  7. --driver-memory 2g \
  8. --driver-cores 4 \
  9. --executor-memory 2g \
  10. --executor-cores 2 \
  11. --num-executors 1 \
  12. --conf spark.default.parallelism=2 \
  13. --conf spark.executor.memoryOverhead=4g \
  14. --conf spark.driver.memoryOverhead=2g \
  15. --conf spark.yarn.maxAppAttempts=3 \
  16. --conf spark.yarn.submit.waitAppCompletion=true \
  17. --conf spark.pyspark.driver.python=${sourceDIR}/py37/bin/python \
  18. --conf spark.yarn.appMasterEnv.PYSPARK_PYTHON=python3env/py37/bin/python \
  19. --conf spark.pyspark.python=python3env/py37/bin/python \
  20. ./sql_script_demo_1009.py "$inputs" $outputs