sql_script_demo_1009.py 470 B

12345678910111213141516
  1. import json
  2. import sys
  3. from pyspark.sql import SparkSession
  4. def run(outputs: list):
  5. script = 'select * from test'
  6. ss = SparkSession.builder.config('hive.metastore.uris',
  7. 'thrift://192.168.199.27:9083').enableHiveSupport().getOrCreate()
  8. ret_df = ss.sql(script)
  9. ret_df.write.mode("overwrite").saveAsTable(outputs[0])
  10. if __name__ == '__main__':
  11. outputs_str = sys.argv[2]
  12. run(outputs=json.loads(outputs_str))