12345678910111213141516 |
- import json
- import sys
- from pyspark.sql import SparkSession
- def run(outputs: list):
- script = 'select * from test'
- ss = SparkSession.builder.config('hive.metastore.uris',
- 'thrift://192.168.199.27:9083').enableHiveSupport().getOrCreate()
- ret_df = ss.sql(script)
- ret_df.write.mode("overwrite").saveAsTable(outputs[0])
- if __name__ == '__main__':
- outputs_str = sys.argv[2]
- run(outputs=json.loads(outputs_str))
|