123456789101112131415 |
- import json
- import sys
- from pyspark.sql import SparkSession
- def run(outputs: list):
- script = 'select * from test'
- ss = SparkSession.builder.config('hive.metastore.uris',
- 'thrift://192.168.199.27:9083').enableHiveSupport().getOrCreate()
- ret_df = ss.sql(script)
- # ret_df.write.mode("overwrite").saveAsTable(outputs[0])
- if __name__ == '__main__':
- run(outputs=[])
|