sparksql_simple.py 421 B

123456789101112131415
  1. import json
  2. import sys
  3. from pyspark.sql import SparkSession
  4. def run(outputs: list):
  5. script = 'select * from test'
  6. ss = SparkSession.builder.config('hive.metastore.uris',
  7. 'thrift://192.168.199.27:9083').enableHiveSupport().getOrCreate()
  8. ret_df = ss.sql(script)
  9. # ret_df.write.mode("overwrite").saveAsTable(outputs[0])
  10. if __name__ == '__main__':
  11. run(outputs=[])