|
@@ -57,6 +57,7 @@ class PythonTaskCompiler(TaskCompiler):
|
|
|
self.default_cmd = "echo \"$SCRIPT\" > run.py && python run.py"
|
|
|
|
|
|
|
|
|
+
|
|
|
class DataXTaskCompiler(TaskCompiler):
|
|
|
def __init__(self, item: AirflowTask):
|
|
|
super(DataXTaskCompiler, self).__init__(item)
|
|
@@ -117,7 +118,12 @@ class SparksTaskCompiler(TaskCompiler):
|
|
|
}
|
|
|
param_str = ' '.join([f'--{k} {v}' for k, v in parameters.items()])
|
|
|
param_str += ''.join([f' --conf {k}={v}' for k, v in spark_config.items()])
|
|
|
- basic_cmds = "kinit -kt /workspace/conf/user.keytab ailab && cd /workspace && echo \"$SCRIPT\" > run.py && ${SPARK_HOME}/bin/spark-submit"
|
|
|
+
|
|
|
+ basic_cmds = "cd /workspace && echo \"$SCRIPT\" > run.py && ${SPARK_HOME}/bin/spark-submit"
|
|
|
+
|
|
|
+ if config.get('K8S', 'enable_kerberos', fallback=None) in ['true', "True", True]:
|
|
|
+ basic_cmds = f"kinit -kt /workspace/conf/user.keytab ailab && {basic_cmds}"
|
|
|
+
|
|
|
self.cmd_str = lambda name: f"{basic_cmds} --name {name} {param_str} run.py"
|
|
|
|
|
|
def translate(self, job_id, task_mode=1):
|