from airflow import DAG from datetime import datetime from airflow.operators.bash import BashOperator from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator from airflow.configuration import conf namespace = conf.get("kubernetes", "NAMESPACE") # set the name that will be printed name = "luoyulong" # instantiate the DAG with DAG( start_date=datetime(2022,6,1), catchup=False, schedule_interval='@daily', dag_id="single_task" ) as dag: op_6aeed233_0951_4637_99e8_4736262edc94 = KubernetesPodOperator( task_id="6aeed233-0951-4637-99e8-4736262edc94", image="SXKJ:32775/pod_python:1.1", in_cluster=True, namespace=namespace, name="PythonNode1", random_name_suffix=True, labels={'app':'backend', 'env':'dev'}, reattach_on_restart=True, is_delete_operator_pod=False, get_logs=True, log_events_on_failure=True, cmds=['/bin/bash', '-c', 'echo -e "$SCRIPT" > run.py && python run.py '], env_vars={'SCRIPT': "import numpy as np\nimport os\n\n\ndef hello():\n for i in range(10):\n print(f'{i}: ok!')\n print(os.environ)\n\n\nif __name__ == '__main__':\n hello()\n"} )