Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/python/324.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Python 创建循环任务以多次运行_Python_Airflow - Fatal编程技术网

Python 创建循环任务以多次运行

Python 创建循环任务以多次运行,python,airflow,Python,Airflow,我不熟悉气流,希望在一个循环中运行一系列任务,但我面临循环错误 from airflow import DAG from airflow.contrib.operators.ssh_operator import SSHOperator from airflow.operators.dummy import DummyOperator from airflow.operators.python_operator import PythonOperator from airflow.contrib

我不熟悉气流,希望在一个循环中运行一系列任务,但我面临循环错误

from airflow import DAG
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.operators.dummy import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.contrib.hooks.ssh_hook import SSHHook
from datetime import timedelta
from datetime import datetime

default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': datetime(2021, 4, 13),
    'email': ['raff@abc.com', 'raffg@abc.com'],
    'email_on_failure': True,
    'email_on_retry': False,
    'retries': 0,
    'retry_delay': timedelta(minutes=5),
}

dag = DAG('sparktestingforstandalone',
          schedule_interval='@yearly',
          default_args=default_args,
          catchup=False
          )

sshHook = SSHHook('conn_ssh_sparkstandalone')
linux_command_1 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task1.py '
linux_command_2 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task2.py '
linux_command_3 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task3.py '
linux_command_4 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task4.py '
linux_command_5 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task5.py '
linux_command_6 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task6.py '
linux_command_7 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task7.py '

start_op = DummyOperator(task_id='start_spark_runs',dag=dag)

t1 = SSHOperator(
    ssh_hook=sshHook,
    task_id='File_Extract_useCase',
    command=linux_command_1,
    dag=dag)

t1_1 = SSHOperator(
    ssh_hook=sshHook,
    task_id='File_Extract_useCase_1',
    command=linux_command_1,
    dag=dag)

t2 = SSHOperator(
    ssh_hook=sshHook,
    task_id='File_Extract_useCase_3',
    command=linux_command_2,
    dag=dag)

t2_1 = SSHOperator(
    ssh_hook=sshHook,
    task_id='File_Extract_useCase_12',
    command=linux_command_2,
    dag=dag)

t3 = SSHOperator(
    ssh_hook=sshHook,
    task_id='Join_useCase',
    command=linux_command_3,
    dag=dag)

t3_1 = SSHOperator(
    ssh_hook=sshHook,
    task_id='Join_useCase_1',
    command=linux_command_3,
    dag=dag)

t4 = SSHOperator(
    ssh_hook=sshHook,
    task_id='Denoramlize_usecase',
    command=linux_command_5,
    dag=dag)

t5 = SSHOperator(
    ssh_hook=sshHook,
    task_id='1798_useCase',
    command=linux_command_5,
    dag=dag)

t6 = SSHOperator(
    ssh_hook=sshHook,
    task_id='Json_Complex_Creation',
    command=linux_command_6,
    dag=dag)

t7 = SSHOperator(
    ssh_hook=sshHook,
    task_id='DB_to_DB_Creation',
    command=linux_command_7,
    dag=dag)

s1 = PythonOperator(task_id="delay_sleep_task_30sec",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))

s1 = PythonOperator(task_id="delay_sleep_task_30sec_1",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s2 = PythonOperator(task_id="delay_sleep_task_30sec_2",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s3 = PythonOperator(task_id="delay_sleep_task_30sec_3",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s4 = PythonOperator(task_id="delay_sleep_task_30sec_4",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s5 = PythonOperator(task_id="delay_sleep_task_30sec_5",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s6 = PythonOperator(task_id="delay_sleep_task_30sec_6",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s7 = PythonOperator(task_id="delay_sleep_task_30sec_7",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s8 = PythonOperator(task_id="delay_sleep_task_30sec_8",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s9 = PythonOperator(task_id="delay_sleep_task_30sec_9",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s10 = PythonOperator(task_id="delay_sleep_task_30sec_10",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s11 = PythonOperator(task_id="delay_sleep_task_30sec_11",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))
s12 = PythonOperator(task_id="delay_sleep_task_30sec_12",
                                dag=dag,
                                python_callable=lambda: time.sleep(30))


end_op = DummyOperator(task_id='end_spark_runs', dag=dag)

start_op >> t1 >> t1_1 >> end_op
start_op >> t2 >> t2_1 >> end_op
start_op >> t5 >> end_op
start_op >> t7 >> s1 >> t7 >> s2 >> t7 >> s3 >> end_op
start_op >> [t3,t4,t6] >> s4 >> [t3,t4,t6] >> s5 >> [t3,t4,t6] >> s6 >> [t3,t4,t6] >> s7 >> [t3,t4,t6] >> s8 >> [t3,t4,t6] >> s9 >> [t3,t4,t6] >> s10 >> [t3,t4,t6] >> s11 >> [t3,t4,t6] >> s12 >> end_op
我知道这很混乱,有没有一种优雅的方法来实现同样的功能

我想在循环中并行运行t3、t4、t6任务n次,每次运行之间睡眠30秒。 还有多个其他任务(如t7)也将被触发


我想在一个dag中一次触发几个任务,多次触发几个任务,我不想像我在这里所做的那样创建那么多实例,需要像前面提到的那样以优雅的方式完成。

你不能在dag气流中创建循环,根据定义,dag是一个循环

但是你可以用。这将触发已定义DAG的DAG运行

def dag_run_有效负载(上下文,dag_run_obj):
#您可以在这里添加dag_run.conf的数据
#使用上下文信息并将其添加到
#dag_run_obj.有效载荷
dag_run_obj.payload={}
trigger\u next\u iter=TriggerDagRunOperator(
dag=dag,
task_id='trigger'u next_iter',
触发器_dag_id='SparkTestingforStandlone'、#或任何其他dag
执行日期=“{ti.xcom_pull(…)}}”,其模板
python\u可执行文件=dag\u运行\u有效负载
)
结束\u op>>触发下一个\u iter
您可以在DAG的末尾附加触发器

注意:这不适用于气流2。TriggerDagRun在更高版本中有所更改,它不提供python_可执行文件,但您仍然可以指定dag_运行配置

另一项考虑 也许你需要检查气流的其他功能来实现你的目标

  • 你的接线员将使你的DAG更少混乱。你可以在这里添加你的
    lambda:time.sleep(30)
  • 喜欢创造。您可以对任务进行分组。并在_success _callback上添加一个带有等待时间的
def子数据集(父数据集名称、子数据集名称、参数):
#这里是您的子DAG定义。
节_1=子运算符(
任务id='section-1',
subdag=subdag(DAG_名称,'section-1',args),
dag=dag,
on_success_callback=lambda:time.sleep(30)
)
  • 在多个DAG中分离当前DAG逻辑,并利用气流的优势。比如创建池和限制DAG的使用

我使用for循环生成任务名称,并将其附加到一个列表中,该列表在几个任务n、n+3、n+2和n+10中依次运行- 解决方案刚刚扩展,如中所示

from airflow import DAG
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.operators.dummy import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.contrib.hooks.ssh_hook import SSHHook
from datetime import timedelta
from datetime import datetime

default_args = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': datetime(2021, 4, 13),
    'email': ['raff@abc.com', 'raffg@abc.com'],
    'email_on_failure': True,
    'email_on_retry': False,
    'retries': 0,
    'retry_delay': timedelta(minutes=5),
}

dag = DAG('sparktestingforstandalone',
          schedule_interval='@yearly',
          default_args=default_args,
          catchup=False
          )

sshHook = SSHHook('conn_ssh_sparkstandalone')
linux_command_1 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task1.py '
linux_command_2 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task2.py '
linux_command_3 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task3.py '
linux_command_4 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task4.py '
linux_command_5 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task5.py '
linux_command_6 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task6.py '
linux_command_7 = 'spark-submit --conf "spark.cores.max=20" --conf "spark.executor.cores=2" --executor-memory 1G --driver-memory 2G /hadoopData/bdipoc/poc/python/task7.py '

start_op = DummyOperator(task_id='start_spark_runs',dag=dag)

t5 = SSHOperator(
    ssh_hook=sshHook,
    task_id='nonloop_usecase',
    command=linux_command_5,
    dag=dag)

chain_operators = []
chain_operators.append(start_op)
chain_operators_1 = []
chain_operators_1.append(start_op)
chain_operators_2 = []
chain_operators_2.append(start_op)
chain_operators_3 = []
chain_operators_3.append(start_op)
chain_operators_4 = []
chain_operators_4.append(start_op)
chain_operators_5 = []
chain_operators_5.append(start_op)
max_attempt = 10
for attempt in range(max_attempt):
    data_pull = SSHOperator(
        ssh_hook=sshHook,
        task_id='Usecase_run10_task_2_{}'.format(attempt),
        command=linux_command_3,
        dag=dag
    )
    data_pull_2 = SSHOperator(
        ssh_hook=sshHook,
        task_id='Usecase_run10_task_1_{}'.format(attempt),
        command=linux_command_4,
        dag=dag
    )
    data_pull_3 = SSHOperator(
        ssh_hook=sshHook,
        task_id='Usecase_run10_task_1{}'.format(attempt),
        command=linux_command_6,
        dag=dag
    )
    chain_operators.append(data_pull)
    chain_operators_1.append(data_pull_2)
    chain_operators_2.append(data_pull_3)
    

max_attempt_1 = 2
for attempt in range(max_attempt_1):
    data_pull_4 = SSHOperator(
        ssh_hook=sshHook,
        task_id='Usecase_runtwice_task_2_{}'.format(attempt),
        command=linux_command_1,
        dag=dag
    )
    data_pull_5 = SSHOperator(
        ssh_hook=sshHook,
        task_id='Usecase_runtwice_task_1_{}'.format(attempt),
        command=linux_command_2,
        dag=dag
    )
    chain_operators_3.append(data_pull_4)
    chain_operators_4.append(data_pull_5)

max_attempt_2 = 3
for attempt in range(max_attempt_2):
    data_pull_6 = SSHOperator(
        ssh_hook=sshHook,
        task_id='Usecase_runthrice_{}'.format(attempt),
        command=linux_command_7,
        dag=dag
    )
    chain_operators_5.append(data_pull_6)


end_op = DummyOperator(task_id='end_spark_runs', dag=dag)
chain_operators_1.append(end_op)
chain_operators_2.append(end_op)
chain_operators_3.append(end_op)
chain_operators_4.append(end_op)
chain_operators_5.append(end_op)
chain_operators.append(end_op)

for i,val in enumerate(chain_operators[:-1]):
    val.set_downstream(chain_operators[i+1])
for j,val in enumerate(chain_operators_1[:-1]):
    val.set_downstream(chain_operators_1[j+1])
for k,val in enumerate(chain_operators_2[:-1]):
    val.set_downstream(chain_operators_2[k+1])

start_op >> t5 >> end_op
start_op >> t7 >> end_op

for l,val in enumerate(chain_operators_3[:-1]):
    val.set_downstream(chain_operators_3[l+1])
for m,val in enumerate(chain_operators_4[:-1]):
    val.set_downstream(chain_operators_4[m+1])
for n,val in enumerate(chain_operators_5[:-1]):
    val.set_downstream(chain_operators_5[n+1])