Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from airflow import DAG
- from airflow.operators.bash_operator import BashOperator
- from datetime import datetime, timedelta
- # default args for dags configuration
- # you can refer to documentation
- default_args = {
- 'owner': 'airflow',
- 'depends_on_past': False,
- 'start_date': airflow.utils.dates.days_ago(1),
- 'email': ['airflow@example.com'],
- 'email_on_failure': False,
- 'email_on_retry': False,
- 'retries': 1,
- 'retry_delay': timedelta(minutes=5),
- # 'queue': 'bash_queue',
- # 'pool': 'backfill',
- # 'priority_weight': 10,
- # 'end_date': datetime(2016, 1, 1),
- }
- # run daily at 11 am according
- dag = DAG('medium_snap_daily', default_args=default_args, schedule_interval='0 11 * * *')
- # using jinja template
- run_python_file = """
- cd `pwd`;
- cd snapshot;
- python {{params.file_name}};
- """
- task1 = BashOperator(
- task_id = 'take_snapshot',
- bash_command = run_python_file,
- params = {"file_name":"take_snapshot.py"},
- dag = dag,)
- task2 = BashOperator(
- task_id = 'upload_to_dropbox',
- bash_command = run_python_file,
- params = {"file_name":"upload_to_dropbox.py"},
- dag = dag,
- )
- task2.set_upstream(task1)
- task1 >> task2
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement