|
| 1 | +from airflow import DAG |
| 2 | +from airflow.operators.dummy import DummyOperator |
| 3 | +from airflow.operators.python import PythonOperator |
| 4 | +from airflow.operators.bash import BashOperator |
| 5 | +from airflow.providers.google.cloud.sensors.gcs import GCSObjectExistenceSensor |
| 6 | +from datetime import datetime, timedelta |
| 7 | +from dag_functions import ( |
| 8 | + file_operation, |
| 9 | + make_http_request, |
| 10 | + process_file, |
| 11 | + read_and_serialize_return, |
| 12 | + log_file_sensor_output, |
| 13 | + final_task, |
| 14 | +) |
| 15 | +import logging |
| 16 | + |
| 17 | +AIRFLOW_TASK = "airflow.task" |
| 18 | +OUTPUT_PATH = "us-east1-composer-airflow-1c67778d-bucket/data/dag_processed_file.csv" |
| 19 | +logger = logging.getLogger(AIRFLOW_TASK) |
| 20 | + |
| 21 | +default_args = { |
| 22 | + 'owner': 'Aadit', |
| 23 | + 'start_date': datetime(2023, 9, 17), |
| 24 | + 'retries': 0, # Number of retries in case of task failure |
| 25 | + 'retry_delay': timedelta(minutes=5), # Delay before retries |
| 26 | +} |
| 27 | + |
| 28 | +dag_1 = DAG( |
| 29 | + 'dag_1_parameterize', |
| 30 | + default_args=default_args, |
| 31 | + description='DAG to parameterize file path, process file, and use FileSensor', |
| 32 | + schedule_interval=None, |
| 33 | + catchup=False, |
| 34 | +) |
| 35 | + |
| 36 | +read_serialize_task = PythonOperator( |
| 37 | + task_id='read_and_serialize', |
| 38 | + python_callable=read_and_serialize_return, |
| 39 | + op_kwargs={ |
| 40 | + 'file_path': 'us-east1-composer-airflow-1c67778d-bucket/data/dag_processing_file.csv' |
| 41 | + }, |
| 42 | + dag=dag_1, |
| 43 | +) |
| 44 | + |
| 45 | +process_task = PythonOperator( |
| 46 | + task_id='process_file', |
| 47 | + python_callable=process_file, |
| 48 | + op_kwargs={ |
| 49 | + 'output_path': OUTPUT_PATH, |
| 50 | + }, |
| 51 | + provide_context=True, |
| 52 | + dag=dag_1, |
| 53 | +) |
| 54 | + |
| 55 | +# File sensor task to check for the processed file's existence |
| 56 | +file_sensor_task = GCSObjectExistenceSensor( |
| 57 | + task_id='file_sensor_task', |
| 58 | + bucket='us-east1-composer-airflow-1c67778d-bucket', |
| 59 | + object='data/dag_processed_file.csv', |
| 60 | + poke_interval=10, |
| 61 | + timeout=300, |
| 62 | + dag=dag_1, |
| 63 | + on_success_callback=log_file_sensor_output, |
| 64 | + on_failure_callback=log_file_sensor_output, |
| 65 | +) |
| 66 | + |
| 67 | +# Final task to execute after the file sensor task |
| 68 | +final_processing_task = PythonOperator( |
| 69 | + task_id='final_processing_task', |
| 70 | + python_callable=final_task, |
| 71 | + op_kwargs={ |
| 72 | + 'output_path': OUTPUT_PATH, |
| 73 | + }, |
| 74 | + dag=dag_1, |
| 75 | +) |
| 76 | + |
| 77 | + |
| 78 | +read_serialize_task >> process_task >> file_sensor_task >> final_processing_task |
| 79 | + |
| 80 | +dag_2 = DAG( |
| 81 | + 'dag_file_and_http', |
| 82 | + default_args=default_args, |
| 83 | + description='DAG for file operations and HTTP request', |
| 84 | + schedule_interval=None, |
| 85 | + catchup=False, |
| 86 | +) |
| 87 | + |
| 88 | +file_op_task = PythonOperator( |
| 89 | + task_id='file_operation', |
| 90 | + python_callable=file_operation, |
| 91 | + op_kwargs={'file_path': OUTPUT_PATH}, |
| 92 | + dag=dag_2, |
| 93 | +) |
| 94 | + |
| 95 | +http_request_task = PythonOperator( |
| 96 | + task_id='http_request', |
| 97 | + python_callable=make_http_request, |
| 98 | + op_kwargs={'url': 'https://jsonplaceholder.typicode.com/todos/1'}, |
| 99 | + dag=dag_2, |
| 100 | +) |
| 101 | + |
| 102 | +file_op_task >> http_request_task |
| 103 | + |
| 104 | +### DAG 3: Task Dependencies |
| 105 | + |
| 106 | +dag_3 = DAG( |
| 107 | + 'dag_3_dependencies', |
| 108 | + default_args=default_args, |
| 109 | + description='DAG to demonstrate task dependencies', |
| 110 | + schedule_interval=None, |
| 111 | + catchup=False, |
| 112 | +) |
| 113 | + |
| 114 | +# DummyOperator: Used for grouping and branching logic |
| 115 | +start_task = DummyOperator( |
| 116 | + task_id='start_task', |
| 117 | + dag=dag_3, |
| 118 | +) |
| 119 | + |
| 120 | +# BashOperator: Runs a simple bash command |
| 121 | +bash_task = BashOperator( |
| 122 | + task_id='bash_task', |
| 123 | + bash_command='echo "This is a bash command"', |
| 124 | + dag=dag_3, |
| 125 | +) |
| 126 | + |
| 127 | +# PythonOperator: Runs a Python callable |
| 128 | +middle_task = PythonOperator( |
| 129 | + task_id='middle_task', |
| 130 | + python_callable=lambda: logger.info("Middle Task"), |
| 131 | + dag=dag_3, |
| 132 | + trigger_rule='all_done', # Execute regardless of the upstream task's status |
| 133 | +) |
| 134 | + |
| 135 | +# DummyOperator: Used for grouping and branching logic |
| 136 | +branch_task = DummyOperator( |
| 137 | + task_id='branch_task', |
| 138 | + dag=dag_3, |
| 139 | +) |
| 140 | + |
| 141 | +# PythonOperator: Runs a Python callable |
| 142 | +end_task = PythonOperator( |
| 143 | + task_id='end_task', |
| 144 | + python_callable=lambda: logger.info("End Task"), |
| 145 | + dag=dag_3, |
| 146 | +) |
| 147 | + |
| 148 | +# Set task dependencies |
| 149 | + |
| 150 | +""" |
| 151 | +The task executes with the following dependencies: |
| 152 | +
|
| 153 | +- `start_task`: Initiates the workflow and triggers two parallel tasks. |
| 154 | +- `bash_task`: Executes a bash script (or a set of operations) and passes the result to `middle_task`. |
| 155 | +- `branch_task`: Executes an independent task (or a set of operations). |
| 156 | +- `middle_task`: Processes the output of `bash_task`. |
| 157 | +- `end_task`: Finalizes the workflow by consuming the results of `middle_task` and `branch_task`. |
| 158 | +
|
| 159 | +The dependency structure ensures that: |
| 160 | + - `bash_task` and `branch_task` run concurrently. |
| 161 | + - `middle_task` depends on the completion of `bash_task`. |
| 162 | + - `end_task` waits for both `middle_task` and `branch_task` to finish. |
| 163 | +
|
| 164 | +This parallel execution can optimize the overall runtime, especially when tasks are I/O bound or computationally independent. |
| 165 | +""" |
| 166 | + |
| 167 | +start_task >> [bash_task, branch_task] |
| 168 | +bash_task >> middle_task >> end_task |
| 169 | +branch_task >> end_task |
| 170 | + |
| 171 | +# If this script is run directly, allow command-line interaction with the DAG |
| 172 | +if __name__ == "__main__": |
| 173 | + dag_1.cli() |
| 174 | + dag_2.cli() |
| 175 | + dag_3.cli() |
0 commit comments