|
| 1 | +# Copyright 2018 Google LLC |
| 2 | +# |
| 3 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +# you may not use this file except in compliance with the License. |
| 5 | +# You may obtain a copy of the License at |
| 6 | +# |
| 7 | +# https://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +# |
| 9 | +# Unless required by applicable law or agreed to in writing, software |
| 10 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +# See the License for the specific language governing permissions and |
| 13 | +# limitations under the License. |
| 14 | + |
| 15 | +"""Example Airflow DAG that performs an export from BQ tables listed in |
| 16 | +config file to GCS, copies GCS objects across locations (e.g., from US to |
| 17 | +EU) then imports from GCS to BQ. The DAG imports the gcs_to_gcs operator |
| 18 | +from plugins and dynamically builds the tasks based on the list of tables. |
| 19 | +Lastly, the DAG defines a specific application logger to generate logs. |
| 20 | +
|
| 21 | +This DAG relies on three Airflow variables |
| 22 | +(https://airflow.apache.org/concepts.html#variables): |
| 23 | +* table_list_file_path - CSV file listing source and target tables, including |
| 24 | +Datasets. |
| 25 | +* gcs_source_bucket - Google Cloud Storage bucket to use for exporting |
| 26 | +BigQuery tables in source. |
| 27 | +* gcs_dest_bucket - Google Cloud Storage bucket to use for importing |
| 28 | +BigQuery tables in destination. |
| 29 | +See https://cloud.google.com/storage/docs/creating-buckets for creating a |
| 30 | +bucket. |
| 31 | +""" |
| 32 | + |
| 33 | +# -------------------------------------------------------------------------------- |
| 34 | +# Load The Dependencies |
| 35 | +# -------------------------------------------------------------------------------- |
| 36 | + |
| 37 | +import csv |
| 38 | +import datetime |
| 39 | +import io |
| 40 | +import logging |
| 41 | + |
| 42 | +from airflow import models |
| 43 | +from airflow.contrib.operators import bigquery_to_gcs |
| 44 | +from airflow.contrib.operators import gcs_to_bq |
| 45 | +from airflow.operators import dummy_operator |
| 46 | +# Import operator from plugins |
| 47 | +from gcs_plugin.operators import gcs_to_gcs |
| 48 | + |
| 49 | + |
| 50 | +# -------------------------------------------------------------------------------- |
| 51 | +# Set default arguments |
| 52 | +# -------------------------------------------------------------------------------- |
| 53 | + |
| 54 | +default_args = { |
| 55 | + 'owner': 'airflow', |
| 56 | + 'start_date': datetime.datetime.today(), |
| 57 | + 'depends_on_past': False, |
| 58 | + 'email': [''], |
| 59 | + 'email_on_failure': False, |
| 60 | + 'email_on_retry': False, |
| 61 | + 'retries': 1, |
| 62 | + 'retry_delay': datetime.timedelta(minutes=5), |
| 63 | +} |
| 64 | + |
| 65 | +# -------------------------------------------------------------------------------- |
| 66 | +# Set variables |
| 67 | +# -------------------------------------------------------------------------------- |
| 68 | + |
| 69 | +# 'table_list_file_path': This variable will contain the location of the master |
| 70 | +# file. |
| 71 | +table_list_file_path = models.Variable.get('table_list_file_path') |
| 72 | + |
| 73 | +# Source Bucket |
| 74 | +source_bucket = models.Variable.get('gcs_source_bucket') |
| 75 | + |
| 76 | +# Destination Bucket |
| 77 | +dest_bucket = models.Variable.get('gcs_dest_bucket') |
| 78 | + |
| 79 | +# -------------------------------------------------------------------------------- |
| 80 | +# Set GCP logging |
| 81 | +# -------------------------------------------------------------------------------- |
| 82 | + |
| 83 | +logger = logging.getLogger('bq_copy_us_to_eu_01') |
| 84 | + |
| 85 | +# -------------------------------------------------------------------------------- |
| 86 | +# Functions |
| 87 | +# -------------------------------------------------------------------------------- |
| 88 | + |
| 89 | + |
| 90 | +def read_table_list(table_list_file): |
| 91 | + """ |
| 92 | + Reads the table list file that will help in creating Airflow tasks in |
| 93 | + the DAG dynamically. |
| 94 | + :param table_list_file: (String) The file location of the table list file, |
| 95 | + e.g. '/home/airflow/framework/table_list.csv' |
| 96 | + :return table_list: (List) List of tuples containing the source and |
| 97 | + target tables. |
| 98 | + """ |
| 99 | + table_list = [] |
| 100 | + logger.info('Reading table_list_file from : %s' % str(table_list_file)) |
| 101 | + try: |
| 102 | + with io.open(table_list_file, 'rt', encoding='utf-8') as csv_file: |
| 103 | + csv_reader = csv.reader(csv_file) |
| 104 | + next(csv_reader) # skip the headers |
| 105 | + for row in csv_reader: |
| 106 | + logger.info(row) |
| 107 | + table_tuple = { |
| 108 | + 'table_source': row[0], |
| 109 | + 'table_dest': row[1] |
| 110 | + } |
| 111 | + table_list.append(table_tuple) |
| 112 | + return table_list |
| 113 | + except IOError as e: |
| 114 | + logger.error('Error opening table_list_file %s: ' % str( |
| 115 | + table_list_file), e) |
| 116 | + |
| 117 | + |
| 118 | +# -------------------------------------------------------------------------------- |
| 119 | +# Main DAG |
| 120 | +# -------------------------------------------------------------------------------- |
| 121 | + |
| 122 | +# Define a DAG (directed acyclic graph) of tasks. |
| 123 | +# Any task you create within the context manager is automatically added to the |
| 124 | +# DAG object. |
| 125 | +with models.DAG('bq_copy_us_to_eu_01', |
| 126 | + default_args=default_args, |
| 127 | + schedule_interval=None) as dag: |
| 128 | + start = dummy_operator.DummyOperator( |
| 129 | + task_id='start', |
| 130 | + trigger_rule='all_success' |
| 131 | + ) |
| 132 | + |
| 133 | + end = dummy_operator.DummyOperator( |
| 134 | + task_id='end', |
| 135 | + |
| 136 | + trigger_rule='all_success' |
| 137 | + ) |
| 138 | + |
| 139 | + # Get the table list from master file |
| 140 | + all_records = read_table_list(table_list_file_path) |
| 141 | + |
| 142 | + # Loop over each record in the 'all_records' python list to build up |
| 143 | + # Airflow tasks |
| 144 | + for record in all_records: |
| 145 | + logger.info('Generating tasks to transfer table: {}'.format(record)) |
| 146 | + |
| 147 | + table_source = record['table_source'] |
| 148 | + table_dest = record['table_dest'] |
| 149 | + |
| 150 | + BQ_to_GCS = bigquery_to_gcs.BigQueryToCloudStorageOperator( |
| 151 | + # Replace ":" with valid character for Airflow task |
| 152 | + task_id='{}_BQ_to_GCS'.format(table_source.replace(":", "_")), |
| 153 | + source_project_dataset_table=table_source, |
| 154 | + destination_cloud_storage_uris=['{}-*.avro'.format( |
| 155 | + 'gs://' + source_bucket + '/' + table_source)], |
| 156 | + export_format='AVRO' |
| 157 | + ) |
| 158 | + |
| 159 | + GCS_to_GCS = gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator( |
| 160 | + # Replace ":" with valid character for Airflow task |
| 161 | + task_id='{}_GCS_to_GCS'.format(table_source.replace(":", "_")), |
| 162 | + source_bucket=source_bucket, |
| 163 | + source_object='{}-*.avro'.format(table_source), |
| 164 | + destination_bucket=dest_bucket, |
| 165 | + # destination_object='{}-*.avro'.format(table_dest) |
| 166 | + ) |
| 167 | + |
| 168 | + GCS_to_BQ = gcs_to_bq.GoogleCloudStorageToBigQueryOperator( |
| 169 | + # Replace ":" with valid character for Airflow task |
| 170 | + task_id='{}_GCS_to_BQ'.format(table_dest.replace(":", "_")), |
| 171 | + bucket=dest_bucket, |
| 172 | + source_objects=['{}-*.avro'.format(table_source)], |
| 173 | + destination_project_dataset_table=table_dest, |
| 174 | + source_format='AVRO', |
| 175 | + write_disposition='WRITE_TRUNCATE' |
| 176 | + ) |
| 177 | + |
| 178 | + start >> BQ_to_GCS >> GCS_to_GCS >> GCS_to_BQ >> end |
0 commit comments