diff --git a/airflow/example_dags/example_kubernetes_operator.py b/airflow/example_dags/example_kubernetes_operator.py index 9b8632145c8d9..a7013ec9e28d1 100644 --- a/airflow/example_dags/example_kubernetes_operator.py +++ b/airflow/example_dags/example_kubernetes_operator.py @@ -16,9 +16,18 @@ # under the License. import airflow -from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator +import logging from airflow.models import DAG +try: + # Kubernetes is optional, so not available in vanilla Airflow + # pip install airflow[gcp] + from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator +except ImportError: + # Just import the BaseOperator as the KubernetesPodOperator + logging.warn("Could not import KubernetesPodOperator") + from airflow.models import BaseOperator as KubernetesPodOperator + args = { 'owner': 'airflow', 'start_date': airflow.utils.dates.days_ago(2) @@ -29,14 +38,14 @@ default_args=args, schedule_interval=None) -k = KubernetesPodOperator(namespace='default', - image="ubuntu:16.04", - cmds=["bash", "-cx"], - arguments=["echo", "10"], - labels={"foo": "bar"}, - name="airflow-test-pod", - in_cluster=False, - task_id="task", - get_logs=True, - dag=dag - ) +k = KubernetesPodOperator( + namespace='default', + image="ubuntu:16.04", + cmds=["bash", "-cx"], + arguments=["echo", "10"], + labels={"foo": "bar"}, + name="airflow-test-pod", + in_cluster=False, + task_id="task", + get_logs=True, + dag=dag)