diff --git a/` b/`
deleted file mode 100644
index 6331805a5b458..0000000000000
--- a/`
+++ /dev/null
@@ -1,2347 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import sys
-
-import os
-import socket
-import importlib
-
-from functools import wraps
-from datetime import datetime, timedelta
-import dateutil.parser
-import copy
-from itertools import chain, product
-
-from past.utils import old_div
-from past.builtins import basestring
-
-import inspect
-import traceback
-
-import sqlalchemy as sqla
-from sqlalchemy import or_, desc, and_
-
-
-from flask import redirect, url_for, request, Markup, Response, current_app, render_template
-from flask_admin import BaseView, expose, AdminIndexView
-from flask_admin.contrib.sqla import ModelView
-from flask_admin.actions import action
-from flask_login import flash
-from flask._compat import PY2
-
-import jinja2
-import markdown
-import json
-
-from wtforms import (
- Form, SelectField, TextAreaField, PasswordField, StringField)
-
-from pygments import highlight, lexers
-from pygments.formatters import HtmlFormatter
-
-import airflow
-from airflow import configuration as conf
-from airflow import models
-from airflow import settings
-from airflow.exceptions import AirflowException
-from airflow.settings import Session
-from airflow.models import XCom
-
-from airflow.utils.json import json_ser
-from airflow.utils.state import State
-from airflow.utils.db import provide_session
-from airflow.utils.helpers import alchemy_to_dict
-from airflow.utils import logging as log_utils
-from airflow.www import utils as wwwutils
-from airflow.www.forms import DateTimeForm, DateTimeWithNumRunsForm
-
-QUERY_LIMIT = 100000
-CHART_LIMIT = 200000
-
-dagbag = models.DagBag(os.path.expanduser(conf.get('core', 'DAGS_FOLDER')))
-
-login_required = airflow.login.login_required
-current_user = airflow.login.current_user
-logout_user = airflow.login.logout_user
-
-FILTER_BY_OWNER = False
-if conf.getboolean('webserver', 'FILTER_BY_OWNER'):
- # filter_by_owner if authentication is enabled and filter_by_owner is true
- FILTER_BY_OWNER = not current_app.config['LOGIN_DISABLED']
-
-
-def dag_link(v, c, m, p):
- url = url_for(
- 'airflow.graph',
- dag_id=m.dag_id)
- return Markup(
- '{m.dag_id}'.format(**locals()))
-
-
-def log_link(v, c, m, p):
- url = url_for(
- 'airflow.log',
- dag_id=m.dag_id,
- task_id=m.task_id,
- execution_date=m.execution_date.isoformat())
- return Markup(
- ''
- ' ').format(**locals())
-
-
-def task_instance_link(v, c, m, p):
- url = url_for(
- 'airflow.task',
- dag_id=m.dag_id,
- task_id=m.task_id,
- execution_date=m.execution_date.isoformat())
- url_root = url_for(
- 'airflow.graph',
- dag_id=m.dag_id,
- root=m.task_id,
- execution_date=m.execution_date.isoformat())
- return Markup(
- """
-
- {m.task_id}
-
-
-
-
- """.format(**locals()))
-
-
-def state_token(state):
- color = State.color(state)
- return Markup(
- ''
- '{state}'.format(**locals()))
-
-
-def state_f(v, c, m, p):
- return state_token(m.state)
-
-
-def duration_f(v, c, m, p):
- if m.end_date and m.duration:
- return timedelta(seconds=m.duration)
-
-
-def datetime_f(v, c, m, p):
- attr = getattr(m, p)
- dttm = attr.isoformat() if attr else ''
- if datetime.now().isoformat()[:4] == dttm[:4]:
- dttm = dttm[5:]
- return Markup("
" + str(content) + "
")
-
- return self.render(
- 'airflow/ti_code.html',
- html_dict=html_dict,
- dag=dag,
- task_id=task_id,
- execution_date=execution_date,
- form=form,
- title=title,)
-
- @expose('/log')
- @login_required
- @wwwutils.action_logging
- def log(self):
- BASE_LOG_FOLDER = os.path.expanduser(
- conf.get('core', 'BASE_LOG_FOLDER'))
- dag_id = request.args.get('dag_id')
- task_id = request.args.get('task_id')
- execution_date = request.args.get('execution_date')
- dag = dagbag.get_dag(dag_id)
- log_relative = "{dag_id}/{task_id}/{execution_date}".format(
- **locals())
- loc = os.path.join(BASE_LOG_FOLDER, log_relative)
- loc = loc.format(**locals())
- log = ""
- TI = models.TaskInstance
- session = Session()
- dttm = dateutil.parser.parse(execution_date)
- ti = session.query(TI).filter(
- TI.dag_id == dag_id, TI.task_id == task_id,
- TI.execution_date == dttm).first()
- dttm = dateutil.parser.parse(execution_date)
- form = DateTimeForm(data={'execution_date': dttm})
-
- if ti:
- host = ti.hostname
- log_loaded = False
-
- if socket.gethostname() == host:
- try:
- f = open(loc)
- log += "".join(f.readlines())
- f.close()
- log_loaded = True
- except:
- log = "*** Local log file not found.\n".format(loc)
- else:
- WORKER_LOG_SERVER_PORT = \
- conf.get('celery', 'WORKER_LOG_SERVER_PORT')
- url = os.path.join(
- "http://{host}:{WORKER_LOG_SERVER_PORT}/log", log_relative
- ).format(**locals())
- log += "*** Log file isn't local.\n"
- log += "*** Fetching here: {url}\n".format(**locals())
- try:
- import requests
- log += '\n' + requests.get(url).text
- log_loaded = True
- except:
- log += "*** Failed to fetch log file from worker.\n".format(
- **locals())
-
- if not log_loaded:
- # load remote logs
- remote_log_base = conf.get('core', 'REMOTE_BASE_LOG_FOLDER')
- remote_log = os.path.join(remote_log_base, log_relative)
- log += '\n*** Reading remote logs...\n'
-
- # S3
- if remote_log.startswith('s3:/'):
- log += log_utils.S3Log().read(remote_log, return_error=True)
-
- # GCS
- elif remote_log.startswith('gs:/'):
- log += log_utils.GCSLog().read(remote_log, return_error=True)
-
- # unsupported
- elif remote_log:
- log += '*** Unsupported remote log location.'
-
- session.commit()
- session.close()
-
- if PY2 and not isinstance(log, unicode):
- log = log.decode('utf-8')
-
- title = "Log"
-
- return self.render(
- 'airflow/ti_code.html',
- code=log, dag=dag, title=title, task_id=task_id,
- execution_date=execution_date, form=form)
-
- @expose('/task')
- @login_required
- @wwwutils.action_logging
- def task(self):
- dag_id = request.args.get('dag_id')
- task_id = request.args.get('task_id')
- # Carrying execution_date through, even though it's irrelevant for
- # this context
- execution_date = request.args.get('execution_date')
- dttm = dateutil.parser.parse(execution_date)
- form = DateTimeForm(data={'execution_date': dttm})
- dag = dagbag.get_dag(dag_id)
- if not dag or task_id not in dag.task_ids:
- flash(
- "Task [{}.{}] doesn't seem to exist"
- " at the moment".format(dag_id, task_id),
- "error")
- return redirect('/admin/')
- task = dag.get_task(task_id)
- task = copy.copy(task)
- task.resolve_template_files()
-
- attributes = []
- for attr_name in dir(task):
- if not attr_name.startswith('_'):
- attr = getattr(task, attr_name)
- if type(attr) != type(self.task) and \
- attr_name not in attr_renderer:
- attributes.append((attr_name, str(attr)))
-
- title = "Task Details"
- # Color coding the special attributes that are code
- special_attrs_rendered = {}
- for attr_name in attr_renderer:
- if hasattr(task, attr_name):
- source = getattr(task, attr_name)
- special_attrs_rendered[attr_name] = attr_renderer[attr_name](source)
-
- return self.render(
- 'airflow/task.html',
- attributes=attributes,
- task_id=task_id,
- execution_date=execution_date,
- special_attrs_rendered=special_attrs_rendered,
- form=form,
- dag=dag, title=title)
-
- @expose('/xcom')
- @login_required
- @wwwutils.action_logging
- def xcom(self):
- dag_id = request.args.get('dag_id')
- task_id = request.args.get('task_id')
- # Carrying execution_date through, even though it's irrelevant for
- # this context
- execution_date = request.args.get('execution_date')
- dttm = dateutil.parser.parse(execution_date)
- form = DateTimeForm(data={'execution_date': dttm})
- dag = dagbag.get_dag(dag_id)
- if not dag or task_id not in dag.task_ids:
- flash(
- "Task [{}.{}] doesn't seem to exist"
- " at the moment".format(dag_id, task_id),
- "error")
- return redirect('/admin/')
-
- session = Session()
- xcomlist = session.query(XCom).filter(
- XCom.dag_id == dag_id, XCom.task_id == task_id,
- XCom.execution_date == dttm).all()
-
- attributes = []
- for xcom in xcomlist:
- if not xcom.key.startswith('_'):
- attributes.append((xcom.key, xcom.value))
-
- title = "XCom"
- return self.render(
- 'airflow/xcom.html',
- attributes=attributes,
- task_id=task_id,
- execution_date=execution_date,
- form=form,
- dag=dag, title=title)\
-
- @expose('/run')
- @login_required
- @wwwutils.action_logging
- @wwwutils.notify_owner
- def run(self):
- dag_id = request.args.get('dag_id')
- task_id = request.args.get('task_id')
- origin = request.args.get('origin')
- dag = dagbag.get_dag(dag_id)
- task = dag.get_task(task_id)
-
- execution_date = request.args.get('execution_date')
- execution_date = dateutil.parser.parse(execution_date)
- force = request.args.get('force') == "true"
- deps = request.args.get('deps') == "true"
-
- try:
- from airflow.executors import DEFAULT_EXECUTOR as executor
- from airflow.executors import CeleryExecutor
- if not isinstance(executor, CeleryExecutor):
- flash("Only works with the CeleryExecutor, sorry", "error")
- return redirect(origin)
- except ImportError:
- # in case CeleryExecutor cannot be imported it is not active either
- flash("Only works with the CeleryExecutor, sorry", "error")
- return redirect(origin)
-
- ti = models.TaskInstance(task=task, execution_date=execution_date)
- executor.start()
- executor.queue_task_instance(
- ti, force=force, ignore_dependencies=deps)
- executor.heartbeat()
- flash(
- "Sent {} to the message queue, "
- "it should start any moment now.".format(ti))
- return redirect(origin)
-
- @expose('/clear')
- @login_required
- @wwwutils.action_logging
- @wwwutils.notify_owner
- def clear(self):
- dag_id = request.args.get('dag_id')
- task_id = request.args.get('task_id')
- origin = request.args.get('origin')
- dag = dagbag.get_dag(dag_id)
- task = dag.get_task(task_id)
-
- execution_date = request.args.get('execution_date')
- execution_date = dateutil.parser.parse(execution_date)
- confirmed = request.args.get('confirmed') == "true"
- upstream = request.args.get('upstream') == "true"
- downstream = request.args.get('downstream') == "true"
- future = request.args.get('future') == "true"
- past = request.args.get('past') == "true"
-
- dag = dag.sub_dag(
- task_regex=r"^{0}$".format(task_id),
- include_downstream=downstream,
- include_upstream=upstream)
-
- end_date = execution_date if not future else None
- start_date = execution_date if not past else None
- if confirmed:
- count = dag.clear(
- start_date=start_date,
- end_date=end_date)
-
- flash("{0} task instances have been cleared".format(count))
- return redirect(origin)
- else:
- tis = dag.clear(
- start_date=start_date,
- end_date=end_date,
- dry_run=True)
- if not tis:
- flash("No task instances to clear", 'error')
- response = redirect(origin)
- else:
- details = "\n".join([str(t) for t in tis])
-
- response = self.render(
- 'airflow/confirm.html',
- message=(
- "Here's the list of task instances you are about "
- "to clear:"),
- details=details,)
-
- return response
-
- @expose('/blocked')
- @login_required
- def blocked(self):
- session = settings.Session()
- DR = models.DagRun
- dags = (
- session.query(DR.dag_id, sqla.func.count(DR.id))
- .filter(DR.state == State.RUNNING)
- .group_by(DR.dag_id)
- .all()
- )
- payload = []
- for dag_id, active_dag_runs in dags:
- max_active_runs = 0
- if dag_id in dagbag.dags:
- max_active_runs = dagbag.dags[dag_id].max_active_runs
- payload.append({
- 'dag_id': dag_id,
- 'active_dag_run': active_dag_runs,
- 'max_active_runs': max_active_runs,
- })
- return wwwutils.json_response(payload)
-
- @expose('/success')
- @login_required
- @wwwutils.action_logging
- @wwwutils.notify_owner
- def success(self):
- dag_id = request.args.get('dag_id')
- task_id = request.args.get('task_id')
- origin = request.args.get('origin')
- dag = dagbag.get_dag(dag_id)
- task = dag.get_task(task_id)
-
- execution_date = request.args.get('execution_date')
- execution_date = dateutil.parser.parse(execution_date)
- confirmed = request.args.get('confirmed') == "true"
- upstream = request.args.get('upstream') == "true"
- downstream = request.args.get('downstream') == "true"
- future = request.args.get('future') == "true"
- past = request.args.get('past') == "true"
- MAX_PERIODS = 1000
-
- # Flagging tasks as successful
- session = settings.Session()
- task_ids = [task_id]
- end_date = ((dag.latest_execution_date or datetime.now())
- if future else execution_date)
-
- if 'start_date' in dag.default_args:
- start_date = dag.default_args['start_date']
- elif dag.start_date:
- start_date = dag.start_date
- else:
- start_date = execution_date
-
- start_date = execution_date if not past else start_date
-
- if downstream:
- task_ids += [
- t.task_id
- for t in task.get_flat_relatives(upstream=False)]
- if upstream:
- task_ids += [
- t.task_id
- for t in task.get_flat_relatives(upstream=True)]
- TI = models.TaskInstance
-
- if dag.schedule_interval == '@once':
- dates = [start_date]
- else:
- dates = dag.date_range(start_date, end_date=end_date)
-
- tis = session.query(TI).filter(
- TI.dag_id == dag_id,
- TI.execution_date.in_(dates),
- TI.task_id.in_(task_ids)).all()
- tis_to_change = session.query(TI).filter(
- TI.dag_id == dag_id,
- TI.execution_date.in_(dates),
- TI.task_id.in_(task_ids),
- TI.state != State.SUCCESS).all()
- tasks = list(product(task_ids, dates))
- tis_to_create = list(
- set(tasks) -
- set([(ti.task_id, ti.execution_date) for ti in tis]))
-
- tis_all_altered = list(chain(
- [(ti.task_id, ti.execution_date) for ti in tis_to_change],
- tis_to_create))
-
- if len(tis_all_altered) > MAX_PERIODS:
- flash("Too many tasks at once (>{0})".format(
- MAX_PERIODS), 'error')
- return redirect(origin)
-
- if confirmed:
- for ti in tis_to_change:
- ti.state = State.SUCCESS
- session.commit()
-
- for task_id, task_execution_date in tis_to_create:
- ti = TI(
- task=dag.get_task(task_id),
- execution_date=task_execution_date,
- state=State.SUCCESS)
- session.add(ti)
- session.commit()
-
- session.commit()
- session.close()
- flash("Marked success on {} task instances".format(
- len(tis_all_altered)))
-
- return redirect(origin)
- else:
- if not tis_all_altered:
- flash("No task instances to mark as successful", 'error')
- response = redirect(origin)
- else:
- tis = []
- for task_id, task_execution_date in tis_all_altered:
- tis.append(TI(
- task=dag.get_task(task_id),
- execution_date=task_execution_date,
- state=State.SUCCESS))
- details = "\n".join([str(t) for t in tis])
-
- response = self.render(
- 'airflow/confirm.html',
- message=(
- "Here's the list of task instances you are about "
- "to mark as successful:"),
- details=details,)
- return response
-
- @expose('/tree')
- @login_required
- @wwwutils.gzipped
- @wwwutils.action_logging
- def tree(self):
- dag_id = request.args.get('dag_id')
- blur = conf.getboolean('webserver', 'demo_mode')
- dag = dagbag.get_dag(dag_id)
- root = request.args.get('root')
- if root:
- dag = dag.sub_dag(
- task_regex=root,
- include_downstream=False,
- include_upstream=True)
-
- session = settings.Session()
-
- base_date = request.args.get('base_date')
- num_runs = request.args.get('num_runs')
- num_runs = int(num_runs) if num_runs else 25
-
- if base_date:
- base_date = dateutil.parser.parse(base_date)
- else:
- base_date = dag.latest_execution_date or datetime.now()
-
- dates = dag.date_range(base_date, num=-abs(num_runs))
- min_date = dates[0] if dates else datetime(2000, 1, 1)
-
- DR = models.DagRun
- dag_runs = (
- session.query(DR)
- .filter(
- DR.dag_id==dag.dag_id,
- DR.execution_date<=base_date,
- DR.execution_date>=min_date)
- .all()
- )
- dag_runs = {
- dr.execution_date: alchemy_to_dict(dr) for dr in dag_runs}
-
- tis = dag.get_task_instances(
- session, start_date=min_date, end_date=base_date)
- dates = sorted(list({ti.execution_date for ti in tis}))
- max_date = max([ti.execution_date for ti in tis]) if dates else None
- task_instances = {}
- for ti in tis:
- tid = alchemy_to_dict(ti)
- dr = dag_runs.get(ti.execution_date)
- tid['external_trigger'] = dr['external_trigger'] if dr else False
- task_instances[(ti.task_id, ti.execution_date)] = tid
-
- expanded = []
- # The default recursion traces every path so that tree view has full
- # expand/collapse functionality. After 5,000 nodes we stop and fall
- # back on a quick DFS search for performance. See PR #320.
- node_count = [0]
- node_limit = 5000 / max(1, len(dag.roots))
-
- def recurse_nodes(task, visited):
- visited.add(task)
- node_count[0] += 1
-
- children = [
- recurse_nodes(t, visited) for t in task.upstream_list
- if node_count[0] < node_limit or t not in visited]
-
- # D3 tree uses children vs _children to define what is
- # expanded or not. The following block makes it such that
- # repeated nodes are collapsed by default.
- children_key = 'children'
- if task.task_id not in expanded:
- expanded.append(task.task_id)
- elif children:
- children_key = "_children"
-
- return {
- 'name': task.task_id,
- 'instances': [
- task_instances.get((task.task_id, d)) or {
- 'execution_date': d.isoformat(),
- 'task_id': task.task_id
- }
- for d in dates],
- children_key: children,
- 'num_dep': len(task.upstream_list),
- 'operator': task.task_type,
- 'retries': task.retries,
- 'owner': task.owner,
- 'start_date': task.start_date,
- 'end_date': task.end_date,
- 'depends_on_past': task.depends_on_past,
- 'ui_color': task.ui_color,
- }
- data = {
- 'name': '[DAG]',
- 'children': [recurse_nodes(t, set()) for t in dag.roots],
- 'instances': [
- dag_runs.get(d) or {'execution_date': d.isoformat()}
- for d in dates],
- }
-
- data = json.dumps(data, indent=4, default=json_ser)
- session.commit()
- session.close()
-
- form = DateTimeWithNumRunsForm(data={'base_date': max_date,
- 'num_runs': num_runs})
- return self.render(
- 'airflow/tree.html',
- operators=sorted(
- list(set([op.__class__ for op in dag.tasks])),
- key=lambda x: x.__name__
- ),
- root=root,
- form=form,
- dag=dag, data=data, blur=blur)
-
- @expose('/graph')
- @login_required
- @wwwutils.gzipped
- @wwwutils.action_logging
- def graph(self):
- session = settings.Session()
- dag_id = request.args.get('dag_id')
- blur = conf.getboolean('webserver', 'demo_mode')
- arrange = request.args.get('arrange', "LR")
- dag = dagbag.get_dag(dag_id)
- if dag_id not in dagbag.dags:
- flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
- return redirect('/admin/')
-
- root = request.args.get('root')
- if root:
- dag = dag.sub_dag(
- task_regex=root,
- include_upstream=True,
- include_downstream=False)
-
- nodes = []
- edges = []
- for task in dag.tasks:
- nodes.append({
- 'id': task.task_id,
- 'value': {
- 'label': task.task_id,
- 'labelStyle': "fill:{0};".format(task.ui_fgcolor),
- 'style': "fill:{0};".format(task.ui_color),
- }
- })
-
- def get_upstream(task):
- for t in task.upstream_list:
- edge = {
- 'u': t.task_id,
- 'v': task.task_id,
- }
- if edge not in edges:
- edges.append(edge)
- get_upstream(t)
-
- for t in dag.roots:
- get_upstream(t)
-
- dttm = request.args.get('execution_date')
- if dttm:
- dttm = dateutil.parser.parse(dttm)
- else:
- dttm = dag.latest_execution_date or datetime.now().date()
-
- DR = models.DagRun
- drs = (
- session.query(DR)
- .filter_by(dag_id=dag_id)
- .order_by(desc(DR.execution_date)).all()
- )
- dr_choices = []
- dr_state = None
- for dr in drs:
- dr_choices.append((dr.execution_date.isoformat(), dr.run_id))
- if dttm == dr.execution_date:
- dr_state = dr.state
-
- class GraphForm(Form):
- execution_date = SelectField("DAG run", choices=dr_choices)
- arrange = SelectField("Layout", choices=(
- ('LR', "Left->Right"),
- ('RL', "Right->Left"),
- ('TB', "Top->Bottom"),
- ('BT', "Bottom->Top"),
- ))
- form = GraphForm(
- data={'execution_date': dttm.isoformat(), 'arrange': arrange})
-
- task_instances = {
- ti.task_id: alchemy_to_dict(ti)
- for ti in dag.get_task_instances(session, dttm, dttm)}
- tasks = {
- t.task_id: {
- 'dag_id': t.dag_id,
- 'task_type': t.task_type,
- }
- for t in dag.tasks}
- if not tasks:
- flash("No tasks found", "error")
- session.commit()
- session.close()
- doc_md = markdown.markdown(dag.doc_md) if hasattr(dag, 'doc_md') else ''
-
- return self.render(
- 'airflow/graph.html',
- dag=dag,
- form=form,
- width=request.args.get('width', "100%"),
- height=request.args.get('height', "800"),
- execution_date=dttm.isoformat(),
- state_token=state_token(dr_state),
- doc_md=doc_md,
- arrange=arrange,
- operators=sorted(
- list(set([op.__class__ for op in dag.tasks])),
- key=lambda x: x.__name__
- ),
- blur=blur,
- root=root or '',
- task_instances=json.dumps(task_instances, indent=2),
- tasks=json.dumps(tasks, indent=2),
- nodes=json.dumps(nodes, indent=2),
- edges=json.dumps(edges, indent=2),)
-
- @expose('/duration')
- @login_required
- @wwwutils.action_logging
- def duration(self):
- from nvd3 import lineChart
- import time
- session = settings.Session()
- dag_id = request.args.get('dag_id')
- dag = dagbag.get_dag(dag_id)
- base_date = request.args.get('base_date')
- num_runs = request.args.get('num_runs')
- num_runs = int(num_runs) if num_runs else 25
-
- if base_date:
- base_date = dateutil.parser.parse(base_date)
- else:
- base_date = dag.latest_execution_date or datetime.now()
-
- dates = dag.date_range(base_date, num=-abs(num_runs))
- min_date = dates[0] if dates else datetime(2000, 1, 1)
-
- root = request.args.get('root')
- if root:
- dag = dag.sub_dag(
- task_regex=root,
- include_upstream=True,
- include_downstream=False)
-
- chart = lineChart(name="lineChart", x_is_date=True, height=750, width=600)
- for task in dag.tasks:
- y = []
- x = []
- for ti in task.get_task_instances(session, start_date=min_date,
- end_date=base_date):
- if ti.duration:
- dttm = int(time.mktime(ti.execution_date.timetuple())) * 1000
- x.append(dttm)
- y.append(float(ti.duration) / (60*60))
- if x:
- chart.add_serie(name=task.task_id, x=x, y=y)
-
- tis = dag.get_task_instances(
- session, start_date=min_date, end_date=base_date)
- dates = sorted(list({ti.execution_date for ti in tis}))
- max_date = max([ti.execution_date for ti in tis]) if dates else None
-
- session.commit()
- session.close()
-
- form = DateTimeWithNumRunsForm(data={'base_date': max_date,
- 'num_runs': num_runs})
- chart.buildhtml()
- return self.render(
- 'airflow/chart.html',
- dag=dag,
- demo_mode=conf.getboolean('webserver', 'demo_mode'),
- root=root,
- form=form,
- chart=chart,
- )
-
- @expose('/landing_times')
- @login_required
- @wwwutils.action_logging
- def landing_times(self):
- session = settings.Session()
- dag_id = request.args.get('dag_id')
- dag = dagbag.get_dag(dag_id)
- base_date = request.args.get('base_date')
- num_runs = request.args.get('num_runs')
- num_runs = int(num_runs) if num_runs else 25
-
- if base_date:
- base_date = dateutil.parser.parse(base_date)
- else:
- base_date = dag.latest_execution_date or datetime.now()
-
- dates = dag.date_range(base_date, num=-abs(num_runs))
- min_date = dates[0] if dates else datetime(2000, 1, 1)
-
- root = request.args.get('root')
- if root:
- dag = dag.sub_dag(
- task_regex=root,
- include_upstream=True,
- include_downstream=False)
-
- all_data = []
- for task in dag.tasks:
- data = []
- for ti in task.get_task_instances(session, start_date=min_date,
- end_date=base_date):
- if ti.end_date:
- ts = ti.execution_date
- if dag.schedule_interval:
- ts = dag.following_schedule(ts)
- secs = old_div((ti.end_date - ts).total_seconds(), 60*60)
- data.append([ti.execution_date.isoformat(), secs])
- all_data.append({'data': data, 'name': task.task_id})
-
- tis = dag.get_task_instances(
- session, start_date=min_date, end_date=base_date)
- dates = sorted(list({ti.execution_date for ti in tis}))
- max_date = max([ti.execution_date for ti in tis]) if dates else None
-
- session.commit()
- session.close()
-
- form = DateTimeWithNumRunsForm(data={'base_date': max_date,
- 'num_runs': num_runs})
- return self.render(
- 'airflow/chart.html',
- dag=dag,
- data=json.dumps(all_data),
- height="700px",
- chart_options={'yAxis': {'title': {'text': 'hours after 00:00'}}},
- demo_mode=conf.getboolean('webserver', 'demo_mode'),
- root=root,
- form=form,
- )
-
- @expose('/paused')
- @login_required
- @wwwutils.action_logging
- def paused(self):
- DagModel = models.DagModel
- dag_id = request.args.get('dag_id')
- session = settings.Session()
- orm_dag = session.query(
- DagModel).filter(DagModel.dag_id == dag_id).first()
- if request.args.get('is_paused') == 'false':
- orm_dag.is_paused = True
- else:
- orm_dag.is_paused = False
- session.merge(orm_dag)
- session.commit()
- session.close()
-
- dagbag.get_dag(dag_id)
- return "OK"
-
- @expose('/refresh')
- @login_required
- @wwwutils.action_logging
- def refresh(self):
- DagModel = models.DagModel
- dag_id = request.args.get('dag_id')
- session = settings.Session()
- orm_dag = session.query(
- DagModel).filter(DagModel.dag_id == dag_id).first()
-
- if orm_dag:
- orm_dag.last_expired = datetime.now()
- session.merge(orm_dag)
- session.commit()
- session.close()
-
- dagbag.get_dag(dag_id)
- flash("DAG [{}] is now fresh as a daisy".format(dag_id))
- return redirect('/')
-
- @expose('/refresh_all')
- @login_required
- @wwwutils.action_logging
- def refresh_all(self):
- dagbag.collect_dags(only_if_updated=False)
- flash("All DAGs are now up to date")
- return redirect('/')
-
- @expose('/gantt')
- @login_required
- @wwwutils.action_logging
- def gantt(self):
-
- session = settings.Session()
- dag_id = request.args.get('dag_id')
- dag = dagbag.get_dag(dag_id)
- demo_mode = conf.getboolean('webserver', 'demo_mode')
-
- root = request.args.get('root')
- if root:
- dag = dag.sub_dag(
- task_regex=root,
- include_upstream=True,
- include_downstream=False)
-
- dttm = request.args.get('execution_date')
- if dttm:
- dttm = dateutil.parser.parse(dttm)
- else:
- dttm = dag.latest_execution_date or datetime.now().date()
-
- form = DateTimeForm(data={'execution_date': dttm})
-
- tis = [
- ti
- for ti in dag.get_task_instances(session, dttm, dttm)
- if ti.start_date]
- tis = sorted(tis, key=lambda ti: ti.start_date)
- tasks = []
- data = []
- for i, ti in enumerate(tis):
- end_date = ti.end_date or datetime.now()
- tasks += [ti.task_id]
- color = State.color(ti.state)
- data.append({
- 'x': i,
- 'low': int(ti.start_date.strftime('%s')) * 1000,
- 'high': int(end_date.strftime('%s')) * 1000,
- 'color': color,
- })
- height = (len(tis) * 25) + 50
- session.commit()
- session.close()
-
- hc = {
- 'chart': {
- 'type': 'columnrange',
- 'inverted': True,
- 'height': height,
- },
- 'xAxis': {'categories': tasks, 'alternateGridColor': '#FAFAFA'},
- 'yAxis': {'type': 'datetime'},
- 'title': {
- 'text': None
- },
- 'plotOptions': {
- 'series': {
- 'cursor': 'pointer',
- 'minPointLength': 4,
- },
- },
- 'legend': {
- 'enabled': False
- },
- 'series': [{
- 'data': data
- }]
- }
- return self.render(
- 'airflow/gantt.html',
- dag=dag,
- execution_date=dttm.isoformat(),
- form=form,
- hc=json.dumps(hc, indent=4),
- height=height,
- demo_mode=demo_mode,
- root=root,
- )
-
- @expose('/object/task_instances')
- @login_required
- @wwwutils.action_logging
- def task_instances(self):
- session = settings.Session()
- dag_id = request.args.get('dag_id')
- dag = dagbag.get_dag(dag_id)
-
- dttm = request.args.get('execution_date')
- if dttm:
- dttm = dateutil.parser.parse(dttm)
- else:
- return ("Error: Invalid execution_date")
-
- task_instances = {
- ti.task_id: alchemy_to_dict(ti)
- for ti in dag.get_task_instances(session, dttm, dttm)}
-
- return json.dumps(task_instances)
-
- @expose('/variables/