response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Make an aware datetime.datetime naive in a given time zone. :param value: datetime :param timezone: timezone :return: naive datetime
def make_naive(value, timezone=None): """ Make an aware datetime.datetime naive in a given time zone. :param value: datetime :param timezone: timezone :return: naive datetime """ if timezone is None: from airflow.settings import TIMEZONE timezone = TIMEZONE # Emulate the behavior of astimezone() on Python < 3.6. if is_naive(value): raise ValueError("make_naive() cannot be applied to a naive datetime") date = value.astimezone(timezone) # cross library compatibility naive = dt.datetime( date.year, date.month, date.day, date.hour, date.minute, date.second, date.microsecond ) return naive
Wrap around datetime.datetime to add settings.TIMEZONE if tzinfo not specified. :return: datetime.datetime
def datetime(*args, **kwargs): """ Wrap around datetime.datetime to add settings.TIMEZONE if tzinfo not specified. :return: datetime.datetime """ if "tzinfo" not in kwargs: from airflow.settings import TIMEZONE kwargs["tzinfo"] = TIMEZONE return dt.datetime(*args, **kwargs)
Parse a time string and return an aware datetime. :param string: time string :param timezone: the timezone :param strict: if False, it will fall back on the dateutil parser if unable to parse with pendulum
def parse(string: str, timezone=None, *, strict=False) -> DateTime: """ Parse a time string and return an aware datetime. :param string: time string :param timezone: the timezone :param strict: if False, it will fall back on the dateutil parser if unable to parse with pendulum """ from airflow.settings import TIMEZONE return pendulum.parse(string, tz=timezone or TIMEZONE, strict=strict)
Convert ``v`` into a timezone-aware ``pendulum.DateTime``. * If ``v`` is *None*, *None* is returned. * If ``v`` is a naive datetime, it is converted to an aware Pendulum DateTime. * If ``v`` is an aware datetime, it is converted to a Pendulum DateTime. Note that ``tz`` is **not** taken into account in this case; the datetime will maintain its original tzinfo!
def coerce_datetime(v: dt.datetime | None, tz: dt.tzinfo | None = None) -> DateTime | None: """Convert ``v`` into a timezone-aware ``pendulum.DateTime``. * If ``v`` is *None*, *None* is returned. * If ``v`` is a naive datetime, it is converted to an aware Pendulum DateTime. * If ``v`` is an aware datetime, it is converted to a Pendulum DateTime. Note that ``tz`` is **not** taken into account in this case; the datetime will maintain its original tzinfo! """ if v is None: return None if isinstance(v, DateTime): return v if v.tzinfo else make_aware(v, tz) # Only dt.datetime is left here. return pendulum.instance(v if v.tzinfo else make_aware(v, tz))
Format a timedelta object or float/int into a readable string for time duration. For example timedelta(seconds=3752) would become `1h:2M:32s`. If the time is less than a second, the return will be `<1s`.
def td_format(td_object: None | dt.timedelta | float | int) -> str | None: """ Format a timedelta object or float/int into a readable string for time duration. For example timedelta(seconds=3752) would become `1h:2M:32s`. If the time is less than a second, the return will be `<1s`. """ if not td_object: return None if isinstance(td_object, dt.timedelta): delta = relativedelta() + td_object else: delta = relativedelta(seconds=int(td_object)) # relativedelta for timedelta cannot convert days to months # so calculate months by assuming 30 day months and normalize months, delta.days = divmod(delta.days, 30) delta = delta.normalized() + relativedelta(months=months) def _format_part(key: str) -> str: value = int(getattr(delta, key)) if value < 1: return "" # distinguish between month/minute following strftime format # and take first char of each unit, i.e. years='y', days='d' if key == "minutes": key = key.upper() key = key[0] return f"{value}{key}" parts = map(_format_part, ("years", "months", "days", "hours", "minutes", "seconds")) joined = ":".join(part for part in parts if part) if not joined: return "<1s" return joined
Parse timezone and return one of the pendulum Timezone. Provide the same interface as ``pendulum.timezone(name)`` :param name: Either IANA timezone or offset to UTC in seconds. :meta private:
def parse_timezone(name: str | int) -> FixedTimezone | Timezone: """ Parse timezone and return one of the pendulum Timezone. Provide the same interface as ``pendulum.timezone(name)`` :param name: Either IANA timezone or offset to UTC in seconds. :meta private: """ if _PENDULUM3: # This only presented in pendulum 3 and code do not reached into the pendulum 2 return pendulum.timezone(name) # type: ignore[operator] # In pendulum 2 this refers to the function, in pendulum 3 refers to the module return pendulum.tz.timezone(name)
Return local timezone. Provide the same interface as ``pendulum.tz.local_timezone()`` :meta private:
def local_timezone() -> FixedTimezone | Timezone: """ Return local timezone. Provide the same interface as ``pendulum.tz.local_timezone()`` :meta private: """ return pendulum.tz.local_timezone()
Parse timestamp and return DateTime in a given time zone. :param timestamp: epoch time in seconds. :param tz: In which timezone should return a resulting object. Could be either one of pendulum timezone, IANA timezone or `local` literal. :meta private:
def from_timestamp( timestamp: int | float, tz: str | FixedTimezone | Timezone | Literal["local"] = utc ) -> DateTime: """ Parse timestamp and return DateTime in a given time zone. :param timestamp: epoch time in seconds. :param tz: In which timezone should return a resulting object. Could be either one of pendulum timezone, IANA timezone or `local` literal. :meta private: """ result = coerce_datetime(dt.datetime.fromtimestamp(timestamp, tz=utc)) if tz != utc or tz != "UTC": if isinstance(tz, str) and tz.lower() == "local": tz = local_timezone() result = result.in_timezone(tz) return result
Capture warnings in context and re-raise it on exit from the context manager.
def capture_with_reraise() -> Generator[list[warnings.WarningMessage], None, None]: """Capture warnings in context and re-raise it on exit from the context manager.""" captured_warnings = [] try: with warnings.catch_warnings(record=True) as captured_warnings: yield captured_warnings finally: if captured_warnings: for cw in captured_warnings: warnings.warn_explicit( message=cw.message, category=cw.category, filename=cw.filename, lineno=cw.lineno, source=cw.source, )
Like yaml.safe_load, but use the C libyaml for speed where we can.
def safe_load(stream: bytes | str | BinaryIO | TextIO) -> Any: """Like yaml.safe_load, but use the C libyaml for speed where we can.""" # delay import until use. from yaml import load as orig try: from yaml import CSafeLoader as SafeLoader except ImportError: from yaml import SafeLoader # type: ignore[assignment, no-redef] return orig(stream, SafeLoader)
Like yaml.safe_dump, but use the C libyaml for speed where we can.
def dump(data: Any, **kwargs) -> str: """Like yaml.safe_dump, but use the C libyaml for speed where we can.""" # delay import until use. from yaml import dump as orig try: from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import SafeDumper # type: ignore[assignment, no-redef] return cast(str, orig(data, Dumper=SafeDumper, **kwargs))
Tell task log handler that task exited with deferral. This exists for the sole purpose of telling elasticsearch handler not to emit end_of_log mark after task deferral. Depending on how the task is run, we may need to set this in task command or in local task job. Kubernetes executor requires the local task job invocation; local executor requires the task command invocation. :meta private:
def _set_task_deferred_context_var(): """ Tell task log handler that task exited with deferral. This exists for the sole purpose of telling elasticsearch handler not to emit end_of_log mark after task deferral. Depending on how the task is run, we may need to set this in task command or in local task job. Kubernetes executor requires the local task job invocation; local executor requires the task command invocation. :meta private: """ logger = logging.getLogger() with suppress(StopIteration): h = next(h for h in logger.handlers if hasattr(h, "ctx_task_deferred")) h.ctx_task_deferred = True
Given TI | TIKey, return a TI object. Will raise exception if no TI is found in the database.
def _ensure_ti(ti: TaskInstanceKey | TaskInstance | TaskInstancePydantic, session) -> TaskInstance: """Given TI | TIKey, return a TI object. Will raise exception if no TI is found in the database. """ from airflow.models.taskinstance import TaskInstance, _get_private_try_number from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic if isinstance(ti, TaskInstance): return ti val = ( session.query(TaskInstance) .filter( TaskInstance.task_id == ti.task_id, TaskInstance.dag_id == ti.dag_id, TaskInstance.run_id == ti.run_id, TaskInstance.map_index == ti.map_index, ) .one_or_none() ) if not val: raise AirflowException(f"Could not find TaskInstance for {ti}") if isinstance(ti, TaskInstancePydantic): val.try_number = _get_private_try_number(task_instance=ti) else: # TaskInstanceKey val.try_number = ti.try_number return val
Remove ANSI escapes codes from string; used to remove "colors" from log messages.
def remove_escape_codes(text: str) -> str: """Remove ANSI escapes codes from string; used to remove "colors" from log messages.""" return ANSI_ESCAPE.sub("", text)
Walk the tree of loggers and try to set the context for each handler. :param logger: logger :param value: value to set
def set_context(logger, value): """ Walk the tree of loggers and try to set the context for each handler. :param logger: logger :param value: value to set """ while logger: orig_propagate = logger.propagate for handler in logger.handlers: # Not all handlers need to have context passed in so we ignore # the error when handlers do not have set_context defined. # Don't use getatrr so we have type checking. And we don't care if handler is actually a # FileTaskHandler, it just needs to have a set_context function! if hasattr(handler, "set_context"): from airflow.utils.log.file_task_handler import FileTaskHandler flag = cast(FileTaskHandler, handler).set_context(value) # By default we disable propagate once we have configured the logger, unless that handler # explicitly asks us to keep it on. if flag is not SetContextPropagate.MAINTAIN_PROPAGATE: logger.propagate = False if orig_propagate is True: # If we were set to propagate before we turned if off, then keep passing set_context up logger = logger.parent else: break
Get comma-separated sensitive Variable Fields from airflow.cfg.
def get_sensitive_variables_fields(): """Get comma-separated sensitive Variable Fields from airflow.cfg.""" from airflow.configuration import conf sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy() sensitive_variable_fields = conf.get("core", "sensitive_var_conn_names") if sensitive_variable_fields: sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(",")}) return sensitive_fields
Return if the value for this given name should be hidden. Name might be a Variable name, or key in conn.extra_dejson, for example.
def should_hide_value_for_key(name): """ Return if the value for this given name should be hidden. Name might be a Variable name, or key in conn.extra_dejson, for example. """ from airflow import settings if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS: name = name.strip().lower() return any(s in name for s in get_sensitive_variables_fields()) return False
Mask a secret from appearing in the task logs. If ``name`` is provided, then it will only be masked if the name matches one of the configured "sensitive" names. If ``secret`` is a dict or a iterable (excluding str) then it will be recursively walked and keys with sensitive names will be hidden.
def mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None: """ Mask a secret from appearing in the task logs. If ``name`` is provided, then it will only be masked if the name matches one of the configured "sensitive" names. If ``secret`` is a dict or a iterable (excluding str) then it will be recursively walked and keys with sensitive names will be hidden. """ # Filtering all log messages is not a free process, so we only do it when # running tasks if not secret: return _secrets_masker().add_mask(secret, name)
Redact any secrets found in ``value``.
def redact(value: Redactable, name: str | None = None, max_depth: int | None = None) -> Redacted: """Redact any secrets found in ``value``.""" return _secrets_masker().redact(value, name, max_depth)
Create a new instance of Airflow WWW app.
def create_app(config=None, testing=False): """Create a new instance of Airflow WWW app.""" flask_app = Flask(__name__) flask_app.secret_key = conf.get("webserver", "SECRET_KEY") flask_app.config["PERMANENT_SESSION_LIFETIME"] = timedelta(minutes=settings.get_session_lifetime_config()) flask_app.config["MAX_CONTENT_LENGTH"] = conf.getfloat("webserver", "allowed_payload_size") * 1024 * 1024 webserver_config = conf.get_mandatory_value("webserver", "config_file") # Enable customizations in webserver_config.py to be applied via Flask.current_app. with flask_app.app_context(): flask_app.config.from_pyfile(webserver_config, silent=True) flask_app.config["TESTING"] = testing flask_app.config["SQLALCHEMY_DATABASE_URI"] = conf.get("database", "SQL_ALCHEMY_CONN") instance_name = conf.get(section="webserver", key="instance_name", fallback="Airflow") require_confirmation_dag_change = conf.getboolean( section="webserver", key="require_confirmation_dag_change", fallback=False ) instance_name_has_markup = conf.getboolean( section="webserver", key="instance_name_has_markup", fallback=False ) if instance_name_has_markup: instance_name = Markup(instance_name).striptags() flask_app.config["APP_NAME"] = instance_name flask_app.config["REQUIRE_CONFIRMATION_DAG_CHANGE"] = require_confirmation_dag_change url = make_url(flask_app.config["SQLALCHEMY_DATABASE_URI"]) if url.drivername == "sqlite" and url.database and not url.database.startswith("/"): raise AirflowConfigException( f'Cannot use relative path: `{conf.get("database", "SQL_ALCHEMY_CONN")}` to connect to sqlite. ' "Please use absolute path such as `sqlite:////tmp/airflow.db`." ) flask_app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False flask_app.config["SESSION_COOKIE_HTTPONLY"] = True flask_app.config["SESSION_COOKIE_SECURE"] = conf.getboolean("webserver", "COOKIE_SECURE") cookie_samesite_config = conf.get("webserver", "COOKIE_SAMESITE") if cookie_samesite_config == "": warnings.warn( "Old deprecated value found for `cookie_samesite` option in `[webserver]` section. " "Using `Lax` instead. Change the value to `Lax` in airflow.cfg to remove this warning.", RemovedInAirflow3Warning, stacklevel=2, ) cookie_samesite_config = "Lax" flask_app.config["SESSION_COOKIE_SAMESITE"] = cookie_samesite_config if config: flask_app.config.from_mapping(config) if "SQLALCHEMY_ENGINE_OPTIONS" not in flask_app.config: flask_app.config["SQLALCHEMY_ENGINE_OPTIONS"] = settings.prepare_engine_args() # Configure the JSON encoder used by `|tojson` filter from Flask flask_app.json_provider_class = AirflowJsonProvider flask_app.json = AirflowJsonProvider(flask_app) InternalApiConfig.force_database_direct_access() csrf.init_app(flask_app) init_wsgi_middleware(flask_app) db = SQLA() db.session = settings.Session db.init_app(flask_app) init_dagbag(flask_app) init_api_experimental_auth(flask_app) init_robots(flask_app) init_cache(flask_app) init_flash_views(flask_app) configure_logging() configure_manifest_files(flask_app) import_all_models() with flask_app.app_context(): init_appbuilder(flask_app) init_appbuilder_views(flask_app) init_appbuilder_links(flask_app) init_plugins(flask_app) init_error_handlers(flask_app) init_api_connexion(flask_app) if conf.getboolean("webserver", "run_internal_api", fallback=False): if not _ENABLE_AIP_44: raise RuntimeError("The AIP_44 is not enabled so you cannot use it.") init_api_internal(flask_app) init_api_experimental(flask_app) init_api_auth_provider(flask_app) init_api_error_handlers(flask_app) # needs to be after all api inits to let them add their path first get_auth_manager().init() init_jinja_globals(flask_app) init_xframe_protection(flask_app) init_airflow_session_interface(flask_app) init_check_user_active(flask_app) return flask_app
Return cached instance of Airflow WWW app.
def cached_app(config=None, testing=False): """Return cached instance of Airflow WWW app.""" global app if not app: app = create_app(config=config, testing=testing) return app
Remove the cached version of the app in global state.
def purge_cached_app(): """Remove the cached version of the app in global state.""" global app app = None
Check current user's permissions against required permissions. Deprecated. Do not use this decorator, use one of the decorator `has_access_*` defined in airflow/www/auth.py instead. This decorator will only work with FAB authentication and not with other auth providers. This decorator is widely used in user plugins, do not remove it. See https://github.com/apache/airflow/pull/33213#discussion_r1346287224
def has_access(permissions: Sequence[tuple[str, str]] | None = None) -> Callable[[T], T]: """ Check current user's permissions against required permissions. Deprecated. Do not use this decorator, use one of the decorator `has_access_*` defined in airflow/www/auth.py instead. This decorator will only work with FAB authentication and not with other auth providers. This decorator is widely used in user plugins, do not remove it. See https://github.com/apache/airflow/pull/33213#discussion_r1346287224 """ warnings.warn( "The 'has_access' decorator is deprecated. Please use one of the decorator `has_access_*`" "defined in airflow/www/auth.py instead.", RemovedInAirflow3Warning, stacklevel=2, ) from airflow.providers.fab.auth_manager.decorators.auth import _has_access_fab return _has_access_fab(permissions)
Check permissions on views. The implementation is very similar from https://github.com/dpgaspar/Flask-AppBuilder/blob/c6fecdc551629e15467fde5d06b4437379d90592/flask_appbuilder/security/decorators.py#L134 The difference is that this decorator will pass the resource ID to check permissions. It allows fined-grained access control using resource IDs.
def has_access_with_pk(f): """ Check permissions on views. The implementation is very similar from https://github.com/dpgaspar/Flask-AppBuilder/blob/c6fecdc551629e15467fde5d06b4437379d90592/flask_appbuilder/security/decorators.py#L134 The difference is that this decorator will pass the resource ID to check permissions. It allows fined-grained access control using resource IDs. """ if hasattr(f, "_permission_name"): permission_str = f._permission_name else: permission_str = f.__name__ def wraps(self, *args, **kwargs): permission_str = f"{PERMISSION_PREFIX}{f._permission_name}" if self.method_permission_name: _permission_name = self.method_permission_name.get(f.__name__) if _permission_name: permission_str = f"{PERMISSION_PREFIX}{_permission_name}" if permission_str in self.base_permissions and self.appbuilder.sm.has_access( action_name=permission_str, resource_name=self.class_permission_name, resource_pk=kwargs.get("pk"), ): return f(self, *args, **kwargs) else: log.warning(LOGMSG_ERR_SEC_ACCESS_DENIED, permission_str, self.__class__.__name__) flash(as_unicode(FLAMSG_ERR_SEC_ACCESS_DENIED), "danger") return redirect(get_auth_manager().get_url_login(next_url=request.url)) f._permission_name = permission_str return functools.update_wrapper(wraps, f)
Check current user's permissions against required permissions. This works only for resources with no details. This function is used in some ``has_access_`` functions below. :param is_authorized_callback: callback to execute to figure whether the user is authorized to access the resource?
def _has_access_no_details(is_authorized_callback: Callable[[], bool]) -> Callable[[T], T]: """ Check current user's permissions against required permissions. This works only for resources with no details. This function is used in some ``has_access_`` functions below. :param is_authorized_callback: callback to execute to figure whether the user is authorized to access the resource? """ def has_access_decorator(func: T): @wraps(func) def decorated(*args, **kwargs): return _has_access( is_authorized=is_authorized_callback(), func=func, args=args, kwargs=kwargs, ) return cast(T, decorated) return has_access_decorator
Define the behavior whether the user is authorized to access the resource. :param is_authorized: whether the user is authorized to access the resource :param func: the function to call if the user is authorized :param args: the arguments of ``func`` :param kwargs: the keyword arguments ``func`` :meta private:
def _has_access(*, is_authorized: bool, func: Callable, args, kwargs): """ Define the behavior whether the user is authorized to access the resource. :param is_authorized: whether the user is authorized to access the resource :param func: the function to call if the user is authorized :param args: the arguments of ``func`` :param kwargs: the keyword arguments ``func`` :meta private: """ if is_authorized: return func(*args, **kwargs) elif get_auth_manager().is_logged_in() and not get_auth_manager().is_authorized_view( access_view=AccessView.WEBSITE ): return ( render_template( "airflow/no_roles_permissions.html", hostname=get_hostname() if conf.getboolean("webserver", "EXPOSE_HOSTNAME") else "", logout_url=get_auth_manager().get_url_logout(), ), 403, ) elif not get_auth_manager().is_logged_in(): return redirect(get_auth_manager().get_url_login(next_url=request.url)) else: access_denied = get_access_denied_message() flash(access_denied, "danger") return redirect(url_for("Airflow.index"))
Check current user's permissions against required permissions for datasets.
def has_access_dataset(method: ResourceMethod) -> Callable[[T], T]: """Check current user's permissions against required permissions for datasets.""" return _has_access_no_details(lambda: get_auth_manager().is_authorized_dataset(method=method))
Check current user's permissions to access the website.
def has_access_view(access_view: AccessView = AccessView.WEBSITE) -> Callable[[T], T]: """Check current user's permissions to access the website.""" return _has_access_no_details(lambda: get_auth_manager().is_authorized_view(access_view=access_view))
Return main Airflow page.
def index(): """Return main Airflow page.""" return redirect(url_for("Airflow.index"))
Mask the 'val_content' field if 'key_content' is in the mask list. The variable requests values and args comes in this form: {'key': 'key_content', 'val': 'val_content', 'description': 'description_content'}
def _mask_variable_fields(extra_fields): """ Mask the 'val_content' field if 'key_content' is in the mask list. The variable requests values and args comes in this form: {'key': 'key_content', 'val': 'val_content', 'description': 'description_content'} """ result = {} keyname = None for k, v in extra_fields.items(): if k == "key": keyname = v result[k] = v elif keyname and (k == "val" or k == "value"): x = secrets_masker.redact(v, keyname) result[k] = x keyname = None else: result[k] = v return result
Mask connection fields.
def _mask_connection_fields(extra_fields): """Mask connection fields.""" result = {} for k, v in extra_fields.items(): if k == "extra" and v: try: extra = json.loads(v) extra = {k: secrets_masker.redact(v, k) for k, v in extra.items()} result[k] = dict(extra) except json.JSONDecodeError: result[k] = "Encountered non-JSON in `extra` field" else: result[k] = secrets_masker.redact(v, k) return result
Log user actions.
def action_logging(func: T | None = None, event: str | None = None) -> T | Callable: """Log user actions.""" def log_action(f: T) -> T: @functools.wraps(f) def wrapper(*args, **kwargs): __tracebackhide__ = True # Hide from pytest traceback. with create_session() as session: event_name = event or f.__name__ if not get_auth_manager().is_logged_in(): user = "anonymous" user_display = "" else: user = get_auth_manager().get_user_name() user_display = get_auth_manager().get_user_display_name() isAPIRequest = request.blueprint == "/api/v1" hasJsonBody = request.headers.get("content-type") == "application/json" and request.json fields_skip_logging = { "csrf_token", "_csrf_token", "is_paused", "dag_id", "task_id", "dag_run_id", "run_id", "execution_date", } extra_fields = { k: secrets_masker.redact(v, k) for k, v in itertools.chain(request.values.items(multi=True), request.view_args.items()) if k not in fields_skip_logging } if event and event.startswith("variable."): extra_fields = _mask_variable_fields( request.json if isAPIRequest and hasJsonBody else extra_fields ) elif event and event.startswith("connection."): extra_fields = _mask_connection_fields( request.json if isAPIRequest and hasJsonBody else extra_fields ) elif hasJsonBody: masked_json = {k: secrets_masker.redact(v, k) for k, v in request.json.items()} extra_fields = {**extra_fields, **masked_json} params = {**request.values, **request.view_args} if params and "is_paused" in params: extra_fields["is_paused"] = params["is_paused"] == "false" if isAPIRequest: if f"{request.origin}/" == request.root_url: event_name = f"ui.{event_name}" else: event_name = f"api.{event_name}" log = Log( event=event_name, task_instance=None, owner=user, owner_display_name=user_display, extra=json.dumps(extra_fields), task_id=params.get("task_id"), dag_id=params.get("dag_id"), run_id=params.get("run_id") or params.get("dag_run_id"), ) if "execution_date" in request.values: execution_date_value = request.values.get("execution_date") try: log.execution_date = pendulum.parse(execution_date_value, strict=False) except ParserError: logger.exception( "Failed to parse execution_date from the request: %s", execution_date_value ) session.add(log) return f(*args, **kwargs) return cast(T, wrapper) if func: return log_action(func) return log_action
Make a view compressed.
def gzipped(f: T) -> T: """Make a view compressed.""" @functools.wraps(f) def view_func(*args, **kwargs): @after_this_request def zipper(response): accept_encoding = request.headers.get("Accept-Encoding", "") if "gzip" not in accept_encoding.lower(): return response response.direct_passthrough = False if ( response.status_code < 200 or response.status_code >= 300 or "Content-Encoding" in response.headers ): return response with BytesIO() as gzip_buffer: with gzip.GzipFile(mode="wb", fileobj=gzip_buffer) as gzip_file: gzip_file.write(response.data) response.data = gzip_buffer.getvalue() response.headers["Content-Encoding"] = "gzip" response.headers["Vary"] = "Accept-Encoding" response.headers["Content-Length"] = len(response.data) return response return f(*args, **kwargs) return cast(T, view_func)
Create a form class for editing and adding Connection. This class is created dynamically because it relies heavily on run-time provider discovery, which slows down webserver startup a lot. By creating the class at runtime, we can delay loading the providers until when the connection form is first used, which may as well be never for a short-lived server.
def create_connection_form_class() -> type[DynamicForm]: """Create a form class for editing and adding Connection. This class is created dynamically because it relies heavily on run-time provider discovery, which slows down webserver startup a lot. By creating the class at runtime, we can delay loading the providers until when the connection form is first used, which may as well be never for a short-lived server. """ providers_manager = ProvidersManager() def _iter_connection_types() -> Iterator[tuple[str, str]]: """List available connection types.""" for connection_type, provider_info in providers_manager.hooks.items(): if provider_info: yield (connection_type, provider_info.hook_name) class ConnectionForm(DynamicForm): def process(self, formdata=None, obj=None, **kwargs): super().process(formdata=formdata, obj=obj, **kwargs) for field in self._fields.values(): if isinstance(getattr(field, "data", None), str): field.data = field.data.strip() conn_id = StringField( lazy_gettext("Connection Id"), validators=[InputRequired(), ValidConnID()], widget=BS3TextFieldWidget(), ) conn_type = SelectField( lazy_gettext("Connection Type"), choices=sorted(_iter_connection_types(), key=operator.itemgetter(1)), widget=Select2Widget(), validators=[InputRequired()], description=( "Connection Type missing? Make sure you've installed the " "corresponding Airflow Provider Package." ), ) description = StringField(lazy_gettext("Description"), widget=BS3TextAreaFieldWidget()) host = StringField(lazy_gettext("Host"), widget=BS3TextFieldWidget()) schema = StringField(lazy_gettext("Schema"), widget=BS3TextFieldWidget()) login = StringField(lazy_gettext("Login"), widget=BS3TextFieldWidget()) password = PasswordField(lazy_gettext("Password"), widget=BS3PasswordFieldWidget()) port = IntegerField(lazy_gettext("Port"), validators=[Optional()], widget=BS3TextFieldWidget()) extra = TextAreaField(lazy_gettext("Extra"), widget=BS3TextAreaFieldWidget()) for key, value in providers_manager.connection_form_widgets.items(): setattr(ConnectionForm, key, value.field) return ConnectionForm
Set process title. This is used by airflow.cli.commands.webserver_command to track the status of the worker.
def post_worker_init(_): """ Set process title. This is used by airflow.cli.commands.webserver_command to track the status of the worker. """ old_title = setproctitle.getproctitle() setproctitle.setproctitle(settings.GUNICORN_WORKER_READY_PREFIX + old_title)
Return URL-encoded params.
def get_params(**kwargs): """Return URL-encoded params.""" return urlencode({d: v for d, v in kwargs.items() if v is not None}, True)
Generate the HTML for a paging component. Uses a similar logic to the paging auto-generated by Flask managed views. The paging component defines a number of pages visible in the pager (window) and once the user goes to a page beyond the largest visible, it would scroll to the right the page numbers and keeps the current one in the middle of the pager component. When in the last pages, the pages won't scroll and just keep moving until the last page. Pager also contains <first, previous, ..., next, last> pages. This component takes into account custom parameters such as search, status, and tags which could be added to the pages link in order to maintain the state between client and server. It also allows to make a bookmark on a specific paging state. :param current_page: the current page number, 0-indexed :param num_of_pages: the total number of pages :param search: the search query string, if any :param status: 'all', 'active', or 'paused' :param tags: array of strings of the current filtered tags :param window: the number of pages to be shown in the paging component (7 default) :param sorting_key: the sorting key selected for dags, None indicates that sorting is not needed/provided :param sorting_direction: direction of sorting, 'asc' or 'desc', None indicates that sorting is not needed/provided :return: the HTML string of the paging component
def generate_pages( current_page, num_of_pages, search=None, status=None, tags=None, window=7, sorting_key=None, sorting_direction=None, ): """ Generate the HTML for a paging component. Uses a similar logic to the paging auto-generated by Flask managed views. The paging component defines a number of pages visible in the pager (window) and once the user goes to a page beyond the largest visible, it would scroll to the right the page numbers and keeps the current one in the middle of the pager component. When in the last pages, the pages won't scroll and just keep moving until the last page. Pager also contains <first, previous, ..., next, last> pages. This component takes into account custom parameters such as search, status, and tags which could be added to the pages link in order to maintain the state between client and server. It also allows to make a bookmark on a specific paging state. :param current_page: the current page number, 0-indexed :param num_of_pages: the total number of pages :param search: the search query string, if any :param status: 'all', 'active', or 'paused' :param tags: array of strings of the current filtered tags :param window: the number of pages to be shown in the paging component (7 default) :param sorting_key: the sorting key selected for dags, None indicates that sorting is not needed/provided :param sorting_direction: direction of sorting, 'asc' or 'desc', None indicates that sorting is not needed/provided :return: the HTML string of the paging component """ void_link = "javascript:void(0)" first_node = Markup( """<li class="paginate_button {disabled}" id="dags_first"> <a href="{href_link}" aria-controls="dags" data-dt-idx="0" tabindex="0">&laquo;</a> </li>""" ) previous_node = Markup( """<li class="paginate_button previous {disabled}" id="dags_previous"> <a href="{href_link}" aria-controls="dags" data-dt-idx="0" tabindex="0">&lsaquo;</a> </li>""" ) next_node = Markup( """<li class="paginate_button next {disabled}" id="dags_next"> <a href="{href_link}" aria-controls="dags" data-dt-idx="3" tabindex="0">&rsaquo;</a> </li>""" ) last_node = Markup( """<li class="paginate_button {disabled}" id="dags_last"> <a href="{href_link}" aria-controls="dags" data-dt-idx="3" tabindex="0">&raquo;</a> </li>""" ) page_node = Markup( """<li class="paginate_button {is_active}"> <a href="{href_link}" aria-controls="dags" data-dt-idx="2" tabindex="0">{page_num}</a> </li>""" ) output = [Markup('<ul class="pagination" style="margin-top:0;">')] is_disabled = "disabled" if current_page <= 0 else "" qs = get_params( page=0, search=search, status=status, tags=tags, sorting_key=sorting_key, sorting_direction=sorting_direction, ) first_node_link = void_link if is_disabled else f"?{qs}" output.append( first_node.format( href_link=first_node_link, disabled=is_disabled, ) ) page_link = void_link if current_page > 0: qs = get_params( page=current_page - 1, search=search, status=status, tags=tags, sorting_key=sorting_key, sorting_direction=sorting_direction, ) page_link = f"?{qs}" output.append(previous_node.format(href_link=page_link, disabled=is_disabled)) mid = window // 2 last_page = num_of_pages - 1 if current_page <= mid or num_of_pages < window: pages = list(range(0, min(num_of_pages, window))) elif mid < current_page < last_page - mid: pages = list(range(current_page - mid, current_page + mid + 1)) else: pages = list(range(num_of_pages - window, last_page + 1)) def is_current(current, page): return page == current for page in pages: qs = get_params( page=page, search=search, status=status, tags=tags, sorting_key=sorting_key, sorting_direction=sorting_direction, ) vals = { "is_active": "active" if is_current(current_page, page) else "", "href_link": void_link if is_current(current_page, page) else f"?{qs}", "page_num": page + 1, } output.append(page_node.format(**vals)) is_disabled = "disabled" if current_page >= num_of_pages - 1 else "" qs = get_params( page=current_page + 1, search=search, status=status, tags=tags, sorting_key=sorting_key, sorting_direction=sorting_direction, ) page_link = void_link if current_page >= num_of_pages - 1 else f"?{qs}" output.append(next_node.format(href_link=page_link, disabled=is_disabled)) qs = get_params( page=last_page, search=search, status=status, tags=tags, sorting_key=sorting_key, sorting_direction=sorting_direction, ) last_node_link = void_link if is_disabled else f"?{qs}" output.append( last_node.format( href_link=last_node_link, disabled=is_disabled, ) ) output.append(Markup("</ul>")) return Markup("\n".join(output))
Return an epoch-type date (tuple with no timezone).
def epoch(dttm): """Return an epoch-type date (tuple with no timezone).""" return (int(time.mktime(dttm.timetuple())) * 1000,)
Get a unique key per URL; used by cache.
def make_cache_key(*args, **kwargs): """Get a unique key per URL; used by cache.""" path = request.path args = str(hash(frozenset(request.args.items()))) return (path + args).encode("ascii", "ignore")
Generate a URL to the Graph view for a TaskInstance.
def task_instance_link(attr): """Generate a URL to the Graph view for a TaskInstance.""" dag_id = attr.get("dag_id") task_id = attr.get("task_id") run_id = attr.get("run_id") map_index = attr.get("map_index", None) if map_index == -1: map_index = None url = url_for( "Airflow.grid", dag_id=dag_id, task_id=task_id, dag_run_id=run_id, map_index=map_index, tab="graph", ) url_root = url_for( "Airflow.grid", dag_id=dag_id, task_id=task_id, root=task_id, dag_run_id=run_id, map_index=map_index, tab="graph", ) return Markup( """ <span style="white-space: nowrap;"> <a href="{url}">{task_id}</a> <a href="{url_root}" title="Filter on this task"> <span class="material-icons" style="margin-left:0;" aria-hidden="true">filter_alt</span> </a> </span> """ ).format(url=url, task_id=task_id, url_root=url_root)
Return a formatted string with HTML for a given State.
def state_token(state): """Return a formatted string with HTML for a given State.""" color = State.color(state) fg_color = State.color_fg(state) return Markup( """ <span class="label" style="color:{fg_color}; background-color:{color};" title="Current State: {state}">{state}</span> """ ).format(color=color, state=state, fg_color=fg_color)
Get 'state' & return a formatted string with HTML for a given State.
def state_f(attr): """Get 'state' & return a formatted string with HTML for a given State.""" state = attr.get("state") return state_token(state)
Return a formatted string with HTML with a Non-breaking Text element.
def nobr_f(attr_name): """Return a formatted string with HTML with a Non-breaking Text element.""" def nobr(attr): f = attr.get(attr_name) return Markup("<nobr>{}</nobr>").format(f) return nobr
Return a formatted string with HTML for given DataTime.
def datetime_f(attr_name): """Return a formatted string with HTML for given DataTime.""" def dt(attr): f = attr.get(attr_name) return datetime_html(f) return dt
Return an HTML formatted string with time element to support timezone changes in UI.
def datetime_html(dttm: DateTime | None) -> str: """Return an HTML formatted string with time element to support timezone changes in UI.""" as_iso = dttm.isoformat() if dttm else "" if not as_iso: return Markup("") as_iso_short = as_iso if timezone.utcnow().isoformat()[:4] == as_iso[:4]: as_iso_short = as_iso[5:] # The empty title will be replaced in JS code when non-UTC dates are displayed return Markup('<nobr><time title="" datetime="{}">{}</time></nobr>').format(as_iso, as_iso_short)
Return a formatted string with HTML for given JSON serializable.
def json_f(attr_name): """Return a formatted string with HTML for given JSON serializable.""" def json_(attr): f = attr.get(attr_name) serialized = json.dumps(f, cls=WebEncoder) return Markup("<nobr>{}</nobr>").format(serialized) return json_
Generate a URL to the Graph view for a Dag.
def dag_link(attr): """Generate a URL to the Graph view for a Dag.""" dag_id = attr.get("dag_id") execution_date = attr.get("execution_date") if not dag_id: return Markup("None") url = url_for("Airflow.graph", dag_id=dag_id, execution_date=execution_date) return Markup('<a href="{}">{}</a>').format(url, dag_id)
Generate a URL to the Graph view for a DagRun.
def dag_run_link(attr): """Generate a URL to the Graph view for a DagRun.""" dag_id = attr.get("dag_id") run_id = attr.get("run_id") url = url_for("Airflow.graph", dag_id=dag_id, dag_run_id=run_id) return Markup('<a href="{url}">{run_id}</a>').format(url=url, run_id=run_id)
Produce DAG runs sorted by specified columns. :param query: An ORM select object against *DagRun*. :param ordering: Column names to sort the runs. should generally come from a timetable's ``run_ordering``. :param limit: Number of runs to limit to. :param session: SQLAlchemy ORM session object :return: A list of DagRun objects ordered by the specified columns. The list contains only the *last* objects, but in *ascending* order.
def sorted_dag_runs( query: Select, *, ordering: Sequence[str], limit: int, session: Session ) -> Sequence[DagRun]: """Produce DAG runs sorted by specified columns. :param query: An ORM select object against *DagRun*. :param ordering: Column names to sort the runs. should generally come from a timetable's ``run_ordering``. :param limit: Number of runs to limit to. :param session: SQLAlchemy ORM session object :return: A list of DagRun objects ordered by the specified columns. The list contains only the *last* objects, but in *ascending* order. """ ordering_exprs = (_get_run_ordering_expr(name) for name in ordering) runs = session.scalars(query.order_by(*ordering_exprs, DagRun.id.desc()).limit(limit)).all() runs.reverse() return runs
Format map index for list columns in model view.
def format_map_index(attr: dict) -> str: """Format map index for list columns in model view.""" value = attr["map_index"] if value < 0: return Markup("&nbsp;") return str(value)
Highlight text using a given Lexer.
def pygment_html_render(s, lexer=lexers.TextLexer): """Highlight text using a given Lexer.""" return highlight(s, lexer(), HtmlFormatter(linenos=True))
Render a given Python object with a given Pygments lexer.
def render(obj: Any, lexer: Lexer, handler: Callable[[Any], str] | None = None): """Render a given Python object with a given Pygments lexer.""" if isinstance(obj, str): return Markup(pygment_html_render(obj, lexer)) elif isinstance(obj, (tuple, list)): out = "" for i, text_to_render in enumerate(obj): if lexer is lexers.PythonLexer: text_to_render = repr(text_to_render) out += Markup("<div>List item #{}</div>").format(i) out += Markup("<div>" + pygment_html_render(text_to_render, lexer) + "</div>") return out elif isinstance(obj, dict): out = "" for k, v in obj.items(): if lexer is lexers.PythonLexer: v = repr(v) out += Markup('<div>Dict item "{}"</div>').format(k) out += Markup("<div>" + pygment_html_render(v, lexer) + "</div>") return out elif handler is not None and obj is not None: return Markup(pygment_html_render(handler(obj), lexer)) else: # Return empty string otherwise return ""
Render a given Python object with json lexer.
def json_render(obj, lexer): """Render a given Python object with json lexer.""" out = "" if isinstance(obj, str): out = Markup(pygment_html_render(obj, lexer)) elif isinstance(obj, (dict, list)): content = json.dumps(obj, sort_keys=True, indent=4) out = Markup(pygment_html_render(content, lexer)) return out
Convert a Markdown string to HTML.
def wrapped_markdown(s, css_class="rich_doc"): """Convert a Markdown string to HTML.""" md = MarkdownIt("gfm-like", {"html": conf.getboolean("webserver", "allow_raw_html_descriptions")}) if s is None: return None s = textwrap.dedent(s) return Markup(f'<div class="{css_class}" >{md.render(s)}</div>')
Return Dictionary containing different Pygments Lexers for Rendering & Highlighting.
def get_attr_renderer(): """Return Dictionary containing different Pygments Lexers for Rendering & Highlighting.""" return { "bash": lambda x: render(x, lexers.BashLexer), "bash_command": lambda x: render(x, lexers.BashLexer), "doc": lambda x: render(x, lexers.TextLexer), "doc_json": lambda x: render(x, lexers.JsonLexer), "doc_md": wrapped_markdown, "doc_rst": lambda x: render(x, lexers.RstLexer), "doc_yaml": lambda x: render(x, lexers.YamlLexer), "hql": lambda x: render(x, lexers.SqlLexer), "html": lambda x: render(x, lexers.HtmlLexer), "jinja": lambda x: render(x, lexers.DjangoLexer), "json": lambda x: json_render(x, lexers.JsonLexer), "md": wrapped_markdown, "mysql": lambda x: render(x, lexers.MySqlLexer), "postgresql": lambda x: render(x, lexers.PostgresLexer), "powershell": lambda x: render(x, lexers.PowerShellLexer), "py": lambda x: render(x, lexers.PythonLexer, get_python_source), "python_callable": lambda x: render(x, lexers.PythonLexer, get_python_source), "rst": lambda x: render(x, lexers.RstLexer), "sql": lambda x: render(x, lexers.SqlLexer), "tsql": lambda x: render(x, lexers.TransactSqlLexer), "yaml": lambda x: render(x, lexers.YamlLexer), }
Remove all parameters starting with `_`. :param args: arguments of request :return: copy of the dictionary passed as input with args starting with `_` removed.
def sanitize_args(args: dict[str, Any]) -> dict[str, Any]: """ Remove all parameters starting with `_`. :param args: arguments of request :return: copy of the dictionary passed as input with args starting with `_` removed. """ return {key: value for key, value in args.items() if not key.startswith("_")}
Given a user-supplied URL, ensure it points to our web server.
def get_safe_url(url): """Given a user-supplied URL, ensure it points to our web server.""" if not url: return url_for("Airflow.index") # If the url contains semicolon, redirect it to homepage to avoid # potential XSS. (Similar to https://github.com/python/cpython/pull/24297/files (bpo-42967)) if ";" in unquote(url): return url_for("Airflow.index") url = url.lstrip(_WHATWG_C0_CONTROL_OR_SPACE) host_url = urlsplit(request.host_url) redirect_url = urlsplit(urljoin(request.host_url, url)) if not (redirect_url.scheme in ("http", "https") and host_url.netloc == redirect_url.netloc): return url_for("Airflow.index") # This will ensure we only redirect to the right scheme/netloc return redirect_url.geturl()
Get Execution Data, Base Date & Number of runs from a Request.
def get_date_time_num_runs_dag_runs_form_data(www_request, session, dag): """Get Execution Data, Base Date & Number of runs from a Request.""" date_time = www_request.args.get("execution_date") run_id = www_request.args.get("run_id") # First check run id, then check execution date, if not fall back on the latest dagrun if run_id: dagrun = dag.get_dagrun(run_id=run_id, session=session) date_time = dagrun.execution_date elif date_time: date_time = _safe_parse_datetime(date_time) else: date_time = dag.get_latest_execution_date(session=session) or timezone.utcnow() base_date = www_request.args.get("base_date") if base_date: base_date = _safe_parse_datetime(base_date) else: # The DateTimeField widget truncates milliseconds and would loose # the first dag run. Round to next second. base_date = (date_time + datetime.timedelta(seconds=1)).replace(microsecond=0) default_dag_run = conf.getint("webserver", "default_dag_run_display_number") num_runs = www_request.args.get("num_runs", default=default_dag_run, type=int) # When base_date has been rounded up because of the DateTimeField widget, we want # to use the execution_date as the starting point for our query just to ensure a # link targeting a specific dag run actually loads that dag run. If there are # more than num_runs dag runs in the "rounded period" then those dagruns would get # loaded and the actual requested run would be excluded by the limit(). Once # the user has changed base date to be anything else we want to use that instead. query_date = base_date if date_time < base_date <= date_time + datetime.timedelta(seconds=1): query_date = date_time drs = session.scalars( select(DagRun) .where(DagRun.dag_id == dag.dag_id, DagRun.execution_date <= query_date) .order_by(desc(DagRun.execution_date)) .limit(num_runs) ).all() dr_choices = [] dr_state = None for dr in drs: dr_choices.append((dr.execution_date.isoformat(), dr.run_id)) if date_time == dr.execution_date: dr_state = dr.state # Happens if base_date was changed and the selected dag run is not in result if not dr_state and drs: dr = drs[0] date_time = dr.execution_date dr_state = dr.state return { "dttm": date_time, "base_date": base_date, "num_runs": num_runs, "execution_date": date_time.isoformat(), "dr_choices": dr_choices, "dr_state": dr_state, }
Parse datetime and return error message for invalid dates. :param v: the string value to be parsed :param allow_empty: Set True to return none if empty str or None :param strict: if False, it will fall back on the dateutil parser if unable to parse with pendulum
def _safe_parse_datetime(v, *, allow_empty=False, strict=True) -> datetime.datetime | None: """ Parse datetime and return error message for invalid dates. :param v: the string value to be parsed :param allow_empty: Set True to return none if empty str or None :param strict: if False, it will fall back on the dateutil parser if unable to parse with pendulum """ if allow_empty is True and not v: return None try: return timezone.parse(v, strict=strict) except (TypeError, ParserError): abort(400, f"Invalid datetime: {v!r}")
Create a nested dict representation of the DAG's TaskGroup and its children. Used to construct the Graph and Grid views.
def dag_to_grid(dag: DagModel, dag_runs: Sequence[DagRun], session: Session) -> dict[str, Any]: """ Create a nested dict representation of the DAG's TaskGroup and its children. Used to construct the Graph and Grid views. """ query = session.execute( select( TaskInstance.task_id, TaskInstance.run_id, TaskInstance.state, TaskInstance._try_number, func.min(TaskInstanceNote.content).label("note"), func.count(func.coalesce(TaskInstance.state, sqla.literal("no_status"))).label("state_count"), func.min(TaskInstance.queued_dttm).label("queued_dttm"), func.min(TaskInstance.start_date).label("start_date"), func.max(TaskInstance.end_date).label("end_date"), ) .join(TaskInstance.task_instance_note, isouter=True) .where( TaskInstance.dag_id == dag.dag_id, TaskInstance.run_id.in_([dag_run.run_id for dag_run in dag_runs]), ) .group_by(TaskInstance.task_id, TaskInstance.run_id, TaskInstance.state, TaskInstance._try_number) .order_by(TaskInstance.task_id, TaskInstance.run_id) ) grouped_tis: dict[str, list[TaskInstance]] = collections.defaultdict( list, ((task_id, list(tis)) for task_id, tis in itertools.groupby(query, key=lambda ti: ti.task_id)), ) @cache def get_task_group_children_getter() -> operator.methodcaller: sort_order = conf.get("webserver", "grid_view_sorting_order", fallback="topological") if sort_order == "topological": return operator.methodcaller("topological_sort") if sort_order == "hierarchical_alphabetical": return operator.methodcaller("hierarchical_alphabetical_sort") raise AirflowConfigException(f"Unsupported grid_view_sorting_order: {sort_order}") def task_group_to_grid(item: Operator | TaskGroup) -> dict[str, Any]: if not isinstance(item, TaskGroup): def _mapped_summary(ti_summaries: list[TaskInstance]) -> Iterator[dict[str, Any]]: run_id = "" record: dict[str, Any] = {} def set_overall_state(record): for state in wwwutils.priority: if state in record["mapped_states"]: record["state"] = state break # When turning the dict into JSON we can't have None as a key, # so use the string that the UI does. with contextlib.suppress(KeyError): record["mapped_states"]["no_status"] = record["mapped_states"].pop(None) for ti_summary in ti_summaries: if run_id != ti_summary.run_id: run_id = ti_summary.run_id if record: set_overall_state(record) yield record record = { "task_id": ti_summary.task_id, "run_id": run_id, "queued_dttm": ti_summary.queued_dttm, "start_date": ti_summary.start_date, "end_date": ti_summary.end_date, "mapped_states": {ti_summary.state: ti_summary.state_count}, "state": None, # We change this before yielding } continue record["queued_dttm"] = min( filter(None, [record["queued_dttm"], ti_summary.queued_dttm]), default=None ) record["start_date"] = min( filter(None, [record["start_date"], ti_summary.start_date]), default=None ) # Sometimes the start date of a group might be before the queued date of the group if ( record["queued_dttm"] and record["start_date"] and record["queued_dttm"] > record["start_date"] ): record["queued_dttm"] = None record["end_date"] = max( filter(None, [record["end_date"], ti_summary.end_date]), default=None ) record["mapped_states"][ti_summary.state] = ti_summary.state_count if record: set_overall_state(record) yield record if item_is_mapped := needs_expansion(item): instances = list(_mapped_summary(grouped_tis[item.task_id])) else: instances = [ { "task_id": task_instance.task_id, "run_id": task_instance.run_id, "state": task_instance.state, "queued_dttm": task_instance.queued_dttm, "start_date": task_instance.start_date, "end_date": task_instance.end_date, "try_number": wwwutils.get_try_count(task_instance._try_number, task_instance.state), "note": task_instance.note, } for task_instance in grouped_tis[item.task_id] ] setup_teardown_type = {} if item.is_setup is True: setup_teardown_type["setupTeardownType"] = "setup" elif item.is_teardown is True: setup_teardown_type["setupTeardownType"] = "teardown" return { "id": item.task_id, "instances": instances, "label": item.label, "extra_links": item.extra_links, "is_mapped": item_is_mapped, "has_outlet_datasets": any(isinstance(i, Dataset) for i in (item.outlets or [])), "operator": item.operator_name, "trigger_rule": item.trigger_rule, **setup_teardown_type, } # Task Group task_group = item children = [task_group_to_grid(child) for child in get_task_group_children_getter()(item)] def get_summary(dag_run: DagRun): child_instances = [ item for sublist in (child["instances"] for child in children if "instances" in child) for item in sublist if item["run_id"] == dag_run.run_id if item ] children_queued_dttms = (item["queued_dttm"] for item in child_instances) children_start_dates = (item["start_date"] for item in child_instances) children_end_dates = (item["end_date"] for item in child_instances) children_states = {item["state"] for item in child_instances} group_state = next((state for state in wwwutils.priority if state in children_states), None) group_queued_dttm = min(filter(None, children_queued_dttms), default=None) group_start_date = min(filter(None, children_start_dates), default=None) group_end_date = max(filter(None, children_end_dates), default=None) # Sometimes the start date of a group might be before the queued date of the group if group_queued_dttm and group_start_date and group_queued_dttm > group_start_date: group_queued_dttm = None return { "task_id": task_group.group_id, "run_id": dag_run.run_id, "state": group_state, "queued_dttm": group_queued_dttm, "start_date": group_start_date, "end_date": group_end_date, } def get_mapped_group_summaries(): mapped_ti_query = session.execute( select(TaskInstance.task_id, TaskInstance.state, TaskInstance.run_id, TaskInstance.map_index) .where( TaskInstance.dag_id == dag.dag_id, TaskInstance.task_id.in_(child["id"] for child in children), TaskInstance.run_id.in_(r.run_id for r in dag_runs), ) .order_by(TaskInstance.task_id, TaskInstance.run_id) ) # Group tis by run_id, and then map_index. mapped_tis: Mapping[str, Mapping[int, list[TaskInstance]]] = defaultdict( lambda: defaultdict(list) ) for ti in mapped_ti_query: mapped_tis[ti.run_id][ti.map_index].append(ti) def get_mapped_group_summary(run_id: str, mapped_instances: Mapping[int, list[TaskInstance]]): child_instances = [ item for sublist in (child["instances"] for child in children if "instances" in child) for item in sublist if item and item["run_id"] == run_id ] children_queued_dttms = (item["queued_dttm"] for item in child_instances) children_start_dates = (item["start_date"] for item in child_instances) children_end_dates = (item["end_date"] for item in child_instances) children_states = {item["state"] for item in child_instances} # TODO: This assumes TI map index has a one-to-one mapping to # its parent mapped task group, which will not be true when we # allow nested mapping in the future. mapped_states: MutableMapping[str, int] = defaultdict(int) for mi_values in mapped_instances.values(): child_states = {mi.state for mi in mi_values} state = next(s for s in wwwutils.priority if s in child_states) value = state.value if state is not None else "no_status" mapped_states[value] += 1 group_state = next((state for state in wwwutils.priority if state in children_states), None) group_queued_dttm = min(filter(None, children_queued_dttms), default=None) group_start_date = min(filter(None, children_start_dates), default=None) group_end_date = max(filter(None, children_end_dates), default=None) return { "task_id": task_group.group_id, "run_id": run_id, "state": group_state, "queued_dttm": group_queued_dttm, "start_date": group_start_date, "end_date": group_end_date, "mapped_states": mapped_states, } return [get_mapped_group_summary(run_id, tis) for run_id, tis in mapped_tis.items()] # We don't need to calculate summaries for the root if task_group.group_id is None: return { "id": task_group.group_id, "label": task_group.label, "children": children, "instances": [], } if next(task_group.iter_mapped_task_groups(), None) is not None: return { "id": task_group.group_id, "label": task_group.label, "children": children, "tooltip": task_group.tooltip, "instances": get_mapped_group_summaries(), "is_mapped": True, } group_summaries = [get_summary(dr) for dr in dag_runs] return { "id": task_group.group_id, "label": task_group.label, "children": children, "tooltip": task_group.tooltip, "instances": group_summaries, } return task_group_to_grid(dag.task_group)
Return a list of dot-separated dictionary paths.
def get_key_paths(input_dict): """Return a list of dot-separated dictionary paths.""" for key, value in input_dict.items(): if isinstance(value, dict): for sub_key in get_key_paths(value): yield f"{key}.{sub_key}" else: yield key
Return the value from a dictionary based on dot-separated path of keys.
def get_value_from_path(key_path, content): """Return the value from a dictionary based on dot-separated path of keys.""" elem = content for x in key_path.strip(".").split("."): try: x = int(x) elem = elem[x] except ValueError: elem = elem.get(x) return elem
Return a dict of the task quantity, grouped by dag id and task status. :param qry: The data in the format (<dag id>, <task state>, <is dag running>, <task count>), ordered by <dag id> and <is dag running>
def get_task_stats_from_query(qry): """ Return a dict of the task quantity, grouped by dag id and task status. :param qry: The data in the format (<dag id>, <task state>, <is dag running>, <task count>), ordered by <dag id> and <is dag running> """ data = {} last_dag_id = None has_running_dags = False for dag_id, state, is_dag_running, count in qry: if last_dag_id != dag_id: last_dag_id = dag_id has_running_dags = False elif not is_dag_running and has_running_dags: continue if is_dag_running: has_running_dags = True if dag_id not in data: data[dag_id] = {} data[dag_id][state] = count return data
Return json which allows us to more elegantly handle side effects in-page. This is useful because some endpoints are called by javascript.
def redirect_or_json(origin, msg, status="", status_code=200): """ Return json which allows us to more elegantly handle side effects in-page. This is useful because some endpoints are called by javascript. """ if request.headers.get("Accept") == "application/json": if status == "error" and status_code == 200: status_code = 500 return Response(response=msg, status=status_code, mimetype="application/json") else: if status: flash(msg, status) else: flash(msg) return redirect(origin)
Show Not Found on screen for any error in the Webserver.
def not_found(error): """Show Not Found on screen for any error in the Webserver.""" return ( render_template( "airflow/error.html", hostname=get_hostname() if conf.getboolean("webserver", "EXPOSE_HOSTNAME") else "", status_code=404, error_message="Page cannot be found.", ), 404, )
Show Method Not Allowed on screen for any error in the Webserver.
def method_not_allowed(error): """Show Method Not Allowed on screen for any error in the Webserver.""" return ( render_template( "airflow/error.html", hostname=get_hostname() if conf.getboolean("webserver", "EXPOSE_HOSTNAME") else "", status_code=405, error_message="Received an invalid request.", ), 405, )
Show Traceback for a given error.
def show_traceback(error): """Show Traceback for a given error.""" is_logged_in = get_auth_manager().is_logged_in() return ( render_template( "airflow/traceback.html", python_version=sys.version.split(" ")[0] if is_logged_in else "redacted", airflow_version=version if is_logged_in else "redacted", hostname=get_hostname() if conf.getboolean("webserver", "EXPOSE_HOSTNAME") and is_logged_in else "redacted", info=traceback.format_exc() if conf.getboolean("webserver", "EXPOSE_STACKTRACE") and is_logged_in else "Error! Please contact server admin.", ), 500, )
Add `.can_edit`, `.can_trigger`, and `.can_delete` properties to DAG based on current user's permissions. Located in `views.py` rather than the DAG model to keep permissions logic out of the Airflow core.
def add_user_permissions_to_dag(sender, template, context, **extra): """ Add `.can_edit`, `.can_trigger`, and `.can_delete` properties to DAG based on current user's permissions. Located in `views.py` rather than the DAG model to keep permissions logic out of the Airflow core. """ if "dag" not in context: return dag = context["dag"] can_create_dag_run = get_auth_manager().is_authorized_dag( method="POST", access_entity=DagAccessEntity.RUN ) dag.can_edit = get_auth_manager().is_authorized_dag(method="PUT", details=DagDetails(id=dag.dag_id)) dag.can_trigger = dag.can_edit and can_create_dag_run dag.can_delete = get_auth_manager().is_authorized_dag(method="DELETE", details=DagDetails(id=dag.dag_id)) context["dag"] = dag
Mark a function as requiring authentication.
def requires_authentication(function: T): """Mark a function as requiring authentication.""" @wraps(function) def decorated(*args, **kwargs): auth = current_app.api_auth[0] return auth.requires_authentication(function)(*args, **kwargs) return cast(T, decorated)
Add Deprecation HTTP Header Field. .. seealso:: IETF proposal for the header field `here <https://datatracker.ietf.org/doc/draft-dalal-deprecation-header/>`_.
def add_deprecation_headers(response: Response): """ Add Deprecation HTTP Header Field. .. seealso:: IETF proposal for the header field `here <https://datatracker.ietf.org/doc/draft-dalal-deprecation-header/>`_. """ response.headers["Deprecation"] = "true" doc_url = get_docs_url("upgrading-to-2.html#migration-guide-from-experimental-api-to-stable-api-v1") deprecation_link = f'<{doc_url}>; rel="deprecation"; type="text/html"' if "link" in response.headers: response.headers["Link"] += f", {deprecation_link}" else: response.headers["Link"] = f"{deprecation_link}" return response
Trigger a new dag run for a Dag with an execution date of now unless specified in the data.
def trigger_dag(dag_id): """Trigger a new dag run for a Dag with an execution date of now unless specified in the data.""" data = request.get_json(force=True) run_id = None if "run_id" in data: run_id = data["run_id"] conf = None if "conf" in data: conf = data["conf"] if not isinstance(conf, dict): error_message = "Dag Run conf must be a dictionary object, other types are not supported" log.error(error_message) response = jsonify({"error": error_message}) response.status_code = 400 return response execution_date = None if "execution_date" in data and data["execution_date"] is not None: execution_date = data["execution_date"] # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: log.error("Given execution date could not be identified as a date.") error_message = ( f"Given execution date, {execution_date}, could not be identified as a date. " f"Example date format: 2015-11-16T14:34:15+00:00" ) response = jsonify({"error": error_message}) response.status_code = 400 return response replace_microseconds = execution_date is None if "replace_microseconds" in data: replace_microseconds = to_boolean(data["replace_microseconds"]) try: dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date, replace_microseconds) except AirflowException as err: log.error(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response if getattr(g, "user", None): log.info("User %s created %s", g.user, dr) response = jsonify( message=f"Created {dr}", execution_date=dr.execution_date.isoformat(), run_id=dr.run_id ) return response
Delete all DB records related to the specified Dag.
def delete_dag(dag_id): """Delete all DB records related to the specified Dag.""" try: count = delete.delete_dag(dag_id) except AirflowException as err: log.error(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response return jsonify(message=f"Removed {count} record(s)", count=count)
Return a list of Dag Runs for a specific DAG ID. :query param state: a query string parameter '?state=queued|running|success...' :param dag_id: String identifier of a DAG :return: List of DAG runs of a DAG with requested state, or all runs if the state is not specified
def dag_runs(dag_id): """ Return a list of Dag Runs for a specific DAG ID. :query param state: a query string parameter '?state=queued|running|success...' :param dag_id: String identifier of a DAG :return: List of DAG runs of a DAG with requested state, or all runs if the state is not specified """ try: state = request.args.get("state") dagruns = get_dag_runs(dag_id, state) except AirflowException as err: log.info(err) response = jsonify(error=f"{err}") response.status_code = 400 return response return jsonify(dagruns)
Test endpoint to check authentication.
def test(): """Test endpoint to check authentication.""" return jsonify(status="OK")
Get Airflow Version.
def info(): """Get Airflow Version.""" return jsonify(version=version)
Return python code of a given dag_id.
def get_dag_code(dag_id): """Return python code of a given dag_id.""" try: return get_code(dag_id) except AirflowException as err: log.info(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response
Return a JSON with a task's public instance variables.
def task_info(dag_id, task_id): """Return a JSON with a task's public instance variables.""" try: t_info = get_task(dag_id, task_id) except AirflowException as err: log.info(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(t_info).items() if not k.startswith("_")} return jsonify(fields)
(Un)pause a dag.
def dag_paused(dag_id, paused): """(Un)pause a dag.""" is_paused = bool(paused == "true") models.DagModel.get_dagmodel(dag_id).set_is_paused( is_paused=is_paused, ) return jsonify({"response": "ok"})
Get paused state of a dag.
def dag_is_paused(dag_id): """Get paused state of a dag.""" is_paused = models.DagModel.get_dagmodel(dag_id).is_paused return jsonify({"is_paused": is_paused})
Return a JSON with a task instance's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request.
def task_instance_info(dag_id, execution_date, task_id): """ Return a JSON with a task instance's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: log.error("Given execution date could not be identified as a date.") error_message = ( f"Given execution date, {execution_date}, could not be identified as a date. " f"Example date format: 2015-11-16T14:34:15+00:00" ) response = jsonify({"error": error_message}) response.status_code = 400 return response try: ti_info = get_task_instance(dag_id, task_id, execution_date) except AirflowException as err: log.info(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(ti_info).items() if not k.startswith("_")} return jsonify(fields)
Return a JSON with a dag_run's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request.
def dag_run_status(dag_id, execution_date): """ Return a JSON with a dag_run's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: log.error("Given execution date could not be identified as a date.") error_message = ( f"Given execution date, {execution_date}, could not be identified as a date. " f"Example date format: 2015-11-16T14:34:15+00:00" ) response = jsonify({"error": error_message}) response.status_code = 400 return response try: dr_info = get_dag_run_state(dag_id, execution_date) except AirflowException as err: log.info(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response return jsonify(dr_info)
Return the latest DagRun for each DAG formatted for the UI.
def latest_dag_runs(): """Return the latest DagRun for each DAG formatted for the UI.""" from airflow.models import DagRun dagruns = DagRun.get_latest_runs() payload = [] for dagrun in dagruns: if dagrun.execution_date: payload.append( { "dag_id": dagrun.dag_id, "execution_date": dagrun.execution_date.isoformat(), "start_date": ((dagrun.start_date or "") and dagrun.start_date.isoformat()), "dag_run_url": url_for( "Airflow.graph", dag_id=dagrun.dag_id, execution_date=dagrun.execution_date ), } ) return jsonify(items=payload)
Get pool by a given name.
def get_pool(name): """Get pool by a given name.""" try: pool = pool_api.get_pool(name=name) except AirflowException as err: log.error(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: return jsonify(pool.to_json())
Get all pools.
def get_pools(): """Get all pools.""" try: pools = pool_api.get_pools() except AirflowException as err: log.error(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: return jsonify([p.to_json() for p in pools])
Create a pool.
def create_pool(): """Create a pool.""" params = request.get_json(force=True) try: pool = pool_api.create_pool(**params) except AirflowException as err: log.error(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: return jsonify(pool.to_json())
Delete pool.
def delete_pool(name): """Delete pool.""" try: pool = pool_api.delete_pool(name=name) except AirflowException as err: log.error(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: return jsonify(pool.to_json())
Get Lineage details for a DagRun.
def get_lineage(dag_id: str, execution_date: str): """Get Lineage details for a DagRun.""" # Convert string datetime into actual datetime try: execution_dt = timezone.parse(execution_date) except ValueError: log.error("Given execution date could not be identified as a date.") error_message = ( f"Given execution date, {execution_date}, could not be identified as a date. " f"Example date format: 2015-11-16T14:34:15+00:00" ) response = jsonify({"error": error_message}) response.status_code = 400 return response try: lineage = get_lineage_api(dag_id=dag_id, execution_date=execution_dt) except AirflowException as err: log.error(err) response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: return jsonify(lineage)
Will dynamically import a class from a string path. :param class_path: string with class path :return: class
def dynamic_class_import(class_path): """ Will dynamically import a class from a string path. :param class_path: string with class path :return: class """ # Split first occurrence of path try: tmp = class_path.split(".") module_path = ".".join(tmp[0:-1]) package = __import__(module_path) return reduce(getattr, tmp[1:], package) except Exception as e: log.exception(e) log.error(LOGMSG_ERR_FAB_ADDON_IMPORT, class_path, e)
Init `Flask App Builder <https://flask-appbuilder.readthedocs.io/en/latest/>`__.
def init_appbuilder(app: Flask) -> AirflowAppBuilder: """Init `Flask App Builder <https://flask-appbuilder.readthedocs.io/en/latest/>`__.""" return AirflowAppBuilder( app=app, session=settings.Session, base_template="airflow/main.html", update_perms=conf.getboolean( "fab", "UPDATE_FAB_PERMS", fallback=conf.getboolean("webserver", "UPDATE_FAB_PERMS") ), auth_rate_limited=conf.getboolean( "fab", "AUTH_RATE_LIMITED", fallback=conf.getboolean("webserver", "AUTH_RATE_LIMITED", fallback=True), ), auth_rate_limit=conf.get( "fab", "AUTH_RATE_LIMIT", fallback=conf.get("webserver", "AUTH_RATE_LIMIT", fallback="5 per 40 second"), ), )
Add links to the navbar.
def init_appbuilder_links(app): """Add links to the navbar.""" appbuilder = app.appbuilder appbuilder.add_link(name="DAGs", href="Airflow.index") appbuilder.menu.menu.insert(0, appbuilder.menu.menu.pop()) # Place in the first menu slot appbuilder.add_link(name="Cluster Activity", href="Airflow.cluster_activity") appbuilder.menu.menu.insert(1, appbuilder.menu.menu.pop()) # Place in the second menu slot appbuilder.add_link(name="Datasets", href="Airflow.datasets") appbuilder.menu.menu.insert(2, appbuilder.menu.menu.pop()) # Place in the third menu slot # Docs links appbuilder.add_link( name=RESOURCE_DOCS, label="Documentation", href=get_docs_url(), category=RESOURCE_DOCS_MENU ) appbuilder.add_link( name=RESOURCE_DOCS, label="Airflow Website", href="https://airflow.apache.org", category=RESOURCE_DOCS_MENU, ) appbuilder.add_link( name=RESOURCE_DOCS, label="GitHub Repo", href="https://github.com/apache/airflow", category=RESOURCE_DOCS_MENU, ) if conf.getboolean("webserver", "enable_swagger_ui", fallback=True): appbuilder.add_link( name=RESOURCE_DOCS, label="REST API Reference (Swagger UI)", href="/api/v1./api/v1_swagger_ui_index", category=RESOURCE_DOCS_MENU, ) appbuilder.add_link( name=RESOURCE_DOCS, label="REST API Reference (Redoc)", href="RedocView.redoc", category=RESOURCE_DOCS_MENU, )
Return just the auth manager class without initializing it. Useful to save execution time if only static methods need to be called.
def get_auth_manager_cls() -> type[BaseAuthManager]: """ Return just the auth manager class without initializing it. Useful to save execution time if only static methods need to be called. """ auth_manager_cls = conf.getimport(section="core", key="auth_manager") if not auth_manager_cls: raise AirflowConfigException( "No auth manager defined in the config. " "Please specify one using section/key [core/auth_manager]." ) return auth_manager_cls
Initialize the auth manager. Import the user manager class and instantiate it.
def init_auth_manager(appbuilder: AirflowAppBuilder) -> BaseAuthManager: """ Initialize the auth manager. Import the user manager class and instantiate it. """ global auth_manager auth_manager_cls = get_auth_manager_cls() auth_manager = auth_manager_cls(appbuilder) return auth_manager
Return the auth manager, provided it's been initialized before.
def get_auth_manager() -> BaseAuthManager: """Return the auth manager, provided it's been initialized before.""" if auth_manager is None: raise RuntimeError( "Auth Manager has not been initialized yet. " "The `init_auth_manager` method needs to be called first." ) return auth_manager
Create global DagBag for webserver and API. To access it use ``flask.current_app.dag_bag``.
def init_dagbag(app): """ Create global DagBag for webserver and API. To access it use ``flask.current_app.dag_bag``. """ if os.environ.get("SKIP_DAGS_PARSING") == "True": app.dag_bag = DagBag(os.devnull, include_examples=False) else: app.dag_bag = DagBag(DAGS_FOLDER, read_dags_from_db=True)
Add extra globals variable to Jinja context.
def init_jinja_globals(app): """Add extra globals variable to Jinja context.""" server_timezone = conf.get("core", "default_timezone") if server_timezone == "system": server_timezone = pendulum.local_timezone().name elif server_timezone == "utc": server_timezone = "UTC" default_ui_timezone = conf.get("webserver", "default_ui_timezone") if default_ui_timezone == "system": default_ui_timezone = pendulum.local_timezone().name elif default_ui_timezone == "utc": default_ui_timezone = "UTC" if not default_ui_timezone: default_ui_timezone = server_timezone expose_hostname = conf.getboolean("webserver", "EXPOSE_HOSTNAME") hostname = get_hostname() if expose_hostname else "redact" try: airflow_version = airflow.__version__ except Exception as e: airflow_version = None logger.error(e) git_version = get_airflow_git_version() def prepare_jinja_globals(): extra_globals = { "server_timezone": server_timezone, "default_ui_timezone": default_ui_timezone, "hostname": hostname, "navbar_color": conf.get("webserver", "NAVBAR_COLOR"), "navbar_text_color": conf.get("webserver", "NAVBAR_TEXT_COLOR"), "navbar_hover_color": conf.get("webserver", "NAVBAR_HOVER_COLOR"), "navbar_text_hover_color": conf.get("webserver", "NAVBAR_TEXT_HOVER_COLOR"), "navbar_logo_text_color": conf.get("webserver", "NAVBAR_LOGO_TEXT_COLOR"), "log_fetch_delay_sec": conf.getint("webserver", "log_fetch_delay_sec", fallback=2), "log_auto_tailing_offset": conf.getint("webserver", "log_auto_tailing_offset", fallback=30), "log_animation_speed": conf.getint("webserver", "log_animation_speed", fallback=1000), "state_color_mapping": STATE_COLORS, "airflow_version": airflow_version, "git_version": git_version, "k8s_or_k8scelery_executor": IS_K8S_OR_K8SCELERY_EXECUTOR, "rest_api_enabled": False, "config_test_connection": conf.get("core", "test_connection", fallback="Disabled"), "included_events_raw": conf.get("webserver", "audit_view_included_events", fallback=""), "excluded_events_raw": conf.get("webserver", "audit_view_excluded_events", fallback=""), } # Extra global specific to auth manager extra_globals["auth_manager"] = get_auth_manager() backends = conf.get("api", "auth_backends") if backends and backends[0] != "airflow.api.auth.backend.deny_all": extra_globals["rest_api_enabled"] = True if "analytics_tool" in conf.getsection("webserver"): extra_globals.update( { "analytics_tool": conf.get("webserver", "ANALYTICS_TOOL"), "analytics_id": conf.get("webserver", "ANALYTICS_ID"), "analytics_url": conf.get("webserver", "ANALYTICS_URL"), } ) return extra_globals app.context_processor(prepare_jinja_globals)
Load the manifest file and register the `url_for_asset_` template tag. :param app:
def configure_manifest_files(app): """Load the manifest file and register the `url_for_asset_` template tag. :param app: """ manifest = {} def parse_manifest_json(): try: manifest_file = os.path.join(os.path.dirname(__file__), os.pardir, "static/dist/manifest.json") with open(manifest_file) as file: manifest.update(json.load(file)) for source, target in manifest.copy().items(): manifest[source] = os.path.join("dist", target) except Exception: print("Please make sure to build the frontend in static/ directory and restart the server") def get_asset_url(filename): if app.debug: parse_manifest_json() return url_for("static", filename=manifest.get(filename, filename)) parse_manifest_json() @app.context_processor def get_url_for_asset(): """Template tag to return the asset URL. WebPack renders the assets after minification and modification under the static/dist folder. This template tag reads the asset name in ``manifest.json`` and returns the appropriate file. """ return {"url_for_asset": get_asset_url}
Add X-Robots-Tag header. Use it to avoid search engines indexing airflow. This mitigates some of the risk associated with exposing Airflow to the public internet, however it does not address the real security risks associated with such a deployment. See also: https://developers.google.com/search/docs/advanced/robots/robots_meta_tag#xrobotstag
def init_robots(app): """ Add X-Robots-Tag header. Use it to avoid search engines indexing airflow. This mitigates some of the risk associated with exposing Airflow to the public internet, however it does not address the real security risks associated with such a deployment. See also: https://developers.google.com/search/docs/advanced/robots/robots_meta_tag#xrobotstag """ def apply_robot_tag(response): response.headers["X-Robots-Tag"] = "noindex, nofollow" return response app.after_request(apply_robot_tag)
Add X-Frame-Options header. Use it to avoid click-jacking attacks, by ensuring that their content is not embedded into other sites. See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options
def init_xframe_protection(app): """ Add X-Frame-Options header. Use it to avoid click-jacking attacks, by ensuring that their content is not embedded into other sites. See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options """ x_frame_enabled = conf.getboolean("webserver", "X_FRAME_ENABLED", fallback=True) if x_frame_enabled: return def apply_caching(response): response.headers["X-Frame-Options"] = "DENY" return response app.after_request(apply_caching)
Load authentication backends.
def init_api_experimental_auth(app): """Load authentication backends.""" auth_backends = "airflow.api.auth.backend.default" try: auth_backends = conf.get("api", "auth_backends") except AirflowConfigException: pass app.api_auth = [] try: for backend in auth_backends.split(","): auth = import_module(backend.strip()) auth.init_app(app) app.api_auth.append(auth) except ImportError as err: log.critical("Cannot import %s for API authentication due to: %s", backend, err) raise AirflowException(err)
Set airflow session interface.
def init_airflow_session_interface(app): """Set airflow session interface.""" config = app.config.copy() selected_backend = conf.get("webserver", "SESSION_BACKEND") # A bit of a misnomer - normally cookies expire whenever the browser is closed # or when they hit their expiry datetime, whichever comes first. "Permanent" # cookies only expire when they hit their expiry datetime, and can outlive # the browser being closed. permanent_cookie = config.get("SESSION_PERMANENT", True) if selected_backend == "securecookie": app.session_interface = AirflowSecureCookieSessionInterface() if permanent_cookie: def make_session_permanent(): builtin_flask_session.permanent = True app.before_request(make_session_permanent) elif selected_backend == "database": app.session_interface = AirflowDatabaseSessionInterface( app=app, db=None, permanent=permanent_cookie, # Typically these would be configurable with Flask-Session, # but we will set them explicitly instead as they don't make # sense to have configurable in Airflow's use case table="session", key_prefix="", use_signer=True, ) else: raise AirflowConfigException( "Unrecognized session backend specified in " f"web_server_session_backend: '{selected_backend}'. Please set " "this to either 'database' or 'securecookie'." )