repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
airflow
|
airflow-main/airflow/utils/json.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from datetime import date, datetime
from decimal import Decimal
from typing import Any
from flask.json.provider import JSONProvider
from airflow.serialization.serde import CLASSNAME, DATA, SCHEMA_ID, deserialize, serialize
from airflow.utils.timezone import convert_to_utc, is_naive
class AirflowJsonProvider(JSONProvider):
"""JSON Provider for Flask app to use WebEncoder."""
ensure_ascii: bool = True
sort_keys: bool = True
def dumps(self, obj, **kwargs):
kwargs.setdefault("ensure_ascii", self.ensure_ascii)
kwargs.setdefault("sort_keys", self.sort_keys)
return json.dumps(obj, **kwargs, cls=WebEncoder)
def loads(self, s: str | bytes, **kwargs):
return json.loads(s, **kwargs)
class WebEncoder(json.JSONEncoder):
"""This encodes values into a web understandable format. There is no deserializer.
This parses datetime, dates, Decimal and bytes. In order to parse the custom
classes and the other types, and since it's just to show the result in the UI,
we return repr(object) for everything else.
"""
def default(self, o: Any) -> Any:
if isinstance(o, datetime):
if is_naive(o):
o = convert_to_utc(o)
return o.isoformat()
if isinstance(o, date):
return o.strftime("%Y-%m-%d")
if isinstance(o, Decimal):
data = serialize(o)
if isinstance(data, dict) and DATA in data:
return data[DATA]
if isinstance(o, bytes):
try:
return o.decode("unicode_escape")
except UnicodeDecodeError:
return repr(o)
try:
data = serialize(o)
if isinstance(data, dict) and CLASSNAME in data:
# this is here for backwards compatibility
if (
data[CLASSNAME].startswith("numpy")
or data[CLASSNAME] == "kubernetes.client.models.v1_pod.V1Pod"
):
return data[DATA]
return data
except TypeError:
return repr(o)
class XComEncoder(json.JSONEncoder):
"""This encoder serializes any object that has attr, dataclass or a custom serializer."""
def default(self, o: object) -> Any:
try:
return serialize(o)
except TypeError:
return super().default(o)
def encode(self, o: Any) -> str:
# checked here and in serialize
if isinstance(o, dict) and (CLASSNAME in o or SCHEMA_ID in o):
raise AttributeError(f"reserved key {CLASSNAME} found in dict to serialize")
# tuples are not preserved by std python serializer
if isinstance(o, tuple):
o = self.default(o)
return super().encode(o)
class XComDecoder(json.JSONDecoder):
"""Deserialize dicts to objects if they contain the `__classname__` key, otherwise return the dict."""
def __init__(self, *args, **kwargs) -> None:
if not kwargs.get("object_hook"):
kwargs["object_hook"] = self.object_hook
super().__init__(*args, **kwargs)
def object_hook(self, dct: dict) -> object:
return deserialize(dct)
@staticmethod
def orm_object_hook(dct: dict) -> object:
"""Creates a readable representation of a serialized object."""
return deserialize(dct, False)
# backwards compatibility
AirflowJsonEncoder = WebEncoder
| 4,289 | 32.779528 | 106 |
py
|
airflow
|
airflow-main/airflow/utils/orm_event_handlers.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import os
import time
import traceback
import sqlalchemy.orm.mapper
from sqlalchemy import event, exc
from airflow.configuration import conf
log = logging.getLogger(__name__)
def setup_event_handlers(engine):
"""Setups event handlers."""
from airflow.models import import_all_models
event.listen(sqlalchemy.orm.mapper, "before_configured", import_all_models, once=True)
@event.listens_for(engine, "connect")
def connect(dbapi_connection, connection_record):
connection_record.info["pid"] = os.getpid()
if engine.dialect.name == "sqlite":
@event.listens_for(engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
# this ensures coherence in mysql when storing datetimes (not required for postgres)
if engine.dialect.name == "mysql":
@event.listens_for(engine, "connect")
def set_mysql_timezone(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("SET time_zone = '+00:00'")
cursor.close()
@event.listens_for(engine, "checkout")
def checkout(dbapi_connection, connection_record, connection_proxy):
pid = os.getpid()
if connection_record.info["pid"] != pid:
connection_record.connection = connection_proxy.connection = None
raise exc.DisconnectionError(
f"Connection record belongs to pid {connection_record.info['pid']}, "
f"attempting to check out in pid {pid}"
)
if conf.getboolean("debug", "sqlalchemy_stats", fallback=False):
@event.listens_for(engine, "before_cursor_execute")
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
conn.info.setdefault("query_start_time", []).append(time.perf_counter())
@event.listens_for(engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
total = time.perf_counter() - conn.info["query_start_time"].pop()
file_name = [
f"'{f.name}':{f.filename}:{f.lineno}"
for f in traceback.extract_stack()
if "sqlalchemy" not in f.filename
][-1]
stack = [f for f in traceback.extract_stack() if "sqlalchemy" not in f.filename]
stack_info = ">".join([f"{f.filename.rpartition('/')[-1]}:{f.name}" for f in stack][-3:])
conn.info.setdefault("query_start_time", []).append(time.monotonic())
log.info(
"@SQLALCHEMY %s |$ %s |$ %s |$ %s ",
total,
file_name,
stack_info,
statement.replace("\n", " "),
)
| 3,731 | 38.702128 | 101 |
py
|
airflow
|
airflow-main/airflow/utils/helpers.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import copy
import re
import signal
import warnings
from datetime import datetime
from functools import reduce
from itertools import filterfalse, tee
from typing import TYPE_CHECKING, Any, Callable, Generator, Iterable, Mapping, MutableMapping, TypeVar, cast
from airflow.configuration import conf
from airflow.exceptions import AirflowException, RemovedInAirflow3Warning
from airflow.utils.context import Context
from airflow.utils.module_loading import import_string
from airflow.utils.types import NOTSET
if TYPE_CHECKING:
import jinja2
from airflow.models.taskinstance import TaskInstance
KEY_REGEX = re.compile(r"^[\w.-]+$")
GROUP_KEY_REGEX = re.compile(r"^[\w-]+$")
CAMELCASE_TO_SNAKE_CASE_REGEX = re.compile(r"(?!^)([A-Z]+)")
T = TypeVar("T")
S = TypeVar("S")
def validate_key(k: str, max_length: int = 250):
"""Validates value used as a key."""
if not isinstance(k, str):
raise TypeError(f"The key has to be a string and is {type(k)}:{k}")
if len(k) > max_length:
raise AirflowException(f"The key has to be less than {max_length} characters")
if not KEY_REGEX.match(k):
raise AirflowException(
f"The key {k!r} has to be made of alphanumeric characters, dashes, "
f"dots and underscores exclusively"
)
def validate_group_key(k: str, max_length: int = 200):
"""Validates value used as a group key."""
if not isinstance(k, str):
raise TypeError(f"The key has to be a string and is {type(k)}:{k}")
if len(k) > max_length:
raise AirflowException(f"The key has to be less than {max_length} characters")
if not GROUP_KEY_REGEX.match(k):
raise AirflowException(
f"The key {k!r} has to be made of alphanumeric characters, dashes and underscores exclusively"
)
def alchemy_to_dict(obj: Any) -> dict | None:
"""Transforms a SQLAlchemy model instance into a dictionary."""
if not obj:
return None
output = {}
for col in obj.__table__.columns:
value = getattr(obj, col.name)
if isinstance(value, datetime):
value = value.isoformat()
output[col.name] = value
return output
def ask_yesno(question: str, default: bool | None = None) -> bool:
"""Helper to get a yes or no answer from the user."""
yes = {"yes", "y"}
no = {"no", "n"}
print(question)
while True:
choice = input().lower()
if choice == "" and default is not None:
return default
if choice in yes:
return True
if choice in no:
return False
print("Please respond with y/yes or n/no.")
def prompt_with_timeout(question: str, timeout: int, default: bool | None = None) -> bool:
"""Ask the user a question and timeout if they don't respond."""
def handler(signum, frame):
raise AirflowException(f"Timeout {timeout}s reached")
signal.signal(signal.SIGALRM, handler)
signal.alarm(timeout)
try:
return ask_yesno(question, default)
finally:
signal.alarm(0)
def is_container(obj: Any) -> bool:
"""Test if an object is a container (iterable) but not a string."""
return hasattr(obj, "__iter__") and not isinstance(obj, str)
def as_tuple(obj: Any) -> tuple:
"""Return obj as a tuple if obj is a container, otherwise return a tuple containing obj."""
if is_container(obj):
return tuple(obj)
else:
return tuple([obj])
def chunks(items: list[T], chunk_size: int) -> Generator[list[T], None, None]:
"""Yield successive chunks of a given size from a list of items."""
if chunk_size <= 0:
raise ValueError("Chunk size must be a positive integer")
for i in range(0, len(items), chunk_size):
yield items[i : i + chunk_size]
def reduce_in_chunks(fn: Callable[[S, list[T]], S], iterable: list[T], initializer: S, chunk_size: int = 0):
"""Split the list of items into chunks of a given size and pass each chunk through the reducer."""
if len(iterable) == 0:
return initializer
if chunk_size == 0:
chunk_size = len(iterable)
return reduce(fn, chunks(iterable, chunk_size), initializer)
def as_flattened_list(iterable: Iterable[Iterable[T]]) -> list[T]:
"""
Return an iterable with one level flattened.
>>> as_flattened_list((('blue', 'red'), ('green', 'yellow', 'pink')))
['blue', 'red', 'green', 'yellow', 'pink']
"""
return [e for i in iterable for e in i]
def parse_template_string(template_string: str) -> tuple[str | None, jinja2.Template | None]:
"""Parses Jinja template string."""
import jinja2
if "{{" in template_string: # jinja mode
return None, jinja2.Template(template_string)
else:
return template_string, None
def render_log_filename(ti: TaskInstance, try_number, filename_template) -> str:
"""
Given task instance, try_number, filename_template, return the rendered log filename.
:param ti: task instance
:param try_number: try_number of the task
:param filename_template: filename template, which can be jinja template or
python string template
"""
filename_template, filename_jinja_template = parse_template_string(filename_template)
if filename_jinja_template:
jinja_context = ti.get_template_context()
jinja_context["try_number"] = try_number
return render_template_to_string(filename_jinja_template, jinja_context)
return filename_template.format(
dag_id=ti.dag_id,
task_id=ti.task_id,
execution_date=ti.execution_date.isoformat(),
try_number=try_number,
)
def convert_camel_to_snake(camel_str: str) -> str:
"""Converts CamelCase to snake_case."""
return CAMELCASE_TO_SNAKE_CASE_REGEX.sub(r"_\1", camel_str).lower()
def merge_dicts(dict1: dict, dict2: dict) -> dict:
"""
Merge two dicts recursively, returning new dict (input dict is not mutated).
Lists are not concatenated. Items in dict2 overwrite those also found in dict1.
"""
merged = dict1.copy()
for k, v in dict2.items():
if k in merged and isinstance(v, dict):
merged[k] = merge_dicts(merged.get(k, {}), v)
else:
merged[k] = v
return merged
def partition(pred: Callable[[T], bool], iterable: Iterable[T]) -> tuple[Iterable[T], Iterable[T]]:
"""Use a predicate to partition entries into false entries and true entries."""
iter_1, iter_2 = tee(iterable)
return filterfalse(pred, iter_1), filter(pred, iter_2)
def chain(*args, **kwargs):
"""This function is deprecated. Please use `airflow.models.baseoperator.chain`."""
warnings.warn(
"This function is deprecated. Please use `airflow.models.baseoperator.chain`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
return import_string("airflow.models.baseoperator.chain")(*args, **kwargs)
def cross_downstream(*args, **kwargs):
"""This function is deprecated. Please use `airflow.models.baseoperator.cross_downstream`."""
warnings.warn(
"This function is deprecated. Please use `airflow.models.baseoperator.cross_downstream`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
return import_string("airflow.models.baseoperator.cross_downstream")(*args, **kwargs)
def build_airflow_url_with_query(query: dict[str, Any]) -> str:
"""
Build airflow url using base_url and default_view and provided query.
For example:
http://0.0.0.0:8000/base/graph?dag_id=my-task&root=&execution_date=2020-10-27T10%3A59%3A25.615587
"""
import flask
view = conf.get_mandatory_value("webserver", "dag_default_view").lower()
return flask.url_for(f"Airflow.{view}", **query)
# The 'template' argument is typed as Any because the jinja2.Template is too
# dynamic to be effectively type-checked.
def render_template(template: Any, context: MutableMapping[str, Any], *, native: bool) -> Any:
"""Render a Jinja2 template with given Airflow context.
The default implementation of ``jinja2.Template.render()`` converts the
input context into dict eagerly many times, which triggers deprecation
messages in our custom context class. This takes the implementation apart
and retain the context mapping without resolving instead.
:param template: A Jinja2 template to render.
:param context: The Airflow task context to render the template with.
:param native: If set to *True*, render the template into a native type. A
DAG can enable this with ``render_template_as_native_obj=True``.
:returns: The render result.
"""
context = copy.copy(context)
env = template.environment
if template.globals:
context.update((k, v) for k, v in template.globals.items() if k not in context)
try:
nodes = template.root_render_func(env.context_class(env, context, template.name, template.blocks))
except Exception:
env.handle_exception() # Rewrite traceback to point to the template.
if native:
import jinja2.nativetypes
return jinja2.nativetypes.native_concat(nodes)
return "".join(nodes)
def render_template_to_string(template: jinja2.Template, context: Context) -> str:
"""Shorthand to ``render_template(native=False)`` with better typing support."""
return render_template(template, cast(MutableMapping[str, Any], context), native=False)
def render_template_as_native(template: jinja2.Template, context: Context) -> Any:
"""Shorthand to ``render_template(native=True)`` with better typing support."""
return render_template(template, cast(MutableMapping[str, Any], context), native=True)
def exactly_one(*args) -> bool:
"""
Returns True if exactly one of *args is "truthy", and False otherwise.
If user supplies an iterable, we raise ValueError and force them to unpack.
"""
if is_container(args[0]):
raise ValueError(
"Not supported for iterable args. Use `*` to unpack your iterable in the function call."
)
return sum(map(bool, args)) == 1
def at_most_one(*args) -> bool:
"""
Returns True if at most one of *args is "truthy", and False otherwise.
NOTSET is treated the same as None.
If user supplies an iterable, we raise ValueError and force them to unpack.
"""
def is_set(val):
if val is NOTSET:
return False
else:
return bool(val)
return sum(map(is_set, args)) in (0, 1)
def prune_dict(val: Any, mode="strict"):
"""
Given dict ``val``, returns new dict based on ``val`` with all empty elements removed.
What constitutes "empty" is controlled by the ``mode`` parameter. If mode is 'strict'
then only ``None`` elements will be removed. If mode is ``truthy``, then element ``x``
will be removed if ``bool(x) is False``.
"""
def is_empty(x):
if mode == "strict":
return x is None
elif mode == "truthy":
return bool(x) is False
raise ValueError("allowable values for `mode` include 'truthy' and 'strict'")
if isinstance(val, dict):
new_dict = {}
for k, v in val.items():
if is_empty(v):
continue
elif isinstance(v, (list, dict)):
new_val = prune_dict(v, mode=mode)
if new_val:
new_dict[k] = new_val
else:
new_dict[k] = v
return new_dict
elif isinstance(val, list):
new_list = []
for v in val:
if is_empty(v):
continue
elif isinstance(v, (list, dict)):
new_val = prune_dict(v, mode=mode)
if new_val:
new_list.append(new_val)
else:
new_list.append(v)
return new_list
else:
return val
def prevent_duplicates(kwargs1: dict[str, Any], kwargs2: Mapping[str, Any], *, fail_reason: str) -> None:
"""Ensure *kwargs1* and *kwargs2* do not contain common keys.
:raises TypeError: If common keys are found.
"""
duplicated_keys = set(kwargs1).intersection(kwargs2)
if not duplicated_keys:
return
if len(duplicated_keys) == 1:
raise TypeError(f"{fail_reason} argument: {duplicated_keys.pop()}")
duplicated_keys_display = ", ".join(sorted(duplicated_keys))
raise TypeError(f"{fail_reason} arguments: {duplicated_keys_display}")
| 13,304 | 34.291777 | 108 |
py
|
airflow
|
airflow-main/airflow/utils/mixins.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import multiprocessing
import typing
from airflow.configuration import conf
from airflow.utils.context import Context
if typing.TYPE_CHECKING:
import multiprocessing.context
from airflow.models.operator import Operator
class MultiprocessingStartMethodMixin:
"""Convenience class to add support for different types of multiprocessing."""
def _get_multiprocessing_start_method(self) -> str:
"""
Determine method of creating new processes.
Checks if the mp_start_method is set in configs, else, it uses the OS default.
"""
if conf.has_option("core", "mp_start_method"):
return conf.get_mandatory_value("core", "mp_start_method")
method = multiprocessing.get_start_method()
if not method:
raise ValueError("Failed to determine start method")
return method
def _get_multiprocessing_context(self) -> multiprocessing.context.DefaultContext:
mp_start_method = self._get_multiprocessing_start_method()
return multiprocessing.get_context(mp_start_method) # type: ignore
class ResolveMixin:
"""A runtime-resolved value."""
def iter_references(self) -> typing.Iterable[tuple[Operator, str]]:
"""Find underlying XCom references this contains.
This is used by the DAG parser to recursively find task dependencies.
:meta private:
"""
raise NotImplementedError
def resolve(self, context: Context) -> typing.Any:
"""Resolve this value for runtime.
:meta private:
"""
raise NotImplementedError
| 2,425 | 32.232877 | 86 |
py
|
airflow
|
airflow-main/airflow/utils/docs.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
try:
import importlib_metadata
except ImportError:
from importlib import metadata as importlib_metadata # type: ignore[no-redef]
def get_docs_url(page: str | None = None) -> str:
"""Prepare link to Airflow documentation."""
from airflow.version import version
if any(suffix in version for suffix in ["dev", "a", "b"]):
result = (
"http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com/docs/apache-airflow/latest/"
)
else:
result = f"https://airflow.apache.org/docs/apache-airflow/{version}/"
if page:
result = result + page
return result
def get_doc_url_for_provider(provider_name: str, provider_version: str) -> str:
"""Prepare link to Airflow Provider documentation."""
try:
metadata_items = importlib_metadata.metadata(provider_name).get_all("Project-URL")
if isinstance(metadata_items, str):
metadata_items = [metadata_items]
if metadata_items:
for item in metadata_items:
if item.lower().startswith("documentation"):
_, _, url = item.partition(",")
if url:
return url.strip()
except importlib_metadata.PackageNotFoundError:
pass
# Fallback if provider is apache one
if provider_name.startswith("apache-airflow"):
return f"https://airflow.apache.org/docs/{provider_name}/{provider_version}/"
return "https://airflow.apache.org/docs/apache-airflow-providers/index.html#creating-your-own-providers"
| 2,383 | 40.103448 | 108 |
py
|
airflow
|
airflow-main/airflow/utils/dag_cycle_tester.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""DAG Cycle tester."""
from __future__ import annotations
from collections import defaultdict, deque
from typing import TYPE_CHECKING
from airflow.exceptions import AirflowDagCycleException, RemovedInAirflow3Warning
if TYPE_CHECKING:
from airflow.models.dag import DAG
CYCLE_NEW = 0
CYCLE_IN_PROGRESS = 1
CYCLE_DONE = 2
def test_cycle(dag: DAG) -> None:
"""
A wrapper function of `check_cycle` for backward compatibility purpose.
New code should use `check_cycle` instead since this function name `test_cycle` starts
with 'test_' and will be considered as a unit test by pytest, resulting in failure.
"""
from warnings import warn
warn(
"Deprecated, please use `check_cycle` at the same module instead.",
RemovedInAirflow3Warning,
stacklevel=2,
)
return check_cycle(dag)
def check_cycle(dag: DAG) -> None:
"""Check to see if there are any cycles in the DAG.
:raises AirflowDagCycleException: If cycle is found in the DAG.
"""
# default of int is 0 which corresponds to CYCLE_NEW
visited: dict[str, int] = defaultdict(int)
path_stack: deque[str] = deque()
task_dict = dag.task_dict
def _check_adjacent_tasks(task_id, current_task):
"""Returns first untraversed child task, else None if all tasks traversed."""
for adjacent_task in current_task.get_direct_relative_ids():
if visited[adjacent_task] == CYCLE_IN_PROGRESS:
msg = f"Cycle detected in DAG: {dag.dag_id}. Faulty task: {task_id}"
raise AirflowDagCycleException(msg)
elif visited[adjacent_task] == CYCLE_NEW:
return adjacent_task
return None
for dag_task_id in dag.task_dict.keys():
if visited[dag_task_id] == CYCLE_DONE:
continue
path_stack.append(dag_task_id)
while path_stack:
current_task_id = path_stack[-1]
if visited[current_task_id] == CYCLE_NEW:
visited[current_task_id] = CYCLE_IN_PROGRESS
task = task_dict[current_task_id]
child_to_check = _check_adjacent_tasks(current_task_id, task)
if not child_to_check:
visited[current_task_id] = CYCLE_DONE
path_stack.pop()
else:
path_stack.append(child_to_check)
| 3,137 | 35.917647 | 90 |
py
|
airflow
|
airflow-main/airflow/utils/cli_action_loggers.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
An Action Logger module.
Singleton pattern has been applied into this module so that registered
callbacks can be used all through the same python process.
"""
from __future__ import annotations
import json
import logging
from typing import Callable
def register_pre_exec_callback(action_logger):
"""Registers more action_logger function callback for pre-execution.
This function callback is expected to be called with keyword args.
For more about the arguments that is being passed to the callback,
refer to airflow.utils.cli.action_logging().
:param action_logger: An action logger function
:return: None
"""
logging.debug("Adding %s to pre execution callback", action_logger)
__pre_exec_callbacks.append(action_logger)
def register_post_exec_callback(action_logger):
"""Registers more action_logger function callback for post-execution.
This function callback is expected to be called with keyword args.
For more about the arguments that is being passed to the callback,
refer to airflow.utils.cli.action_logging().
:param action_logger: An action logger function
:return: None
"""
logging.debug("Adding %s to post execution callback", action_logger)
__post_exec_callbacks.append(action_logger)
def on_pre_execution(**kwargs):
"""Calls callbacks before execution.
Note that any exception from callback will be logged but won't be propagated.
:param kwargs:
:return: None
"""
logging.debug("Calling callbacks: %s", __pre_exec_callbacks)
for callback in __pre_exec_callbacks:
try:
callback(**kwargs)
except Exception:
logging.exception("Failed on pre-execution callback using %s", callback)
def on_post_execution(**kwargs):
"""Calls callbacks after execution.
As it's being called after execution, it can capture status of execution,
duration, etc. Note that any exception from callback will be logged but
won't be propagated.
:param kwargs:
:return: None
"""
logging.debug("Calling callbacks: %s", __post_exec_callbacks)
for callback in __post_exec_callbacks:
try:
callback(**kwargs)
except Exception:
logging.exception("Failed on post-execution callback using %s", callback)
def default_action_log(sub_command, user, task_id, dag_id, execution_date, host_name, full_command, **_):
"""Default action logger callback that behaves similar to ``action_logging``.
The difference is this function uses the global ORM session, and pushes a
``Log`` row into the database instead of actually logging.
"""
from sqlalchemy.exc import OperationalError, ProgrammingError
from airflow.models.log import Log
from airflow.utils import timezone
from airflow.utils.session import create_session
try:
with create_session() as session:
extra = json.dumps({"host_name": host_name, "full_command": full_command})
# Use bulk_insert_mappings here to avoid importing all models (which using the classes does) early
# on in the CLI
session.bulk_insert_mappings(
Log,
[
{
"event": f"cli_{sub_command}",
"task_instance": None,
"owner": user,
"extra": extra,
"task_id": task_id,
"dag_id": dag_id,
"execution_date": execution_date,
"dttm": timezone.utcnow(),
}
],
)
except (OperationalError, ProgrammingError) as e:
expected = [
'"log" does not exist', # postgres
"no such table", # sqlite
"log' doesn't exist", # mysql
"Invalid object name 'log'", # mssql
]
error_is_ok = e.args and any(x in e.args[0] for x in expected)
if not error_is_ok:
logging.warning("Failed to log action %s", e)
except Exception as e:
logging.warning("Failed to log action %s", e)
__pre_exec_callbacks: list[Callable] = []
__post_exec_callbacks: list[Callable] = []
# By default, register default action log into pre-execution callback
register_pre_exec_callback(default_action_log)
| 5,160 | 34.840278 | 110 |
py
|
airflow
|
airflow-main/airflow/utils/dates.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from datetime import datetime, timedelta
from typing import Collection
from croniter import croniter
from dateutil.relativedelta import relativedelta # for doctest
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.typing_compat import Literal
from airflow.utils import timezone
cron_presets: dict[str, str] = {
"@hourly": "0 * * * *",
"@daily": "0 0 * * *",
"@weekly": "0 0 * * 0",
"@monthly": "0 0 1 * *",
"@quarterly": "0 0 1 */3 *",
"@yearly": "0 0 1 1 *",
}
def date_range(
start_date: datetime,
end_date: datetime | None = None,
num: int | None = None,
delta: str | timedelta | relativedelta | None = None,
) -> list[datetime]:
"""Get a list of dates in the specified range, separated by delta.
.. code-block:: pycon
>>> from airflow.utils.dates import date_range
>>> from datetime import datetime, timedelta
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta=timedelta(1))
[datetime.datetime(2016, 1, 1, 0, 0, tzinfo=Timezone('UTC')),
datetime.datetime(2016, 1, 2, 0, 0, tzinfo=Timezone('UTC')),
datetime.datetime(2016, 1, 3, 0, 0, tzinfo=Timezone('UTC'))]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta="0 0 * * *")
[datetime.datetime(2016, 1, 1, 0, 0, tzinfo=Timezone('UTC')),
datetime.datetime(2016, 1, 2, 0, 0, tzinfo=Timezone('UTC')),
datetime.datetime(2016, 1, 3, 0, 0, tzinfo=Timezone('UTC'))]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 3, 3), delta="0 0 0 * *")
[datetime.datetime(2016, 1, 1, 0, 0, tzinfo=Timezone('UTC')),
datetime.datetime(2016, 2, 1, 0, 0, tzinfo=Timezone('UTC')),
datetime.datetime(2016, 3, 1, 0, 0, tzinfo=Timezone('UTC'))]
:param start_date: anchor date to start the series from
:param end_date: right boundary for the date range
:param num: alternatively to end_date, you can specify the number of
number of entries you want in the range. This number can be negative,
output will always be sorted regardless
:param delta: step length. It can be datetime.timedelta or cron expression as string
"""
warnings.warn(
"`airflow.utils.dates.date_range()` is deprecated. Please use `airflow.timetables`.",
category=RemovedInAirflow3Warning,
stacklevel=2,
)
if not delta:
return []
if end_date:
if start_date > end_date:
raise Exception("Wait. start_date needs to be before end_date")
if num:
raise Exception("Wait. Either specify end_date OR num")
if not end_date and not num:
end_date = timezone.utcnow()
delta_iscron = False
time_zone = start_date.tzinfo
abs_delta: timedelta | relativedelta
if isinstance(delta, str):
delta_iscron = True
if timezone.is_localized(start_date):
start_date = timezone.make_naive(start_date, time_zone)
cron = croniter(cron_presets.get(delta, delta), start_date)
elif isinstance(delta, timedelta):
abs_delta = abs(delta)
elif isinstance(delta, relativedelta):
abs_delta = abs(delta)
else:
raise Exception("Wait. delta must be either datetime.timedelta or cron expression as str")
dates = []
if end_date:
if timezone.is_naive(start_date) and not timezone.is_naive(end_date):
end_date = timezone.make_naive(end_date, time_zone)
while start_date <= end_date: # type: ignore
if timezone.is_naive(start_date):
dates.append(timezone.make_aware(start_date, time_zone))
else:
dates.append(start_date)
if delta_iscron:
start_date = cron.get_next(datetime)
else:
start_date += abs_delta
else:
num_entries: int = num # type: ignore
for _ in range(abs(num_entries)):
if timezone.is_naive(start_date):
dates.append(timezone.make_aware(start_date, time_zone))
else:
dates.append(start_date)
if delta_iscron and num_entries > 0:
start_date = cron.get_next(datetime)
elif delta_iscron:
start_date = cron.get_prev(datetime)
elif num_entries > 0:
start_date += abs_delta
else:
start_date -= abs_delta
return sorted(dates)
def round_time(
dt: datetime,
delta: str | timedelta | relativedelta,
start_date: datetime = timezone.make_aware(datetime.min),
):
"""Returns ``start_date + i * delta`` for given ``i`` where the result is closest to ``dt``.
.. code-block:: pycon
>>> round_time(datetime(2015, 1, 1, 6), timedelta(days=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 1, 2), relativedelta(months=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 9, 16, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 16, 0, 0)
>>> round_time(datetime(2015, 9, 15, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 15, 0, 0)
>>> round_time(datetime(2015, 9, 14, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
>>> round_time(datetime(2015, 9, 13, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
"""
if isinstance(delta, str):
# It's cron based, so it's easy
time_zone = start_date.tzinfo
start_date = timezone.make_naive(start_date, time_zone)
cron = croniter(delta, start_date)
prev = cron.get_prev(datetime)
if prev == start_date:
return timezone.make_aware(start_date, time_zone)
else:
return timezone.make_aware(prev, time_zone)
# Ignore the microseconds of dt
dt -= timedelta(microseconds=dt.microsecond)
# We are looking for a datetime in the form start_date + i * delta
# which is as close as possible to dt. Since delta could be a relative
# delta we don't know its exact length in seconds so we cannot rely on
# division to find i. Instead we employ a binary search algorithm, first
# finding an upper and lower limit and then dissecting the interval until
# we have found the closest match.
# We first search an upper limit for i for which start_date + upper * delta
# exceeds dt.
upper = 1
while start_date + upper * delta < dt:
# To speed up finding an upper limit we grow this exponentially by a
# factor of 2
upper *= 2
# Since upper is the first value for which start_date + upper * delta
# exceeds dt, upper // 2 is below dt and therefore forms a lower limited
# for the i we are looking for
lower = upper // 2
# We now continue to intersect the interval between
# start_date + lower * delta and start_date + upper * delta
# until we find the closest value
while True:
# Invariant: start + lower * delta < dt <= start + upper * delta
# If start_date + (lower + 1)*delta exceeds dt, then either lower or
# lower+1 has to be the solution we are searching for
if start_date + (lower + 1) * delta >= dt:
# Check if start_date + (lower + 1)*delta or
# start_date + lower*delta is closer to dt and return the solution
if (start_date + (lower + 1) * delta) - dt <= dt - (start_date + lower * delta):
return start_date + (lower + 1) * delta
else:
return start_date + lower * delta
# We intersect the interval and either replace the lower or upper
# limit with the candidate
candidate = lower + (upper - lower) // 2
if start_date + candidate * delta >= dt:
upper = candidate
else:
lower = candidate
# in the special case when start_date > dt the search for upper will
# immediately stop for upper == 1 which results in lower = upper // 2 = 0
# and this function returns start_date.
TimeUnit = Literal["days", "hours", "minutes", "seconds"]
def infer_time_unit(time_seconds_arr: Collection[float]) -> TimeUnit:
"""Determine the most appropriate time unit for given durations (in seconds).
e.g. 5400 seconds => 'minutes', 36000 seconds => 'hours'
"""
if len(time_seconds_arr) == 0:
return "hours"
max_time_seconds = max(time_seconds_arr)
if max_time_seconds <= 60 * 2:
return "seconds"
elif max_time_seconds <= 60 * 60 * 2:
return "minutes"
elif max_time_seconds <= 24 * 60 * 60 * 2:
return "hours"
else:
return "days"
def scale_time_units(time_seconds_arr: Collection[float], unit: TimeUnit) -> Collection[float]:
"""Convert an array of time durations in seconds to the specified time unit."""
if unit == "minutes":
return list(map(lambda x: x / 60, time_seconds_arr))
elif unit == "hours":
return list(map(lambda x: x / (60 * 60), time_seconds_arr))
elif unit == "days":
return list(map(lambda x: x / (24 * 60 * 60), time_seconds_arr))
return time_seconds_arr
def days_ago(n, hour=0, minute=0, second=0, microsecond=0):
"""Get a datetime object representing *n* days ago.
By default the time is set to midnight.
"""
warnings.warn(
"Function `days_ago` is deprecated and will be removed in Airflow 3.0. "
"You can achieve equivalent behavior with `pendulum.today('UTC').add(days=-N, ...)`",
RemovedInAirflow3Warning,
stacklevel=2,
)
today = timezone.utcnow().replace(hour=hour, minute=minute, second=second, microsecond=microsecond)
return today - timedelta(days=n)
def parse_execution_date(execution_date_str):
"""Parse execution date string to datetime object."""
return timezone.parse(execution_date_str)
| 10,868 | 38.813187 | 103 |
py
|
airflow
|
airflow-main/airflow/utils/cli_app_builder.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from contextlib import contextmanager
from functools import lru_cache
from typing import Generator
from flask import Flask
import airflow
from airflow.www.extensions.init_appbuilder import AirflowAppBuilder, init_appbuilder
from airflow.www.extensions.init_views import init_plugins
@lru_cache(maxsize=None)
def _return_appbuilder(app: Flask) -> AirflowAppBuilder:
"""Returns an appbuilder instance for the given app."""
init_appbuilder(app)
init_plugins(app)
return app.appbuilder # type: ignore[attr-defined]
@contextmanager
def get_application_builder() -> Generator[AirflowAppBuilder, None, None]:
static_folder = os.path.join(os.path.dirname(airflow.__file__), "www", "static")
flask_app = Flask(__name__, static_folder=static_folder)
with flask_app.app_context():
yield _return_appbuilder(flask_app)
| 1,681 | 35.565217 | 85 |
py
|
airflow
|
airflow-main/airflow/utils/decorators.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sys
import warnings
from collections import deque
from functools import wraps
from typing import Callable, TypeVar, cast
from airflow.exceptions import RemovedInAirflow3Warning
T = TypeVar("T", bound=Callable)
def apply_defaults(func: T) -> T:
"""
This decorator is deprecated.
In previous versions, all subclasses of BaseOperator must use apply_default decorator for the"
`default_args` feature to work properly.
In current version, it is optional. The decorator is applied automatically using the metaclass.
"""
warnings.warn(
"This decorator is deprecated. \n"
"\n"
"In previous versions, all subclasses of BaseOperator must use apply_default decorator for the "
"`default_args` feature to work properly.\n"
"\n"
"In current version, it is optional. The decorator is applied automatically using the metaclass.\n",
RemovedInAirflow3Warning,
stacklevel=3,
)
# Make it still be a wrapper to keep the previous behaviour of an extra stack frame
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return cast(T, wrapper)
def remove_task_decorator(python_source: str, task_decorator_name: str) -> str:
"""
Removes @task or similar decorators as well as @setup and @teardown.
:param python_source: The python source code
:param task_decorator_name: the decorator name
"""
def _remove_task_decorator(py_source, decorator_name):
if decorator_name not in py_source:
return python_source
split = python_source.split(decorator_name)
before_decorator, after_decorator = split[0], split[1]
if after_decorator[0] == "(":
after_decorator = _balance_parens(after_decorator)
if after_decorator[0] == "\n":
after_decorator = after_decorator[1:]
return before_decorator + after_decorator
decorators = ["@setup", "@teardown", task_decorator_name]
for decorator in decorators:
python_source = _remove_task_decorator(python_source, decorator)
return python_source
def _balance_parens(after_decorator):
num_paren = 1
after_decorator = deque(after_decorator)
after_decorator.popleft()
while num_paren:
current = after_decorator.popleft()
if current == "(":
num_paren = num_paren + 1
elif current == ")":
num_paren = num_paren - 1
return "".join(after_decorator)
class _autostacklevel_warn:
def __init__(self):
self.warnings = __import__("warnings")
def __getattr__(self, name):
return getattr(self.warnings, name)
def __dir__(self):
return dir(self.warnings)
def warn(self, message, category=None, stacklevel=1, source=None):
self.warnings.warn(message, category, stacklevel + 2, source)
def fixup_decorator_warning_stack(func):
if func.__globals__.get("warnings") is sys.modules["warnings"]:
# Yes, this is more than slightly hacky, but it _automatically_ sets the right stacklevel parameter to
# `warnings.warn` to ignore the decorator.
func.__globals__["warnings"] = _autostacklevel_warn()
| 4,035 | 33.793103 | 110 |
py
|
airflow
|
airflow-main/airflow/utils/strings.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Common utility functions with strings."""
from __future__ import annotations
import string
from random import choice
def get_random_string(length=8, choices=string.ascii_letters + string.digits):
"""Generate random string."""
return "".join(choice(choices) for _ in range(length))
TRUE_LIKE_VALUES = {"on", "t", "true", "y", "yes", "1"}
def to_boolean(astring: str | None) -> bool:
"""Convert a string to a boolean."""
if astring is None:
return False
if astring.lower() in TRUE_LIKE_VALUES:
return True
return False
| 1,352 | 33.692308 | 78 |
py
|
airflow
|
airflow-main/airflow/utils/dot_renderer.py
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Renderer DAG (tasks and dependencies) to the graphviz object."""
from __future__ import annotations
from typing import Any
import graphviz
from airflow import AirflowException
from airflow.models import TaskInstance
from airflow.models.baseoperator import BaseOperator
from airflow.models.dag import DAG
from airflow.models.mappedoperator import MappedOperator
from airflow.models.taskmixin import DependencyMixin
from airflow.serialization.serialized_objects import DagDependency
from airflow.utils.dag_edges import dag_edges
from airflow.utils.state import State
from airflow.utils.task_group import TaskGroup
def _refine_color(color: str):
"""
Converts color in #RGB (12 bits) format to #RRGGBB (32 bits), if it possible.
Otherwise, it returns the original value. Graphviz does not support colors in #RGB format.
:param color: Text representation of color
:return: Refined representation of color
"""
if len(color) == 4 and color[0] == "#":
color_r = color[1]
color_g = color[2]
color_b = color[3]
return "#" + color_r + color_r + color_g + color_g + color_b + color_b
return color
def _draw_task(
task: MappedOperator | BaseOperator,
parent_graph: graphviz.Digraph,
states_by_task_id: dict[Any, Any] | None,
) -> None:
"""Draw a single task on the given parent_graph."""
if states_by_task_id:
state = states_by_task_id.get(task.task_id)
color = State.color_fg(state)
fill_color = State.color(state)
else:
color = task.ui_fgcolor
fill_color = task.ui_color
parent_graph.node(
task.task_id,
_attributes={
"label": task.label,
"shape": "rectangle",
"style": "filled,rounded",
"color": _refine_color(color),
"fillcolor": _refine_color(fill_color),
},
)
def _draw_task_group(
task_group: TaskGroup, parent_graph: graphviz.Digraph, states_by_task_id: dict[str, str] | None
) -> None:
"""Draw the given task_group and its children on the given parent_graph."""
# Draw joins
if task_group.upstream_group_ids or task_group.upstream_task_ids:
parent_graph.node(
task_group.upstream_join_id,
_attributes={
"label": "",
"shape": "circle",
"style": "filled,rounded",
"color": _refine_color(task_group.ui_fgcolor),
"fillcolor": _refine_color(task_group.ui_color),
"width": "0.2",
"height": "0.2",
},
)
if task_group.downstream_group_ids or task_group.downstream_task_ids:
parent_graph.node(
task_group.downstream_join_id,
_attributes={
"label": "",
"shape": "circle",
"style": "filled,rounded",
"color": _refine_color(task_group.ui_fgcolor),
"fillcolor": _refine_color(task_group.ui_color),
"width": "0.2",
"height": "0.2",
},
)
# Draw children
for child in sorted(task_group.children.values(), key=lambda t: t.node_id if t.node_id else ""):
_draw_nodes(child, parent_graph, states_by_task_id)
def _draw_nodes(
node: DependencyMixin, parent_graph: graphviz.Digraph, states_by_task_id: dict[str, str] | None
) -> None:
"""Draw the node and its children on the given parent_graph recursively."""
if isinstance(node, BaseOperator) or isinstance(node, MappedOperator):
_draw_task(node, parent_graph, states_by_task_id)
else:
if not isinstance(node, TaskGroup):
raise AirflowException(f"The node {node} should be TaskGroup and is not")
# Draw TaskGroup
if node.is_root:
# No need to draw background for root TaskGroup.
_draw_task_group(node, parent_graph, states_by_task_id)
else:
with parent_graph.subgraph(name=f"cluster_{node.group_id}") as sub:
sub.attr(
shape="rectangle",
style="filled",
color=_refine_color(node.ui_fgcolor),
# Partially transparent CornflowerBlue
fillcolor="#6495ed7f",
label=node.label,
)
_draw_task_group(node, sub, states_by_task_id)
def render_dag_dependencies(deps: dict[str, list[DagDependency]]) -> graphviz.Digraph:
"""
Renders the DAG dependency to the DOT object.
:param deps: List of DAG dependencies
:return: Graphviz object
"""
dot = graphviz.Digraph(graph_attr={"rankdir": "LR"})
for dag, dependencies in deps.items():
for dep in dependencies:
with dot.subgraph(
name=dag,
graph_attr={
"rankdir": "LR",
"labelloc": "t",
"label": dag,
},
) as dep_subgraph:
dep_subgraph.edge(dep.source, dep.dependency_id)
dep_subgraph.edge(dep.dependency_id, dep.target)
return dot
def render_dag(dag: DAG, tis: list[TaskInstance] | None = None) -> graphviz.Digraph:
"""
Renders the DAG object to the DOT object.
If an task instance list is passed, the nodes will be painted according to task statuses.
:param dag: DAG that will be rendered.
:param tis: List of task instances
:return: Graphviz object
"""
dot = graphviz.Digraph(
dag.dag_id,
graph_attr={
"rankdir": dag.orientation if dag.orientation else "LR",
"labelloc": "t",
"label": dag.dag_id,
},
)
states_by_task_id = None
if tis is not None:
states_by_task_id = {ti.task_id: ti.state for ti in tis}
_draw_nodes(dag.task_group, dot, states_by_task_id)
for edge in dag_edges(dag):
# Gets an optional label for the edge; this will be None if none is specified.
label = dag.get_edge_info(edge["source_id"], edge["target_id"]).get("label")
# Add the edge to the graph with optional label
# (we can just use the maybe-None label variable directly)
dot.edge(edge["source_id"], edge["target_id"], label)
return dot
| 7,135 | 34.326733 | 100 |
py
|
airflow
|
airflow-main/airflow/utils/entry_points.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import collections
import functools
import logging
from typing import Iterator, Tuple
try:
import importlib_metadata as metadata
except ImportError:
from importlib import metadata # type: ignore[no-redef]
log = logging.getLogger(__name__)
EPnD = Tuple[metadata.EntryPoint, metadata.Distribution]
@functools.lru_cache(maxsize=None)
def _get_grouped_entry_points() -> dict[str, list[EPnD]]:
mapping: dict[str, list[EPnD]] = collections.defaultdict(list)
for dist in metadata.distributions():
try:
for e in dist.entry_points:
mapping[e.group].append((e, dist))
except Exception as e:
log.warning("Error when retrieving package metadata (skipping it): %s, %s", dist, e)
return mapping
def entry_points_with_dist(group: str) -> Iterator[EPnD]:
"""Retrieve entry points of the given group.
This is like the ``entry_points()`` function from ``importlib.metadata``,
except it also returns the distribution the entry point was loaded from.
Note that this may return multiple distributions to the same package if they
are loaded from different ``sys.path`` entries. The caller site should
implement appropriate deduplication logic if needed.
:param group: Filter results to only this entrypoint group
:return: Generator of (EntryPoint, Distribution) objects for the specified groups
"""
return iter(_get_grouped_entry_points()[group])
| 2,273 | 36.9 | 96 |
py
|
airflow
|
airflow-main/airflow/utils/log/json_formatter.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""json_formatter module stores all related to ElasticSearch specific logger classes."""
from __future__ import annotations
import json
import logging
from airflow.utils.helpers import merge_dicts
class JSONFormatter(logging.Formatter):
"""JSONFormatter instances are used to convert a log record to json."""
def __init__(self, fmt=None, datefmt=None, style="%", json_fields=None, extras=None):
super().__init__(fmt, datefmt, style)
if extras is None:
extras = {}
if json_fields is None:
json_fields = []
self.json_fields = json_fields
self.extras = extras
def usesTime(self):
return self.json_fields.count("asctime") > 0
def format(self, record):
super().format(record)
record_dict = {label: getattr(record, label, None) for label in self.json_fields}
if "message" in self.json_fields:
msg = record_dict["message"]
if record.exc_text:
if msg[-1:] != "\n":
msg = msg + "\n"
msg = msg + record.exc_text
if record.stack_info:
if msg[-1:] != "\n":
msg = msg + "\n"
msg = msg + self.formatStack(record.stack_info)
record_dict["message"] = msg
merged_record = merge_dicts(record_dict, self.extras)
return json.dumps(merged_record)
| 2,200 | 36.948276 | 89 |
py
|
airflow
|
airflow-main/airflow/utils/log/non_caching_file_handler.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from logging import FileHandler
from logging.handlers import RotatingFileHandler
from typing import IO
def make_file_io_non_caching(io: IO[str]) -> IO[str]:
try:
fd = io.fileno()
os.posix_fadvise(fd, 0, 0, os.POSIX_FADV_DONTNEED)
except Exception:
# in case either file descriptor cannot be retrieved or fadvise is not available
# we should simply return the wrapper retrieved by FileHandler's open method
# the advice to the kernel is just an advice and if we cannot give it, we won't
pass
return io
class NonCachingFileHandler(FileHandler):
"""
An extension of FileHandler, advises the Kernel to not cache the file in PageCache when it is written.
While there is nothing wrong with such cache (it will be cleaned when memory is needed), it
causes ever-growing memory usage when scheduler is running as it keeps on writing new log
files and the files are not rotated later on. This might lead to confusion for our users,
who are monitoring memory usage of Scheduler - without realising that it is harmless and
expected in this case.
See https://github.com/apache/airflow/issues/14924
Adding the advice to Kernel might help with not generating the cache memory growth in the first place.
"""
def _open(self):
return make_file_io_non_caching(super()._open())
class NonCachingRotatingFileHandler(RotatingFileHandler):
"""
An extension of RotatingFileHandler, advises the Kernel to not cache the file in PageCache when written.
While there is nothing wrong with such cache (it will be cleaned when memory is needed), it
causes ever-growing memory usage when scheduler is running as it keeps on writing new log
files and the files are not rotated later on. This might lead to confusion for our users,
who are monitoring memory usage of Scheduler - without realising that it is harmless and
expected in this case.
See https://github.com/apache/airflow/issues/27065
Adding the advice to Kernel might help with not generating the cache memory growth in the first place.
"""
def _open(self):
return make_file_io_non_caching(super()._open())
| 3,042 | 40.684932 | 108 |
py
|
airflow
|
airflow-main/airflow/utils/log/colored_log.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Class responsible for colouring logs based on log level."""
from __future__ import annotations
import sys
from logging import LogRecord
from typing import Any
import re2
from colorlog import TTYColoredFormatter
from colorlog.escape_codes import esc, escape_codes
from airflow.utils.log.timezone_aware import TimezoneAware
DEFAULT_COLORS = {
"DEBUG": "green",
"INFO": "",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red",
}
BOLD_ON = escape_codes["bold"]
BOLD_OFF = esc("22")
class CustomTTYColoredFormatter(TTYColoredFormatter, TimezoneAware):
"""
Custom log formatter.
Extends `colored.TTYColoredFormatter` by adding attributes
to message arguments and coloring error traceback.
"""
def __init__(self, *args, **kwargs):
kwargs["stream"] = sys.stdout or kwargs.get("stream")
kwargs["log_colors"] = DEFAULT_COLORS
super().__init__(*args, **kwargs)
@staticmethod
def _color_arg(arg: Any) -> str | float | int:
if isinstance(arg, (int, float)):
# In case of %d or %f formatting
return arg
return BOLD_ON + str(arg) + BOLD_OFF
@staticmethod
def _count_number_of_arguments_in_message(record: LogRecord) -> int:
matches = re2.findall(r"%.", record.msg)
return len(matches) if matches else 0
def _color_record_args(self, record: LogRecord) -> LogRecord:
if isinstance(record.args, (tuple, list)):
record.args = tuple(self._color_arg(arg) for arg in record.args)
elif isinstance(record.args, dict):
if self._count_number_of_arguments_in_message(record) > 1:
# Case of logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
record.args = {key: self._color_arg(value) for key, value in record.args.items()}
else:
# Case of single dict passed to formatted string
record.args = self._color_arg(record.args) # type: ignore
elif isinstance(record.args, str):
record.args = self._color_arg(record.args)
return record
def _color_record_traceback(self, record: LogRecord) -> LogRecord:
if record.exc_info:
# Cache the traceback text to avoid converting it multiple times
# (it's constant anyway)
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
record.exc_text = (
self.color(self.log_colors, record.levelname) + record.exc_text + escape_codes["reset"]
)
return record
def format(self, record: LogRecord) -> str:
try:
if self.stream.isatty():
record = self._color_record_args(record)
record = self._color_record_traceback(record)
return super().format(record)
except ValueError: # I/O operation on closed file
from logging import Formatter
return Formatter().format(record)
| 3,841 | 35.245283 | 107 |
py
|
airflow
|
airflow-main/airflow/utils/log/file_task_handler.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""File logging handler for tasks."""
from __future__ import annotations
import logging
import os
import warnings
from contextlib import suppress
from enum import Enum
from functools import cached_property
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Iterable
from urllib.parse import urljoin
import pendulum
from airflow.configuration import conf
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.executors.executor_loader import ExecutorLoader
from airflow.utils.context import Context
from airflow.utils.helpers import parse_template_string, render_template_to_string
from airflow.utils.log.logging_mixin import SetContextPropagate
from airflow.utils.log.non_caching_file_handler import NonCachingFileHandler
from airflow.utils.session import create_session
from airflow.utils.state import State, TaskInstanceState
if TYPE_CHECKING:
from airflow.models import TaskInstance
logger = logging.getLogger(__name__)
class LogType(str, Enum):
"""
Type of service from which we retrieve logs.
:meta private:
"""
TRIGGER = "trigger"
WORKER = "worker"
def _set_task_deferred_context_var():
"""
Tell task log handler that task exited with deferral.
This exists for the sole purpose of telling elasticsearch handler not to
emit end_of_log mark after task deferral.
Depending on how the task is run, we may need to set this in task command or in local task job.
Kubernetes executor requires the local task job invocation; local executor requires the task
command invocation.
:meta private:
"""
logger = logging.getLogger()
with suppress(StopIteration):
h = next(h for h in logger.handlers if hasattr(h, "ctx_task_deferred"))
h.ctx_task_deferred = True
def _fetch_logs_from_service(url, log_relative_path):
import httpx
from airflow.utils.jwt_signer import JWTSigner
timeout = conf.getint("webserver", "log_fetch_timeout_sec", fallback=None)
signer = JWTSigner(
secret_key=conf.get("webserver", "secret_key"),
expiration_time_in_seconds=conf.getint("webserver", "log_request_clock_grace", fallback=30),
audience="task-instance-logs",
)
response = httpx.get(
url,
timeout=timeout,
headers={"Authorization": signer.generate_signed_token({"filename": log_relative_path})},
)
response.encoding = "utf-8"
return response
_parse_timestamp = conf.getimport("logging", "interleave_timestamp_parser", fallback=None)
if not _parse_timestamp:
def _parse_timestamp(line: str):
timestamp_str, _ = line.split(" ", 1)
return pendulum.parse(timestamp_str.strip("[]"))
def _parse_timestamps_in_log_file(lines: Iterable[str]):
timestamp = None
next_timestamp = None
for idx, line in enumerate(lines):
if not line:
continue
with suppress(Exception):
# next_timestamp unchanged if line can't be parsed
next_timestamp = _parse_timestamp(line)
if next_timestamp:
timestamp = next_timestamp
yield timestamp, idx, line
def _interleave_logs(*logs):
records = []
for log in logs:
records.extend(_parse_timestamps_in_log_file(log.splitlines()))
last = None
for _, _, v in sorted(
records, key=lambda x: (x[0], x[1]) if x[0] else (pendulum.datetime(2000, 1, 1), x[1])
):
if v != last: # dedupe
yield v
last = v
class FileTaskHandler(logging.Handler):
"""
FileTaskHandler is a python log handler that handles and reads task instance logs.
It creates and delegates log handling to `logging.FileHandler` after receiving task
instance context. It reads logs from task instance's host machine.
:param base_log_folder: Base log folder to place logs.
:param filename_template: template filename string
"""
trigger_should_wrap = True
def __init__(self, base_log_folder: str, filename_template: str | None = None):
super().__init__()
self.handler: logging.FileHandler | None = None
self.local_base = base_log_folder
if filename_template is not None:
warnings.warn(
"Passing filename_template to a log handler is deprecated and has no effect",
RemovedInAirflow3Warning,
# We want to reference the stack that actually instantiates the
# handler, not the one that calls super()__init__.
stacklevel=(2 if type(self) == FileTaskHandler else 3),
)
self.maintain_propagate: bool = False
"""
If true, overrides default behavior of setting propagate=False
:meta private:
"""
self.ctx_task_deferred = False
"""
If true, task exited with deferral to trigger.
Some handlers emit "end of log" markers, and may not wish to do so when task defers.
"""
def set_context(self, ti: TaskInstance) -> None | SetContextPropagate:
"""
Provide task_instance context to airflow task handler.
Generally speaking returns None. But if attr `maintain_propagate` has
been set to propagate, then returns sentinel MAINTAIN_PROPAGATE. This
has the effect of overriding the default behavior to set `propagate`
to False whenever set_context is called. At time of writing, this
functionality is only used in unit testing.
:param ti: task instance object
"""
local_loc = self._init_file(ti)
self.handler = NonCachingFileHandler(local_loc, encoding="utf-8")
if self.formatter:
self.handler.setFormatter(self.formatter)
self.handler.setLevel(self.level)
return SetContextPropagate.MAINTAIN_PROPAGATE if self.maintain_propagate else None
@staticmethod
def add_triggerer_suffix(full_path, job_id=None):
"""
Helper for deriving trigger log filename from task log filename.
E.g. given /path/to/file.log returns /path/to/file.log.trigger.123.log, where 123
is the triggerer id. We use the triggerer ID instead of trigger ID to distinguish
the files because, rarely, the same trigger could get picked up by two different
triggerer instances.
"""
full_path = Path(full_path).as_posix()
full_path += f".{LogType.TRIGGER.value}"
if job_id:
full_path += f".{job_id}.log"
return full_path
def emit(self, record):
if self.handler:
self.handler.emit(record)
def flush(self):
if self.handler:
self.handler.flush()
def close(self):
if self.handler:
self.handler.close()
def _render_filename(self, ti: TaskInstance, try_number: int) -> str:
"""Returns the worker log filename."""
with create_session() as session:
dag_run = ti.get_dagrun(session=session)
template = dag_run.get_log_template(session=session).filename
str_tpl, jinja_tpl = parse_template_string(template)
if jinja_tpl:
if hasattr(ti, "task"):
context = ti.get_template_context(session=session)
else:
context = Context(ti=ti, ts=dag_run.logical_date.isoformat())
context["try_number"] = try_number
return render_template_to_string(jinja_tpl, context)
if str_tpl:
try:
dag = ti.task.dag
except AttributeError: # ti.task is not always set.
data_interval = (dag_run.data_interval_start, dag_run.data_interval_end)
else:
if TYPE_CHECKING:
assert dag is not None
data_interval = dag.get_run_data_interval(dag_run)
if data_interval[0]:
data_interval_start = data_interval[0].isoformat()
else:
data_interval_start = ""
if data_interval[1]:
data_interval_end = data_interval[1].isoformat()
else:
data_interval_end = ""
return str_tpl.format(
dag_id=ti.dag_id,
task_id=ti.task_id,
run_id=ti.run_id,
data_interval_start=data_interval_start,
data_interval_end=data_interval_end,
execution_date=ti.get_dagrun().logical_date.isoformat(),
try_number=try_number,
)
else:
raise RuntimeError(f"Unable to render log filename for {ti}. This should never happen")
def _read_grouped_logs(self):
return False
@cached_property
def _executor_get_task_log(self) -> Callable[[TaskInstance, int], tuple[list[str], list[str]]]:
"""This cached property avoids loading executor repeatedly."""
executor = ExecutorLoader.get_default_executor()
return executor.get_task_log
def _read(
self,
ti: TaskInstance,
try_number: int,
metadata: dict[str, Any] | None = None,
):
"""
Template method that contains custom logic of reading logs given the try_number.
:param ti: task instance record
:param try_number: current try_number to read log from
:param metadata: log metadata,
can be used for steaming log reading and auto-tailing.
Following attributes are used:
log_pos: (absolute) Char position to which the log
which was retrieved in previous calls, this
part will be skipped and only following test
returned to be added to tail.
:return: log message as a string and metadata.
Following attributes are used in metadata:
end_of_log: Boolean, True if end of log is reached or False
if further calls might get more log text.
This is determined by the status of the TaskInstance
log_pos: (absolute) Char position to which the log is retrieved
"""
# Task instance here might be different from task instance when
# initializing the handler. Thus explicitly getting log location
# is needed to get correct log path.
worker_log_rel_path = self._render_filename(ti, try_number)
messages_list: list[str] = []
remote_logs: list[str] = []
local_logs: list[str] = []
executor_messages: list[str] = []
executor_logs: list[str] = []
served_logs: list[str] = []
with suppress(NotImplementedError):
remote_messages, remote_logs = self._read_remote_logs(ti, try_number, metadata)
messages_list.extend(remote_messages)
if ti.state == TaskInstanceState.RUNNING:
response = self._executor_get_task_log(ti, try_number)
if response:
executor_messages, executor_logs = response
if executor_messages:
messages_list.extend(executor_messages)
if not (remote_logs and ti.state not in State.unfinished):
# when finished, if we have remote logs, no need to check local
worker_log_full_path = Path(self.local_base, worker_log_rel_path)
local_messages, local_logs = self._read_from_local(worker_log_full_path)
messages_list.extend(local_messages)
if ti.state in (TaskInstanceState.RUNNING, TaskInstanceState.DEFERRED) and not executor_messages:
served_messages, served_logs = self._read_from_logs_server(ti, worker_log_rel_path)
messages_list.extend(served_messages)
elif ti.state not in State.unfinished and not (local_logs or remote_logs):
# ordinarily we don't check served logs, with the assumption that users set up
# remote logging or shared drive for logs for persistence, but that's not always true
# so even if task is done, if no local logs or remote logs are found, we'll check the worker
served_messages, served_logs = self._read_from_logs_server(ti, worker_log_rel_path)
messages_list.extend(served_messages)
logs = "\n".join(
_interleave_logs(
*local_logs,
*remote_logs,
*(executor_logs or []),
*served_logs,
)
)
log_pos = len(logs)
messages = "".join([f"*** {x}\n" for x in messages_list])
end_of_log = ti.try_number != try_number or ti.state not in (
TaskInstanceState.RUNNING,
TaskInstanceState.DEFERRED,
)
if metadata and "log_pos" in metadata:
previous_chars = metadata["log_pos"]
logs = logs[previous_chars:] # Cut off previously passed log test as new tail
out_message = logs if "log_pos" in (metadata or {}) else messages + logs
return out_message, {"end_of_log": end_of_log, "log_pos": log_pos}
@staticmethod
def _get_pod_namespace(ti: TaskInstance):
pod_override = ti.executor_config.get("pod_override")
namespace = None
with suppress(Exception):
namespace = pod_override.metadata.namespace
return namespace or conf.get("kubernetes_executor", "namespace", fallback="default")
def _get_log_retrieval_url(
self, ti: TaskInstance, log_relative_path: str, log_type: LogType | None = None
) -> tuple[str, str]:
"""Given TI, generate URL with which to fetch logs from service log server."""
if log_type == LogType.TRIGGER:
if not ti.triggerer_job:
raise RuntimeError("Could not build triggerer log URL; no triggerer job.")
config_key = "triggerer_log_server_port"
config_default = 8794
hostname = ti.triggerer_job.hostname
log_relative_path = self.add_triggerer_suffix(log_relative_path, job_id=ti.triggerer_job.id)
else:
hostname = ti.hostname
config_key = "worker_log_server_port"
config_default = 8793
return (
urljoin(
f"http://{hostname}:{conf.get('logging', config_key, fallback=config_default)}/log/",
log_relative_path,
),
log_relative_path,
)
def read(self, task_instance, try_number=None, metadata=None):
"""
Read logs of given task instance from local machine.
:param task_instance: task instance object
:param try_number: task instance try_number to read logs from. If None
it returns all logs separated by try_number
:param metadata: log metadata, can be used for steaming log reading and auto-tailing.
:return: a list of listed tuples which order log string by host
"""
# Task instance increments its try number when it starts to run.
# So the log for a particular task try will only show up when
# try number gets incremented in DB, i.e logs produced the time
# after cli run and before try_number + 1 in DB will not be displayed.
if try_number is None:
next_try = task_instance.next_try_number
try_numbers = list(range(1, next_try))
elif try_number < 1:
logs = [
[("default_host", f"Error fetching the logs. Try number {try_number} is invalid.")],
]
return logs, [{"end_of_log": True}]
else:
try_numbers = [try_number]
logs = [""] * len(try_numbers)
metadata_array = [{}] * len(try_numbers)
# subclasses implement _read and may not have log_type, which was added recently
for i, try_number_element in enumerate(try_numbers):
log, out_metadata = self._read(task_instance, try_number_element, metadata)
# es_task_handler return logs grouped by host. wrap other handler returning log string
# with default/ empty host so that UI can render the response in the same way
logs[i] = log if self._read_grouped_logs() else [(task_instance.hostname, log)]
metadata_array[i] = out_metadata
return logs, metadata_array
def _prepare_log_folder(self, directory: Path):
"""
Prepare the log folder and ensure its mode is as configured.
To handle log writing when tasks are impersonated, the log files need to
be writable by the user that runs the Airflow command and the user
that is impersonated. This is mainly to handle corner cases with the
SubDagOperator. When the SubDagOperator is run, all of the operators
run under the impersonated user and create appropriate log files
as the impersonated user. However, if the user manually runs tasks
of the SubDagOperator through the UI, then the log files are created
by the user that runs the Airflow command. For example, the Airflow
run command may be run by the `airflow_sudoable` user, but the Airflow
tasks may be run by the `airflow` user. If the log files are not
writable by both users, then it's possible that re-running a task
via the UI (or vice versa) results in a permission error as the task
tries to write to a log file created by the other user.
We leave it up to the user to manage their permissions by exposing configuration for both
new folders and new log files. Default is to make new log folders and files group-writeable
to handle most common impersonation use cases. The requirement in this case will be to make
sure that the same group is set as default group for both - impersonated user and main airflow
user.
"""
new_folder_permissions = int(
conf.get("logging", "file_task_handler_new_folder_permissions", fallback="0o775"), 8
)
directory.mkdir(mode=new_folder_permissions, parents=True, exist_ok=True)
if directory.stat().st_mode % 0o1000 != new_folder_permissions % 0o1000:
print(f"Changing {directory} permission to {new_folder_permissions}")
try:
directory.chmod(new_folder_permissions)
except PermissionError as e:
# In some circumstances (depends on user and filesystem) we might not be able to
# change the permission for the folder (when the folder was created by another user
# before or when the filesystem does not allow to change permission). We should not
# fail in this case but rather ignore it.
print(f"Failed to change {directory} permission to {new_folder_permissions}: {e}")
pass
def _init_file(self, ti):
"""
Create log directory and give it permissions that are configured.
See above _prepare_log_folder method for more detailed explanation.
:param ti: task instance object
:return: relative log path of the given task instance
"""
new_file_permissions = int(
conf.get("logging", "file_task_handler_new_file_permissions", fallback="0o664"), 8
)
local_relative_path = self._render_filename(ti, ti.try_number)
full_path = os.path.join(self.local_base, local_relative_path)
if ti.is_trigger_log_context is True:
# if this is true, we're invoked via set_context in the context of
# setting up individual trigger logging. return trigger log path.
full_path = self.add_triggerer_suffix(full_path=full_path, job_id=ti.triggerer_job.id)
self._prepare_log_folder(Path(full_path).parent)
if not os.path.exists(full_path):
open(full_path, "a").close()
try:
os.chmod(full_path, new_file_permissions)
except OSError as e:
logging.warning("OSError while changing ownership of the log file. ", e)
return full_path
@staticmethod
def _read_from_local(worker_log_path: Path) -> tuple[list[str], list[str]]:
messages = []
logs = []
files = list(worker_log_path.parent.glob(worker_log_path.name + "*"))
if files:
messages.extend(["Found local files:", *[f" * {x}" for x in sorted(files)]])
for file in sorted(files):
logs.append(Path(file).read_text())
return messages, logs
def _read_from_logs_server(self, ti, worker_log_rel_path) -> tuple[list[str], list[str]]:
messages = []
logs = []
try:
log_type = LogType.TRIGGER if ti.triggerer_job else LogType.WORKER
url, rel_path = self._get_log_retrieval_url(ti, worker_log_rel_path, log_type=log_type)
response = _fetch_logs_from_service(url, rel_path)
if response.status_code == 403:
messages.append(
"!!!! Please make sure that all your Airflow components (e.g. "
"schedulers, webservers, workers and triggerer) have "
"the same 'secret_key' configured in 'webserver' section and "
"time is synchronized on all your machines (for example with ntpd)\n"
"See more at https://airflow.apache.org/docs/apache-airflow/"
"stable/configurations-ref.html#secret-key"
)
# Check if the resource was properly fetched
response.raise_for_status()
if response.text:
messages.append(f"Found logs served from host {url}")
logs.append(response.text)
except Exception as e:
messages.append(f"Could not read served logs: {str(e)}")
logger.exception("Could not read served logs")
return messages, logs
def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[list[str], list[str]]:
"""
Implement in subclasses to read from the remote service.
This method should return two lists, messages and logs.
* Each element in the messages list should be a single message,
such as, "reading from x file".
* Each element in the logs list should be the content of one file.
"""
raise NotImplementedError
| 23,290 | 42.211503 | 105 |
py
|
airflow
|
airflow-main/airflow/utils/log/logging_mixin.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import abc
import enum
import logging
import sys
from io import IOBase
from logging import Handler, Logger, StreamHandler
from typing import IO, Any, TypeVar, cast
import re2
# 7-bit C1 ANSI escape sequences
ANSI_ESCAPE = re2.compile(r"\x1B[@-_][0-?]*[ -/]*[@-~]")
# Private: A sentinel objects
class SetContextPropagate(enum.Enum):
"""Sentinel objects for log propagation contexts.
:meta private:
"""
# If a `set_context` function wants to _keep_ propagation set on it's logger it needs to return this
# special value.
MAINTAIN_PROPAGATE = object()
# Don't use this one anymore!
DISABLE_PROPAGATE = object()
def __getattr__(name):
if name in ("DISABLE_PROPOGATE", "DISABLE_PROPAGATE"):
# Compat for spelling on off chance someone is using this directly
# And old object that isn't needed anymore
return SetContextPropagate.DISABLE_PROPAGATE
raise AttributeError(f"module {__name__} has no attribute {name}")
def remove_escape_codes(text: str) -> str:
"""Remove ANSI escapes codes from string; used to remove "colors" from log messages."""
return ANSI_ESCAPE.sub("", text)
_T = TypeVar("_T")
class LoggingMixin:
"""Convenience super-class to have a logger configured with the class name."""
_log: logging.Logger | None = None
def __init__(self, context=None):
self._set_context(context)
@staticmethod
def _get_log(obj: Any, clazz: type[_T]) -> Logger:
if obj._log is None:
obj._log = logging.getLogger(f"{clazz.__module__}.{clazz.__name__}")
return obj._log
@classmethod
def logger(cls) -> Logger:
"""Returns a logger."""
return LoggingMixin._get_log(cls, cls)
@property
def log(self) -> Logger:
"""Returns a logger."""
return LoggingMixin._get_log(self, self.__class__)
def _set_context(self, context):
if context is not None:
set_context(self.log, context)
class ExternalLoggingMixin:
"""Define a log handler based on an external service (e.g. ELK, StackDriver)."""
@property
@abc.abstractmethod
def log_name(self) -> str:
"""Return log name."""
@abc.abstractmethod
def get_external_log_url(self, task_instance, try_number) -> str:
"""Return the URL for log visualization in the external service."""
@property
@abc.abstractmethod
def supports_external_link(self) -> bool:
"""Return whether handler is able to support external links."""
# We have to ignore typing errors here because Python I/O classes are a mess, and they do not
# have the same type hierarchy defined as the `typing.IO` - they violate Liskov Substitution Principle
# While it is ok to make your class derive from IOBase (and its good thing to do as they provide
# base implementation for IO-implementing classes, it's impossible to make them work with
# IO generics (and apparently it has not even been intended)
# See more: https://giters.com/python/typeshed/issues/6077
class StreamLogWriter(IOBase, IO[str]): # type: ignore[misc]
"""
Allows to redirect stdout and stderr to logger.
:param log: The log level method to write to, ie. log.debug, log.warning
"""
encoding: None = None
def __init__(self, logger, level):
self.logger = logger
self.level = level
self._buffer = ""
def close(self):
"""
Provide close method, for compatibility with the io.IOBase interface.
This is a no-op method.
"""
@property
def closed(self):
"""
Return False to indicate that the stream is not closed.
Streams will be open for the duration of Airflow's lifecycle.
For compatibility with the io.IOBase interface.
"""
return False
def _propagate_log(self, message):
"""Propagate message removing escape codes."""
self.logger.log(self.level, remove_escape_codes(message))
def write(self, message):
"""
Do whatever it takes to actually log the specified logging record.
:param message: message to log
"""
if not message.endswith("\n"):
self._buffer += message
else:
self._buffer += message.rstrip()
self.flush()
def flush(self):
"""Ensure all logging output has been flushed."""
buf = self._buffer
if len(buf) > 0:
self._buffer = ""
self._propagate_log(buf)
def isatty(self):
"""
Returns False to indicate the fd is not connected to a tty(-like) device.
For compatibility reasons.
"""
return False
class RedirectStdHandler(StreamHandler):
"""
Custom StreamHandler that uses current sys.stderr/stdout as the stream for logging.
This class is like a StreamHandler using sys.stderr/stdout, but uses
whatever sys.stderr/stdout is currently set to rather than the value of
sys.stderr/stdout at handler construction time, except when running a
task in a kubernetes executor pod.
"""
def __init__(self, stream):
if not isinstance(stream, str):
raise Exception(
"Cannot use file like objects. Use 'stdout' or 'stderr' as a str and without 'ext://'."
)
self._use_stderr = True
if "stdout" in stream:
self._use_stderr = False
self._orig_stream = sys.stdout
else:
self._orig_stream = sys.stderr
# StreamHandler tries to set self.stream
Handler.__init__(self)
@property
def stream(self):
"""Returns current stream."""
from airflow.settings import IS_K8S_EXECUTOR_POD
if IS_K8S_EXECUTOR_POD:
return self._orig_stream
if self._use_stderr:
return sys.stderr
return sys.stdout
def set_context(logger, value):
"""
Walks the tree of loggers and tries to set the context for each handler.
:param logger: logger
:param value: value to set
"""
while logger:
orig_propagate = logger.propagate
for handler in logger.handlers:
# Not all handlers need to have context passed in so we ignore
# the error when handlers do not have set_context defined.
# Don't use getatrr so we have type checking. And we don't care if handler is actually a
# FileTaskHandler, it just needs to have a set_context function!
if hasattr(handler, "set_context"):
from airflow.utils.log.file_task_handler import FileTaskHandler
flag = cast(FileTaskHandler, handler).set_context(value)
# By default we disable propagate once we have configured the logger, unless that handler
# explicitly asks us to keep it on.
if flag is not SetContextPropagate.MAINTAIN_PROPAGATE:
logger.propagate = False
if orig_propagate is True:
# If we were set to propagate before we turned if off, then keep passing set_context up
logger = logger.parent
else:
break
| 8,002 | 31.40081 | 105 |
py
|
airflow
|
airflow-main/airflow/utils/log/trigger_handler.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
import logging
from contextvars import ContextVar
from copy import copy
from logging.handlers import QueueHandler
from airflow.utils.log.file_task_handler import FileTaskHandler
ctx_task_instance: ContextVar = ContextVar("task_instance")
ctx_trigger_id: ContextVar = ContextVar("trigger_id")
ctx_trigger_end: ContextVar = ContextVar("trigger_end")
ctx_indiv_trigger: ContextVar = ContextVar("__individual_trigger")
class TriggerMetadataFilter(logging.Filter):
"""
Injects TI key, triggerer job_id, and trigger_id into the log record.
:meta private:
"""
def filter(self, record):
for var in (
ctx_task_instance,
ctx_trigger_id,
ctx_trigger_end,
ctx_indiv_trigger,
):
val = var.get(None)
if val is not None:
setattr(record, var.name, val)
return True
class DropTriggerLogsFilter(logging.Filter):
"""
If record has attr with name ctx_indiv_trigger, filter the record.
The purpose here is to prevent trigger logs from going to stdout
in the trigger service.
:meta private:
"""
def filter(self, record):
return getattr(record, ctx_indiv_trigger.name, None) is None
class TriggererHandlerWrapper(logging.Handler):
"""
Wrap inheritors of FileTaskHandler and direct log messages to them based on trigger_id.
:meta private:
"""
trigger_should_queue = True
def __init__(self, base_handler: FileTaskHandler, level=logging.NOTSET):
super().__init__(level=level)
self.base_handler: FileTaskHandler = base_handler
self.handlers: dict[int, FileTaskHandler] = {}
def _make_handler(self, ti):
h = copy(self.base_handler)
h.set_context(ti=ti)
return h
def _get_or_create_handler(self, trigger_id, ti):
if trigger_id not in self.handlers:
self.handlers[trigger_id] = self._make_handler(ti)
return self.handlers[trigger_id]
def emit(self, record):
h = self._get_or_create_handler(record.trigger_id, record.task_instance)
h.emit(record)
def handle(self, record):
if not getattr(record, ctx_indiv_trigger.name, None):
return False
if record.trigger_end:
self.close_one(record.trigger_id)
return False
emit = self.filter(record)
if emit:
self.emit(record)
return emit
def close_one(self, trigger_id):
h = self.handlers.get(trigger_id)
if h:
h.close()
del self.handlers[trigger_id]
def flush(self):
for _, h in self.handlers.items():
h.flush()
def close(self):
for trigger_id in list(self.handlers.keys()):
h = self.handlers[trigger_id]
h.close()
del self.handlers[trigger_id]
class LocalQueueHandler(QueueHandler):
"""
Send messages to queue.
:meta private:
"""
def emit(self, record: logging.LogRecord) -> None:
# There is no need to call `prepare` because queue is in same process.
try:
self.enqueue(record)
except asyncio.CancelledError:
raise
except Exception:
self.handleError(record)
| 4,123 | 28.669065 | 91 |
py
|
airflow
|
airflow-main/airflow/utils/log/file_processor_handler.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import os
from datetime import datetime
from pathlib import Path
from airflow import settings
from airflow.utils.helpers import parse_template_string
from airflow.utils.log.logging_mixin import DISABLE_PROPOGATE
from airflow.utils.log.non_caching_file_handler import NonCachingFileHandler
class FileProcessorHandler(logging.Handler):
"""
FileProcessorHandler is a python log handler that handles dag processor logs.
It creates and delegates log handling to `logging.FileHandler`
after receiving dag processor context.
:param base_log_folder: Base log folder to place logs.
:param filename_template: template filename string
"""
def __init__(self, base_log_folder, filename_template):
super().__init__()
self.handler = None
self.base_log_folder = base_log_folder
self.dag_dir = os.path.expanduser(settings.DAGS_FOLDER)
self.filename_template, self.filename_jinja_template = parse_template_string(filename_template)
self._cur_date = datetime.today()
Path(self._get_log_directory()).mkdir(parents=True, exist_ok=True)
self._symlink_latest_log_directory()
def set_context(self, filename):
"""
Provide filename context to airflow task handler.
:param filename: filename in which the dag is located
"""
local_loc = self._init_file(filename)
self.handler = NonCachingFileHandler(local_loc)
self.handler.setFormatter(self.formatter)
self.handler.setLevel(self.level)
if self._cur_date < datetime.today():
self._symlink_latest_log_directory()
self._cur_date = datetime.today()
return DISABLE_PROPOGATE
def emit(self, record):
if self.handler is not None:
self.handler.emit(record)
def flush(self):
if self.handler is not None:
self.handler.flush()
def close(self):
if self.handler is not None:
self.handler.close()
def _render_filename(self, filename):
# Airflow log path used to be generated by `os.path.relpath(filename, self.dag_dir)`, however all DAGs
# in airflow source code are not located in the DAG dir as other DAGs.
# That will create a log filepath which is not under control since it could be outside
# of the log dir. The change here is to make sure the log path for DAGs in airflow code
# is always inside the log dir as other DAGs. To be differentiate with regular DAGs,
# their logs will be in the `log_dir/native_dags`.
import airflow
airflow_directory = airflow.__path__[0]
if filename.startswith(airflow_directory):
filename = os.path.join("native_dags", os.path.relpath(filename, airflow_directory))
else:
filename = os.path.relpath(filename, self.dag_dir)
ctx = {"filename": filename}
if self.filename_jinja_template:
return self.filename_jinja_template.render(**ctx)
return self.filename_template.format(filename=ctx["filename"])
def _get_log_directory(self):
now = datetime.utcnow()
return os.path.join(self.base_log_folder, now.strftime("%Y-%m-%d"))
def _symlink_latest_log_directory(self):
"""
Create symbolic link to the current day's log directory.
Allows easy access to the latest scheduler log files.
:return: None
"""
log_directory = self._get_log_directory()
latest_log_directory_path = os.path.join(self.base_log_folder, "latest")
if os.path.isdir(log_directory):
try:
# if symlink exists but is stale, update it
if os.path.islink(latest_log_directory_path):
if os.readlink(latest_log_directory_path) != log_directory:
os.unlink(latest_log_directory_path)
os.symlink(log_directory, latest_log_directory_path)
elif os.path.isdir(latest_log_directory_path) or os.path.isfile(latest_log_directory_path):
logging.warning(
"%s already exists as a dir/file. Skip creating symlink.", latest_log_directory_path
)
else:
os.symlink(log_directory, latest_log_directory_path)
except OSError:
logging.warning("OSError while attempting to symlink the latest log directory")
def _init_file(self, filename):
"""
Create log file and directory if required.
:param filename: task instance object
:return: relative log path of the given task instance
"""
relative_log_file_path = os.path.join(self._get_log_directory(), self._render_filename(filename))
log_file_path = os.path.abspath(relative_log_file_path)
directory = os.path.dirname(log_file_path)
Path(directory).mkdir(parents=True, exist_ok=True)
if not os.path.exists(log_file_path):
open(log_file_path, "a").close()
return log_file_path
| 5,935 | 37.797386 | 110 |
py
|
airflow
|
airflow-main/airflow/utils/log/log_reader.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import time
from functools import cached_property
from typing import Iterator
from sqlalchemy.orm.session import Session
from airflow.configuration import conf
from airflow.models.taskinstance import TaskInstance
from airflow.utils.helpers import render_log_filename
from airflow.utils.log.logging_mixin import ExternalLoggingMixin
from airflow.utils.session import NEW_SESSION, provide_session
from airflow.utils.state import TaskInstanceState
class TaskLogReader:
"""Task log reader."""
STREAM_LOOP_SLEEP_SECONDS = 0.5
"""Time to sleep between loops while waiting for more logs"""
def read_log_chunks(
self, ti: TaskInstance, try_number: int | None, metadata
) -> tuple[list[tuple[tuple[str, str]]], dict[str, str]]:
"""
Reads chunks of Task Instance logs.
:param ti: The taskInstance
:param try_number: If provided, logs for the given try will be returned.
Otherwise, logs from all attempts are returned.
:param metadata: A dictionary containing information about how to read the task log
The following is an example of how to use this method to read log:
.. code-block:: python
logs, metadata = task_log_reader.read_log_chunks(ti, try_number, metadata)
logs = logs[0] if try_number is not None else logs
where task_log_reader is an instance of TaskLogReader. The metadata will always
contain information about the task log which can enable you read logs to the
end.
"""
logs, metadatas = self.log_handler.read(ti, try_number, metadata=metadata)
metadata = metadatas[0]
return logs, metadata
def read_log_stream(self, ti: TaskInstance, try_number: int | None, metadata: dict) -> Iterator[str]:
"""
Used to continuously read log to the end.
:param ti: The Task Instance
:param try_number: the task try number
:param metadata: A dictionary containing information about how to read the task log
"""
if try_number is None:
next_try = ti.next_try_number
try_numbers = list(range(1, next_try))
else:
try_numbers = [try_number]
for current_try_number in try_numbers:
metadata.pop("end_of_log", None)
metadata.pop("max_offset", None)
metadata.pop("offset", None)
metadata.pop("log_pos", None)
while True:
logs, metadata = self.read_log_chunks(ti, current_try_number, metadata)
for host, log in logs[0]:
yield "\n".join([host or "", log]) + "\n"
if "end_of_log" not in metadata or (
not metadata["end_of_log"]
and ti.state not in (TaskInstanceState.RUNNING, TaskInstanceState.DEFERRED)
):
if not logs[0]:
# we did not receive any logs in this loop
# sleeping to conserve resources / limit requests on external services
time.sleep(self.STREAM_LOOP_SLEEP_SECONDS)
else:
break
@cached_property
def log_handler(self):
"""Get the log handler which is configured to read logs."""
task_log_reader = conf.get("logging", "task_log_reader")
def handlers():
"""
Yield all handlers first from airflow.task logger then root logger.
Depending on whether we're in a running task, it could be in either of these locations.
"""
yield from logging.getLogger("airflow.task").handlers
yield from logging.getLogger().handlers
return next((h for h in handlers() if h.name == task_log_reader), None)
@property
def supports_read(self):
"""Checks if a read operation is supported by a current log handler."""
return hasattr(self.log_handler, "read")
@property
def supports_external_link(self) -> bool:
"""Check if the logging handler supports external links (e.g. to Elasticsearch, Stackdriver, etc)."""
if not isinstance(self.log_handler, ExternalLoggingMixin):
return False
return self.log_handler.supports_external_link
@provide_session
def render_log_filename(
self,
ti: TaskInstance,
try_number: int | None = None,
*,
session: Session = NEW_SESSION,
) -> str:
"""
Renders the log attachment filename.
:param ti: The task instance
:param try_number: The task try number
"""
dagrun = ti.get_dagrun(session=session)
attachment_filename = render_log_filename(
ti=ti,
try_number="all" if try_number is None else try_number,
filename_template=dagrun.get_log_template(session=session).filename,
)
return attachment_filename
| 5,789 | 37.85906 | 109 |
py
|
airflow
|
airflow-main/airflow/utils/log/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# fmt: off
from __future__ import annotations
from airflow.utils.deprecation_tools import add_deprecated_classes
__deprecated_classes = {
'cloudwatch_task_handler': {
'CloudwatchTaskHandler': (
'airflow.providers.amazon.aws.log.cloudwatch_task_handler.CloudwatchTaskHandler'
),
},
'es_task_handler': {
'ElasticsearchTaskHandler': (
'airflow.providers.elasticsearch.log.es_task_handler.ElasticsearchTaskHandler'
),
},
'gcs_task_handler': {
'GCSTaskHandler': 'airflow.providers.google.cloud.log.gcs_task_handler.GCSTaskHandler',
},
's3_task_handler': {
'S3TaskHandler': 'airflow.providers.amazon.aws.log.s3_task_handler.S3TaskHandler',
},
'stackdriver_task_handler': {
'StackdriverTaskHandler': (
'airflow.providers.google.cloud.log.stackdriver_task_handler.StackdriverTaskHandler'
),
},
'wasb_task_handler': {
'WasbTaskHandler': 'airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 1,909 | 36.45098 | 101 |
py
|
airflow
|
airflow-main/airflow/utils/log/task_handler_with_custom_formatter.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Custom logging formatter for Airflow."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from airflow.configuration import conf
from airflow.utils.helpers import parse_template_string, render_template_to_string
if TYPE_CHECKING:
from jinja2 import Template
from airflow.models.taskinstance import TaskInstance
class TaskHandlerWithCustomFormatter(logging.StreamHandler):
"""Custom implementation of StreamHandler, a class which writes logging records for Airflow."""
prefix_jinja_template: Template | None = None
def set_context(self, ti) -> None:
"""
Accept the run-time context (i.e. the current task) and configure the formatter accordingly.
:param ti:
:return:
"""
if ti.raw or self.formatter is None:
return
prefix = conf.get("logging", "task_log_prefix_template")
if prefix:
_, self.prefix_jinja_template = parse_template_string(prefix)
rendered_prefix = self._render_prefix(ti)
else:
rendered_prefix = ""
formatter = logging.Formatter(f"{rendered_prefix}:{self.formatter._fmt}")
self.setFormatter(formatter)
self.setLevel(self.level)
def _render_prefix(self, ti: TaskInstance) -> str:
if self.prefix_jinja_template:
jinja_context = ti.get_template_context()
return render_template_to_string(self.prefix_jinja_template, jinja_context)
logging.warning("'task_log_prefix_template' is in invalid format, ignoring the variable value")
return ""
| 2,397 | 37.063492 | 103 |
py
|
airflow
|
airflow-main/airflow/utils/log/secrets_masker.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Mask sensitive information from logs."""
from __future__ import annotations
import collections.abc
import logging
import sys
from functools import cached_property
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Generator,
Iterable,
Iterator,
List,
Pattern,
TextIO,
Tuple,
TypeVar,
Union,
)
import re2
from airflow import settings
from airflow.compat.functools import cache
from airflow.typing_compat import TypeGuard
if TYPE_CHECKING:
from kubernetes.client import V1EnvVar
Redactable = TypeVar("Redactable", str, "V1EnvVar", Dict[Any, Any], Tuple[Any, ...], List[Any])
Redacted = Union[Redactable, str]
log = logging.getLogger(__name__)
DEFAULT_SENSITIVE_FIELDS = frozenset(
{
"access_token",
"api_key",
"apikey",
"authorization",
"passphrase",
"passwd",
"password",
"private_key",
"secret",
"token",
"keyfile_dict",
"service_account",
}
)
"""Names of fields (Connection extra, Variable key name etc.) that are deemed sensitive"""
SECRETS_TO_SKIP_MASKING_FOR_TESTS = {"airflow"}
@cache
def get_sensitive_variables_fields():
"""Get comma-separated sensitive Variable Fields from airflow.cfg."""
from airflow.configuration import conf
sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy()
sensitive_variable_fields = conf.get("core", "sensitive_var_conn_names")
if sensitive_variable_fields:
sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(",")})
return sensitive_fields
def should_hide_value_for_key(name):
"""Should the value for this given name (Variable name, or key in conn.extra_dejson) be hidden."""
from airflow import settings
if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS:
name = name.strip().lower()
return any(s in name for s in get_sensitive_variables_fields())
return False
def mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None:
"""
Mask a secret from appearing in the task logs.
If ``name`` is provided, then it will only be masked if the name matches
one of the configured "sensitive" names.
If ``secret`` is a dict or a iterable (excluding str) then it will be
recursively walked and keys with sensitive names will be hidden.
"""
# Filtering all log messages is not a free process, so we only do it when
# running tasks
if not secret:
return
_secrets_masker().add_mask(secret, name)
def redact(value: Redactable, name: str | None = None, max_depth: int | None = None) -> Redacted:
"""Redact any secrets found in ``value``."""
return _secrets_masker().redact(value, name, max_depth)
@cache
def _secrets_masker() -> SecretsMasker:
for flt in logging.getLogger("airflow.task").filters:
if isinstance(flt, SecretsMasker):
return flt
raise RuntimeError(
"Logging Configuration Error! No SecretsMasker found! If you have custom logging, please make "
"sure you configure it taking airflow configuration as a base as explained at "
"https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/logging-tasks.html"
"#advanced-configuration"
)
@cache
def _get_v1_env_var_type() -> type:
try:
from kubernetes.client import V1EnvVar
except ImportError:
return type("V1EnvVar", (), {})
return V1EnvVar
def _is_v1_env_var(v: Any) -> TypeGuard[V1EnvVar]:
return isinstance(v, _get_v1_env_var_type())
class SecretsMasker(logging.Filter):
"""Redact secrets from logs."""
replacer: Pattern | None = None
patterns: set[str]
ALREADY_FILTERED_FLAG = "__SecretsMasker_filtered"
MAX_RECURSION_DEPTH = 5
def __init__(self):
super().__init__()
self.patterns = set()
@cached_property
def _record_attrs_to_ignore(self) -> Iterable[str]:
# Doing log.info(..., extra={'foo': 2}) sets extra properties on
# record, i.e. record.foo. And we need to filter those too. Fun
#
# Create a record, and look at what attributes are on it, and ignore
# all the default ones!
record = logging.getLogRecordFactory()(
# name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None,
"x",
logging.INFO,
__file__,
1,
"",
tuple(),
exc_info=None,
func="funcname",
)
return frozenset(record.__dict__).difference({"msg", "args"})
def _redact_exception_with_context(self, exception):
# Exception class may not be modifiable (e.g. declared by an
# extension module such as JDBC).
try:
exception.args = (self.redact(v) for v in exception.args)
except AttributeError:
pass
if exception.__context__:
self._redact_exception_with_context(exception.__context__)
if exception.__cause__ and exception.__cause__ is not exception.__context__:
self._redact_exception_with_context(exception.__cause__)
def filter(self, record) -> bool:
if settings.MASK_SECRETS_IN_LOGS is not True:
return True
if self.ALREADY_FILTERED_FLAG in record.__dict__:
# Filters are attached to multiple handlers and logs, keep a
# "private" flag that stops us needing to process it more than once
return True
if self.replacer:
for k, v in record.__dict__.items():
if k in self._record_attrs_to_ignore:
continue
record.__dict__[k] = self.redact(v)
if record.exc_info and record.exc_info[1] is not None:
exc = record.exc_info[1]
self._redact_exception_with_context(exc)
record.__dict__[self.ALREADY_FILTERED_FLAG] = True
return True
# Default on `max_depth` is to support versions of the OpenLineage plugin (not the provider) which called
# this function directly. New versions of that provider, and this class itself call it with a value
def _redact_all(self, item: Redactable, depth: int, max_depth: int = MAX_RECURSION_DEPTH) -> Redacted:
if depth > max_depth or isinstance(item, str):
return "***"
if isinstance(item, dict):
return {
dict_key: self._redact_all(subval, depth + 1, max_depth) for dict_key, subval in item.items()
}
elif isinstance(item, (tuple, set)):
# Turn set in to tuple!
return tuple(self._redact_all(subval, depth + 1, max_depth) for subval in item)
elif isinstance(item, list):
return list(self._redact_all(subval, depth + 1, max_depth) for subval in item)
else:
return item
def _redact(self, item: Redactable, name: str | None, depth: int, max_depth: int) -> Redacted:
# Avoid spending too much effort on redacting on deeply nested
# structures. This also avoid infinite recursion if a structure has
# reference to self.
if depth > max_depth:
return item
try:
if name and should_hide_value_for_key(name):
return self._redact_all(item, depth, max_depth)
if isinstance(item, dict):
to_return = {
dict_key: self._redact(subval, name=dict_key, depth=(depth + 1), max_depth=max_depth)
for dict_key, subval in item.items()
}
return to_return
elif _is_v1_env_var(item):
tmp: dict = item.to_dict()
if should_hide_value_for_key(tmp.get("name", "")) and "value" in tmp:
tmp["value"] = "***"
else:
return self._redact(item=tmp, name=name, depth=depth, max_depth=max_depth)
return tmp
elif isinstance(item, str):
if self.replacer:
# We can't replace specific values, but the key-based redacting
# can still happen, so we can't short-circuit, we need to walk
# the structure.
return self.replacer.sub("***", item)
return item
elif isinstance(item, (tuple, set)):
# Turn set in to tuple!
return tuple(
self._redact(subval, name=None, depth=(depth + 1), max_depth=max_depth) for subval in item
)
elif isinstance(item, list):
return [
self._redact(subval, name=None, depth=(depth + 1), max_depth=max_depth) for subval in item
]
else:
return item
# I think this should never happen, but it does not hurt to leave it just in case
# Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373)
# but it caused infinite recursion, so we need to cast it to str first.
except Exception as e:
log.warning(
"Unable to redact %s, please report this via <https://github.com/apache/airflow/issues>. "
"Error was: %s: %s",
repr(item),
type(e).__name__,
str(e),
)
return item
def redact(self, item: Redactable, name: str | None = None, max_depth: int | None = None) -> Redacted:
"""Redact an any secrets found in ``item``, if it is a string.
If ``name`` is given, and it's a "sensitive" name (see
:func:`should_hide_value_for_key`) then all string values in the item
is redacted.
"""
return self._redact(item, name, depth=0, max_depth=max_depth or self.MAX_RECURSION_DEPTH)
@cached_property
def _mask_adapter(self) -> None | Callable:
"""Pulls the secret mask adapter from config.
This lives in a function here to be cached and only hit the config once.
"""
from airflow.configuration import conf
return conf.getimport("logging", "secret_mask_adapter", fallback=None)
@cached_property
def _test_mode(self) -> bool:
"""Pulls the unit test mode flag from config.
This lives in a function here to be cached and only hit the config once.
"""
from airflow.configuration import conf
return conf.getboolean("core", "unit_test_mode")
def _adaptations(self, secret: str) -> Generator[str, None, None]:
"""Yields the secret along with any adaptations to the secret that should be masked."""
yield secret
if self._mask_adapter:
# This can return an iterable of secrets to mask OR a single secret as a string
secret_or_secrets = self._mask_adapter(secret)
if not isinstance(secret_or_secrets, str):
# if its not a string, it must be an iterable
yield from secret_or_secrets
else:
yield secret_or_secrets
def add_mask(self, secret: str | dict | Iterable, name: str | None = None):
"""Add a new secret to be masked to this filter instance."""
if isinstance(secret, dict):
for k, v in secret.items():
self.add_mask(v, k)
elif isinstance(secret, str):
if not secret or (self._test_mode and secret in SECRETS_TO_SKIP_MASKING_FOR_TESTS):
return
new_mask = False
for s in self._adaptations(secret):
if s:
pattern = re2.escape(s)
if pattern not in self.patterns and (not name or should_hide_value_for_key(name)):
self.patterns.add(pattern)
new_mask = True
if new_mask:
self.replacer = re2.compile("|".join(self.patterns))
elif isinstance(secret, collections.abc.Iterable):
for v in secret:
self.add_mask(v, name)
class RedactedIO(TextIO):
"""IO class that redacts values going into stdout.
Expected usage::
with contextlib.redirect_stdout(RedactedIO()):
... # Writes to stdout will be redacted.
"""
def __init__(self):
self.target = sys.stdout
def __enter__(self) -> TextIO:
return self.target.__enter__()
def __exit__(self, t, v, b) -> None:
return self.target.__exit__(t, v, b)
def __iter__(self) -> Iterator[str]:
return iter(self.target)
def __next__(self) -> str:
return next(self.target)
def close(self) -> None:
return self.target.close()
def fileno(self) -> int:
return self.target.fileno()
def flush(self) -> None:
return self.target.flush()
def isatty(self) -> bool:
return self.target.isatty()
def read(self, n: int = -1) -> str:
return self.target.read(n)
def readable(self) -> bool:
return self.target.readable()
def readline(self, n: int = -1) -> str:
return self.target.readline(n)
def readlines(self, n: int = -1) -> list[str]:
return self.target.readlines(n)
def seek(self, offset: int, whence: int = 0) -> int:
return self.target.seek(offset, whence)
def seekable(self) -> bool:
return self.target.seekable()
def tell(self) -> int:
return self.target.tell()
def truncate(self, s: int | None = None) -> int:
return self.target.truncate(s)
def writable(self) -> bool:
return self.target.writable()
def write(self, s: str) -> int:
s = redact(s)
return self.target.write(s)
def writelines(self, lines) -> None:
self.target.writelines(lines)
| 14,706 | 34.100239 | 110 |
py
|
airflow
|
airflow-main/airflow/utils/log/action_logger.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def action_event_from_permission(prefix: str, permission: str) -> str:
if permission.startswith("can_"):
permission = permission[4:]
if prefix:
return f"{prefix}.{permission}"
return permission
| 1,046 | 37.777778 | 70 |
py
|
airflow
|
airflow-main/airflow/utils/log/timezone_aware.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import pendulum
class TimezoneAware(logging.Formatter):
"""Override time-formatting methods to include UTC offset.
Since Airflow parses the logs to perform time conversion, UTC offset is
critical information. This formatter ensures ``%(asctime)s`` is formatted
containing the offset in ISO 8601, e.g. ``2022-06-12T13:00:00.123+0000``.
"""
default_time_format = "%Y-%m-%dT%H:%M:%S"
default_msec_format = "%s.%03d"
default_tz_format = "%z"
def formatTime(self, record, datefmt=None):
"""Format time in record.
This returns the creation time of the specified LogRecord in ISO 8601
date and time format in the local time zone.
"""
dt = pendulum.from_timestamp(record.created, tz=pendulum.local_timezone())
if datefmt:
s = dt.strftime(datefmt)
else:
s = dt.strftime(self.default_time_format)
if self.default_msec_format:
s = self.default_msec_format % (s, record.msecs)
if self.default_tz_format:
s += dt.strftime(self.default_tz_format)
return s
| 1,950 | 35.811321 | 82 |
py
|
airflow
|
airflow-main/airflow/contrib/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated."""
| 821 | 42.263158 | 62 |
py
|
airflow
|
airflow-main/airflow/contrib/secrets/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated. Please use `airflow.secrets` or `airflow.providers.*.secrets`."""
from __future__ import annotations
import warnings
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.utils.deprecation_tools import add_deprecated_classes
warnings.warn(
"This module is deprecated. Please use airflow.providers.*.secrets.",
RemovedInAirflow3Warning,
stacklevel=2
)
__deprecated_classes = {
'aws_secrets_manager': {
'SecretsManagerBackend': 'airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend',
},
'aws_systems_manager': {
'SystemsManagerParameterStoreBackend': (
'airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend'
),
},
'azure_key_vault': {
'AzureKeyVaultBackend': 'airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend',
},
'gcp_secrets_manager': {
'CloudSecretManagerBackend': (
'airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend'
),
'CloudSecretsManagerBackend': (
'airflow.providers.google.cloud.secrets.secret_manager.CloudSecretManagerBackend'
),
},
'hashicorp_vault': {
'VaultBackend': 'airflow.providers.hashicorp.secrets.vault.VaultBackend',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 2,207 | 37.736842 | 110 |
py
|
airflow
|
airflow-main/airflow/contrib/task_runner/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated. Please use `airflow.task.task_runner`."""
from __future__ import annotations
import warnings
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.utils.deprecation_tools import add_deprecated_classes
warnings.warn(
"This module is deprecated. Please use airflow.task.task_runner.", RemovedInAirflow3Warning, stacklevel=2
)
__deprecated_classes = {
'cgroup_task_runner': {
'CgroupTaskRunner': 'airflow.task.task_runner.cgroup_task_runner.CgroupTaskRunner',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 1,375 | 36.189189 | 109 |
py
|
airflow
|
airflow-main/airflow/contrib/operators/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated. Please use `airflow.operators` or `airflow.providers.*.operators`."""
from __future__ import annotations
import warnings
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.utils.deprecation_tools import add_deprecated_classes
warnings.warn(
"This package is deprecated. Please use `airflow.operators` or `airflow.providers.*.operators`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
__deprecated_classes = {
'adls_list_operator': {
'ADLSListOperator': 'airflow.providers.microsoft.azure.operators.adls.ADLSListOperator',
'AzureDataLakeStorageListOperator': (
'airflow.providers.microsoft.azure.operators.adls.ADLSListOperator'
),
},
'adls_to_gcs': {
'ADLSToGCSOperator': 'airflow.providers.google.cloud.transfers.adls_to_gcs.ADLSToGCSOperator',
'AdlsToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.adls_to_gcs.ADLSToGCSOperator'
),
},
'aws_athena_operator': {
'AWSAthenaOperator': 'airflow.providers.amazon.aws.operators.athena.AthenaOperator',
},
'aws_sqs_publish_operator': {
'SqsPublishOperator': 'airflow.providers.amazon.aws.operators.sqs.SqsPublishOperator',
'SQSPublishOperator': 'airflow.providers.amazon.aws.operators.sqs.SqsPublishOperator',
},
'awsbatch_operator': {
'BatchProtocol': 'airflow.providers.amazon.aws.hooks.batch_client.BatchProtocol',
'BatchOperator': 'airflow.providers.amazon.aws.operators.batch.BatchOperator',
'AWSBatchOperator': 'airflow.providers.amazon.aws.operators.batch.BatchOperator',
},
'azure_container_instances_operator': {
'AzureContainerInstancesOperator': (
'airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator'
),
},
'azure_cosmos_operator': {
'AzureCosmosInsertDocumentOperator': (
'airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator'
),
},
'bigquery_check_operator': {
'BigQueryCheckOperator': 'airflow.providers.google.cloud.operators.bigquery.BigQueryCheckOperator',
'BigQueryIntervalCheckOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryIntervalCheckOperator'
),
'BigQueryValueCheckOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryValueCheckOperator'
),
},
'bigquery_get_data': {
'BigQueryGetDataOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryGetDataOperator'
),
},
'bigquery_operator': {
'BigQueryCreateEmptyDatasetOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator'
),
'BigQueryCreateEmptyTableOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryCreateEmptyTableOperator'
),
'BigQueryCreateExternalTableOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator'
),
'BigQueryDeleteDatasetOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteDatasetOperator'
),
'BigQueryExecuteQueryOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator'
),
'BigQueryGetDatasetOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetOperator'
),
'BigQueryGetDatasetTablesOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryGetDatasetTablesOperator'
),
'BigQueryPatchDatasetOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryPatchDatasetOperator'
),
'BigQueryUpdateDatasetOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryUpdateDatasetOperator'
),
'BigQueryUpsertTableOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryUpsertTableOperator'
),
'BigQueryOperator': 'airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator',
},
'bigquery_table_delete_operator': {
'BigQueryDeleteTableOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator'
),
'BigQueryTableDeleteOperator': (
'airflow.providers.google.cloud.operators.bigquery.BigQueryDeleteTableOperator'
),
},
'bigquery_to_bigquery': {
'BigQueryToBigQueryOperator': (
'airflow.providers.google.cloud.transfers.bigquery_to_bigquery.BigQueryToBigQueryOperator'
),
},
'bigquery_to_gcs': {
'BigQueryToGCSOperator': (
'airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator'
),
'BigQueryToCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator'
),
},
'bigquery_to_mysql_operator': {
'BigQueryToMySqlOperator': (
'airflow.providers.google.cloud.transfers.bigquery_to_mysql.BigQueryToMySqlOperator'
),
},
'cassandra_to_gcs': {
'CassandraToGCSOperator': (
'airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator'
),
'CassandraToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator'
),
},
'databricks_operator': {
'DatabricksRunNowOperator': (
'airflow.providers.databricks.operators.databricks.DatabricksRunNowOperator'
),
'DatabricksSubmitRunOperator': (
'airflow.providers.databricks.operators.databricks.DatabricksSubmitRunOperator'
),
},
'dataflow_operator': {
'DataflowCreateJavaJobOperator': (
'airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator'
),
'DataflowCreatePythonJobOperator': (
'airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator'
),
'DataflowTemplatedJobStartOperator': (
'airflow.providers.google.cloud.operators.dataflow.DataflowTemplatedJobStartOperator'
),
'DataFlowJavaOperator': (
'airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator'
),
'DataFlowPythonOperator': (
'airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator'
),
'DataflowTemplateOperator': (
'airflow.providers.google.cloud.operators.dataflow.DataflowTemplatedJobStartOperator'
),
},
'dataproc_operator': {
'DataprocCreateClusterOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator'
),
'DataprocDeleteClusterOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocDeleteClusterOperator'
),
'DataprocInstantiateInlineWorkflowTemplateOperator':
'airflow.providers.google.cloud.operators.dataproc.'
'DataprocInstantiateInlineWorkflowTemplateOperator',
'DataprocInstantiateWorkflowTemplateOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator'
),
'DataprocJobBaseOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocJobBaseOperator'
),
'DataprocScaleClusterOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocScaleClusterOperator'
),
'DataprocSubmitHadoopJobOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator'
),
'DataprocSubmitHiveJobOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator'
),
'DataprocSubmitPigJobOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator'
),
'DataprocSubmitPySparkJobOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator'
),
'DataprocSubmitSparkJobOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator'
),
'DataprocSubmitSparkSqlJobOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator'
),
'DataprocClusterCreateOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocCreateClusterOperator'
),
'DataprocClusterDeleteOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocDeleteClusterOperator'
),
'DataprocClusterScaleOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocScaleClusterOperator'
),
'DataProcHadoopOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator'
),
'DataProcHiveOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator'
),
'DataProcJobBaseOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocJobBaseOperator'
),
'DataProcPigOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator'
),
'DataProcPySparkOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator'
),
'DataProcSparkOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator'
),
'DataProcSparkSqlOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator'
),
'DataprocWorkflowTemplateInstantiateInlineOperator':
'airflow.providers.google.cloud.operators.dataproc.'
'DataprocInstantiateInlineWorkflowTemplateOperator',
'DataprocWorkflowTemplateInstantiateOperator': (
'airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator'
),
},
'datastore_export_operator': {
'CloudDatastoreExportEntitiesOperator': (
'airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator'
),
'DatastoreExportOperator': (
'airflow.providers.google.cloud.operators.datastore.CloudDatastoreExportEntitiesOperator'
),
},
'datastore_import_operator': {
'CloudDatastoreImportEntitiesOperator': (
'airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator'
),
'DatastoreImportOperator': (
'airflow.providers.google.cloud.operators.datastore.CloudDatastoreImportEntitiesOperator'
),
},
'dingding_operator': {
'DingdingOperator': 'airflow.providers.dingding.operators.dingding.DingdingOperator',
},
'discord_webhook_operator': {
'DiscordWebhookOperator': (
'airflow.providers.discord.operators.discord_webhook.DiscordWebhookOperator'
),
},
'docker_swarm_operator': {
'DockerSwarmOperator': 'airflow.providers.docker.operators.docker_swarm.DockerSwarmOperator',
},
'druid_operator': {
'DruidOperator': 'airflow.providers.apache.druid.operators.druid.DruidOperator',
},
'dynamodb_to_s3': {
'DynamoDBToS3Operator': 'airflow.providers.amazon.aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator',
},
'ecs_operator': {
'EcsProtocol': 'airflow.providers.amazon.aws.hooks.ecs.EcsProtocol',
'EcsRunTaskOperator': 'airflow.providers.amazon.aws.operators.ecs.EcsRunTaskOperator',
'EcsOperator': 'airflow.providers.amazon.aws.operators.ecs.EcsRunTaskOperator',
},
'file_to_gcs': {
'LocalFilesystemToGCSOperator': (
'airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator'
),
'FileToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator'
),
},
'file_to_wasb': {
'LocalFilesystemToWasbOperator': (
'airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator'
),
'FileToWasbOperator': (
'airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator'
),
},
'gcp_bigtable_operator': {
'BigtableCreateInstanceOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableCreateInstanceOperator'
),
'BigtableCreateTableOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableCreateTableOperator'
),
'BigtableDeleteInstanceOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableDeleteInstanceOperator'
),
'BigtableDeleteTableOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableDeleteTableOperator'
),
'BigtableUpdateClusterOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableUpdateClusterOperator'
),
'BigtableTableReplicationCompletedSensor': (
'airflow.providers.google.cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor'
),
'BigtableClusterUpdateOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableUpdateClusterOperator'
),
'BigtableInstanceCreateOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableCreateInstanceOperator'
),
'BigtableInstanceDeleteOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableDeleteInstanceOperator'
),
'BigtableTableCreateOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableCreateTableOperator'
),
'BigtableTableDeleteOperator': (
'airflow.providers.google.cloud.operators.bigtable.BigtableDeleteTableOperator'
),
'BigtableTableWaitForReplicationSensor': (
'airflow.providers.google.cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor'
),
},
'gcp_cloud_build_operator': {
'CloudBuildCreateBuildOperator': (
'airflow.providers.google.cloud.operators.cloud_build.CloudBuildCreateBuildOperator'
),
},
'gcp_compute_operator': {
'ComputeEngineBaseOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineBaseOperator'
),
'ComputeEngineCopyInstanceTemplateOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineCopyInstanceTemplateOperator'
),
'ComputeEngineInstanceGroupUpdateManagerTemplateOperator':
'airflow.providers.google.cloud.operators.compute.'
'ComputeEngineInstanceGroupUpdateManagerTemplateOperator',
'ComputeEngineSetMachineTypeOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineSetMachineTypeOperator'
),
'ComputeEngineStartInstanceOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineStartInstanceOperator'
),
'ComputeEngineStopInstanceOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineStopInstanceOperator'
),
'GceBaseOperator': 'airflow.providers.google.cloud.operators.compute.ComputeEngineBaseOperator',
'GceInstanceGroupManagerUpdateTemplateOperator':
'airflow.providers.google.cloud.operators.compute.'
'ComputeEngineInstanceGroupUpdateManagerTemplateOperator',
'GceInstanceStartOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineStartInstanceOperator'
),
'GceInstanceStopOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineStopInstanceOperator'
),
'GceInstanceTemplateCopyOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineCopyInstanceTemplateOperator'
),
'GceSetMachineTypeOperator': (
'airflow.providers.google.cloud.operators.compute.ComputeEngineSetMachineTypeOperator'
),
},
'gcp_container_operator': {
'GKECreateClusterOperator': (
'airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator'
),
'GKEDeleteClusterOperator': (
'airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator'
),
'GKEStartPodOperator': (
'airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator'
),
'GKEClusterCreateOperator': (
'airflow.providers.google.cloud.operators.kubernetes_engine.GKECreateClusterOperator'
),
'GKEClusterDeleteOperator': (
'airflow.providers.google.cloud.operators.kubernetes_engine.GKEDeleteClusterOperator'
),
'GKEPodOperator': 'airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator',
},
'gcp_dlp_operator': {
'CloudDLPCancelDLPJobOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPCancelDLPJobOperator'
),
'CloudDLPCreateDeidentifyTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator'
),
'CloudDLPCreateDLPJobOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDLPJobOperator'
),
'CloudDLPCreateInspectTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateInspectTemplateOperator'
),
'CloudDLPCreateJobTriggerOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateJobTriggerOperator'
),
'CloudDLPCreateStoredInfoTypeOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateStoredInfoTypeOperator'
),
'CloudDLPDeidentifyContentOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeidentifyContentOperator'
),
'CloudDLPDeleteDeidentifyTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator'
),
'CloudDLPDeleteDLPJobOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDLPJobOperator'
),
'CloudDLPDeleteInspectTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteInspectTemplateOperator'
),
'CloudDLPDeleteJobTriggerOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteJobTriggerOperator'
),
'CloudDLPDeleteStoredInfoTypeOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteStoredInfoTypeOperator'
),
'CloudDLPGetDeidentifyTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator'
),
'CloudDLPGetDLPJobOperator': 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobOperator',
'CloudDLPGetDLPJobTriggerOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator'
),
'CloudDLPGetInspectTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetInspectTemplateOperator'
),
'CloudDLPGetStoredInfoTypeOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator'
),
'CloudDLPInspectContentOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPInspectContentOperator'
),
'CloudDLPListDeidentifyTemplatesOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator'
),
'CloudDLPListDLPJobsOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPListDLPJobsOperator'
),
'CloudDLPListInfoTypesOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPListInfoTypesOperator'
),
'CloudDLPListInspectTemplatesOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPListInspectTemplatesOperator'
),
'CloudDLPListJobTriggersOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPListJobTriggersOperator'
),
'CloudDLPListStoredInfoTypesOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator'
),
'CloudDLPRedactImageOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPRedactImageOperator'
),
'CloudDLPReidentifyContentOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPReidentifyContentOperator'
),
'CloudDLPUpdateDeidentifyTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator'
),
'CloudDLPUpdateInspectTemplateOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator'
),
'CloudDLPUpdateJobTriggerOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateJobTriggerOperator'
),
'CloudDLPUpdateStoredInfoTypeOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateStoredInfoTypeOperator'
),
'CloudDLPDeleteDlpJobOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDLPJobOperator'
),
'CloudDLPGetDlpJobOperator': 'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobOperator',
'CloudDLPGetJobTripperOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator'
),
'CloudDLPListDlpJobsOperator': (
'airflow.providers.google.cloud.operators.dlp.CloudDLPListDLPJobsOperator'
),
},
'gcp_function_operator': {
'CloudFunctionDeleteFunctionOperator': (
'airflow.providers.google.cloud.operators.functions.CloudFunctionDeleteFunctionOperator'
),
'CloudFunctionDeployFunctionOperator': (
'airflow.providers.google.cloud.operators.functions.CloudFunctionDeployFunctionOperator'
),
'GcfFunctionDeleteOperator': (
'airflow.providers.google.cloud.operators.functions.CloudFunctionDeleteFunctionOperator'
),
'GcfFunctionDeployOperator': (
'airflow.providers.google.cloud.operators.functions.CloudFunctionDeployFunctionOperator'
),
},
'gcp_natural_language_operator': {
'CloudNaturalLanguageAnalyzeEntitiesOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageAnalyzeEntitiesOperator',
'CloudNaturalLanguageAnalyzeEntitySentimentOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageAnalyzeEntitySentimentOperator',
'CloudNaturalLanguageAnalyzeSentimentOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageAnalyzeSentimentOperator',
'CloudNaturalLanguageClassifyTextOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageClassifyTextOperator',
'CloudLanguageAnalyzeEntitiesOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageAnalyzeEntitiesOperator',
'CloudLanguageAnalyzeEntitySentimentOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageAnalyzeEntitySentimentOperator',
'CloudLanguageAnalyzeSentimentOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageAnalyzeSentimentOperator',
'CloudLanguageClassifyTextOperator':
'airflow.providers.google.cloud.operators.natural_language.'
'CloudNaturalLanguageClassifyTextOperator',
},
'gcp_spanner_operator': {
'SpannerDeleteDatabaseInstanceOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator'
),
'SpannerDeleteInstanceOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeleteInstanceOperator'
),
'SpannerDeployDatabaseInstanceOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator'
),
'SpannerDeployInstanceOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeployInstanceOperator'
),
'SpannerQueryDatabaseInstanceOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator'
),
'SpannerUpdateDatabaseInstanceOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator'
),
'CloudSpannerInstanceDatabaseDeleteOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator'
),
'CloudSpannerInstanceDatabaseDeployOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator'
),
'CloudSpannerInstanceDatabaseQueryOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator'
),
'CloudSpannerInstanceDatabaseUpdateOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator'
),
'CloudSpannerInstanceDeleteOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeleteInstanceOperator'
),
'CloudSpannerInstanceDeployOperator': (
'airflow.providers.google.cloud.operators.spanner.SpannerDeployInstanceOperator'
),
},
'gcp_speech_to_text_operator': {
'CloudSpeechToTextRecognizeSpeechOperator': (
'airflow.providers.google.cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator'
),
'GcpSpeechToTextRecognizeSpeechOperator': (
'airflow.providers.google.cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator'
),
},
'gcp_sql_operator': {
'CloudSQLBaseOperator': 'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLBaseOperator',
'CloudSQLCreateInstanceDatabaseOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLCreateInstanceDatabaseOperator'
),
'CloudSQLCreateInstanceOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLCreateInstanceOperator'
),
'CloudSQLDeleteInstanceDatabaseOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLDeleteInstanceDatabaseOperator'
),
'CloudSQLDeleteInstanceOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLDeleteInstanceOperator'
),
'CloudSQLExecuteQueryOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLExecuteQueryOperator'
),
'CloudSQLExportInstanceOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLExportInstanceOperator'
),
'CloudSQLImportInstanceOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLImportInstanceOperator'
),
'CloudSQLInstancePatchOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLInstancePatchOperator'
),
'CloudSQLPatchInstanceDatabaseOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLPatchInstanceDatabaseOperator'
),
'CloudSqlBaseOperator': 'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLBaseOperator',
'CloudSqlInstanceCreateOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLCreateInstanceOperator'
),
'CloudSqlInstanceDatabaseCreateOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLCreateInstanceDatabaseOperator'
),
'CloudSqlInstanceDatabaseDeleteOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLDeleteInstanceDatabaseOperator'
),
'CloudSqlInstanceDatabasePatchOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLPatchInstanceDatabaseOperator'
),
'CloudSqlInstanceDeleteOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLDeleteInstanceOperator'
),
'CloudSqlInstanceExportOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLExportInstanceOperator'
),
'CloudSqlInstanceImportOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLImportInstanceOperator'
),
'CloudSqlInstancePatchOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLInstancePatchOperator'
),
'CloudSqlQueryOperator': (
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLExecuteQueryOperator'
),
},
'gcp_tasks_operator': {
'CloudTasksQueueCreateOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueueCreateOperator'
),
'CloudTasksQueueDeleteOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueueDeleteOperator'
),
'CloudTasksQueueGetOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueueGetOperator'
),
'CloudTasksQueuePauseOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePauseOperator'
),
'CloudTasksQueuePurgeOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueuePurgeOperator'
),
'CloudTasksQueueResumeOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueueResumeOperator'
),
'CloudTasksQueuesListOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueuesListOperator'
),
'CloudTasksQueueUpdateOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksQueueUpdateOperator'
),
'CloudTasksTaskCreateOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksTaskCreateOperator'
),
'CloudTasksTaskDeleteOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksTaskDeleteOperator'
),
'CloudTasksTaskGetOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksTaskGetOperator'
),
'CloudTasksTaskRunOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksTaskRunOperator'
),
'CloudTasksTasksListOperator': (
'airflow.providers.google.cloud.operators.tasks.CloudTasksTasksListOperator'
),
},
'gcp_text_to_speech_operator': {
'CloudTextToSpeechSynthesizeOperator': (
'airflow.providers.google.cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator'
),
'GcpTextToSpeechSynthesizeOperator': (
'airflow.providers.google.cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator'
),
},
'gcp_transfer_operator': {
'CloudDataTransferServiceCancelOperationOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceCancelOperationOperator',
'CloudDataTransferServiceCreateJobOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceCreateJobOperator',
'CloudDataTransferServiceDeleteJobOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceDeleteJobOperator',
'CloudDataTransferServiceGCSToGCSOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceGCSToGCSOperator',
'CloudDataTransferServiceGetOperationOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceGetOperationOperator',
'CloudDataTransferServiceListOperationsOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceListOperationsOperator',
'CloudDataTransferServicePauseOperationOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServicePauseOperationOperator',
'CloudDataTransferServiceResumeOperationOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceResumeOperationOperator',
'CloudDataTransferServiceS3ToGCSOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceS3ToGCSOperator',
'CloudDataTransferServiceUpdateJobOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceUpdateJobOperator',
'GcpTransferServiceJobCreateOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceCreateJobOperator',
'GcpTransferServiceJobDeleteOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceDeleteJobOperator',
'GcpTransferServiceJobUpdateOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceUpdateJobOperator',
'GcpTransferServiceOperationCancelOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceCancelOperationOperator',
'GcpTransferServiceOperationGetOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceGetOperationOperator',
'GcpTransferServiceOperationPauseOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServicePauseOperationOperator',
'GcpTransferServiceOperationResumeOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceResumeOperationOperator',
'GcpTransferServiceOperationsListOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceListOperationsOperator',
'GoogleCloudStorageToGoogleCloudStorageTransferOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceGCSToGCSOperator',
'S3ToGoogleCloudStorageTransferOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceS3ToGCSOperator',
},
'gcp_translate_operator': {
'CloudTranslateTextOperator': (
'airflow.providers.google.cloud.operators.translate.CloudTranslateTextOperator'
),
},
'gcp_translate_speech_operator': {
'CloudTranslateSpeechOperator': (
'airflow.providers.google.cloud.operators.translate_speech.CloudTranslateSpeechOperator'
),
'GcpTranslateSpeechOperator': (
'airflow.providers.google.cloud.operators.translate_speech.CloudTranslateSpeechOperator'
),
},
'gcp_video_intelligence_operator': {
'CloudVideoIntelligenceDetectVideoExplicitContentOperator':
'airflow.providers.google.cloud.operators.video_intelligence.'
'CloudVideoIntelligenceDetectVideoExplicitContentOperator',
'CloudVideoIntelligenceDetectVideoLabelsOperator':
'airflow.providers.google.cloud.operators.video_intelligence.'
'CloudVideoIntelligenceDetectVideoLabelsOperator',
'CloudVideoIntelligenceDetectVideoShotsOperator':
'airflow.providers.google.cloud.operators.video_intelligence.'
'CloudVideoIntelligenceDetectVideoShotsOperator',
},
'gcp_vision_operator': {
'CloudVisionAddProductToProductSetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionAddProductToProductSetOperator'
),
'CloudVisionCreateProductOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductOperator'
),
'CloudVisionCreateProductSetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductSetOperator'
),
'CloudVisionCreateReferenceImageOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionCreateReferenceImageOperator'
),
'CloudVisionDeleteProductOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductOperator'
),
'CloudVisionDeleteProductSetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductSetOperator'
),
'CloudVisionDetectImageLabelsOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageLabelsOperator'
),
'CloudVisionDetectImageSafeSearchOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageSafeSearchOperator'
),
'CloudVisionDetectTextOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionDetectTextOperator'
),
'CloudVisionGetProductOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionGetProductOperator'
),
'CloudVisionGetProductSetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionGetProductSetOperator'
),
'CloudVisionImageAnnotateOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator'
),
'CloudVisionRemoveProductFromProductSetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator'
),
'CloudVisionTextDetectOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionTextDetectOperator'
),
'CloudVisionUpdateProductOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductOperator'
),
'CloudVisionUpdateProductSetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductSetOperator'
),
'CloudVisionAnnotateImageOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator'
),
'CloudVisionDetectDocumentTextOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionTextDetectOperator'
),
'CloudVisionProductCreateOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductOperator'
),
'CloudVisionProductDeleteOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductOperator'
),
'CloudVisionProductGetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionGetProductOperator'
),
'CloudVisionProductSetCreateOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductSetOperator'
),
'CloudVisionProductSetDeleteOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductSetOperator'
),
'CloudVisionProductSetGetOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionGetProductSetOperator'
),
'CloudVisionProductSetUpdateOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductSetOperator'
),
'CloudVisionProductUpdateOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductOperator'
),
'CloudVisionReferenceImageCreateOperator': (
'airflow.providers.google.cloud.operators.vision.CloudVisionCreateReferenceImageOperator'
),
},
'gcs_acl_operator': {
'GCSBucketCreateAclEntryOperator': (
'airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator'
),
'GCSObjectCreateAclEntryOperator': (
'airflow.providers.google.cloud.operators.gcs.GCSObjectCreateAclEntryOperator'
),
'GoogleCloudStorageBucketCreateAclEntryOperator': (
'airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator'
),
'GoogleCloudStorageObjectCreateAclEntryOperator': (
'airflow.providers.google.cloud.operators.gcs.GCSObjectCreateAclEntryOperator'
),
},
'gcs_delete_operator': {
'GCSDeleteObjectsOperator': 'airflow.providers.google.cloud.operators.gcs.GCSDeleteObjectsOperator',
'GoogleCloudStorageDeleteOperator': (
'airflow.providers.google.cloud.operators.gcs.GCSDeleteObjectsOperator'
),
},
'gcs_download_operator': {
'GCSToLocalFilesystemOperator': (
'airflow.providers.google.cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator'
),
'GoogleCloudStorageDownloadOperator': (
'airflow.providers.google.cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator'
),
},
'gcs_list_operator': {
'GCSListObjectsOperator': 'airflow.providers.google.cloud.operators.gcs.GCSListObjectsOperator',
'GoogleCloudStorageListOperator': (
'airflow.providers.google.cloud.operators.gcs.GCSListObjectsOperator'
),
},
'gcs_operator': {
'GCSCreateBucketOperator': 'airflow.providers.google.cloud.operators.gcs.GCSCreateBucketOperator',
'GoogleCloudStorageCreateBucketOperator': (
'airflow.providers.google.cloud.operators.gcs.GCSCreateBucketOperator'
),
},
'gcs_to_bq': {
'GCSToBigQueryOperator': (
'airflow.providers.google.cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator'
),
'GoogleCloudStorageToBigQueryOperator': (
'airflow.providers.google.cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator'
),
},
'gcs_to_gcs': {
'GCSToGCSOperator': 'airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator',
'GoogleCloudStorageToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator'
),
},
'gcs_to_gdrive_operator': {
'GCSToGoogleDriveOperator': (
'airflow.providers.google.suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator'
),
},
'gcs_to_s3': {
'GCSToS3Operator': 'airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator',
'GoogleCloudStorageToS3Operator': 'airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator',
},
'grpc_operator': {
'GrpcOperator': 'airflow.providers.grpc.operators.grpc.GrpcOperator',
},
'hive_to_dynamodb': {
'HiveToDynamoDBOperator': (
'airflow.providers.amazon.aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator'
),
},
'imap_attachment_to_s3_operator': {
'ImapAttachmentToS3Operator': (
'airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator'
),
},
'jenkins_job_trigger_operator': {
'JenkinsJobTriggerOperator': (
'airflow.providers.jenkins.operators.jenkins_job_trigger.JenkinsJobTriggerOperator'
),
},
'jira_operator': {
'JiraOperator': 'airflow.providers.atlassian.jira.operators.jira.JiraOperator',
},
'kubernetes_pod_operator': {
'KubernetesPodOperator': (
'airflow.providers.cncf.kubernetes.operators.pod.KubernetesPodOperator'
),
},
'mlengine_operator': {
'MLEngineManageModelOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineManageModelOperator'
),
'MLEngineManageVersionOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineManageVersionOperator'
),
'MLEngineStartBatchPredictionJobOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator'
),
'MLEngineStartTrainingJobOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineStartTrainingJobOperator'
),
'MLEngineBatchPredictionOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator'
),
'MLEngineModelOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineManageModelOperator'
),
'MLEngineTrainingOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineStartTrainingJobOperator'
),
'MLEngineVersionOperator': (
'airflow.providers.google.cloud.operators.mlengine.MLEngineManageVersionOperator'
),
},
'mongo_to_s3': {
'MongoToS3Operator': 'airflow.providers.amazon.aws.transfers.mongo_to_s3.MongoToS3Operator',
},
'mssql_to_gcs': {
'MSSQLToGCSOperator': 'airflow.providers.google.cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator',
'MsSqlToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator'
),
},
'mysql_to_gcs': {
'MySQLToGCSOperator': 'airflow.providers.google.cloud.transfers.mysql_to_gcs.MySQLToGCSOperator',
'MySqlToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.mysql_to_gcs.MySQLToGCSOperator'
),
},
'opsgenie_alert_operator': {
'OpsgenieCreateAlertOperator': (
'airflow.providers.opsgenie.operators.opsgenie.OpsgenieCreateAlertOperator'
),
'OpsgenieAlertOperator': 'airflow.providers.opsgenie.operators.opsgenie.OpsgenieCreateAlertOperator',
},
'oracle_to_azure_data_lake_transfer': {
'OracleToAzureDataLakeOperator':
'airflow.providers.microsoft.azure.transfers.'
'oracle_to_azure_data_lake.OracleToAzureDataLakeOperator',
},
'oracle_to_oracle_transfer': {
'OracleToOracleOperator': (
'airflow.providers.oracle.transfers.oracle_to_oracle.OracleToOracleOperator'
),
'OracleToOracleTransfer': (
'airflow.providers.oracle.transfers.oracle_to_oracle.OracleToOracleOperator'
),
},
'postgres_to_gcs_operator': {
'PostgresToGCSOperator': (
'airflow.providers.google.cloud.transfers.postgres_to_gcs.PostgresToGCSOperator'
),
'PostgresToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.postgres_to_gcs.PostgresToGCSOperator'
),
},
'pubsub_operator': {
'PubSubCreateSubscriptionOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubCreateSubscriptionOperator'
),
'PubSubCreateTopicOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubCreateTopicOperator'
),
'PubSubDeleteSubscriptionOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubDeleteSubscriptionOperator'
),
'PubSubDeleteTopicOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubDeleteTopicOperator'
),
'PubSubPublishMessageOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubPublishMessageOperator'
),
'PubSubPublishOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubPublishMessageOperator'
),
'PubSubSubscriptionCreateOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubCreateSubscriptionOperator'
),
'PubSubSubscriptionDeleteOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubDeleteSubscriptionOperator'
),
'PubSubTopicCreateOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubCreateTopicOperator'
),
'PubSubTopicDeleteOperator': (
'airflow.providers.google.cloud.operators.pubsub.PubSubDeleteTopicOperator'
),
},
'qubole_check_operator': {
'QuboleCheckOperator': 'airflow.providers.qubole.operators.qubole_check.QuboleCheckOperator',
'QuboleValueCheckOperator': (
'airflow.providers.qubole.operators.qubole_check.QuboleValueCheckOperator'
),
},
'qubole_operator': {
'QuboleOperator': 'airflow.providers.qubole.operators.qubole.QuboleOperator',
},
'redis_publish_operator': {
'RedisPublishOperator': 'airflow.providers.redis.operators.redis_publish.RedisPublishOperator',
},
's3_to_gcs_operator': {
'S3ToGCSOperator': 'airflow.providers.google.cloud.transfers.s3_to_gcs.S3ToGCSOperator',
},
's3_to_gcs_transfer_operator': {
'CloudDataTransferServiceS3ToGCSOperator':
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service.'
'CloudDataTransferServiceS3ToGCSOperator',
},
's3_to_sftp_operator': {
'S3ToSFTPOperator': 'airflow.providers.amazon.aws.transfers.s3_to_sftp.S3ToSFTPOperator',
},
'segment_track_event_operator': {
'SegmentTrackEventOperator': (
'airflow.providers.segment.operators.segment_track_event.SegmentTrackEventOperator'
),
},
'sftp_operator': {
'SFTPOperator': 'airflow.providers.sftp.operators.sftp.SFTPOperator',
},
'sftp_to_s3_operator': {
'SFTPToS3Operator': 'airflow.providers.amazon.aws.transfers.sftp_to_s3.SFTPToS3Operator',
},
'slack_webhook_operator': {
'SlackWebhookOperator': 'airflow.providers.slack.operators.slack_webhook.SlackWebhookOperator',
},
'snowflake_operator': {
'SnowflakeOperator': 'airflow.providers.snowflake.operators.snowflake.SnowflakeOperator',
},
'sns_publish_operator': {
'SnsPublishOperator': 'airflow.providers.amazon.aws.operators.sns.SnsPublishOperator',
},
'spark_jdbc_operator': {
'SparkJDBCOperator': 'airflow.providers.apache.spark.operators.spark_jdbc.SparkJDBCOperator',
'SparkSubmitOperator': 'airflow.providers.apache.spark.operators.spark_jdbc.SparkSubmitOperator',
},
'spark_sql_operator': {
'SparkSqlOperator': 'airflow.providers.apache.spark.operators.spark_sql.SparkSqlOperator',
},
'spark_submit_operator': {
'SparkSubmitOperator': 'airflow.providers.apache.spark.operators.spark_submit.SparkSubmitOperator',
},
'sql_to_gcs': {
'BaseSQLToGCSOperator': 'airflow.providers.google.cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator',
'BaseSQLToGoogleCloudStorageOperator': (
'airflow.providers.google.cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator'
),
},
'sqoop_operator': {
'SqoopOperator': 'airflow.providers.apache.sqoop.operators.sqoop.SqoopOperator',
},
'ssh_operator': {
'SSHOperator': 'airflow.providers.ssh.operators.ssh.SSHOperator',
},
'vertica_operator': {
'VerticaOperator': 'airflow.providers.vertica.operators.vertica.VerticaOperator',
},
'vertica_to_hive': {
'VerticaToHiveOperator': (
'airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator'
),
'VerticaToHiveTransfer': (
'airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator'
),
},
'vertica_to_mysql': {
'VerticaToMySqlOperator': 'airflow.providers.mysql.transfers.vertica_to_mysql.VerticaToMySqlOperator',
'VerticaToMySqlTransfer': 'airflow.providers.mysql.transfers.vertica_to_mysql.VerticaToMySqlOperator',
},
'wasb_delete_blob_operator': {
'WasbDeleteBlobOperator': (
'airflow.providers.microsoft.azure.operators.wasb_delete_blob.WasbDeleteBlobOperator'
),
},
'winrm_operator': {
'WinRMOperator': 'airflow.providers.microsoft.winrm.operators.winrm.WinRMOperator',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 55,340 | 47.417323 | 110 |
py
|
airflow
|
airflow-main/airflow/contrib/hooks/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated. Please use `airflow.hooks` or `airflow.providers.*.hooks`."""
from __future__ import annotations
import warnings
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.utils.deprecation_tools import add_deprecated_classes
warnings.warn(
"This package is deprecated. Please use `airflow.hooks` or `airflow.providers.*.hooks`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
__deprecated_classes = {
'aws_athena_hook': {
'AWSAthenaHook': 'airflow.providers.amazon.aws.hooks.athena.AthenaHook',
},
'aws_datasync_hook': {
'AWSDataSyncHook': 'airflow.providers.amazon.aws.hooks.datasync.DataSyncHook',
},
'aws_dynamodb_hook': {
'AwsDynamoDBHook': 'airflow.providers.amazon.aws.hooks.dynamodb.DynamoDBHook',
},
'aws_firehose_hook': {
'FirehoseHook': 'airflow.providers.amazon.aws.hooks.kinesis.FirehoseHook',
},
'aws_glue_catalog_hook': {
'AwsGlueCatalogHook': 'airflow.providers.amazon.aws.hooks.glue_catalog.GlueCatalogHook',
},
'aws_hook': {
'AwsBaseHook': 'airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook',
'_parse_s3_config': 'airflow.providers.amazon.aws.hooks.base_aws._parse_s3_config',
'boto3': 'airflow.providers.amazon.aws.hooks.base_aws.boto3',
'AwsHook': 'airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook',
},
'aws_lambda_hook': {
'AwsLambdaHook': 'airflow.providers.amazon.aws.hooks.lambda_function.LambdaHook',
},
'aws_logs_hook': {
'AwsLogsHook': 'airflow.providers.amazon.aws.hooks.logs.AwsLogsHook',
},
'aws_sns_hook': {
'AwsSnsHook': 'airflow.providers.amazon.aws.hooks.sns.SnsHook',
},
'aws_sqs_hook': {
'SqsHook': 'airflow.providers.amazon.aws.hooks.sqs.SqsHook',
'SQSHook': 'airflow.providers.amazon.aws.hooks.sqs.SqsHook',
},
'azure_container_instance_hook': {
'AzureContainerInstanceHook': (
'airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook'
),
},
'azure_container_registry_hook': {
'AzureContainerRegistryHook': (
'airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook'
),
},
'azure_container_volume_hook': {
'AzureContainerVolumeHook': (
'airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook'
),
},
'azure_cosmos_hook': {
'AzureCosmosDBHook': 'airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook',
},
'azure_data_lake_hook': {
'AzureDataLakeHook': 'airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook',
},
'azure_fileshare_hook': {
'AzureFileShareHook': 'airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook',
},
'bigquery_hook': {
'BigQueryBaseCursor': 'airflow.providers.google.cloud.hooks.bigquery.BigQueryBaseCursor',
'BigQueryConnection': 'airflow.providers.google.cloud.hooks.bigquery.BigQueryConnection',
'BigQueryCursor': 'airflow.providers.google.cloud.hooks.bigquery.BigQueryCursor',
'BigQueryHook': 'airflow.providers.google.cloud.hooks.bigquery.BigQueryHook',
'GbqConnector': 'airflow.providers.google.cloud.hooks.bigquery.GbqConnector',
},
'cassandra_hook': {
'CassandraHook': 'airflow.providers.apache.cassandra.hooks.cassandra.CassandraHook',
},
'cloudant_hook': {
'CloudantHook': 'airflow.providers.cloudant.hooks.cloudant.CloudantHook',
},
'databricks_hook': {
'CANCEL_RUN_ENDPOINT': 'airflow.providers.databricks.hooks.databricks.CANCEL_RUN_ENDPOINT',
'GET_RUN_ENDPOINT': 'airflow.providers.databricks.hooks.databricks.GET_RUN_ENDPOINT',
'RESTART_CLUSTER_ENDPOINT': 'airflow.providers.databricks.hooks.databricks.RESTART_CLUSTER_ENDPOINT',
'RUN_LIFE_CYCLE_STATES': 'airflow.providers.databricks.hooks.databricks.RUN_LIFE_CYCLE_STATES',
'RUN_NOW_ENDPOINT': 'airflow.providers.databricks.hooks.databricks.RUN_NOW_ENDPOINT',
'START_CLUSTER_ENDPOINT': 'airflow.providers.databricks.hooks.databricks.START_CLUSTER_ENDPOINT',
'SUBMIT_RUN_ENDPOINT': 'airflow.providers.databricks.hooks.databricks.SUBMIT_RUN_ENDPOINT',
'TERMINATE_CLUSTER_ENDPOINT': (
'airflow.providers.databricks.hooks.databricks.TERMINATE_CLUSTER_ENDPOINT'
),
'DatabricksHook': 'airflow.providers.databricks.hooks.databricks.DatabricksHook',
'RunState': 'airflow.providers.databricks.hooks.databricks.RunState',
},
'datadog_hook': {
'DatadogHook': 'airflow.providers.datadog.hooks.datadog.DatadogHook',
},
'datastore_hook': {
'DatastoreHook': 'airflow.providers.google.cloud.hooks.datastore.DatastoreHook',
},
'dingding_hook': {
'DingdingHook': 'airflow.providers.dingding.hooks.dingding.DingdingHook',
'requests': 'airflow.providers.dingding.hooks.dingding.requests',
},
'discord_webhook_hook': {
'DiscordWebhookHook': 'airflow.providers.discord.hooks.discord_webhook.DiscordWebhookHook',
},
'emr_hook': {
'EmrHook': 'airflow.providers.amazon.aws.hooks.emr.EmrHook',
},
'fs_hook': {
'FSHook': 'airflow.hooks.filesystem.FSHook',
},
'ftp_hook': {
'FTPHook': 'airflow.providers.ftp.hooks.ftp.FTPHook',
'FTPSHook': 'airflow.providers.ftp.hooks.ftp.FTPSHook',
},
'gcp_api_base_hook': {
'GoogleBaseHook': 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook',
'GoogleCloudBaseHook': 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook',
},
'gcp_bigtable_hook': {
'BigtableHook': 'airflow.providers.google.cloud.hooks.bigtable.BigtableHook',
},
'gcp_cloud_build_hook': {
'CloudBuildHook': 'airflow.providers.google.cloud.hooks.cloud_build.CloudBuildHook',
},
'gcp_compute_hook': {
'ComputeEngineHook': 'airflow.providers.google.cloud.hooks.compute.ComputeEngineHook',
'GceHook': 'airflow.providers.google.cloud.hooks.compute.ComputeEngineHook',
},
'gcp_container_hook': {
'GKEHook': 'airflow.providers.google.cloud.hooks.kubernetes_engine.GKEHook',
'GKEClusterHook': 'airflow.providers.google.cloud.hooks.kubernetes_engine.GKEHook',
},
'gcp_dataflow_hook': {
'DataflowHook': 'airflow.providers.google.cloud.hooks.dataflow.DataflowHook',
'DataFlowHook': 'airflow.providers.google.cloud.hooks.dataflow.DataflowHook',
},
'gcp_dataproc_hook': {
'DataprocHook': 'airflow.providers.google.cloud.hooks.dataproc.DataprocHook',
'DataProcHook': 'airflow.providers.google.cloud.hooks.dataproc.DataprocHook',
},
'gcp_dlp_hook': {
'CloudDLPHook': 'airflow.providers.google.cloud.hooks.dlp.CloudDLPHook',
'DlpJob': 'airflow.providers.google.cloud.hooks.dlp.DlpJob',
},
'gcp_function_hook': {
'CloudFunctionsHook': 'airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook',
'GcfHook': 'airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook',
},
'gcp_kms_hook': {
'CloudKMSHook': 'airflow.providers.google.cloud.hooks.kms.CloudKMSHook',
'GoogleCloudKMSHook': 'airflow.providers.google.cloud.hooks.kms.CloudKMSHook',
},
'gcp_mlengine_hook': {
'MLEngineHook': 'airflow.providers.google.cloud.hooks.mlengine.MLEngineHook',
},
'gcp_natural_language_hook': {
'CloudNaturalLanguageHook': (
'airflow.providers.google.cloud.hooks.natural_language.CloudNaturalLanguageHook'
),
},
'gcp_pubsub_hook': {
'PubSubException': 'airflow.providers.google.cloud.hooks.pubsub.PubSubException',
'PubSubHook': 'airflow.providers.google.cloud.hooks.pubsub.PubSubHook',
},
'gcp_spanner_hook': {
'SpannerHook': 'airflow.providers.google.cloud.hooks.spanner.SpannerHook',
'CloudSpannerHook': 'airflow.providers.google.cloud.hooks.spanner.SpannerHook',
},
'gcp_speech_to_text_hook': {
'CloudSpeechToTextHook': 'airflow.providers.google.cloud.hooks.speech_to_text.CloudSpeechToTextHook',
'GCPSpeechToTextHook': 'airflow.providers.google.cloud.hooks.speech_to_text.CloudSpeechToTextHook',
},
'gcp_sql_hook': {
'CloudSQLDatabaseHook': 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook',
'CloudSQLHook': 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook',
'CloudSqlDatabaseHook': 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook',
'CloudSqlHook': 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook',
},
'gcp_tasks_hook': {
'CloudTasksHook': 'airflow.providers.google.cloud.hooks.tasks.CloudTasksHook',
},
'gcp_text_to_speech_hook': {
'CloudTextToSpeechHook': 'airflow.providers.google.cloud.hooks.text_to_speech.CloudTextToSpeechHook',
'GCPTextToSpeechHook': 'airflow.providers.google.cloud.hooks.text_to_speech.CloudTextToSpeechHook',
},
'gcp_transfer_hook': {
'CloudDataTransferServiceHook': (
'airflow.providers.google.cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook'
),
'GCPTransferServiceHook': (
'airflow.providers.google.cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook'
),
},
'gcp_translate_hook': {
'CloudTranslateHook': 'airflow.providers.google.cloud.hooks.translate.CloudTranslateHook',
},
'gcp_video_intelligence_hook': {
'CloudVideoIntelligenceHook': (
'airflow.providers.google.cloud.hooks.video_intelligence.CloudVideoIntelligenceHook'
),
},
'gcp_vision_hook': {
'CloudVisionHook': 'airflow.providers.google.cloud.hooks.vision.CloudVisionHook',
},
'gcs_hook': {
'GCSHook': 'airflow.providers.google.cloud.hooks.gcs.GCSHook',
'GoogleCloudStorageHook': 'airflow.providers.google.cloud.hooks.gcs.GCSHook',
},
'gdrive_hook': {
'GoogleDriveHook': 'airflow.providers.google.suite.hooks.drive.GoogleDriveHook',
},
'grpc_hook': {
'GrpcHook': 'airflow.providers.grpc.hooks.grpc.GrpcHook',
},
'imap_hook': {
'ImapHook': 'airflow.providers.imap.hooks.imap.ImapHook',
'Mail': 'airflow.providers.imap.hooks.imap.Mail',
'MailPart': 'airflow.providers.imap.hooks.imap.MailPart',
},
'jenkins_hook': {
'JenkinsHook': 'airflow.providers.jenkins.hooks.jenkins.JenkinsHook',
},
'jira_hook': {
'JiraHook': 'airflow.providers.atlassian.jira.hooks.jira.JiraHook',
},
'mongo_hook': {
'MongoHook': 'airflow.providers.mongo.hooks.mongo.MongoHook',
},
'openfaas_hook': {
'OK_STATUS_CODE': 'airflow.providers.openfaas.hooks.openfaas.OK_STATUS_CODE',
'OpenFaasHook': 'airflow.providers.openfaas.hooks.openfaas.OpenFaasHook',
'requests': 'airflow.providers.openfaas.hooks.openfaas.requests',
},
'opsgenie_alert_hook': {
'OpsgenieAlertHook': 'airflow.providers.opsgenie.hooks.opsgenie.OpsgenieAlertHook',
},
'pagerduty_hook': {
'PagerdutyHook': 'airflow.providers.pagerduty.hooks.pagerduty.PagerdutyHook',
},
'pinot_hook': {
'PinotAdminHook': 'airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook',
'PinotDbApiHook': 'airflow.providers.apache.pinot.hooks.pinot.PinotDbApiHook',
},
'qubole_check_hook': {
'QuboleCheckHook': 'airflow.providers.qubole.hooks.qubole_check.QuboleCheckHook',
},
'qubole_hook': {
'QuboleHook': 'airflow.providers.qubole.hooks.qubole.QuboleHook',
},
'redis_hook': {
'RedisHook': 'airflow.providers.redis.hooks.redis.RedisHook',
},
'redshift_hook': {
'RedshiftHook': 'airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftHook',
},
'sagemaker_hook': {
'LogState': 'airflow.providers.amazon.aws.hooks.sagemaker.LogState',
'Position': 'airflow.providers.amazon.aws.hooks.sagemaker.Position',
'SageMakerHook': 'airflow.providers.amazon.aws.hooks.sagemaker.SageMakerHook',
'argmin': 'airflow.providers.amazon.aws.hooks.sagemaker.argmin',
'secondary_training_status_changed': (
'airflow.providers.amazon.aws.hooks.sagemaker.secondary_training_status_changed'
),
'secondary_training_status_message': (
'airflow.providers.amazon.aws.hooks.sagemaker.secondary_training_status_message'
),
},
'salesforce_hook': {
'SalesforceHook': 'airflow.providers.salesforce.hooks.salesforce.SalesforceHook',
'pd': 'airflow.providers.salesforce.hooks.salesforce.pd',
},
'segment_hook': {
'SegmentHook': 'airflow.providers.segment.hooks.segment.SegmentHook',
'analytics': 'airflow.providers.segment.hooks.segment.analytics',
},
'sftp_hook': {
'SFTPHook': 'airflow.providers.sftp.hooks.sftp.SFTPHook',
},
'slack_webhook_hook': {
'SlackWebhookHook': 'airflow.providers.slack.hooks.slack_webhook.SlackWebhookHook',
},
'snowflake_hook': {
'SnowflakeHook': 'airflow.providers.snowflake.hooks.snowflake.SnowflakeHook',
},
'spark_jdbc_hook': {
'SparkJDBCHook': 'airflow.providers.apache.spark.hooks.spark_jdbc.SparkJDBCHook',
},
'spark_sql_hook': {
'SparkSqlHook': 'airflow.providers.apache.spark.hooks.spark_sql.SparkSqlHook',
},
'spark_submit_hook': {
'SparkSubmitHook': 'airflow.providers.apache.spark.hooks.spark_submit.SparkSubmitHook',
},
'sqoop_hook': {
'SqoopHook': 'airflow.providers.apache.sqoop.hooks.sqoop.SqoopHook',
},
'ssh_hook': {
'SSHHook': 'airflow.providers.ssh.hooks.ssh.SSHHook',
},
'vertica_hook': {
'VerticaHook': 'airflow.providers.vertica.hooks.vertica.VerticaHook',
},
'wasb_hook': {
'WasbHook': 'airflow.providers.microsoft.azure.hooks.wasb.WasbHook',
},
'winrm_hook': {
'WinRMHook': 'airflow.providers.microsoft.winrm.hooks.winrm.WinRMHook',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 15,158 | 43.849112 | 110 |
py
|
airflow
|
airflow-main/airflow/contrib/utils/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated. Please use `airflow.utils`."""
from __future__ import annotations
import warnings
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.utils.deprecation_tools import add_deprecated_classes
warnings.warn(
"This module is deprecated. Please use `airflow.utils`.",
RemovedInAirflow3Warning,
stacklevel=2
)
__deprecated_classes = {
'gcp_field_sanitizer': {
'GcpBodyFieldSanitizer': 'airflow.providers.google.cloud.utils.field_sanitizer.GcpBodyFieldSanitizer',
'GcpFieldSanitizerException': (
'airflow.providers.google.cloud.utils.field_sanitizer.GcpFieldSanitizerException'
),
},
'gcp_field_validator': {
'GcpBodyFieldValidator': 'airflow.providers.google.cloud.utils.field_validator.GcpBodyFieldValidator',
'GcpFieldValidationException': (
'airflow.providers.google.cloud.utils.field_validator.GcpFieldValidationException'
),
'GcpValidationSpecificationException': (
'airflow.providers.google.cloud.utils.field_validator.GcpValidationSpecificationException'
),
},
'mlengine_operator_utils': {
'create_evaluate_ops': (
'airflow.providers.google.cloud.utils.mlengine_operator_utils.create_evaluate_ops'
),
},
'mlengine_prediction_summary': {
'JsonCoder': 'airflow.providers.google.cloud.utils.mlengine_prediction_summary.JsonCoder',
'MakeSummary': 'airflow.providers.google.cloud.utils.mlengine_prediction_summary.MakeSummary',
},
'sendgrid': {
'import_string': 'airflow.utils.module_loading.import_string',
},
'weekday': {
'WeekDay': 'airflow.utils.weekday.WeekDay',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 2,582 | 38.136364 | 110 |
py
|
airflow
|
airflow-main/airflow/contrib/utils/log/__init__.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated. Please use `airflow.utils.log`."""
from __future__ import annotations
import warnings
from airflow.utils.deprecation_tools import add_deprecated_classes
warnings.warn("This module is deprecated. Please use `airflow.utils.log`.", DeprecationWarning, stacklevel=2)
__deprecated_classes = {
'task_handler_with_custom_formatter': {
'TaskHandlerWithCustomFormatter': (
'airflow.utils.log.task_handler_with_custom_formatter.TaskHandlerWithCustomFormatter'
),
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 1,370 | 38.171429 | 109 |
py
|
airflow
|
airflow-main/airflow/contrib/sensors/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This package is deprecated. Please use `airflow.sensors` or `airflow.providers.*.sensors`."""
from __future__ import annotations
import warnings
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.utils.deprecation_tools import add_deprecated_classes
warnings.warn(
"This package is deprecated. Please use `airflow.sensors` or `airflow.providers.*.sensors`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
__deprecated_classes = {
'aws_athena_sensor': {
'AthenaSensor': 'airflow.providers.amazon.aws.sensors.athena.AthenaSensor',
},
'aws_glue_catalog_partition_sensor': {
'AwsGlueCatalogPartitionSensor': (
'airflow.providers.amazon.aws.sensors.glue_catalog_partition.GlueCatalogPartitionSensor'
),
},
'aws_redshift_cluster_sensor': {
'AwsRedshiftClusterSensor': (
'airflow.providers.amazon.aws.sensors.redshift_cluster.RedshiftClusterSensor'
),
},
'aws_sqs_sensor': {
'SqsSensor': 'airflow.providers.amazon.aws.sensors.sqs.SqsSensor',
'SQSSensor': 'airflow.providers.amazon.aws.sensors.sqs.SqsSensor',
},
'azure_cosmos_sensor': {
'AzureCosmosDocumentSensor': (
'airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor'
),
},
'bash_sensor': {
'STDOUT': 'airflow.sensors.bash.STDOUT',
'BashSensor': 'airflow.sensors.bash.BashSensor',
'Popen': 'airflow.sensors.bash.Popen',
'TemporaryDirectory': 'airflow.sensors.bash.TemporaryDirectory',
'gettempdir': 'airflow.sensors.bash.gettempdir',
},
'bigquery_sensor': {
'BigQueryTableExistenceSensor': (
'airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor'
),
'BigQueryTableSensor': 'airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceSensor',
},
'cassandra_record_sensor': {
'CassandraRecordSensor': 'airflow.providers.apache.cassandra.sensors.record.CassandraRecordSensor',
},
'cassandra_table_sensor': {
'CassandraTableSensor': 'airflow.providers.apache.cassandra.sensors.table.CassandraTableSensor',
},
'celery_queue_sensor': {
'CeleryQueueSensor': 'airflow.providers.celery.sensors.celery_queue.CeleryQueueSensor',
},
'datadog_sensor': {
'DatadogSensor': 'airflow.providers.datadog.sensors.datadog.DatadogSensor',
},
'file_sensor': {
'FileSensor': 'airflow.sensors.filesystem.FileSensor',
},
'ftp_sensor': {
'FTPSensor': 'airflow.providers.ftp.sensors.ftp.FTPSensor',
'FTPSSensor': 'airflow.providers.ftp.sensors.ftp.FTPSSensor',
},
'gcp_transfer_sensor': {
'CloudDataTransferServiceJobStatusSensor':
'airflow.providers.google.cloud.sensors.cloud_storage_transfer_service.'
'CloudDataTransferServiceJobStatusSensor',
'GCPTransferServiceWaitForJobStatusSensor':
'airflow.providers.google.cloud.sensors.cloud_storage_transfer_service.'
'CloudDataTransferServiceJobStatusSensor',
},
'gcs_sensor': {
'GCSObjectExistenceSensor': 'airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor',
'GCSObjectsWithPrefixExistenceSensor': (
'airflow.providers.google.cloud.sensors.gcs.GCSObjectsWithPrefixExistenceSensor'
),
'GCSObjectUpdateSensor': 'airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor',
'GCSUploadSessionCompleteSensor': (
'airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor'
),
'GoogleCloudStorageObjectSensor': (
'airflow.providers.google.cloud.sensors.gcs.GCSObjectExistenceSensor'
),
'GoogleCloudStorageObjectUpdatedSensor': (
'airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor'
),
'GoogleCloudStoragePrefixSensor': (
'airflow.providers.google.cloud.sensors.gcs.GCSObjectsWithPrefixExistenceSensor'
),
'GoogleCloudStorageUploadSessionCompleteSensor': (
'airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor'
),
},
'imap_attachment_sensor': {
'ImapAttachmentSensor': 'airflow.providers.imap.sensors.imap_attachment.ImapAttachmentSensor',
},
'jira_sensor': {
'JiraSensor': 'airflow.providers.atlassian.jira.sensors.jira.JiraSensor',
'JiraTicketSensor': 'airflow.providers.atlassian.jira.sensors.jira.JiraTicketSensor',
},
'mongo_sensor': {
'MongoSensor': 'airflow.providers.mongo.sensors.mongo.MongoSensor',
},
'pubsub_sensor': {
'PubSubPullSensor': 'airflow.providers.google.cloud.sensors.pubsub.PubSubPullSensor',
},
'python_sensor': {
'PythonSensor': 'airflow.sensors.python.PythonSensor',
},
'qubole_sensor': {
'QuboleFileSensor': 'airflow.providers.qubole.sensors.qubole.QuboleFileSensor',
'QubolePartitionSensor': 'airflow.providers.qubole.sensors.qubole.QubolePartitionSensor',
'QuboleSensor': 'airflow.providers.qubole.sensors.qubole.QuboleSensor',
},
'redis_key_sensor': {
'RedisKeySensor': 'airflow.providers.redis.sensors.redis_key.RedisKeySensor',
},
'redis_pub_sub_sensor': {
'RedisPubSubSensor': 'airflow.providers.redis.sensors.redis_pub_sub.RedisPubSubSensor',
},
'sagemaker_training_sensor': {
'SageMakerHook': 'airflow.providers.amazon.aws.sensors.sagemaker.SageMakerHook',
'SageMakerTrainingSensor': 'airflow.providers.amazon.aws.sensors.sagemaker.SageMakerTrainingSensor',
},
'sftp_sensor': {
'SFTPSensor': 'airflow.providers.sftp.sensors.sftp.SFTPSensor',
},
'wasb_sensor': {
'WasbBlobSensor': 'airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor',
'WasbPrefixSensor': 'airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor',
},
'weekday_sensor': {
'DayOfWeekSensor': 'airflow.sensors.weekday.DayOfWeekSensor',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 6,968 | 42.285714 | 110 |
py
|
airflow
|
airflow-main/airflow/sensors/base.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import functools
import hashlib
import logging
import time
import traceback
from datetime import timedelta
from typing import Any, Callable, Iterable
from airflow import settings
from airflow.configuration import conf
from airflow.exceptions import (
AirflowException,
AirflowFailException,
AirflowRescheduleException,
AirflowSensorTimeout,
AirflowSkipException,
AirflowTaskTimeout,
)
from airflow.executors.executor_loader import ExecutorLoader
from airflow.models.baseoperator import BaseOperator
from airflow.models.skipmixin import SkipMixin
from airflow.models.taskreschedule import TaskReschedule
from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep
from airflow.utils import timezone
from airflow.utils.context import Context
# We need to keep the import here because GCSToLocalFilesystemOperator released in
# Google Provider before 3.0.0 imported apply_defaults from here.
# See https://github.com/apache/airflow/issues/16035
from airflow.utils.decorators import apply_defaults # noqa: F401
# As documented in https://dev.mysql.com/doc/refman/5.7/en/datetime.html.
_MYSQL_TIMESTAMP_MAX = datetime.datetime(2038, 1, 19, 3, 14, 7, tzinfo=timezone.utc)
@functools.lru_cache(maxsize=None)
def _is_metadatabase_mysql() -> bool:
if settings.engine is None:
raise AirflowException("Must initialize ORM first")
return settings.engine.url.get_backend_name() == "mysql"
class PokeReturnValue:
"""
Optional return value for poke methods.
Sensors can optionally return an instance of the PokeReturnValue class in the poke method.
If an XCom value is supplied when the sensor is done, then the XCom value will be
pushed through the operator return value.
:param is_done: Set to true to indicate the sensor can stop poking.
:param xcom_value: An optional XCOM value to be returned by the operator.
"""
def __init__(self, is_done: bool, xcom_value: Any | None = None) -> None:
self.xcom_value = xcom_value
self.is_done = is_done
def __bool__(self) -> bool:
return self.is_done
class BaseSensorOperator(BaseOperator, SkipMixin):
"""
Sensor operators are derived from this class and inherit these attributes.
Sensor operators keep executing at a time interval and succeed when
a criteria is met and fail if and when they time out.
:param soft_fail: Set to true to mark the task as SKIPPED on failure
:param poke_interval: Time that the job should wait in between each try.
Can be ``timedelta`` or ``float`` seconds.
:param timeout: Time elapsed before the task times out and fails.
Can be ``timedelta`` or ``float`` seconds.
This should not be confused with ``execution_timeout`` of the
``BaseOperator`` class. ``timeout`` measures the time elapsed between the
first poke and the current time (taking into account any
reschedule delay between each poke), while ``execution_timeout``
checks the **running** time of the task (leaving out any reschedule
delay). In case that the ``mode`` is ``poke`` (see below), both of
them are equivalent (as the sensor is never rescheduled), which is not
the case in ``reschedule`` mode.
:param mode: How the sensor operates.
Options are: ``{ poke | reschedule }``, default is ``poke``.
When set to ``poke`` the sensor is taking up a worker slot for its
whole execution time and sleeps between pokes. Use this mode if the
expected runtime of the sensor is short or if a short poke interval
is required. Note that the sensor will hold onto a worker slot and
a pool slot for the duration of the sensor's runtime in this mode.
When set to ``reschedule`` the sensor task frees the worker slot when
the criteria is not yet met and it's rescheduled at a later time. Use
this mode if the time before the criteria is met is expected to be
quite long. The poke interval should be more than one minute to
prevent too much load on the scheduler.
:param exponential_backoff: allow progressive longer waits between
pokes by using exponential backoff algorithm
:param max_wait: maximum wait interval between pokes, can be ``timedelta`` or ``float`` seconds
:param silent_fail: If true, and poke method raises an exception different from
AirflowSensorTimeout, AirflowTaskTimeout, AirflowSkipException
and AirflowFailException, the sensor will log the error and continue
its execution. Otherwise, the sensor task fails, and it can be retried
based on the provided `retries` parameter.
"""
ui_color: str = "#e6f1f2"
valid_modes: Iterable[str] = ["poke", "reschedule"]
# Adds one additional dependency for all sensor operators that checks if a
# sensor task instance can be rescheduled.
deps = BaseOperator.deps | {ReadyToRescheduleDep()}
def __init__(
self,
*,
poke_interval: timedelta | float = 60,
timeout: timedelta | float = conf.getfloat("sensors", "default_timeout"),
soft_fail: bool = False,
mode: str = "poke",
exponential_backoff: bool = False,
max_wait: timedelta | float | None = None,
silent_fail: bool = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.poke_interval = self._coerce_poke_interval(poke_interval).total_seconds()
self.soft_fail = soft_fail
self.timeout = self._coerce_timeout(timeout).total_seconds()
self.mode = mode
self.exponential_backoff = exponential_backoff
self.max_wait = self._coerce_max_wait(max_wait)
self.silent_fail = silent_fail
self._validate_input_values()
@staticmethod
def _coerce_poke_interval(poke_interval: float | timedelta) -> timedelta:
if isinstance(poke_interval, timedelta):
return poke_interval
if isinstance(poke_interval, (int, float)) and poke_interval >= 0:
return timedelta(seconds=poke_interval)
raise AirflowException(
"Operator arg `poke_interval` must be timedelta object or a non-negative number"
)
@staticmethod
def _coerce_timeout(timeout: float | timedelta) -> timedelta:
if isinstance(timeout, timedelta):
return timeout
if isinstance(timeout, (int, float)) and timeout >= 0:
return timedelta(seconds=timeout)
raise AirflowException("Operator arg `timeout` must be timedelta object or a non-negative number")
@staticmethod
def _coerce_max_wait(max_wait: float | timedelta | None) -> timedelta | None:
if max_wait is None or isinstance(max_wait, timedelta):
return max_wait
if isinstance(max_wait, (int, float)) and max_wait >= 0:
return timedelta(seconds=max_wait)
raise AirflowException("Operator arg `max_wait` must be timedelta object or a non-negative number")
def _validate_input_values(self) -> None:
if not isinstance(self.poke_interval, (int, float)) or self.poke_interval < 0:
raise AirflowException("The poke_interval must be a non-negative number")
if not isinstance(self.timeout, (int, float)) or self.timeout < 0:
raise AirflowException("The timeout must be a non-negative number")
if self.mode not in self.valid_modes:
raise AirflowException(
f"The mode must be one of {self.valid_modes},'{self.dag.dag_id if self.has_dag() else ''} "
f".{self.task_id}'; received '{self.mode}'."
)
# Quick check for poke_interval isn't immediately over MySQL's TIMESTAMP limit.
# This check is only rudimentary to catch trivial user errors, e.g. mistakenly
# set the value to milliseconds instead of seconds. There's another check when
# we actually try to reschedule to ensure database coherence.
if self.reschedule and _is_metadatabase_mysql():
if timezone.utcnow() + datetime.timedelta(seconds=self.poke_interval) > _MYSQL_TIMESTAMP_MAX:
raise AirflowException(
f"Cannot set poke_interval to {self.poke_interval} seconds in reschedule "
f"mode since it will take reschedule time over MySQL's TIMESTAMP limit."
)
def poke(self, context: Context) -> bool | PokeReturnValue:
"""Function defined by the sensors while deriving this class should override."""
raise AirflowException("Override me.")
def execute(self, context: Context) -> Any:
started_at: datetime.datetime | float
if self.reschedule:
# If reschedule, use the start date of the first try (first try can be either the very
# first execution of the task, or the first execution after the task was cleared.)
first_try_number = context["ti"].max_tries - self.retries + 1
task_reschedules = TaskReschedule.find_for_task_instance(
context["ti"], try_number=first_try_number
)
if not task_reschedules:
start_date = timezone.utcnow()
else:
start_date = task_reschedules[0].start_date
started_at = start_date
def run_duration() -> float:
# If we are in reschedule mode, then we have to compute diff
# based on the time in a DB, so can't use time.monotonic
return (timezone.utcnow() - start_date).total_seconds()
else:
started_at = start_monotonic = time.monotonic()
def run_duration() -> float:
return time.monotonic() - start_monotonic
try_number = 1
log_dag_id = self.dag.dag_id if self.has_dag() else ""
xcom_value = None
while True:
try:
poke_return = self.poke(context)
except (
AirflowSensorTimeout,
AirflowTaskTimeout,
AirflowSkipException,
AirflowFailException,
) as e:
raise e
except Exception as e:
if self.silent_fail:
logging.error("Sensor poke failed: \n %s", traceback.format_exc())
poke_return = False
else:
raise e
if poke_return:
if isinstance(poke_return, PokeReturnValue):
xcom_value = poke_return.xcom_value
break
if run_duration() > self.timeout:
# If sensor is in soft fail mode but times out raise AirflowSkipException.
message = (
f"Sensor has timed out; run duration of {run_duration()} seconds exceeds "
f"the specified timeout of {self.timeout}."
)
if self.soft_fail:
raise AirflowSkipException(message)
else:
raise AirflowSensorTimeout(message)
if self.reschedule:
next_poke_interval = self._get_next_poke_interval(started_at, run_duration, try_number)
reschedule_date = timezone.utcnow() + timedelta(seconds=next_poke_interval)
if _is_metadatabase_mysql() and reschedule_date > _MYSQL_TIMESTAMP_MAX:
raise AirflowSensorTimeout(
f"Cannot reschedule DAG {log_dag_id} to {reschedule_date.isoformat()} "
f"since it is over MySQL's TIMESTAMP storage limit."
)
raise AirflowRescheduleException(reschedule_date)
else:
time.sleep(self._get_next_poke_interval(started_at, run_duration, try_number))
try_number += 1
self.log.info("Success criteria met. Exiting.")
return xcom_value
def _get_next_poke_interval(
self,
started_at: datetime.datetime | float,
run_duration: Callable[[], float],
try_number: int,
) -> float:
"""Using the similar logic which is used for exponential backoff retry delay for operators."""
if not self.exponential_backoff:
return self.poke_interval
# The value of min_backoff should always be greater than or equal to 1.
min_backoff = max(int(self.poke_interval * (2 ** (try_number - 2))), 1)
run_hash = int(
hashlib.sha1(f"{self.dag_id}#{self.task_id}#{started_at}#{try_number}".encode()).hexdigest(),
16,
)
modded_hash = min_backoff + run_hash % min_backoff
delay_backoff_in_seconds = min(modded_hash, timedelta.max.total_seconds() - 1)
new_interval = min(self.timeout - int(run_duration()), delay_backoff_in_seconds)
if self.max_wait:
new_interval = min(self.max_wait.total_seconds(), new_interval)
self.log.info("new %s interval is %s", self.mode, new_interval)
return new_interval
def prepare_for_execution(self) -> BaseOperator:
task = super().prepare_for_execution()
# Sensors in `poke` mode can block execution of DAGs when running
# with single process executor, thus we change the mode to`reschedule`
# to allow parallel task being scheduled and executed
executor, _ = ExecutorLoader.import_default_executor_cls()
if executor.change_sensor_mode_to_reschedule:
self.log.warning("%s changes sensor mode to 'reschedule'.", executor.__name__)
task.mode = "reschedule"
return task
@property
def reschedule(self):
"""Define mode rescheduled sensors."""
return self.mode == "reschedule"
@classmethod
def get_serialized_fields(cls):
return super().get_serialized_fields() | {"reschedule"}
def poke_mode_only(cls):
"""
Decorate a subclass of BaseSensorOperator with poke.
Indicate that instances of this class are only safe to use poke mode.
Will decorate all methods in the class to assert they did not change
the mode from 'poke'.
:param cls: BaseSensor class to enforce methods only use 'poke' mode.
"""
def decorate(cls_type):
def mode_getter(_):
return "poke"
def mode_setter(_, value):
if value != "poke":
raise ValueError(f"Cannot set mode to '{value}'. Only 'poke' is acceptable")
if not issubclass(cls_type, BaseSensorOperator):
raise ValueError(
f"poke_mode_only decorator should only be "
f"applied to subclasses of BaseSensorOperator,"
f" got:{cls_type}."
)
cls_type.mode = property(mode_getter, mode_setter)
return cls_type
return decorate(cls)
| 15,736 | 41.997268 | 107 |
py
|
airflow
|
airflow-main/airflow/sensors/python.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any, Callable, Mapping, Sequence
from airflow.sensors.base import BaseSensorOperator, PokeReturnValue
from airflow.utils.context import Context, context_merge
from airflow.utils.operator_helpers import determine_kwargs
class PythonSensor(BaseSensorOperator):
"""
Waits for a Python callable to return True.
User could put input argument in templates_dict
e.g ``templates_dict = {'start_ds': 1970}``
and access the argument by calling ``kwargs['templates_dict']['start_ds']``
in the callable
:param python_callable: A reference to an object that is callable
:param op_kwargs: a dictionary of keyword arguments that will get unpacked
in your function
:param op_args: a list of positional arguments that will get unpacked when
calling your callable
:param templates_dict: a dictionary where the values are templates that
will get templated by the Airflow engine sometime between
``__init__`` and ``execute`` takes place and are made available
in your callable's context after the template has been applied.
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:PythonSensor`
"""
template_fields: Sequence[str] = ("templates_dict", "op_args", "op_kwargs")
def __init__(
self,
*,
python_callable: Callable,
op_args: list | None = None,
op_kwargs: Mapping[str, Any] | None = None,
templates_dict: dict | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.python_callable = python_callable
self.op_args = op_args or []
self.op_kwargs = op_kwargs or {}
self.templates_dict = templates_dict
def poke(self, context: Context) -> PokeReturnValue | bool:
context_merge(context, self.op_kwargs, templates_dict=self.templates_dict)
self.op_kwargs = determine_kwargs(self.python_callable, self.op_args, context)
self.log.info("Poking callable: %s", str(self.python_callable))
return_value = self.python_callable(*self.op_args, **self.op_kwargs)
if isinstance(return_value, PokeReturnValue):
return return_value
else:
return PokeReturnValue(bool(return_value))
| 3,145 | 39.333333 | 86 |
py
|
airflow
|
airflow-main/airflow/sensors/time_delta.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.sensors.base import BaseSensorOperator
from airflow.triggers.temporal import DateTimeTrigger
from airflow.utils import timezone
from airflow.utils.context import Context
class TimeDeltaSensor(BaseSensorOperator):
"""
Waits for a timedelta after the run's data interval.
:param delta: time length to wait after the data interval before succeeding.
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:TimeDeltaSensor`
"""
def __init__(self, *, delta, **kwargs):
super().__init__(**kwargs)
self.delta = delta
def poke(self, context: Context):
target_dttm = context["data_interval_end"]
target_dttm += self.delta
self.log.info("Checking if the time (%s) has come", target_dttm)
return timezone.utcnow() > target_dttm
class TimeDeltaSensorAsync(TimeDeltaSensor):
"""
A deferrable drop-in replacement for TimeDeltaSensor.
Will defers itself to avoid taking up a worker slot while it is waiting.
:param delta: time length to wait after the data interval before succeeding.
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:TimeDeltaSensorAsync`
"""
def execute(self, context: Context):
target_dttm = context["data_interval_end"]
target_dttm += self.delta
self.defer(trigger=DateTimeTrigger(moment=target_dttm), method_name="execute_complete")
def execute_complete(self, context, event=None):
"""Callback for when the trigger fires - returns immediately."""
return None
| 2,508 | 33.847222 | 95 |
py
|
airflow
|
airflow-main/airflow/sensors/filesystem.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import os
from glob import glob
from typing import Sequence
from airflow.hooks.filesystem import FSHook
from airflow.sensors.base import BaseSensorOperator
from airflow.utils.context import Context
class FileSensor(BaseSensorOperator):
"""
Waits for a file or folder to land in a filesystem.
If the path given is a directory then this sensor will only return true if
any files exist inside it (either directly, or within a subdirectory)
:param fs_conn_id: reference to the File (path)
connection id
:param filepath: File or folder name (relative to
the base path set within the connection), can be a glob.
:param recursive: when set to ``True``, enables recursive directory matching behavior of
``**`` in glob filepath parameter. Defaults to ``False``.
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:FileSensor`
"""
template_fields: Sequence[str] = ("filepath",)
ui_color = "#91818a"
def __init__(self, *, filepath, fs_conn_id="fs_default", recursive=False, **kwargs):
super().__init__(**kwargs)
self.filepath = filepath
self.fs_conn_id = fs_conn_id
self.recursive = recursive
def poke(self, context: Context):
hook = FSHook(self.fs_conn_id)
basepath = hook.get_path()
full_path = os.path.join(basepath, self.filepath)
self.log.info("Poking for file %s", full_path)
for path in glob(full_path, recursive=self.recursive):
if os.path.isfile(path):
mod_time = datetime.datetime.fromtimestamp(os.path.getmtime(path)).strftime("%Y%m%d%H%M%S")
self.log.info("Found File %s last modified: %s", str(path), mod_time)
return True
for _, _, files in os.walk(path):
if len(files) > 0:
return True
return False
| 2,791 | 35.736842 | 107 |
py
|
airflow
|
airflow-main/airflow/sensors/external_task.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import os
import warnings
from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable
import attr
from sqlalchemy import func
from airflow.exceptions import AirflowException, AirflowSkipException, RemovedInAirflow3Warning
from airflow.models.baseoperator import BaseOperatorLink
from airflow.models.dag import DagModel
from airflow.models.dagbag import DagBag
from airflow.models.dagrun import DagRun
from airflow.models.taskinstance import TaskInstance
from airflow.operators.empty import EmptyOperator
from airflow.sensors.base import BaseSensorOperator
from airflow.utils.file import correct_maybe_zipped
from airflow.utils.helpers import build_airflow_url_with_query
from airflow.utils.session import NEW_SESSION, provide_session
from airflow.utils.sqlalchemy import tuple_in_condition
from airflow.utils.state import State, TaskInstanceState
if TYPE_CHECKING:
from sqlalchemy.orm import Query, Session
from airflow.utils.context import Context
class ExternalDagLink(BaseOperatorLink):
"""
Operator link for ExternalTaskSensor and ExternalTaskMarker.
It allows users to access DAG waited with ExternalTaskSensor or cleared by ExternalTaskMarker.
"""
name = "External DAG"
def get_link(self, operator, dttm):
ti = TaskInstance(task=operator, execution_date=dttm)
operator.render_template_fields(ti.get_template_context())
query = {"dag_id": operator.external_dag_id, "execution_date": dttm.isoformat()}
return build_airflow_url_with_query(query)
class ExternalTaskSensor(BaseSensorOperator):
"""
Waits for a different DAG, task group, or task to complete for a specific logical date.
If both `external_task_group_id` and `external_task_id` are ``None`` (default), the sensor
waits for the DAG.
Values for `external_task_group_id` and `external_task_id` can't be set at the same time.
By default, the ExternalTaskSensor will wait for the external task to
succeed, at which point it will also succeed. However, by default it will
*not* fail if the external task fails, but will continue to check the status
until the sensor times out (thus giving you time to retry the external task
without also having to clear the sensor).
By default, the ExternalTaskSensor will not skip if the external task skips.
To change this, simply set ``skipped_states=[TaskInstanceState.SKIPPED]``.
Note that if you are monitoring multiple tasks, and one enters error state
and the other enters a skipped state, then the external task will react to
whichever one it sees first. If both happen together, then the failed state
takes priority.
It is possible to alter the default behavior by setting states which
cause the sensor to fail, e.g. by setting ``allowed_states=[DagRunState.FAILED]``
and ``failed_states=[DagRunState.SUCCESS]`` you will flip the behaviour to
get a sensor which goes green when the external task *fails* and immediately
goes red if the external task *succeeds*!
Note that ``soft_fail`` is respected when examining the failed_states. Thus
if the external task enters a failed state and ``soft_fail == True`` the
sensor will _skip_ rather than fail. As a result, setting ``soft_fail=True``
and ``failed_states=[DagRunState.SKIPPED]`` will result in the sensor
skipping if the external task skips. However, this is a contrived
example---consider using ``skipped_states`` if you would like this
behaviour. Using ``skipped_states`` allows the sensor to skip if the target
fails, but still enter failed state on timeout. Using ``soft_fail == True``
as above will cause the sensor to skip if the target fails, but also if it
times out.
:param external_dag_id: The dag_id that contains the task you want to
wait for. (templated)
:param external_task_id: The task_id that contains the task you want to
wait for. (templated)
:param external_task_ids: The list of task_ids that you want to wait for. (templated)
If ``None`` (default value) the sensor waits for the DAG. Either
external_task_id or external_task_ids can be passed to
ExternalTaskSensor, but not both.
:param external_task_group_id: The task_group_id that contains the task you want to
wait for. (templated)
:param allowed_states: Iterable of allowed states, default is ``['success']``
:param skipped_states: Iterable of states to make this task mark as skipped, default is ``None``
:param failed_states: Iterable of failed or dis-allowed states, default is ``None``
:param execution_delta: time difference with the previous execution to
look at, the default is the same logical date as the current task or DAG.
For yesterday, use [positive!] datetime.timedelta(days=1). Either
execution_delta or execution_date_fn can be passed to
ExternalTaskSensor, but not both.
:param execution_date_fn: function that receives the current execution's logical date as the first
positional argument and optionally any number of keyword arguments available in the
context dictionary, and returns the desired logical dates to query.
Either execution_delta or execution_date_fn can be passed to ExternalTaskSensor,
but not both.
:param check_existence: Set to `True` to check if the external task exists (when
external_task_id is not None) or check if the DAG to wait for exists (when
external_task_id is None), and immediately cease waiting if the external task
or DAG does not exist (default value: False).
"""
template_fields = ["external_dag_id", "external_task_id", "external_task_ids", "external_task_group_id"]
ui_color = "#19647e"
operator_extra_links = [ExternalDagLink()]
def __init__(
self,
*,
external_dag_id: str,
external_task_id: str | None = None,
external_task_ids: Collection[str] | None = None,
external_task_group_id: str | None = None,
allowed_states: Iterable[str] | None = None,
skipped_states: Iterable[str] | None = None,
failed_states: Iterable[str] | None = None,
execution_delta: datetime.timedelta | None = None,
execution_date_fn: Callable | None = None,
check_existence: bool = False,
**kwargs,
):
super().__init__(**kwargs)
self.allowed_states = list(allowed_states) if allowed_states else [TaskInstanceState.SUCCESS.value]
self.skipped_states = list(skipped_states) if skipped_states else []
self.failed_states = list(failed_states) if failed_states else []
total_states = set(self.allowed_states + self.skipped_states + self.failed_states)
if len(total_states) != len(self.allowed_states) + len(self.skipped_states) + len(self.failed_states):
raise AirflowException(
"Duplicate values provided across allowed_states, skipped_states and failed_states."
)
# convert [] to None
if not external_task_ids:
external_task_ids = None
# can't set both single task id and a list of task ids
if external_task_id is not None and external_task_ids is not None:
raise ValueError(
"Only one of `external_task_id` or `external_task_ids` may "
"be provided to ExternalTaskSensor; "
"use external_task_id or external_task_ids or external_task_group_id."
)
# since both not set, convert the single id to a 1-elt list - from here on, we only consider the list
if external_task_id is not None:
external_task_ids = [external_task_id]
if external_task_group_id is not None and external_task_ids is not None:
raise ValueError(
"Only one of `external_task_group_id` or `external_task_ids` may "
"be provided to ExternalTaskSensor; "
"use external_task_id or external_task_ids or external_task_group_id."
)
# check the requested states are all valid states for the target type, be it dag or task
if external_task_ids or external_task_group_id:
if not total_states <= set(State.task_states):
raise ValueError(
"Valid values for `allowed_states`, `skipped_states` and `failed_states` "
"when `external_task_id` or `external_task_ids` or `external_task_group_id` "
f"is not `None`: {State.task_states}"
)
elif not total_states <= set(State.dag_states):
raise ValueError(
"Valid values for `allowed_states`, `skipped_states` and `failed_states` "
f"when `external_task_id` and `external_task_group_id` is `None`: {State.dag_states}"
)
if execution_delta is not None and execution_date_fn is not None:
raise ValueError(
"Only one of `execution_delta` or `execution_date_fn` may "
"be provided to ExternalTaskSensor; not both."
)
self.execution_delta = execution_delta
self.execution_date_fn = execution_date_fn
self.external_dag_id = external_dag_id
self.external_task_id = external_task_id
self.external_task_ids = external_task_ids
self.external_task_group_id = external_task_group_id
self.check_existence = check_existence
self._has_checked_existence = False
def _get_dttm_filter(self, context):
if self.execution_delta:
dttm = context["logical_date"] - self.execution_delta
elif self.execution_date_fn:
dttm = self._handle_execution_date_fn(context=context)
else:
dttm = context["logical_date"]
return dttm if isinstance(dttm, list) else [dttm]
@provide_session
def poke(self, context: Context, session: Session = NEW_SESSION) -> bool:
# delay check to poke rather than __init__ in case it was supplied as XComArgs
if self.external_task_ids and len(self.external_task_ids) > len(set(self.external_task_ids)):
raise ValueError("Duplicate task_ids passed in external_task_ids parameter")
dttm_filter = self._get_dttm_filter(context)
serialized_dttm_filter = ",".join(dt.isoformat() for dt in dttm_filter)
if self.external_task_ids:
self.log.info(
"Poking for tasks %s in dag %s on %s ... ",
self.external_task_ids,
self.external_dag_id,
serialized_dttm_filter,
)
if self.external_task_group_id:
self.log.info(
"Poking for task_group '%s' in dag '%s' on %s ... ",
self.external_task_group_id,
self.external_dag_id,
serialized_dttm_filter,
)
if self.external_dag_id and not self.external_task_group_id and not self.external_task_ids:
self.log.info(
"Poking for DAG '%s' on %s ... ",
self.external_dag_id,
serialized_dttm_filter,
)
# In poke mode this will check dag existence only once
if self.check_existence and not self._has_checked_existence:
self._check_for_existence(session=session)
count_failed = -1
if self.failed_states:
count_failed = self.get_count(dttm_filter, session, self.failed_states)
# Fail if anything in the list has failed.
if count_failed > 0:
if self.external_task_ids:
if self.soft_fail:
raise AirflowSkipException(
f"Some of the external tasks {self.external_task_ids} "
f"in DAG {self.external_dag_id} failed. Skipping due to soft_fail."
)
raise AirflowException(
f"Some of the external tasks {self.external_task_ids} "
f"in DAG {self.external_dag_id} failed."
)
elif self.external_task_group_id:
if self.soft_fail:
raise AirflowSkipException(
f"The external task_group '{self.external_task_group_id}' "
f"in DAG '{self.external_dag_id}' failed. Skipping due to soft_fail."
)
raise AirflowException(
f"The external task_group '{self.external_task_group_id}' "
f"in DAG '{self.external_dag_id}' failed."
)
else:
if self.soft_fail:
raise AirflowSkipException(
f"The external DAG {self.external_dag_id} failed. Skipping due to soft_fail."
)
raise AirflowException(f"The external DAG {self.external_dag_id} failed.")
count_skipped = -1
if self.skipped_states:
count_skipped = self.get_count(dttm_filter, session, self.skipped_states)
# Skip if anything in the list has skipped. Note if we are checking multiple tasks and one skips
# before another errors, we'll skip first.
if count_skipped > 0:
if self.external_task_ids:
raise AirflowSkipException(
f"Some of the external tasks {self.external_task_ids} "
f"in DAG {self.external_dag_id} reached a state in our states-to-skip-on list. Skipping."
)
elif self.external_task_group_id:
raise AirflowSkipException(
f"The external task_group '{self.external_task_group_id}' "
f"in DAG {self.external_dag_id} reached a state in our states-to-skip-on list. Skipping."
)
else:
raise AirflowSkipException(
f"The external DAG {self.external_dag_id} reached a state in our states-to-skip-on list. "
"Skipping."
)
# only go green if every single task has reached an allowed state
count_allowed = self.get_count(dttm_filter, session, self.allowed_states)
return count_allowed == len(dttm_filter)
def _check_for_existence(self, session) -> None:
dag_to_wait = DagModel.get_current(self.external_dag_id, session)
if not dag_to_wait:
raise AirflowException(f"The external DAG {self.external_dag_id} does not exist.")
if not os.path.exists(correct_maybe_zipped(dag_to_wait.fileloc)):
raise AirflowException(f"The external DAG {self.external_dag_id} was deleted.")
if self.external_task_ids:
refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag(self.external_dag_id)
for external_task_id in self.external_task_ids:
if not refreshed_dag_info.has_task(external_task_id):
raise AirflowException(
f"The external task {external_task_id} in "
f"DAG {self.external_dag_id} does not exist."
)
if self.external_task_group_id:
refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag(self.external_dag_id)
if not refreshed_dag_info.has_task_group(self.external_task_group_id):
raise AirflowException(
f"The external task group '{self.external_task_group_id}' in "
f"DAG '{self.external_dag_id}' does not exist."
)
self._has_checked_existence = True
def get_count(self, dttm_filter, session, states) -> int:
"""
Get the count of records against dttm filter and states.
:param dttm_filter: date time filter for execution date
:param session: airflow session object
:param states: task or dag states
:return: count of record against the filters
"""
TI = TaskInstance
DR = DagRun
if not dttm_filter:
return 0
if self.external_task_ids:
count = (
self._count_query(TI, session, states, dttm_filter)
.filter(TI.task_id.in_(self.external_task_ids))
.scalar()
) / len(self.external_task_ids)
elif self.external_task_group_id:
external_task_group_task_ids = self.get_external_task_group_task_ids(session, dttm_filter)
if not external_task_group_task_ids:
count = 0
else:
count = (
self._count_query(TI, session, states, dttm_filter)
.filter(tuple_in_condition((TI.task_id, TI.map_index), external_task_group_task_ids))
.scalar()
) / len(external_task_group_task_ids)
else:
count = self._count_query(DR, session, states, dttm_filter).scalar()
return count
def _count_query(self, model, session, states, dttm_filter) -> Query:
query = session.query(func.count()).filter(
model.dag_id == self.external_dag_id,
model.state.in_(states),
model.execution_date.in_(dttm_filter),
)
return query
def get_external_task_group_task_ids(self, session, dttm_filter):
refreshed_dag_info = DagBag(read_dags_from_db=True).get_dag(self.external_dag_id, session)
task_group = refreshed_dag_info.task_group_dict.get(self.external_task_group_id)
if task_group:
group_tasks = session.query(TaskInstance).filter(
TaskInstance.dag_id == self.external_dag_id,
TaskInstance.task_id.in_(task.task_id for task in task_group),
TaskInstance.execution_date.in_(dttm_filter),
)
return [(t.task_id, t.map_index) for t in group_tasks]
# returning default task_id as group_id itself, this will avoid any failure in case of
# 'check_existence=False' and will fail on timeout
return [(self.external_task_group_id, -1)]
def _handle_execution_date_fn(self, context) -> Any:
"""
Handle backward compatibility.
This function is to handle backwards compatibility with how this operator was
previously where it only passes the execution date, but also allow for the newer
implementation to pass all context variables as keyword arguments, to allow
for more sophisticated returns of dates to return.
"""
from airflow.utils.operator_helpers import make_kwargs_callable
# Remove "logical_date" because it is already a mandatory positional argument
logical_date = context["logical_date"]
kwargs = {k: v for k, v in context.items() if k not in {"execution_date", "logical_date"}}
# Add "context" in the kwargs for backward compatibility (because context used to be
# an acceptable argument of execution_date_fn)
kwargs["context"] = context
if TYPE_CHECKING:
assert self.execution_date_fn is not None
kwargs_callable = make_kwargs_callable(self.execution_date_fn)
return kwargs_callable(logical_date, **kwargs)
class ExternalTaskMarker(EmptyOperator):
"""
Use this operator to indicate that a task on a different DAG depends on this task.
When this task is cleared with "Recursive" selected, Airflow will clear the task on
the other DAG and its downstream tasks recursively. Transitive dependencies are followed
until the recursion_depth is reached.
:param external_dag_id: The dag_id that contains the dependent task that needs to be cleared.
:param external_task_id: The task_id of the dependent task that needs to be cleared.
:param execution_date: The logical date of the dependent task execution that needs to be cleared.
:param recursion_depth: The maximum level of transitive dependencies allowed. Default is 10.
This is mostly used for preventing cyclic dependencies. It is fine to increase
this number if necessary. However, too many levels of transitive dependencies will make
it slower to clear tasks in the web UI.
"""
template_fields = ["external_dag_id", "external_task_id", "execution_date"]
ui_color = "#19647e"
operator_extra_links = [ExternalDagLink()]
# The _serialized_fields are lazily loaded when get_serialized_fields() method is called
__serialized_fields: frozenset[str] | None = None
def __init__(
self,
*,
external_dag_id: str,
external_task_id: str,
execution_date: str | datetime.datetime | None = "{{ logical_date.isoformat() }}",
recursion_depth: int = 10,
**kwargs,
):
super().__init__(**kwargs)
self.external_dag_id = external_dag_id
self.external_task_id = external_task_id
if isinstance(execution_date, datetime.datetime):
self.execution_date = execution_date.isoformat()
elif isinstance(execution_date, str):
self.execution_date = execution_date
else:
raise TypeError(
f"Expected str or datetime.datetime type for execution_date. Got {type(execution_date)}"
)
if recursion_depth <= 0:
raise ValueError("recursion_depth should be a positive integer")
self.recursion_depth = recursion_depth
@classmethod
def get_serialized_fields(cls):
"""Serialized ExternalTaskMarker contain exactly these fields + templated_fields ."""
if not cls.__serialized_fields:
cls.__serialized_fields = frozenset(super().get_serialized_fields() | {"recursion_depth"})
return cls.__serialized_fields
@attr.s(auto_attribs=True)
class ExternalTaskSensorLink(ExternalDagLink):
"""
This external link is deprecated.
Please use :class:`airflow.sensors.external_task.ExternalDagLink`.
"""
def __attrs_post_init__(self):
warnings.warn(
"This external link is deprecated. "
"Please use :class:`airflow.sensors.external_task.ExternalDagLink`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
| 23,161 | 45.047714 | 110 |
py
|
airflow
|
airflow-main/airflow/sensors/weekday.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from typing import Iterable
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.sensors.base import BaseSensorOperator
from airflow.utils import timezone
from airflow.utils.context import Context
from airflow.utils.weekday import WeekDay
class DayOfWeekSensor(BaseSensorOperator):
"""
Waits until the first specified day of the week.
For example, if the execution day of the task is '2018-12-22' (Saturday)
and you pass 'FRIDAY', the task will wait until next Friday.
**Example** (with single day): ::
weekend_check = DayOfWeekSensor(
task_id='weekend_check',
week_day='Saturday',
use_task_logical_date=True,
dag=dag)
**Example** (with multiple day using set): ::
weekend_check = DayOfWeekSensor(
task_id='weekend_check',
week_day={'Saturday', 'Sunday'},
use_task_logical_date=True,
dag=dag)
**Example** (with :class:`~airflow.utils.weekday.WeekDay` enum): ::
# import WeekDay Enum
from airflow.utils.weekday import WeekDay
weekend_check = DayOfWeekSensor(
task_id='weekend_check',
week_day={WeekDay.SATURDAY, WeekDay.SUNDAY},
use_task_logical_date=True,
dag=dag)
:param week_day: Day of the week to check (full name). Optionally, a set
of days can also be provided using a set.
Example values:
* ``"MONDAY"``,
* ``{"Saturday", "Sunday"}``
* ``{WeekDay.TUESDAY}``
* ``{WeekDay.SATURDAY, WeekDay.SUNDAY}``
To use ``WeekDay`` enum, import it from ``airflow.utils.weekday``
:param use_task_logical_date: If ``True``, uses task's logical date to compare
with week_day. Execution Date is Useful for backfilling.
If ``False``, uses system's day of the week. Useful when you
don't want to run anything on weekdays on the system.
:param use_task_execution_day: deprecated parameter, same effect as `use_task_logical_date`
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:DayOfWeekSensor`
"""
def __init__(
self,
*,
week_day: str | Iterable[str] | WeekDay | Iterable[WeekDay],
use_task_logical_date: bool = False,
use_task_execution_day: bool = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.week_day = week_day
self.use_task_logical_date = use_task_logical_date
if use_task_execution_day:
self.use_task_logical_date = use_task_execution_day
warnings.warn(
"Parameter ``use_task_execution_day`` is deprecated. Use ``use_task_logical_date``.",
RemovedInAirflow3Warning,
stacklevel=2,
)
self._week_day_num = WeekDay.validate_week_day(week_day)
def poke(self, context: Context) -> bool:
self.log.info(
"Poking until weekday is in %s, Today is %s",
self.week_day,
WeekDay(timezone.utcnow().isoweekday()).name,
)
if self.use_task_logical_date:
return context["logical_date"].isoweekday() in self._week_day_num
else:
return timezone.utcnow().isoweekday() in self._week_day_num
| 4,229 | 35.153846 | 101 |
py
|
airflow
|
airflow-main/airflow/sensors/time_sensor.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from airflow.sensors.base import BaseSensorOperator
from airflow.triggers.temporal import DateTimeTrigger
from airflow.utils import timezone
from airflow.utils.context import Context
class TimeSensor(BaseSensorOperator):
"""
Waits until the specified time of the day.
:param target_time: time after which the job succeeds
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:TimeSensor`
"""
def __init__(self, *, target_time, **kwargs):
super().__init__(**kwargs)
self.target_time = target_time
def poke(self, context: Context):
self.log.info("Checking if the time (%s) has come", self.target_time)
return timezone.make_naive(timezone.utcnow(), self.dag.timezone).time() > self.target_time
class TimeSensorAsync(BaseSensorOperator):
"""
Waits until the specified time of the day.
This frees up a worker slot while it is waiting.
:param target_time: time after which the job succeeds
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:TimeSensorAsync`
"""
def __init__(self, *, target_time, **kwargs):
super().__init__(**kwargs)
self.target_time = target_time
aware_time = timezone.coerce_datetime(
datetime.datetime.combine(datetime.datetime.today(), self.target_time)
)
self.target_datetime = timezone.convert_to_utc(aware_time)
def execute(self, context: Context):
self.defer(
trigger=DateTimeTrigger(moment=self.target_datetime),
method_name="execute_complete",
)
def execute_complete(self, context, event=None):
"""Callback for when the trigger fires - returns immediately."""
return None
| 2,698 | 32.320988 | 98 |
py
|
airflow
|
airflow-main/airflow/sensors/date_time.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from typing import Sequence
from airflow.sensors.base import BaseSensorOperator
from airflow.triggers.temporal import DateTimeTrigger
from airflow.utils import timezone
from airflow.utils.context import Context
class DateTimeSensor(BaseSensorOperator):
"""
Waits until the specified datetime.
A major advantage of this sensor is idempotence for the ``target_time``.
It handles some cases for which ``TimeSensor`` and ``TimeDeltaSensor`` are not suited.
**Example** 1 :
If a task needs to wait for 11am on each ``execution_date``. Using
``TimeSensor`` or ``TimeDeltaSensor``, all backfill tasks started at
1am have to wait for 10 hours. This is unnecessary, e.g. a backfill
task with ``{{ ds }} = '1970-01-01'`` does not need to wait because
``1970-01-01T11:00:00`` has already passed.
**Example** 2 :
If a DAG is scheduled to run at 23:00 daily, but one of the tasks is
required to run at 01:00 next day, using ``TimeSensor`` will return
``True`` immediately because 23:00 > 01:00. Instead, we can do this:
.. code-block:: python
DateTimeSensor(
task_id="wait_for_0100",
target_time="{{ next_execution_date.tomorrow().replace(hour=1) }}",
)
:param target_time: datetime after which the job succeeds. (templated)
"""
template_fields: Sequence[str] = ("target_time",)
def __init__(self, *, target_time: str | datetime.datetime, **kwargs) -> None:
super().__init__(**kwargs)
# self.target_time can't be a datetime object as it is a template_field
if isinstance(target_time, datetime.datetime):
self.target_time = target_time.isoformat()
elif isinstance(target_time, str):
self.target_time = target_time
else:
raise TypeError(
f"Expected str or datetime.datetime type for target_time. Got {type(target_time)}"
)
def poke(self, context: Context) -> bool:
self.log.info("Checking if the time (%s) has come", self.target_time)
return timezone.utcnow() > timezone.parse(self.target_time)
class DateTimeSensorAsync(DateTimeSensor):
"""
Waits until the specified datetime occurs.
Deferring itself to avoid taking up a worker slot while it is waiting.
It is a drop-in replacement for DateTimeSensor.
:param target_time: datetime after which the job succeeds. (templated)
"""
def execute(self, context: Context):
self.defer(
trigger=DateTimeTrigger(moment=timezone.parse(self.target_time)),
method_name="execute_complete",
)
def execute_complete(self, context, event=None):
"""Callback for when the trigger fires - returns immediately."""
return None
| 3,677 | 36.917526 | 98 |
py
|
airflow
|
airflow-main/airflow/sensors/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# fmt: off
"""Sensors."""
from __future__ import annotations
from airflow.utils.deprecation_tools import add_deprecated_classes
__deprecated_classes = {
'base_sensor_operator': {
'BaseSensorOperator': 'airflow.sensors.base.BaseSensorOperator',
},
'date_time_sensor': {
'DateTimeSensor': 'airflow.sensors.date_time.DateTimeSensor',
},
'external_task_sensor': {
'ExternalTaskMarker': 'airflow.sensors.external_task.ExternalTaskMarker',
'ExternalTaskSensor': 'airflow.sensors.external_task.ExternalTaskSensor',
'ExternalTaskSensorLink': 'airflow.sensors.external_task.ExternalTaskSensorLink',
},
'hive_partition_sensor': {
'HivePartitionSensor': 'airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor',
},
'http_sensor': {
'HttpSensor': 'airflow.providers.http.sensors.http.HttpSensor',
},
'metastore_partition_sensor': {
'MetastorePartitionSensor': (
'airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor'
),
},
'named_hive_partition_sensor': {
'NamedHivePartitionSensor': (
'airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor'
),
},
's3_key_sensor': {
'S3KeySensor': 'airflow.providers.amazon.aws.sensors.s3.S3KeySensor',
},
'sql': {
'SqlSensor': 'airflow.providers.common.sql.sensors.sql.SqlSensor',
},
'sql_sensor': {
'SqlSensor': 'airflow.providers.common.sql.sensors.sql.SqlSensor',
},
'time_delta_sensor': {
'TimeDeltaSensor': 'airflow.sensors.time_delta.TimeDeltaSensor',
},
'web_hdfs_sensor': {
'WebHdfsSensor': 'airflow.providers.apache.hdfs.sensors.web_hdfs.WebHdfsSensor',
},
}
add_deprecated_classes(__deprecated_classes, __name__)
| 2,662 | 37.042857 | 106 |
py
|
airflow
|
airflow-main/airflow/sensors/bash.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from subprocess import PIPE, STDOUT, Popen
from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
from typing import Sequence
from airflow.exceptions import AirflowFailException
from airflow.sensors.base import BaseSensorOperator
from airflow.utils.context import Context
class BashSensor(BaseSensorOperator):
"""
Executes a bash command/script.
Return True if and only if the return code is 0.
:param bash_command: The command, set of commands or reference to a
bash script (must be '.sh') to be executed.
:param env: If env is not None, it must be a mapping that defines the
environment variables for the new process; these are used instead
of inheriting the current process environment, which is the default
behavior. (templated)
:param output_encoding: output encoding of bash command.
:param retry_exit_code: If task exits with this code, treat the sensor
as not-yet-complete and retry the check later according to the
usual retry/timeout settings. Any other non-zero return code will
be treated as an error, and cause the sensor to fail. If set to
``None`` (the default), any non-zero exit code will cause a retry
and the task will never raise an error except on time-out.
.. seealso::
For more information on how to use this sensor,take a look at the guide:
:ref:`howto/operator:BashSensor`
"""
template_fields: Sequence[str] = ("bash_command", "env")
def __init__(
self, *, bash_command, env=None, output_encoding="utf-8", retry_exit_code: int | None = None, **kwargs
):
super().__init__(**kwargs)
self.bash_command = bash_command
self.env = env
self.output_encoding = output_encoding
self.retry_exit_code = retry_exit_code
def poke(self, context: Context):
"""Execute the bash command in a temporary directory."""
bash_command = self.bash_command
self.log.info("Tmp dir root location: %s", gettempdir())
with TemporaryDirectory(prefix="airflowtmp") as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:
f.write(bytes(bash_command, "utf_8"))
f.flush()
fname = f.name
script_location = tmp_dir + "/" + fname
self.log.info("Temporary script location: %s", script_location)
self.log.info("Running command: %s", bash_command)
with Popen(
["bash", fname],
stdout=PIPE,
stderr=STDOUT,
close_fds=True,
cwd=tmp_dir,
env=self.env,
preexec_fn=os.setsid,
) as resp:
if resp.stdout:
self.log.info("Output:")
for line in iter(resp.stdout.readline, b""):
self.log.info(line.decode(self.output_encoding).strip())
resp.wait()
self.log.info("Command exited with return code %s", resp.returncode)
# zero code means success, the sensor can go green
if resp.returncode == 0:
return True
# we have a retry exit code, sensor retries if return code matches, otherwise error
elif self.retry_exit_code is not None:
if resp.returncode == self.retry_exit_code:
self.log.info("Return code matches retry code, will retry later")
return False
else:
raise AirflowFailException(f"Command exited with return code {resp.returncode}")
# backwards compatibility: sensor retries no matter the error code
else:
self.log.info("Non-zero return code and no retry code set, will retry later")
return False
| 4,918 | 42.919643 | 110 |
py
|
airflow
|
airflow-main/airflow/api_internal/internal_api_call.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import inspect
import json
from functools import wraps
from typing import Callable, TypeVar
import requests
from airflow.configuration import conf
from airflow.exceptions import AirflowConfigException, AirflowException
from airflow.settings import _ENABLE_AIP_44
from airflow.typing_compat import ParamSpec
PS = ParamSpec("PS")
RT = TypeVar("RT")
class InternalApiConfig:
"""Stores and caches configuration for Internal API."""
_initialized = False
_use_internal_api = False
_internal_api_endpoint = ""
@staticmethod
def force_database_direct_access():
"""Current component will not use Internal API.
All methods decorated with internal_api_call will always be executed locally.
This mode is needed for "trusted" components like Scheduler, Webserver or Internal Api server.
"""
InternalApiConfig._initialized = True
InternalApiConfig._use_internal_api = False
@staticmethod
def get_use_internal_api():
if not InternalApiConfig._initialized:
InternalApiConfig._init_values()
return InternalApiConfig._use_internal_api
@staticmethod
def get_internal_api_endpoint():
if not InternalApiConfig._initialized:
InternalApiConfig._init_values()
return InternalApiConfig._internal_api_endpoint
@staticmethod
def _init_values():
use_internal_api = conf.getboolean("core", "database_access_isolation", fallback=False)
if use_internal_api and not _ENABLE_AIP_44:
raise RuntimeError("The AIP_44 is not enabled so you cannot use it.")
internal_api_endpoint = ""
if use_internal_api:
internal_api_url = conf.get("core", "internal_api_url")
internal_api_endpoint = internal_api_url + "/internal_api/v1/rpcapi"
if not internal_api_endpoint.startswith("http://"):
raise AirflowConfigException("[core]internal_api_url must start with http://")
InternalApiConfig._initialized = True
InternalApiConfig._use_internal_api = use_internal_api
InternalApiConfig._internal_api_endpoint = internal_api_endpoint
def internal_api_call(func: Callable[PS, RT]) -> Callable[PS, RT]:
"""Decorator for methods which may be executed in database isolation mode.
If [core]database_access_isolation is true then such method are not executed locally,
but instead RPC call is made to Database API (aka Internal API). This makes some components
decouple from direct Airflow database access.
Each decorated method must be present in METHODS list in airflow.api_internal.endpoints.rpc_api_endpoint.
Only static methods can be decorated. This decorator must be before "provide_session".
See [AIP-44](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-44+Airflow+Internal+API)
for more information .
"""
headers = {
"Content-Type": "application/json",
}
def make_jsonrpc_request(method_name: str, params_json: str) -> bytes:
data = {"jsonrpc": "2.0", "method": method_name, "params": params_json}
internal_api_endpoint = InternalApiConfig.get_internal_api_endpoint()
response = requests.post(url=internal_api_endpoint, data=json.dumps(data), headers=headers)
if response.status_code != 200:
raise AirflowException(
f"Got {response.status_code}:{response.reason} when sending the internal api request."
)
return response.content
@wraps(func)
def wrapper(*args, **kwargs) -> RT:
use_internal_api = InternalApiConfig.get_use_internal_api()
if not use_internal_api:
return func(*args, **kwargs)
from airflow.serialization.serialized_objects import BaseSerialization # avoid circular import
bound = inspect.signature(func).bind(*args, **kwargs)
arguments_dict = dict(bound.arguments)
if "session" in arguments_dict:
del arguments_dict["session"]
if "cls" in arguments_dict: # used by @classmethod
del arguments_dict["cls"]
args_json = json.dumps(BaseSerialization.serialize(arguments_dict, use_pydantic_models=True))
method_name = f"{func.__module__}.{func.__qualname__}"
result = make_jsonrpc_request(method_name, args_json)
return BaseSerialization.deserialize(json.loads(result), use_pydantic_models=True)
return wrapper
| 5,267 | 39.837209 | 109 |
py
|
airflow
|
airflow-main/airflow/api_internal/__init__.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 |
py
|
airflow
|
airflow-main/airflow/api_internal/endpoints/__init__.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 |
py
|
airflow
|
airflow-main/airflow/api_internal/endpoints/rpc_api_endpoint.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import functools
import json
import logging
from typing import Any, Callable
from flask import Response
from airflow.api_connexion.types import APIResponse
from airflow.serialization.serialized_objects import BaseSerialization
log = logging.getLogger(__name__)
@functools.lru_cache
def _initialize_map() -> dict[str, Callable]:
from airflow.dag_processing.manager import DagFileProcessorManager
from airflow.dag_processing.processor import DagFileProcessor
from airflow.models import Trigger, Variable, XCom
from airflow.models.dag import DagModel
from airflow.models.dagwarning import DagWarning
functions: list[Callable] = [
DagFileProcessor.update_import_errors,
DagFileProcessor.manage_slas,
DagFileProcessorManager.deactivate_stale_dags,
DagModel.deactivate_deleted_dags,
DagModel.get_paused_dag_ids,
DagFileProcessorManager.clear_nonexistent_import_errors,
DagWarning.purge_inactive_dag_warnings,
XCom.get_value,
XCom.get_one,
XCom.get_many,
XCom.clear,
Variable.set,
Variable.update,
Variable.delete,
Trigger.from_object,
Trigger.bulk_fetch,
Trigger.clean_unused,
Trigger.submit_event,
Trigger.submit_failure,
Trigger.ids_for_triggerer,
Trigger.assign_unassigned,
]
return {f"{func.__module__}.{func.__qualname__}": func for func in functions}
def internal_airflow_api(body: dict[str, Any]) -> APIResponse:
"""Handler for Internal API /internal_api/v1/rpcapi endpoint."""
log.debug("Got request")
json_rpc = body.get("jsonrpc")
if json_rpc != "2.0":
log.error("Not jsonrpc-2.0 request.")
return Response(response="Expected jsonrpc 2.0 request.", status=400)
methods_map = _initialize_map()
method_name = body.get("method")
if method_name not in methods_map:
log.error("Unrecognized method: %s.", method_name)
return Response(response=f"Unrecognized method: {method_name}.", status=400)
handler = methods_map[method_name]
params = {}
try:
if body.get("params"):
params_json = json.loads(str(body.get("params")))
params = BaseSerialization.deserialize(params_json, use_pydantic_models=True)
except Exception as err:
log.error("Error deserializing parameters.")
log.error(err)
return Response(response="Error deserializing parameters.", status=400)
log.debug("Calling method %.", {method_name})
try:
output = handler(**params)
output_json = BaseSerialization.serialize(output, use_pydantic_models=True)
log.debug("Returning response")
return Response(
response=json.dumps(output_json or "{}"), headers={"Content-Type": "application/json"}
)
except Exception as e:
log.error("Error when calling method %s.", method_name)
log.error(e)
return Response(response=f"Error executing method: {method_name}.", status=500)
| 3,870 | 36.221154 | 98 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_trigger_target_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example usage of the TriggerDagRunOperator. This example holds 2 DAGs:
1. 1st DAG (example_trigger_controller_dag) holds a TriggerDagRunOperator, which will trigger the 2nd DAG
2. 2nd DAG (example_trigger_target_dag) which will be triggered by the TriggerDagRunOperator in the 1st DAG
"""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.decorators import task
from airflow.operators.bash import BashOperator
@task(task_id="run_this")
def run_this_func(dag_run=None):
"""
Print the payload "message" passed to the DagRun conf attribute.
:param dag_run: The DagRun object
"""
print(f"Remotely received value of {dag_run.conf.get('message')} for key=message")
with DAG(
dag_id="example_trigger_target_dag",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule=None,
tags=["example"],
) as dag:
run_this = run_this_func()
bash_task = BashOperator(
task_id="bash_task",
bash_command='echo "Here is the message: $message"',
env={"message": '{{ dag_run.conf.get("message") }}'},
)
| 1,912 | 33.160714 | 107 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_bash_operator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the BashOperator."""
from __future__ import annotations
import datetime
import pendulum
from airflow import DAG
from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
with DAG(
dag_id="example_bash_operator",
schedule="0 0 * * *",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
dagrun_timeout=datetime.timedelta(minutes=60),
tags=["example", "example2"],
params={"example_key": "example_value"},
) as dag:
run_this_last = EmptyOperator(
task_id="run_this_last",
)
# [START howto_operator_bash]
run_this = BashOperator(
task_id="run_after_loop",
bash_command="echo 1",
)
# [END howto_operator_bash]
run_this >> run_this_last
for i in range(3):
task = BashOperator(
task_id="runme_" + str(i),
bash_command='echo "{{ task_instance_key_str }}" && sleep 1',
)
task >> run_this
# [START howto_operator_bash_template]
also_run_this = BashOperator(
task_id="also_run_this",
bash_command='echo "ti_key={{ task_instance_key_str }}"',
)
# [END howto_operator_bash_template]
also_run_this >> run_this_last
# [START howto_operator_bash_skip]
this_will_skip = BashOperator(
task_id="this_will_skip",
bash_command='echo "hello world"; exit 99;',
dag=dag,
)
# [END howto_operator_bash_skip]
this_will_skip >> run_this_last
if __name__ == "__main__":
dag.test()
| 2,331 | 29.285714 | 73 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_branch_day_of_week_operator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating the usage of BranchDayOfWeekOperator.
"""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.operators.weekday import BranchDayOfWeekOperator
from airflow.utils.weekday import WeekDay
with DAG(
dag_id="example_weekday_branch_operator",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
schedule="@daily",
) as dag:
# [START howto_operator_day_of_week_branch]
empty_task_1 = EmptyOperator(task_id="branch_true")
empty_task_2 = EmptyOperator(task_id="branch_false")
empty_task_3 = EmptyOperator(task_id="branch_weekend")
empty_task_4 = EmptyOperator(task_id="branch_mid_week")
branch = BranchDayOfWeekOperator(
task_id="make_choice",
follow_task_ids_if_true="branch_true",
follow_task_ids_if_false="branch_false",
week_day="Monday",
)
branch_weekend = BranchDayOfWeekOperator(
task_id="make_weekend_choice",
follow_task_ids_if_true="branch_weekend",
follow_task_ids_if_false="branch_mid_week",
week_day={WeekDay.SATURDAY, WeekDay.SUNDAY},
)
# Run empty_task_1 if branch executes on Monday, empty_task_2 otherwise
branch >> [empty_task_1, empty_task_2]
# Run empty_task_3 if it's a weekend, empty_task_4 otherwise
empty_task_2 >> branch_weekend >> [empty_task_3, empty_task_4]
# [END howto_operator_day_of_week_branch]
| 2,300 | 36.721311 | 75 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_task_group_decorator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the @taskgroup decorator."""
from __future__ import annotations
import pendulum
from airflow.decorators import task, task_group
from airflow.models.dag import DAG
# [START howto_task_group_decorator]
# Creating Tasks
@task
def task_start():
"""Empty Task which is First Task of Dag"""
return "[Task_start]"
@task
def task_1(value: int) -> str:
"""Empty Task1"""
return f"[ Task1 {value} ]"
@task
def task_2(value: str) -> str:
"""Empty Task2"""
return f"[ Task2 {value} ]"
@task
def task_3(value: str) -> None:
"""Empty Task3"""
print(f"[ Task3 {value} ]")
@task
def task_end() -> None:
"""Empty Task which is Last Task of Dag"""
print("[ Task_End ]")
# Creating TaskGroups
@task_group
def task_group_function(value: int) -> None:
"""TaskGroup for grouping related Tasks"""
task_3(task_2(task_1(value)))
# Executing Tasks and TaskGroups
with DAG(
dag_id="example_task_group_decorator",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
start_task = task_start()
end_task = task_end()
for i in range(5):
current_task_group = task_group_function(i)
start_task >> current_task_group >> end_task
# [END howto_task_group_decorator]
| 2,111 | 25.4 | 70 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_task_group.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the TaskGroup."""
from __future__ import annotations
import pendulum
from airflow.models.dag import DAG
from airflow.operators.bash import BashOperator
from airflow.operators.empty import EmptyOperator
from airflow.utils.task_group import TaskGroup
# [START howto_task_group]
with DAG(
dag_id="example_task_group",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
start = EmptyOperator(task_id="start")
# [START howto_task_group_section_1]
with TaskGroup("section_1", tooltip="Tasks for section_1") as section_1:
task_1 = EmptyOperator(task_id="task_1")
task_2 = BashOperator(task_id="task_2", bash_command="echo 1")
task_3 = EmptyOperator(task_id="task_3")
task_1 >> [task_2, task_3]
# [END howto_task_group_section_1]
# [START howto_task_group_section_2]
with TaskGroup("section_2", tooltip="Tasks for section_2") as section_2:
task_1 = EmptyOperator(task_id="task_1")
# [START howto_task_group_inner_section_2]
with TaskGroup("inner_section_2", tooltip="Tasks for inner_section2") as inner_section_2:
task_2 = BashOperator(task_id="task_2", bash_command="echo 1")
task_3 = EmptyOperator(task_id="task_3")
task_4 = EmptyOperator(task_id="task_4")
[task_2, task_3] >> task_4
# [END howto_task_group_inner_section_2]
# [END howto_task_group_section_2]
end = EmptyOperator(task_id="end")
start >> section_1 >> section_2 >> end
# [END howto_task_group]
| 2,401 | 35.953846 | 97 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_latest_only_with_trigger.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example LatestOnlyOperator and TriggerRule interactions
"""
from __future__ import annotations
# [START example]
import datetime
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.operators.latest_only import LatestOnlyOperator
from airflow.utils.trigger_rule import TriggerRule
with DAG(
dag_id="latest_only_with_trigger",
schedule=datetime.timedelta(hours=4),
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example3"],
) as dag:
latest_only = LatestOnlyOperator(task_id="latest_only")
task1 = EmptyOperator(task_id="task1")
task2 = EmptyOperator(task_id="task2")
task3 = EmptyOperator(task_id="task3")
task4 = EmptyOperator(task_id="task4", trigger_rule=TriggerRule.ALL_DONE)
latest_only >> task1 >> [task3, task4]
task2 >> [task3, task4]
# [END example]
| 1,680 | 33.306122 | 77 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_xcomargs.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the XComArgs."""
from __future__ import annotations
import logging
import pendulum
from airflow import DAG
from airflow.decorators import task
from airflow.operators.bash import BashOperator
log = logging.getLogger(__name__)
@task
def generate_value():
"""Empty function"""
return "Bring me a shrubbery!"
@task
def print_value(value, ts=None):
"""Empty function"""
log.info("The knights of Ni say: %s (at %s)", value, ts)
with DAG(
dag_id="example_xcom_args",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule=None,
tags=["example"],
) as dag:
print_value(generate_value())
with DAG(
"example_xcom_args_with_operators",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule=None,
tags=["example"],
) as dag2:
bash_op1 = BashOperator(task_id="c", bash_command="echo c")
bash_op2 = BashOperator(task_id="d", bash_command="echo c")
xcom_args_a = print_value("first!")
xcom_args_b = print_value("second!")
bash_op1 >> xcom_args_a >> xcom_args_b >> bash_op2
| 1,922 | 28.136364 | 63 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_branch_python_dop_operator_3.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating the usage of ``@task.branch`` TaskFlow API decorator with depends_on_past=True,
where tasks may be run or skipped on alternating runs.
"""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.decorators import task
from airflow.operators.empty import EmptyOperator
@task.branch()
def should_run(**kwargs) -> str:
"""
Determine which empty_task should be run based on if the execution date minute is even or odd.
:param dict kwargs: Context
:return: Id of the task to run
"""
print(
f"------------- exec dttm = {kwargs['execution_date']} and minute = {kwargs['execution_date'].minute}"
)
if kwargs["execution_date"].minute % 2 == 0:
return "empty_task_1"
else:
return "empty_task_2"
with DAG(
dag_id="example_branch_dop_operator_v3",
schedule="*/1 * * * *",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
default_args={"depends_on_past": True},
tags=["example"],
) as dag:
cond = should_run()
empty_task_1 = EmptyOperator(task_id="empty_task_1")
empty_task_2 = EmptyOperator(task_id="empty_task_2")
cond >> [empty_task_1, empty_task_2]
| 2,015 | 32.04918 | 110 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_sensors.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime, timedelta
import pendulum
from pytz import UTC
from airflow.models import DAG
from airflow.operators.bash import BashOperator
from airflow.sensors.bash import BashSensor
from airflow.sensors.filesystem import FileSensor
from airflow.sensors.python import PythonSensor
from airflow.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync
from airflow.sensors.time_sensor import TimeSensor, TimeSensorAsync
from airflow.sensors.weekday import DayOfWeekSensor
from airflow.utils.trigger_rule import TriggerRule
from airflow.utils.weekday import WeekDay
# [START example_callables]
def success_callable():
return True
def failure_callable():
return False
# [END example_callables]
with DAG(
dag_id="example_sensors",
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
# [START example_time_delta_sensor]
t0 = TimeDeltaSensor(task_id="wait_some_seconds", delta=timedelta(seconds=2))
# [END example_time_delta_sensor]
# [START example_time_delta_sensor_async]
t0a = TimeDeltaSensorAsync(task_id="wait_some_seconds_async", delta=timedelta(seconds=2))
# [END example_time_delta_sensor_async]
# [START example_time_sensors]
t1 = TimeSensor(task_id="fire_immediately", target_time=datetime.now(tz=UTC).time())
t2 = TimeSensor(
task_id="timeout_after_second_date_in_the_future",
timeout=1,
soft_fail=True,
target_time=(datetime.now(tz=UTC) + timedelta(hours=1)).time(),
)
# [END example_time_sensors]
# [START example_time_sensors_async]
t1a = TimeSensorAsync(task_id="fire_immediately_async", target_time=datetime.now(tz=UTC).time())
t2a = TimeSensorAsync(
task_id="timeout_after_second_date_in_the_future_async",
timeout=1,
soft_fail=True,
target_time=(datetime.now(tz=UTC) + timedelta(hours=1)).time(),
)
# [END example_time_sensors_async]
# [START example_bash_sensors]
t3 = BashSensor(task_id="Sensor_succeeds", bash_command="exit 0")
t4 = BashSensor(task_id="Sensor_fails_after_3_seconds", timeout=3, soft_fail=True, bash_command="exit 1")
# [END example_bash_sensors]
t5 = BashOperator(task_id="remove_file", bash_command="rm -rf /tmp/temporary_file_for_testing")
# [START example_file_sensor]
t6 = FileSensor(task_id="wait_for_file", filepath="/tmp/temporary_file_for_testing")
# [END example_file_sensor]
t7 = BashOperator(
task_id="create_file_after_3_seconds", bash_command="sleep 3; touch /tmp/temporary_file_for_testing"
)
# [START example_python_sensors]
t8 = PythonSensor(task_id="success_sensor_python", python_callable=success_callable)
t9 = PythonSensor(
task_id="failure_timeout_sensor_python", timeout=3, soft_fail=True, python_callable=failure_callable
)
# [END example_python_sensors]
# [START example_day_of_week_sensor]
t10 = DayOfWeekSensor(
task_id="week_day_sensor_failing_on_timeout", timeout=3, soft_fail=True, week_day=WeekDay.MONDAY
)
# [END example_day_of_week_sensor]
tx = BashOperator(task_id="print_date_in_bash", bash_command="date")
tx.trigger_rule = TriggerRule.NONE_FAILED
[t0, t0a, t1, t1a, t2, t2a, t3, t4] >> tx
t5 >> t6 >> tx
t7 >> tx
[t8, t9] >> tx
t10 >> tx
| 4,221 | 33.048387 | 109 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_python_operator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating the usage of the TaskFlow API to execute Python functions natively and within a
virtual environment.
"""
from __future__ import annotations
import logging
import shutil
import sys
import tempfile
import time
from pprint import pprint
import pendulum
from airflow import DAG
from airflow.decorators import task
from airflow.operators.python import ExternalPythonOperator, PythonVirtualenvOperator
log = logging.getLogger(__name__)
PATH_TO_PYTHON_BINARY = sys.executable
BASE_DIR = tempfile.gettempdir()
def x():
pass
with DAG(
dag_id="example_python_operator",
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
# [START howto_operator_python]
@task(task_id="print_the_context")
def print_context(ds=None, **kwargs):
"""Print the Airflow context and ds variable from the context."""
pprint(kwargs)
print(ds)
return "Whatever you return gets printed in the logs"
run_this = print_context()
# [END howto_operator_python]
# [START howto_operator_python_render_sql]
@task(task_id="log_sql_query", templates_dict={"query": "sql/sample.sql"}, templates_exts=[".sql"])
def log_sql(**kwargs):
logging.info("Python task decorator query: %s", str(kwargs["templates_dict"]["query"]))
log_the_sql = log_sql()
# [END howto_operator_python_render_sql]
# [START howto_operator_python_kwargs]
# Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
for i in range(5):
@task(task_id=f"sleep_for_{i}")
def my_sleeping_function(random_base):
"""This is a function that will run within the DAG execution"""
time.sleep(random_base)
sleeping_task = my_sleeping_function(random_base=float(i) / 10)
run_this >> log_the_sql >> sleeping_task
# [END howto_operator_python_kwargs]
if not shutil.which("virtualenv"):
log.warning("The virtalenv_python example task requires virtualenv, please install it.")
else:
# [START howto_operator_python_venv]
@task.virtualenv(
task_id="virtualenv_python", requirements=["colorama==0.4.0"], system_site_packages=False
)
def callable_virtualenv():
"""
Example function that will be performed in a virtual environment.
Importing at the module level ensures that it will not attempt to import the
library before it is installed.
"""
from time import sleep
from colorama import Back, Fore, Style
print(Fore.RED + "some red text")
print(Back.GREEN + "and with a green background")
print(Style.DIM + "and in dim text")
print(Style.RESET_ALL)
for _ in range(4):
print(Style.DIM + "Please wait...", flush=True)
sleep(1)
print("Finished")
virtualenv_task = callable_virtualenv()
# [END howto_operator_python_venv]
sleeping_task >> virtualenv_task
# [START howto_operator_external_python]
@task.external_python(task_id="external_python", python=PATH_TO_PYTHON_BINARY)
def callable_external_python():
"""
Example function that will be performed in a virtual environment.
Importing at the module level ensures that it will not attempt to import the
library before it is installed.
"""
import sys
from time import sleep
print(f"Running task via {sys.executable}")
print("Sleeping")
for _ in range(4):
print("Please wait...", flush=True)
sleep(1)
print("Finished")
external_python_task = callable_external_python()
# [END howto_operator_external_python]
# [START howto_operator_external_python_classic]
external_classic = ExternalPythonOperator(
task_id="external_python_classic",
python=PATH_TO_PYTHON_BINARY,
python_callable=x,
)
# [END howto_operator_external_python_classic]
# [START howto_operator_python_venv_classic]
virtual_classic = PythonVirtualenvOperator(
task_id="virtualenv_classic",
requirements="colorama==0.4.0",
python_callable=x,
)
# [END howto_operator_python_venv_classic]
run_this >> external_classic >> external_python_task >> virtual_classic
| 5,383 | 32.65 | 105 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_external_task_marker_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating setting up inter-DAG dependencies using ExternalTaskSensor and
ExternalTaskMarker.
In this example, child_task1 in example_external_task_marker_child depends on parent_task in
example_external_task_marker_parent. When parent_task is cleared with 'Recursive' selected,
the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its downstream tasks.
ExternalTaskSensor will keep poking for the status of remote ExternalTaskMarker task at a regular
interval till one of the following will happen:
ExternalTaskMarker reaches the states mentioned in the allowed_states list.
In this case, ExternalTaskSensor will exit with a success status code
ExternalTaskMarker reaches the states mentioned in the failed_states list
In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this
with multiple downstream tasks
ExternalTaskSensor times out. In this case, ExternalTaskSensor will raise AirflowSkipException
or AirflowSensorTimeout exception
"""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.sensors.external_task import ExternalTaskMarker, ExternalTaskSensor
start_date = pendulum.datetime(2021, 1, 1, tz="UTC")
with DAG(
dag_id="example_external_task_marker_parent",
start_date=start_date,
catchup=False,
schedule=None,
tags=["example2"],
) as parent_dag:
# [START howto_operator_external_task_marker]
parent_task = ExternalTaskMarker(
task_id="parent_task",
external_dag_id="example_external_task_marker_child",
external_task_id="child_task1",
)
# [END howto_operator_external_task_marker]
with DAG(
dag_id="example_external_task_marker_child",
start_date=start_date,
schedule=None,
catchup=False,
tags=["example2"],
) as child_dag:
# [START howto_operator_external_task_sensor]
child_task1 = ExternalTaskSensor(
task_id="child_task1",
external_dag_id=parent_dag.dag_id,
external_task_id=parent_task.task_id,
timeout=600,
allowed_states=["success"],
failed_states=["failed", "skipped"],
mode="reschedule",
)
# [END howto_operator_external_task_sensor]
# [START howto_operator_external_task_sensor_with_task_group]
child_task2 = ExternalTaskSensor(
task_id="child_task2",
external_dag_id=parent_dag.dag_id,
external_task_group_id="parent_dag_task_group_id",
timeout=600,
allowed_states=["success"],
failed_states=["failed", "skipped"],
mode="reschedule",
)
# [END howto_operator_external_task_sensor_with_task_group]
child_task3 = EmptyOperator(task_id="child_task3")
child_task1 >> child_task2 >> child_task3
| 3,607 | 35.816327 | 97 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_trigger_controller_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example usage of the TriggerDagRunOperator. This example holds 2 DAGs:
1. 1st DAG (example_trigger_controller_dag) holds a TriggerDagRunOperator, which will trigger the 2nd DAG
2. 2nd DAG (example_trigger_target_dag) which will be triggered by the TriggerDagRunOperator in the 1st DAG
"""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
with DAG(
dag_id="example_trigger_controller_dag",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule="@once",
tags=["example"],
) as dag:
trigger = TriggerDagRunOperator(
task_id="test_trigger_dagrun",
trigger_dag_id="example_trigger_target_dag", # Ensure this equals the dag_id of the DAG to trigger
conf={"message": "Hello World"},
)
| 1,640 | 38.071429 | 107 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_datasets.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG for demonstrating behavior of Datasets feature.
Notes on usage:
Turn on all the dags.
DAG dataset_produces_1 should run because it's on a schedule.
After dataset_produces_1 runs, dataset_consumes_1 should be triggered immediately
because its only dataset dependency is managed by dataset_produces_1.
No other dags should be triggered. Note that even though dataset_consumes_1_and_2 depends on
the dataset in dataset_produces_1, it will not be triggered until dataset_produces_2 runs
(and dataset_produces_2 is left with no schedule so that we can trigger it manually).
Next, trigger dataset_produces_2. After dataset_produces_2 finishes,
dataset_consumes_1_and_2 should run.
Dags dataset_consumes_1_never_scheduled and dataset_consumes_unknown_never_scheduled should not run because
they depend on datasets that never get updated.
"""
from __future__ import annotations
import pendulum
from airflow import DAG, Dataset
from airflow.operators.bash import BashOperator
# [START dataset_def]
dag1_dataset = Dataset("s3://dag1/output_1.txt", extra={"hi": "bye"})
# [END dataset_def]
dag2_dataset = Dataset("s3://dag2/output_1.txt", extra={"hi": "bye"})
with DAG(
dag_id="dataset_produces_1",
catchup=False,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
schedule="@daily",
tags=["produces", "dataset-scheduled"],
) as dag1:
# [START task_outlet]
BashOperator(outlets=[dag1_dataset], task_id="producing_task_1", bash_command="sleep 5")
# [END task_outlet]
with DAG(
dag_id="dataset_produces_2",
catchup=False,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
schedule=None,
tags=["produces", "dataset-scheduled"],
) as dag2:
BashOperator(outlets=[dag2_dataset], task_id="producing_task_2", bash_command="sleep 5")
# [START dag_dep]
with DAG(
dag_id="dataset_consumes_1",
catchup=False,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
schedule=[dag1_dataset],
tags=["consumes", "dataset-scheduled"],
) as dag3:
# [END dag_dep]
BashOperator(
outlets=[Dataset("s3://consuming_1_task/dataset_other.txt")],
task_id="consuming_1",
bash_command="sleep 5",
)
with DAG(
dag_id="dataset_consumes_1_and_2",
catchup=False,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
schedule=[dag1_dataset, dag2_dataset],
tags=["consumes", "dataset-scheduled"],
) as dag4:
BashOperator(
outlets=[Dataset("s3://consuming_2_task/dataset_other_unknown.txt")],
task_id="consuming_2",
bash_command="sleep 5",
)
with DAG(
dag_id="dataset_consumes_1_never_scheduled",
catchup=False,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
schedule=[
dag1_dataset,
Dataset("s3://this-dataset-doesnt-get-triggered"),
],
tags=["consumes", "dataset-scheduled"],
) as dag5:
BashOperator(
outlets=[Dataset("s3://consuming_2_task/dataset_other_unknown.txt")],
task_id="consuming_3",
bash_command="sleep 5",
)
with DAG(
dag_id="dataset_consumes_unknown_never_scheduled",
catchup=False,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
schedule=[
Dataset("s3://unrelated/dataset3.txt"),
Dataset("s3://unrelated/dataset_other_unknown.txt"),
],
tags=["dataset-scheduled"],
) as dag6:
BashOperator(
task_id="unrelated_task",
outlets=[Dataset("s3://unrelated_task/dataset_other_unknown.txt")],
bash_command="sleep 5",
)
| 4,313 | 32.184615 | 107 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_short_circuit_operator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the ShortCircuitOperator."""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.models.baseoperator import chain
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import ShortCircuitOperator
from airflow.utils.trigger_rule import TriggerRule
with DAG(
dag_id="example_short_circuit_operator",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
cond_true = ShortCircuitOperator(
task_id="condition_is_True",
python_callable=lambda: True,
)
cond_false = ShortCircuitOperator(
task_id="condition_is_False",
python_callable=lambda: False,
)
ds_true = [EmptyOperator(task_id="true_" + str(i)) for i in [1, 2]]
ds_false = [EmptyOperator(task_id="false_" + str(i)) for i in [1, 2]]
chain(cond_true, *ds_true)
chain(cond_false, *ds_false)
[task_1, task_2, task_3, task_4, task_5, task_6] = [
EmptyOperator(task_id=f"task_{i}") for i in range(1, 7)
]
task_7 = EmptyOperator(task_id="task_7", trigger_rule=TriggerRule.ALL_DONE)
short_circuit = ShortCircuitOperator(
task_id="short_circuit", ignore_downstream_trigger_rules=False, python_callable=lambda: False
)
chain(task_1, [task_2, short_circuit], [task_3, task_4], [task_5, task_6], task_7)
| 2,205 | 34.580645 | 101 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_latest_only.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example of the LatestOnlyOperator"""
from __future__ import annotations
import datetime as dt
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.operators.latest_only import LatestOnlyOperator
with DAG(
dag_id="latest_only",
schedule=dt.timedelta(hours=4),
start_date=dt.datetime(2021, 1, 1),
catchup=False,
tags=["example2", "example3"],
) as dag:
latest_only = LatestOnlyOperator(task_id="latest_only")
task1 = EmptyOperator(task_id="task1")
latest_only >> task1
| 1,327 | 33.947368 | 62 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_skip_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the EmptyOperator and a custom EmptySkipOperator which skips by default."""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.exceptions import AirflowSkipException
from airflow.models.baseoperator import BaseOperator
from airflow.operators.empty import EmptyOperator
from airflow.utils.context import Context
from airflow.utils.trigger_rule import TriggerRule
# Create some placeholder operators
class EmptySkipOperator(BaseOperator):
"""Empty operator which always skips the task."""
ui_color = "#e8b7e4"
def execute(self, context: Context):
raise AirflowSkipException
def create_test_pipeline(suffix, trigger_rule):
"""
Instantiate a number of operators for the given DAG.
:param str suffix: Suffix to append to the operator task_ids
:param str trigger_rule: TriggerRule for the join task
:param DAG dag_: The DAG to run the operators on
"""
skip_operator = EmptySkipOperator(task_id=f"skip_operator_{suffix}")
always_true = EmptyOperator(task_id=f"always_true_{suffix}")
join = EmptyOperator(task_id=trigger_rule, trigger_rule=trigger_rule)
final = EmptyOperator(task_id=f"final_{suffix}")
skip_operator >> join
always_true >> join
join >> final
with DAG(
dag_id="example_skip_dag",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
create_test_pipeline("1", TriggerRule.ALL_SUCCESS)
create_test_pipeline("2", TriggerRule.ONE_SUCCESS)
| 2,346 | 34.029851 | 104 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_subdag_operator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the SubDagOperator."""
from __future__ import annotations
# [START example_subdag_operator]
import datetime
from airflow import DAG
from airflow.example_dags.subdags.subdag import subdag
from airflow.operators.empty import EmptyOperator
from airflow.operators.subdag import SubDagOperator
DAG_NAME = "example_subdag_operator"
with DAG(
dag_id=DAG_NAME,
default_args={"retries": 2},
start_date=datetime.datetime(2022, 1, 1),
schedule="@once",
tags=["example"],
) as dag:
start = EmptyOperator(
task_id="start",
)
section_1 = SubDagOperator(
task_id="section-1",
subdag=subdag(DAG_NAME, "section-1", dag.default_args),
)
some_other_task = EmptyOperator(
task_id="some-other-task",
)
section_2 = SubDagOperator(
task_id="section-2",
subdag=subdag(DAG_NAME, "section-2", dag.default_args),
)
end = EmptyOperator(
task_id="end",
)
start >> section_1 >> some_other_task >> section_2 >> end
# [END example_subdag_operator]
| 1,875 | 28.777778 | 64 |
py
|
airflow
|
airflow-main/airflow/example_dags/tutorial.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
### Tutorial Documentation
Documentation that goes along with the Airflow tutorial located
[here](https://airflow.apache.org/tutorial.html)
"""
from __future__ import annotations
# [START tutorial]
# [START import_module]
from datetime import datetime, timedelta
from textwrap import dedent
# The DAG object; we'll need this to instantiate a DAG
from airflow import DAG
# Operators; we need this to operate!
from airflow.operators.bash import BashOperator
# [END import_module]
# [START instantiate_dag]
with DAG(
"tutorial",
# [START default_args]
# These args will get passed on to each operator
# You can override them on a per-task basis during operator initialization
default_args={
"depends_on_past": False,
"email": ["[email protected]"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=5),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
# 'wait_for_downstream': False,
# 'sla': timedelta(hours=2),
# 'execution_timeout': timedelta(seconds=300),
# 'on_failure_callback': some_function, # or list of functions
# 'on_success_callback': some_other_function, # or list of functions
# 'on_retry_callback': another_function, # or list of functions
# 'sla_miss_callback': yet_another_function, # or list of functions
# 'trigger_rule': 'all_success'
},
# [END default_args]
description="A simple tutorial DAG",
schedule=timedelta(days=1),
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example"],
) as dag:
# [END instantiate_dag]
# t1, t2 and t3 are examples of tasks created by instantiating operators
# [START basic_task]
t1 = BashOperator(
task_id="print_date",
bash_command="date",
)
t2 = BashOperator(
task_id="sleep",
depends_on_past=False,
bash_command="sleep 5",
retries=3,
)
# [END basic_task]
# [START documentation]
t1.doc_md = dedent(
"""\
#### Task Documentation
You can document your task using the attributes `doc_md` (markdown),
`doc` (plain text), `doc_rst`, `doc_json`, `doc_yaml` which gets
rendered in the UI's Task Instance Details page.

**Image Credit:** Randall Munroe, [XKCD](https://xkcd.com/license.html)
"""
)
dag.doc_md = __doc__ # providing that you have a docstring at the beginning of the DAG; OR
dag.doc_md = """
This is a documentation placed anywhere
""" # otherwise, type it like this
# [END documentation]
# [START jinja_template]
templated_command = dedent(
"""
{% for i in range(5) %}
echo "{{ ds }}"
echo "{{ macros.ds_add(ds, 7)}}"
{% endfor %}
"""
)
t3 = BashOperator(
task_id="templated",
depends_on_past=False,
bash_command=templated_command,
)
# [END jinja_template]
t1 >> [t2, t3]
# [END tutorial]
| 3,980 | 30.595238 | 95 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_complex.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that shows the complex DAG structure.
"""
from __future__ import annotations
import pendulum
from airflow import models
from airflow.models.baseoperator import chain
from airflow.operators.bash import BashOperator
with models.DAG(
dag_id="example_complex",
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example", "example2", "example3"],
) as dag:
# Create
create_entry_group = BashOperator(task_id="create_entry_group", bash_command="echo create_entry_group")
create_entry_group_result = BashOperator(
task_id="create_entry_group_result", bash_command="echo create_entry_group_result"
)
create_entry_group_result2 = BashOperator(
task_id="create_entry_group_result2", bash_command="echo create_entry_group_result2"
)
create_entry_gcs = BashOperator(task_id="create_entry_gcs", bash_command="echo create_entry_gcs")
create_entry_gcs_result = BashOperator(
task_id="create_entry_gcs_result", bash_command="echo create_entry_gcs_result"
)
create_entry_gcs_result2 = BashOperator(
task_id="create_entry_gcs_result2", bash_command="echo create_entry_gcs_result2"
)
create_tag = BashOperator(task_id="create_tag", bash_command="echo create_tag")
create_tag_result = BashOperator(task_id="create_tag_result", bash_command="echo create_tag_result")
create_tag_result2 = BashOperator(task_id="create_tag_result2", bash_command="echo create_tag_result2")
create_tag_template = BashOperator(task_id="create_tag_template", bash_command="echo create_tag_template")
create_tag_template_result = BashOperator(
task_id="create_tag_template_result", bash_command="echo create_tag_template_result"
)
create_tag_template_result2 = BashOperator(
task_id="create_tag_template_result2", bash_command="echo create_tag_template_result2"
)
create_tag_template_field = BashOperator(
task_id="create_tag_template_field", bash_command="echo create_tag_template_field"
)
create_tag_template_field_result = BashOperator(
task_id="create_tag_template_field_result", bash_command="echo create_tag_template_field_result"
)
create_tag_template_field_result2 = BashOperator(
task_id="create_tag_template_field_result2", bash_command="echo create_tag_template_field_result"
)
# Delete
delete_entry = BashOperator(task_id="delete_entry", bash_command="echo delete_entry")
create_entry_gcs >> delete_entry
delete_entry_group = BashOperator(task_id="delete_entry_group", bash_command="echo delete_entry_group")
create_entry_group >> delete_entry_group
delete_tag = BashOperator(task_id="delete_tag", bash_command="echo delete_tag")
create_tag >> delete_tag
delete_tag_template_field = BashOperator(
task_id="delete_tag_template_field", bash_command="echo delete_tag_template_field"
)
delete_tag_template = BashOperator(task_id="delete_tag_template", bash_command="echo delete_tag_template")
# Get
get_entry_group = BashOperator(task_id="get_entry_group", bash_command="echo get_entry_group")
get_entry_group_result = BashOperator(
task_id="get_entry_group_result", bash_command="echo get_entry_group_result"
)
get_entry = BashOperator(task_id="get_entry", bash_command="echo get_entry")
get_entry_result = BashOperator(task_id="get_entry_result", bash_command="echo get_entry_result")
get_tag_template = BashOperator(task_id="get_tag_template", bash_command="echo get_tag_template")
get_tag_template_result = BashOperator(
task_id="get_tag_template_result", bash_command="echo get_tag_template_result"
)
# List
list_tags = BashOperator(task_id="list_tags", bash_command="echo list_tags")
list_tags_result = BashOperator(task_id="list_tags_result", bash_command="echo list_tags_result")
# Lookup
lookup_entry = BashOperator(task_id="lookup_entry", bash_command="echo lookup_entry")
lookup_entry_result = BashOperator(task_id="lookup_entry_result", bash_command="echo lookup_entry_result")
# Rename
rename_tag_template_field = BashOperator(
task_id="rename_tag_template_field", bash_command="echo rename_tag_template_field"
)
# Search
search_catalog = BashOperator(task_id="search_catalog", bash_command="echo search_catalog")
search_catalog_result = BashOperator(
task_id="search_catalog_result", bash_command="echo search_catalog_result"
)
# Update
update_entry = BashOperator(task_id="update_entry", bash_command="echo update_entry")
update_tag = BashOperator(task_id="update_tag", bash_command="echo update_tag")
update_tag_template = BashOperator(task_id="update_tag_template", bash_command="echo update_tag_template")
update_tag_template_field = BashOperator(
task_id="update_tag_template_field", bash_command="echo update_tag_template_field"
)
# Create
create_tasks = [
create_entry_group,
create_entry_gcs,
create_tag_template,
create_tag_template_field,
create_tag,
]
chain(*create_tasks)
create_entry_group >> delete_entry_group
create_entry_group >> create_entry_group_result
create_entry_group >> create_entry_group_result2
create_entry_gcs >> delete_entry
create_entry_gcs >> create_entry_gcs_result
create_entry_gcs >> create_entry_gcs_result2
create_tag_template >> delete_tag_template_field
create_tag_template >> create_tag_template_result
create_tag_template >> create_tag_template_result2
create_tag_template_field >> delete_tag_template_field
create_tag_template_field >> create_tag_template_field_result
create_tag_template_field >> create_tag_template_field_result2
create_tag >> delete_tag
create_tag >> create_tag_result
create_tag >> create_tag_result2
# Delete
delete_tasks = [
delete_tag,
delete_tag_template_field,
delete_tag_template,
delete_entry_group,
delete_entry,
]
chain(*delete_tasks)
# Get
create_tag_template >> get_tag_template >> delete_tag_template
get_tag_template >> get_tag_template_result
create_entry_gcs >> get_entry >> delete_entry
get_entry >> get_entry_result
create_entry_group >> get_entry_group >> delete_entry_group
get_entry_group >> get_entry_group_result
# List
create_tag >> list_tags >> delete_tag
list_tags >> list_tags_result
# Lookup
create_entry_gcs >> lookup_entry >> delete_entry
lookup_entry >> lookup_entry_result
# Rename
create_tag_template_field >> rename_tag_template_field >> delete_tag_template_field
# Search
chain(create_tasks, search_catalog, delete_tasks)
search_catalog >> search_catalog_result
# Update
create_entry_gcs >> update_entry >> delete_entry
create_tag >> update_tag >> delete_tag
create_tag_template >> update_tag_template >> delete_tag_template
create_tag_template_field >> update_tag_template_field >> rename_tag_template_field
| 7,944 | 34.950226 | 110 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_dag_decorator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
import httpx
import pendulum
from airflow.decorators import dag, task
from airflow.models.baseoperator import BaseOperator
from airflow.operators.email import EmailOperator
from airflow.utils.context import Context
class GetRequestOperator(BaseOperator):
"""Custom operator to send GET request to provided url"""
def __init__(self, *, url: str, **kwargs):
super().__init__(**kwargs)
self.url = url
def execute(self, context: Context):
return httpx.get(self.url).json()
# [START dag_decorator_usage]
@dag(
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
)
def example_dag_decorator(email: str = "[email protected]"):
"""
DAG to send server IP to email.
:param email: Email to send IP to. Defaults to [email protected].
"""
get_ip = GetRequestOperator(task_id="get_ip", url="http://httpbin.org/get")
@task(multiple_outputs=True)
def prepare_email(raw_json: dict[str, Any]) -> dict[str, str]:
external_ip = raw_json["origin"]
return {
"subject": f"Server connected from {external_ip}",
"body": f"Seems like today your server executing Airflow is connected from IP {external_ip}<br>",
}
email_info = prepare_email(get_ip.output)
EmailOperator(
task_id="send_email", to=email, subject=email_info["subject"], html_content=email_info["body"]
)
example_dag = example_dag_decorator()
# [END dag_decorator_usage]
| 2,367 | 31 | 109 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_xcom.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of XComs."""
from __future__ import annotations
import pendulum
from airflow import DAG, XComArg
from airflow.decorators import task
from airflow.operators.bash import BashOperator
value_1 = [1, 2, 3]
value_2 = {"a": "b"}
@task
def push(ti=None):
"""Pushes an XCom without a specific target"""
ti.xcom_push(key="value from pusher 1", value=value_1)
@task
def push_by_returning():
"""Pushes an XCom without a specific target, just by returning it"""
return value_2
def _compare_values(pulled_value, check_value):
if pulled_value != check_value:
raise ValueError(f"The two values differ {pulled_value} and {check_value}")
@task
def puller(pulled_value_2, ti=None):
"""Pull all previously pushed XComs and check if the pushed values match the pulled values."""
pulled_value_1 = ti.xcom_pull(task_ids="push", key="value from pusher 1")
_compare_values(pulled_value_1, value_1)
_compare_values(pulled_value_2, value_2)
@task
def pull_value_from_bash_push(ti=None):
bash_pushed_via_return_value = ti.xcom_pull(key="return_value", task_ids="bash_push")
bash_manually_pushed_value = ti.xcom_pull(key="manually_pushed_value", task_ids="bash_push")
print(f"The xcom value pushed by task push via return value is {bash_pushed_via_return_value}")
print(f"The xcom value pushed by task push manually is {bash_manually_pushed_value}")
with DAG(
"example_xcom",
schedule="@once",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
bash_push = BashOperator(
task_id="bash_push",
bash_command='echo "bash_push demo" && '
'echo "Manually set xcom value '
'{{ ti.xcom_push(key="manually_pushed_value", value="manually_pushed_value") }}" && '
'echo "value_by_return"',
)
bash_pull = BashOperator(
task_id="bash_pull",
bash_command='echo "bash pull demo" && '
f'echo "The xcom pushed manually is {XComArg(bash_push, key="manually_pushed_value")}" && '
f'echo "The returned_value xcom is {XComArg(bash_push)}" && '
'echo "finished"',
do_xcom_push=False,
)
python_pull_from_bash = pull_value_from_bash_push()
[bash_pull, python_pull_from_bash] << bash_push
puller(push_by_returning()) << push()
| 3,159 | 32.617021 | 99 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_setup_teardown.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of setup and teardown tasks."""
from __future__ import annotations
import pendulum
from airflow.models.dag import DAG
from airflow.operators.bash import BashOperator
from airflow.utils.task_group import TaskGroup
with DAG(
dag_id="example_setup_teardown",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
root_setup = BashOperator(task_id="root_setup", bash_command="echo 'Hello from root_setup'").as_setup()
root_normal = BashOperator(task_id="normal", bash_command="echo 'I am just a normal task'")
root_teardown = BashOperator(
task_id="root_teardown", bash_command="echo 'Goodbye from root_teardown'"
).as_teardown(setups=root_setup)
root_setup >> root_normal >> root_teardown
with TaskGroup("section_1") as section_1:
inner_setup = BashOperator(
task_id="taskgroup_setup", bash_command="echo 'Hello from taskgroup_setup'"
).as_setup()
inner_normal = BashOperator(task_id="normal", bash_command="echo 'I am just a normal task'")
inner_teardown = BashOperator(
task_id="taskgroup_teardown", bash_command="echo 'Hello from taskgroup_teardown'"
).as_teardown(setups=inner_setup)
inner_setup >> inner_normal >> inner_teardown
root_normal >> section_1
| 2,150 | 42.897959 | 107 |
py
|
airflow
|
airflow-main/airflow/example_dags/tutorial_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
### DAG Tutorial Documentation
This DAG is demonstrating an Extract -> Transform -> Load pipeline
"""
from __future__ import annotations
# [START tutorial]
# [START import_module]
import json
from textwrap import dedent
import pendulum
# The DAG object; we'll need this to instantiate a DAG
from airflow import DAG
# Operators; we need this to operate!
from airflow.operators.python import PythonOperator
# [END import_module]
# [START instantiate_dag]
with DAG(
"tutorial_dag",
# [START default_args]
# These args will get passed on to each operator
# You can override them on a per-task basis during operator initialization
default_args={"retries": 2},
# [END default_args]
description="DAG tutorial",
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
# [END instantiate_dag]
# [START documentation]
dag.doc_md = __doc__
# [END documentation]
# [START extract_function]
def extract(**kwargs):
ti = kwargs["ti"]
data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}'
ti.xcom_push("order_data", data_string)
# [END extract_function]
# [START transform_function]
def transform(**kwargs):
ti = kwargs["ti"]
extract_data_string = ti.xcom_pull(task_ids="extract", key="order_data")
order_data = json.loads(extract_data_string)
total_order_value = 0
for value in order_data.values():
total_order_value += value
total_value = {"total_order_value": total_order_value}
total_value_json_string = json.dumps(total_value)
ti.xcom_push("total_order_value", total_value_json_string)
# [END transform_function]
# [START load_function]
def load(**kwargs):
ti = kwargs["ti"]
total_value_string = ti.xcom_pull(task_ids="transform", key="total_order_value")
total_order_value = json.loads(total_value_string)
print(total_order_value)
# [END load_function]
# [START main_flow]
extract_task = PythonOperator(
task_id="extract",
python_callable=extract,
)
extract_task.doc_md = dedent(
"""\
#### Extract task
A simple Extract task to get data ready for the rest of the data pipeline.
In this case, getting data is simulated by reading from a hardcoded JSON string.
This data is then put into xcom, so that it can be processed by the next task.
"""
)
transform_task = PythonOperator(
task_id="transform",
python_callable=transform,
)
transform_task.doc_md = dedent(
"""\
#### Transform task
A simple Transform task which takes in the collection of order data from xcom
and computes the total order value.
This computed value is then put into xcom, so that it can be processed by the next task.
"""
)
load_task = PythonOperator(
task_id="load",
python_callable=load,
)
load_task.doc_md = dedent(
"""\
#### Load task
A simple Load task which takes in the result of the Transform task, by reading it
from xcom and instead of saving it to end user review, just prints it out.
"""
)
extract_task >> transform_task >> load_task
# [END main_flow]
# [END tutorial]
| 4,129 | 29.367647 | 92 |
py
|
airflow
|
airflow-main/airflow/example_dags/tutorial_taskflow_api.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
# [START tutorial]
# [START import_module]
import json
import pendulum
from airflow.decorators import dag, task
# [END import_module]
# [START instantiate_dag]
@dag(
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
)
def tutorial_taskflow_api():
"""
### TaskFlow API Tutorial Documentation
This is a simple data pipeline example which demonstrates the use of
the TaskFlow API using three simple tasks for Extract, Transform, and Load.
Documentation that goes along with the Airflow TaskFlow API tutorial is
located
[here](https://airflow.apache.org/docs/apache-airflow/stable/tutorial_taskflow_api.html)
"""
# [END instantiate_dag]
# [START extract]
@task()
def extract():
"""
#### Extract task
A simple Extract task to get data ready for the rest of the data
pipeline. In this case, getting data is simulated by reading from a
hardcoded JSON string.
"""
data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}'
order_data_dict = json.loads(data_string)
return order_data_dict
# [END extract]
# [START transform]
@task(multiple_outputs=True)
def transform(order_data_dict: dict):
"""
#### Transform task
A simple Transform task which takes in the collection of order data and
computes the total order value.
"""
total_order_value = 0
for value in order_data_dict.values():
total_order_value += value
return {"total_order_value": total_order_value}
# [END transform]
# [START load]
@task()
def load(total_order_value: float):
"""
#### Load task
A simple Load task which takes in the result of the Transform task and
instead of saving it to end user review, just prints it out.
"""
print(f"Total order value is: {total_order_value:.2f}")
# [END load]
# [START main_flow]
order_data = extract()
order_summary = transform(order_data)
load(order_summary["total_order_value"])
# [END main_flow]
# [START dag_invocation]
tutorial_taskflow_api()
# [END dag_invocation]
# [END tutorial]
| 3,095 | 27.934579 | 92 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_params_trigger_ui.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage DAG params to model a trigger UI with a user form.
This example DAG generates greetings to a list of provided names in selected languages in the logs.
"""
from __future__ import annotations
import datetime
from pathlib import Path
from airflow import DAG
from airflow.decorators import task
from airflow.models.dagrun import DagRun
from airflow.models.param import Param
from airflow.models.taskinstance import TaskInstance
from airflow.utils.trigger_rule import TriggerRule
with DAG(
dag_id=Path(__file__).stem,
description=__doc__[0 : __doc__.find(".")],
doc_md=__doc__,
schedule=None,
start_date=datetime.datetime(2022, 3, 4),
catchup=False,
tags=["example_ui"],
params={
"names": Param(
["Linda", "Martha", "Thomas"],
type="array",
description="Define the list of names for which greetings should be generated in the logs."
" Please have one name per line.",
title="Names to greet",
),
"english": Param(True, type="boolean", title="English"),
"german": Param(True, type="boolean", title="German (Formal)"),
"french": Param(True, type="boolean", title="French"),
},
) as dag:
@task(task_id="get_names")
def get_names(**kwargs) -> list[str]:
ti: TaskInstance = kwargs["ti"]
dag_run: DagRun = ti.dag_run
if "names" not in dag_run.conf:
print("Uuups, no names given, was no UI used to trigger?")
return []
return dag_run.conf["names"]
@task.branch(task_id="select_languages")
def select_languages(**kwargs) -> list[str]:
ti: TaskInstance = kwargs["ti"]
dag_run: DagRun = ti.dag_run
selected_languages = []
for lang in ["english", "german", "french"]:
if lang in dag_run.conf and dag_run.conf[lang]:
selected_languages.append(f"generate_{lang}_greeting")
return selected_languages
@task(task_id="generate_english_greeting")
def generate_english_greeting(name: str) -> str:
return f"Hello {name}!"
@task(task_id="generate_german_greeting")
def generate_german_greeting(name: str) -> str:
return f"Sehr geehrter Herr/Frau {name}."
@task(task_id="generate_french_greeting")
def generate_french_greeting(name: str) -> str:
return f"Bonjour {name}!"
@task(task_id="print_greetings", trigger_rule=TriggerRule.ALL_DONE)
def print_greetings(greetings1, greetings2, greetings3) -> None:
for g in greetings1 if greetings1 else []:
print(g)
for g in greetings2 if greetings2 else []:
print(g)
for g in greetings3 if greetings3 else []:
print(g)
if not greetings1 and not greetings2 and not greetings3:
print("sad, nobody to greet :-(")
lang_select = select_languages()
names = get_names()
english_greetings = generate_english_greeting.expand(name=names)
german_greetings = generate_german_greeting.expand(name=names)
french_greetings = generate_french_greeting.expand(name=names)
lang_select >> [english_greetings, german_greetings, french_greetings]
results_print = print_greetings(english_greetings, german_greetings, french_greetings)
| 4,103 | 38.085714 | 103 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_setup_teardown_taskflow.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of setup and teardown tasks."""
from __future__ import annotations
import pendulum
from airflow.decorators import setup, task, task_group, teardown
from airflow.models.dag import DAG
with DAG(
dag_id="example_setup_teardown_taskflow",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
@task
def task_1():
print("Hello 1")
@task
def task_2():
print("Hello 2")
@task
def task_3():
print("Hello 3")
# you can set setup / teardown relationships with the `as_teardown` method.
t1 = task_1()
t2 = task_2()
t3 = task_3()
t1 >> t2 >> t3.as_teardown(setups=t1)
# the method `as_teadrown` will mark t3 as teardown, t1 as setup, and arrow t1 >> t3
# now if you clear t2 (downstream), then t1 will be cleared in addition to t3
# it's also possible to use a decorator to mark a task as setup or
# teardown when you define it. see below.
@setup
def dag_setup():
print("I am dag_setup")
@teardown
def dag_teardown():
print("I am dag_teardown")
@task
def dag_normal_task():
print("I am just a normal task")
s = dag_setup()
t = dag_teardown()
# by using the decorators, dag_setup and dag_teardown are already marked as setup / teardown
# now we just need to make sure they are linked directly
# what we need to do is this::
# s >> t
# s >> dag_normal_task() >> t
# but we can use a context manager to make it cleaner
with s >> t:
dag_normal_task()
@task_group
def section_1():
@task
def my_setup():
print("I set up")
@task
def my_teardown():
print("I tear down")
@task
def hello():
print("I say hello")
(s := my_setup()) >> hello() >> my_teardown().as_teardown(setups=s)
# and let's put section 1 inside the "dag setup" and "dag teardown"
s >> section_1() >> t
| 2,842 | 27.717172 | 96 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_time_delta_sensor_async.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating ``TimeDeltaSensorAsync``, a drop in replacement for ``TimeDeltaSensor`` that
defers and doesn't occupy a worker slot while it waits
"""
from __future__ import annotations
import datetime
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.sensors.time_delta import TimeDeltaSensorAsync
with DAG(
dag_id="example_time_delta_sensor_async",
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
) as dag:
wait = TimeDeltaSensorAsync(task_id="wait", delta=datetime.timedelta(seconds=30))
finish = EmptyOperator(task_id="finish")
wait >> finish
| 1,490 | 34.5 | 102 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_short_circuit_decorator.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the `@task.short_circuit()` TaskFlow decorator."""
from __future__ import annotations
import pendulum
from airflow.decorators import dag, task
from airflow.models.baseoperator import chain
from airflow.operators.empty import EmptyOperator
from airflow.utils.trigger_rule import TriggerRule
@dag(start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), catchup=False, tags=["example"])
def example_short_circuit_decorator():
# [START howto_operator_short_circuit]
@task.short_circuit()
def check_condition(condition):
return condition
ds_true = [EmptyOperator(task_id="true_" + str(i)) for i in [1, 2]]
ds_false = [EmptyOperator(task_id="false_" + str(i)) for i in [1, 2]]
condition_is_true = check_condition.override(task_id="condition_is_true")(condition=True)
condition_is_false = check_condition.override(task_id="condition_is_false")(condition=False)
chain(condition_is_true, *ds_true)
chain(condition_is_false, *ds_false)
# [END howto_operator_short_circuit]
# [START howto_operator_short_circuit_trigger_rules]
[task_1, task_2, task_3, task_4, task_5, task_6] = [
EmptyOperator(task_id=f"task_{i}") for i in range(1, 7)
]
task_7 = EmptyOperator(task_id="task_7", trigger_rule=TriggerRule.ALL_DONE)
short_circuit = check_condition.override(task_id="short_circuit", ignore_downstream_trigger_rules=False)(
condition=False
)
chain(task_1, [task_2, short_circuit], [task_3, task_4], [task_5, task_6], task_7)
# [END howto_operator_short_circuit_trigger_rules]
example_dag = example_short_circuit_decorator()
| 2,441 | 39.032787 | 109 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_local_kubernetes_executor.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This is an example dag for using a Local Kubernetes Executor Configuration.
"""
from __future__ import annotations
import logging
from datetime import datetime
from airflow import DAG
from airflow.configuration import conf
from airflow.decorators import task
from airflow.example_dags.libs.helper import print_stuff
log = logging.getLogger(__name__)
worker_container_repository = conf.get("kubernetes_executor", "worker_container_repository")
worker_container_tag = conf.get("kubernetes_executor", "worker_container_tag")
try:
from kubernetes.client import models as k8s
except ImportError:
log.warning("Could not import DAGs in example_local_kubernetes_executor.py", exc_info=True)
log.warning("Install Kubernetes dependencies with: pip install apache-airflow[cncf.kubernetes]")
k8s = None
if k8s:
with DAG(
dag_id="example_local_kubernetes_executor",
schedule=None,
start_date=datetime(2021, 1, 1),
catchup=False,
tags=["example3"],
) as dag:
# You can use annotations on your kubernetes pods!
start_task_executor_config = {
"pod_override": k8s.V1Pod(metadata=k8s.V1ObjectMeta(annotations={"test": "annotation"}))
}
@task(
executor_config=start_task_executor_config,
queue="kubernetes",
task_id="task_with_kubernetes_executor",
)
def task_with_template():
print_stuff()
@task(task_id="task_with_local_executor")
def task_with_local(ds=None, **kwargs):
"""Print the Airflow context and ds variable from the context."""
print(kwargs)
print(ds)
return "Whatever you return gets printed in the logs"
task_with_local() >> task_with_template()
| 2,584 | 34.902778 | 100 |
py
|
airflow
|
airflow-main/airflow/example_dags/tutorial_taskflow_api_virtualenv.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import shutil
from datetime import datetime
from airflow.decorators import dag, task
log = logging.getLogger(__name__)
if not shutil.which("virtualenv"):
log.warning("The tutorial_taskflow_api_virtualenv example DAG requires virtualenv, please install it.")
else:
@dag(schedule=None, start_date=datetime(2021, 1, 1), catchup=False, tags=["example"])
def tutorial_taskflow_api_virtualenv():
"""
### TaskFlow API example using virtualenv
This is a simple data pipeline example which demonstrates the use of
the TaskFlow API using three simple tasks for Extract, Transform, and Load.
"""
@task.virtualenv(
use_dill=True,
system_site_packages=False,
requirements=["funcsigs"],
)
def extract():
"""
#### Extract task
A simple Extract task to get data ready for the rest of the data
pipeline. In this case, getting data is simulated by reading from a
hardcoded JSON string.
"""
import json
data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}'
order_data_dict = json.loads(data_string)
return order_data_dict
@task(multiple_outputs=True)
def transform(order_data_dict: dict):
"""
#### Transform task
A simple Transform task which takes in the collection of order data and
computes the total order value.
"""
total_order_value = 0
for value in order_data_dict.values():
total_order_value += value
return {"total_order_value": total_order_value}
@task()
def load(total_order_value: float):
"""
#### Load task
A simple Load task which takes in the result of the Transform task and
instead of saving it to end user review, just prints it out.
"""
print(f"Total order value is: {total_order_value:.2f}")
order_data = extract()
order_summary = transform(order_data)
load(order_summary["total_order_value"])
tutorial_dag = tutorial_taskflow_api_virtualenv()
| 3,084 | 34.056818 | 107 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_passing_params_via_test_command.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the params arguments in templated arguments."""
from __future__ import annotations
import datetime
import os
from textwrap import dedent
import pendulum
from airflow import DAG
from airflow.decorators import task
from airflow.operators.bash import BashOperator
@task(task_id="run_this")
def my_py_command(params, test_mode=None, task=None):
"""
Print out the "foo" param passed in via
`airflow tasks test example_passing_params_via_test_command run_this <date>
-t '{"foo":"bar"}'`
"""
if test_mode:
print(
f" 'foo' was passed in via test={test_mode} command : kwargs[params][foo] = {task.params['foo']}"
)
# Print out the value of "miff", passed in below via the Python Operator
print(f" 'miff' was passed in via task params = {params['miff']}")
return 1
@task(task_id="env_var_test_task")
def print_env_vars(test_mode=None):
"""
Print out the "foo" param passed in via
`airflow tasks test example_passing_params_via_test_command env_var_test_task <date>
--env-vars '{"foo":"bar"}'`
"""
if test_mode:
print(f"foo={os.environ.get('foo')}")
print(f"AIRFLOW_TEST_MODE={os.environ.get('AIRFLOW_TEST_MODE')}")
with DAG(
"example_passing_params_via_test_command",
schedule="*/1 * * * *",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
dagrun_timeout=datetime.timedelta(minutes=4),
tags=["example"],
) as dag:
run_this = my_py_command(params={"miff": "agg"})
my_command = dedent(
"""
echo "'foo' was passed in via Airflow CLI Test command with value '$FOO'"
echo "'miff' was passed in via BashOperator with value '$MIFF'"
"""
)
also_run_this = BashOperator(
task_id="also_run_this",
bash_command=my_command,
params={"miff": "agg"},
env={"FOO": "{{ params.foo }}", "MIFF": "{{ params.miff }}"},
)
env_var_test_task = print_env_vars()
run_this >> also_run_this
| 2,834 | 31.586207 | 109 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_branch_operator_decorator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the ``@task.branch`` TaskFlow API decorator."""
from __future__ import annotations
import random
import pendulum
from airflow import DAG
from airflow.decorators import task
from airflow.operators.empty import EmptyOperator
from airflow.utils.edgemodifier import Label
from airflow.utils.trigger_rule import TriggerRule
with DAG(
dag_id="example_branch_python_operator_decorator",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule="@daily",
tags=["example", "example2"],
) as dag:
run_this_first = EmptyOperator(task_id="run_this_first")
options = ["branch_a", "branch_b", "branch_c", "branch_d"]
@task.branch(task_id="branching")
def random_choice(choices: list[str]) -> str:
return random.choice(choices)
random_choice_instance = random_choice(choices=options)
run_this_first >> random_choice_instance
join = EmptyOperator(task_id="join", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS)
for option in options:
t = EmptyOperator(task_id=option)
empty_follow = EmptyOperator(task_id="follow_" + option)
# Label is optional here, but it can help identify more complex branches
random_choice_instance >> Label(option) >> t >> empty_follow >> join
| 2,109 | 34.762712 | 94 |
py
|
airflow
|
airflow-main/airflow/example_dags/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_kubernetes_executor.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This is an example dag for using a Kubernetes Executor Configuration.
"""
from __future__ import annotations
import logging
import os
import pendulum
from airflow import DAG
from airflow.configuration import conf
from airflow.decorators import task
from airflow.example_dags.libs.helper import print_stuff
log = logging.getLogger(__name__)
worker_container_repository = conf.get("kubernetes_executor", "worker_container_repository")
worker_container_tag = conf.get("kubernetes_executor", "worker_container_tag")
try:
from kubernetes.client import models as k8s
except ImportError:
log.warning(
"The example_kubernetes_executor example DAG requires the kubernetes provider."
" Please install it with: pip install apache-airflow[cncf.kubernetes]"
)
k8s = None
if k8s:
with DAG(
dag_id="example_kubernetes_executor",
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example3"],
) as dag:
# You can use annotations on your kubernetes pods!
start_task_executor_config = {
"pod_override": k8s.V1Pod(metadata=k8s.V1ObjectMeta(annotations={"test": "annotation"}))
}
@task(executor_config=start_task_executor_config)
def start_task():
print_stuff()
# [START task_with_volume]
executor_config_volume_mount = {
"pod_override": k8s.V1Pod(
spec=k8s.V1PodSpec(
containers=[
k8s.V1Container(
name="base",
volume_mounts=[
k8s.V1VolumeMount(mount_path="/foo/", name="example-kubernetes-test-volume")
],
)
],
volumes=[
k8s.V1Volume(
name="example-kubernetes-test-volume",
host_path=k8s.V1HostPathVolumeSource(path="/tmp/"),
)
],
)
),
}
@task(executor_config=executor_config_volume_mount)
def test_volume_mount():
"""
Tests whether the volume has been mounted.
"""
with open("/foo/volume_mount_test.txt", "w") as foo:
foo.write("Hello")
return_code = os.system("cat /foo/volume_mount_test.txt")
if return_code != 0:
raise ValueError(f"Error when checking volume mount. Return code {return_code}")
volume_task = test_volume_mount()
# [END task_with_volume]
# [START task_with_sidecar]
executor_config_sidecar = {
"pod_override": k8s.V1Pod(
spec=k8s.V1PodSpec(
containers=[
k8s.V1Container(
name="base",
volume_mounts=[k8s.V1VolumeMount(mount_path="/shared/", name="shared-empty-dir")],
),
k8s.V1Container(
name="sidecar",
image="ubuntu",
args=['echo "retrieved from mount" > /shared/test.txt'],
command=["bash", "-cx"],
volume_mounts=[k8s.V1VolumeMount(mount_path="/shared/", name="shared-empty-dir")],
),
],
volumes=[
k8s.V1Volume(name="shared-empty-dir", empty_dir=k8s.V1EmptyDirVolumeSource()),
],
)
),
}
@task(executor_config=executor_config_sidecar)
def test_sharedvolume_mount():
"""
Tests whether the volume has been mounted.
"""
for i in range(5):
try:
return_code = os.system("cat /shared/test.txt")
if return_code != 0:
raise ValueError(f"Error when checking volume mount. Return code {return_code}")
except ValueError as e:
if i > 4:
raise e
sidecar_task = test_sharedvolume_mount()
# [END task_with_sidecar]
# You can add labels to pods
executor_config_non_root = {
"pod_override": k8s.V1Pod(metadata=k8s.V1ObjectMeta(labels={"release": "stable"}))
}
@task(executor_config=executor_config_non_root)
def non_root_task():
print_stuff()
third_task = non_root_task()
executor_config_other_ns = {
"pod_override": k8s.V1Pod(
metadata=k8s.V1ObjectMeta(namespace="test-namespace", labels={"release": "stable"})
)
}
@task(executor_config=executor_config_other_ns)
def other_namespace_task():
print_stuff()
other_ns_task = other_namespace_task()
# You can also change the base image, here we used the worker image for demonstration.
# Note that the image must have the same configuration as the
# worker image. Could be that you want to run this task in a special docker image that has a zip
# library built-in. You build the special docker image on top your worker image.
kube_exec_config_special = {
"pod_override": k8s.V1Pod(
spec=k8s.V1PodSpec(
containers=[
k8s.V1Container(
name="base", image=f"{worker_container_repository}:{worker_container_tag}"
),
]
)
)
}
@task(executor_config=kube_exec_config_special)
def base_image_override_task():
print_stuff()
base_image_task = base_image_override_task()
# Use k8s_client.V1Affinity to define node affinity
k8s_affinity = k8s.V1Affinity(
pod_anti_affinity=k8s.V1PodAntiAffinity(
required_during_scheduling_ignored_during_execution=[
k8s.V1PodAffinityTerm(
label_selector=k8s.V1LabelSelector(
match_expressions=[
k8s.V1LabelSelectorRequirement(key="app", operator="In", values=["airflow"])
]
),
topology_key="kubernetes.io/hostname",
)
]
)
)
# Use k8s_client.V1Toleration to define node tolerations
k8s_tolerations = [k8s.V1Toleration(key="dedicated", operator="Equal", value="airflow")]
# Use k8s_client.V1ResourceRequirements to define resource limits
k8s_resource_requirements = k8s.V1ResourceRequirements(
requests={"memory": "512Mi"}, limits={"memory": "512Mi"}
)
kube_exec_config_resource_limits = {
"pod_override": k8s.V1Pod(
spec=k8s.V1PodSpec(
containers=[
k8s.V1Container(
name="base",
resources=k8s_resource_requirements,
)
],
affinity=k8s_affinity,
tolerations=k8s_tolerations,
)
)
}
@task(executor_config=kube_exec_config_resource_limits)
def task_with_resource_limits():
print_stuff()
four_task = task_with_resource_limits()
(
start_task()
>> [volume_task, other_ns_task, sidecar_task]
>> third_task
>> [base_image_task, four_task]
)
| 8,654 | 35.0625 | 110 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_sensor_decorator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the sensor decorator."""
from __future__ import annotations
# [START tutorial]
# [START import_module]
import pendulum
from airflow.decorators import dag, task
from airflow.sensors.base import PokeReturnValue
# [END import_module]
# [START instantiate_dag]
@dag(
schedule=None,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
)
def example_sensor_decorator():
# [END instantiate_dag]
# [START wait_function]
# Using a sensor operator to wait for the upstream data to be ready.
@task.sensor(poke_interval=60, timeout=3600, mode="reschedule")
def wait_for_upstream() -> PokeReturnValue:
return PokeReturnValue(is_done=True, xcom_value="xcom_value")
# [END wait_function]
# [START dummy_function]
@task
def dummy_operator() -> None:
pass
# [END dummy_function]
# [START main_flow]
wait_for_upstream() >> dummy_operator()
# [END main_flow]
# [START dag_invocation]
tutorial_etl_dag = example_sensor_decorator()
# [END dag_invocation]
# [END tutorial]
| 1,912 | 27.132353 | 72 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_sla_dag.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating SLA use in Tasks"""
from __future__ import annotations
import datetime
import time
import pendulum
from airflow.decorators import dag, task
# [START howto_task_sla]
def sla_callback(dag, task_list, blocking_task_list, slas, blocking_tis):
print(
"The callback arguments are: ",
{
"dag": dag,
"task_list": task_list,
"blocking_task_list": blocking_task_list,
"slas": slas,
"blocking_tis": blocking_tis,
},
)
@dag(
schedule="*/2 * * * *",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
sla_miss_callback=sla_callback,
default_args={"email": "[email protected]"},
)
def example_sla_dag():
@task(sla=datetime.timedelta(seconds=10))
def sleep_20():
"""Sleep for 20 seconds"""
time.sleep(20)
@task
def sleep_30():
"""Sleep for 30 seconds"""
time.sleep(30)
sleep_20() >> sleep_30()
example_dag = example_sla_dag()
# [END howto_task_sla]
| 1,840 | 26.893939 | 73 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_branch_operator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of the BranchPythonOperator."""
from __future__ import annotations
import random
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.operators.python import BranchPythonOperator
from airflow.utils.edgemodifier import Label
from airflow.utils.trigger_rule import TriggerRule
with DAG(
dag_id="example_branch_operator",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule="@daily",
tags=["example", "example2"],
) as dag:
run_this_first = EmptyOperator(
task_id="run_this_first",
)
options = ["branch_a", "branch_b", "branch_c", "branch_d"]
branching = BranchPythonOperator(
task_id="branching",
python_callable=lambda: random.choice(options),
)
run_this_first >> branching
join = EmptyOperator(
task_id="join",
trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS,
)
for option in options:
t = EmptyOperator(
task_id=option,
)
empty_follow = EmptyOperator(
task_id="follow_" + option,
)
# Label is optional here, but it can help identify more complex branches
branching >> Label(option) >> t >> empty_follow >> join
| 2,094 | 30.742424 | 80 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_branch_labels.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating the usage of labels with different branches.
"""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.utils.edgemodifier import Label
with DAG(
"example_branch_labels",
schedule="@daily",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
) as dag:
ingest = EmptyOperator(task_id="ingest")
analyse = EmptyOperator(task_id="analyze")
check = EmptyOperator(task_id="check_integrity")
describe = EmptyOperator(task_id="describe_integrity")
error = EmptyOperator(task_id="email_error")
save = EmptyOperator(task_id="save")
report = EmptyOperator(task_id="report")
ingest >> analyse >> check
check >> Label("No errors") >> save >> report
check >> Label("Errors found") >> describe >> error >> report
| 1,673 | 35.391304 | 70 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_nested_branch_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating a workflow with nested branching. The join tasks are created with
``none_failed_min_one_success`` trigger rule such that they are skipped whenever their corresponding
branching tasks are skipped.
"""
from __future__ import annotations
import pendulum
from airflow.decorators import task
from airflow.models import DAG
from airflow.operators.empty import EmptyOperator
from airflow.utils.trigger_rule import TriggerRule
with DAG(
dag_id="example_nested_branch_dag",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule="@daily",
tags=["example"],
) as dag:
@task.branch()
def branch(task_id_to_return: str) -> str:
return task_id_to_return
branch_1 = branch.override(task_id="branch_1")(task_id_to_return="true_1")
join_1 = EmptyOperator(task_id="join_1", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS)
true_1 = EmptyOperator(task_id="true_1")
false_1 = EmptyOperator(task_id="false_1")
branch_2 = branch.override(task_id="branch_2")(task_id_to_return="true_2")
join_2 = EmptyOperator(task_id="join_2", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS)
true_2 = EmptyOperator(task_id="true_2")
false_2 = EmptyOperator(task_id="false_2")
false_3 = EmptyOperator(task_id="false_3")
branch_1 >> true_1 >> join_1
branch_1 >> false_1 >> branch_2 >> [true_2, false_2] >> join_2 >> false_3 >> join_1
| 2,230 | 38.140351 | 100 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_dynamic_task_mapping_with_no_taskflow_operators.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of dynamic task mapping with non-TaskFlow operators."""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.models.baseoperator import BaseOperator
class AddOneOperator(BaseOperator):
"""A custom operator that adds one to the input."""
def __init__(self, value, **kwargs):
super().__init__(**kwargs)
self.value = value
def execute(self, context):
return self.value + 1
class SumItOperator(BaseOperator):
"""A custom operator that sums the input."""
template_fields = ("values",)
def __init__(self, values, **kwargs):
super().__init__(**kwargs)
self.values = values
def execute(self, context):
total = sum(self.values)
print(f"Total was {total}")
return total
with DAG(
dag_id="example_dynamic_task_mapping_with_no_taskflow_operators",
start_date=datetime(2022, 3, 4),
catchup=False,
):
# map the task to a list of values
add_one_task = AddOneOperator.partial(task_id="add_one").expand(value=[1, 2, 3])
# aggregate (reduce) the mapped tasks results
sum_it_task = SumItOperator(task_id="sum_it", values=add_one_task.output)
| 2,032 | 31.269841 | 94 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_params_ui_tutorial.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""DAG demonstrating various options for a trigger form generated by DAG params.
The DAG attribute `params` is used to define a default dictionary of parameters which are usually passed
to the DAG and which are used to render a trigger form.
"""
from __future__ import annotations
import datetime
import json
from pathlib import Path
from airflow import DAG
from airflow.decorators import task
from airflow.exceptions import AirflowSkipException
from airflow.models.dagrun import DagRun
from airflow.models.param import Param
from airflow.models.taskinstance import TaskInstance
with DAG(
dag_id=Path(__file__).stem,
description=__doc__[0 : __doc__.find(".")],
doc_md=__doc__,
schedule=None,
start_date=datetime.datetime(2022, 3, 4),
catchup=False,
tags=["example_ui"],
params={
# Let's start simple: Standard dict values are detected from type and offered as entry form fields.
# Detected types are numbers, text, boolean, lists and dicts.
# Note that such auto-detected parameters are treated as optional (not required to contain a value)
"x": 3,
"text": "Hello World!",
"flag": False,
"a_simple_list": ["one", "two", "three", "actually one value is made per line"],
# But of course you might want to have it nicer! Let's add some description to parameters.
# Note if you can add any HTML formatting to the description, you need to use the description_html
# attribute.
"most_loved_number": Param(
42,
type="integer",
title="Your favorite number",
description_html="""Everybody should have a favorite number. Not only math teachers.
If you can not think of any at the moment please think of the 42 which is very famous because
of the book
<a href='https://en.wikipedia.org/wiki/Phrases_from_The_Hitchhiker%27s_Guide_to_the_Galaxy#
The_Answer_to_the_Ultimate_Question_of_Life,_the_Universe,_and_Everything_is_42'>
The Hitchhiker's Guide to the Galaxy</a>""",
),
# If you want to have a selection list box then you can use the enum feature of JSON schema
"pick_one": Param(
"value 42",
type="string",
title="Select one Value",
description="You can use JSON schema enum's to generate drop down selection boxes.",
enum=[f"value {i}" for i in range(16, 64)],
),
# You can also label the selected values via values_display attribute
"pick_with_label": Param(
3,
type="number",
title="Select one Number",
description="With drop down selections you can also have nice display labels for the values.",
enum=[*range(1, 10)],
values_display={
1: "One",
2: "Two",
3: "Three",
4: "Four - is like you take three and get one for free!",
5: "Five",
6: "Six",
7: "Seven",
8: "Eight",
9: "Nine",
},
),
# If you want to have a list box with proposals but not enforcing a fixed list
# then you can use the examples feature of JSON schema
"proposals": Param(
"some value",
type="string",
title="Field with proposals",
description="You can use JSON schema examples's to generate drop down selection boxes "
"but allow also to enter custom values. Try typing an 'a' and see options.",
examples=(
"Alpha,Bravo,Charlie,Delta,Echo,Foxtrot,Golf,Hotel,India,Juliett,Kilo,Lima,Mike,November,Oscar,Papa,"
"Quebec,Romeo,Sierra,Tango,Uniform,Victor,Whiskey,X-ray,Yankee,Zulu"
).split(","),
),
# If you want to select multiple items from a fixed list JSON schema des not allow to use enum
# In this case the type "array" is being used together with "examples" as pick list
"multi_select": Param(
["two", "three"],
"Select from the list of options.",
type="array",
title="Multi Select",
examples=["one", "two", "three", "four", "five"],
),
# A multiple options selection can also be combined with values_display
"multi_select_with_label": Param(
["2", "3"],
"Select from the list of options. See that options can have nicer text and still technical values"
"are propagated as values during trigger to the DAG.",
type="array",
title="Multi Select with Labels",
examples=["1", "2", "3", "4", "5"],
values_display={
"1": "One box of choccolate",
"2": "Two bananas",
"3": "Three apples",
# Note: Value display mapping does not need to be complete.s
},
),
# Boolean as proper parameter with description
"bool": Param(
True,
type="boolean",
title="Please confirm",
description="A On/Off selection with a proper description.",
),
# Dates and Times are also supported
"date_time": Param(
f"{datetime.date.today()}T{datetime.time(hour=12, minute=17, second=00)}+00:00",
type="string",
format="date-time",
title="Date-Time Picker",
description="Please select a date and time, use the button on the left for a pup-up calendar.",
),
"date": Param(
f"{datetime.date.today()}",
type="string",
format="date",
title="Date Picker",
description="Please select a date, use the button on the left for a pup-up calendar. "
"See that here are no times!",
),
"time": Param(
f"{datetime.time(hour=12, minute=13, second=14)}",
type=["string", "null"],
format="time",
title="Time Picker",
description="Please select a time, use the button on the left for a pup-up tool.",
),
# Fields can be required or not. If the defined fields are typed they are getting required by default
# (else they would not pass JSON schema validation) - to make typed fields optional you must
# permit the optional "null" type
"required_field": Param(
"You can not trigger if no text is given here!",
type="string",
title="Required text field",
description="This field is required. You can not submit without having text in here.",
),
"optional_field": Param(
"optional text, you can trigger also w/o text",
type=["null", "string"],
title="Optional text field",
description_html="This field is optional. As field content is JSON schema validated you must "
"allow the <code>null</code> type.",
),
# You can arrange the entry fields in sections so that you can have a better overview for the user
# Therefore you can add the "section" attribute.
# The benefit of the Params class definition is that the full scope of JSON schema validation
# can be leveraged for form fields and they will be validated before DAG submission.
"checked_text": Param(
"length-checked-field",
type="string",
title="Text field with length check",
description_html="""This field is required. And you need to provide something between 10 and 30
characters. See the
<a href='https://json-schema.org/understanding-json-schema/reference/string.html'>
JSON schema description (string)</a> in for more details""",
minLength=10,
maxLength=20,
section="JSON Schema validation options",
),
"checked_number": Param(
100,
type="number",
title="Number field with value check",
description_html="""This field is required. You need to provide any number between 64 and 128.
See the <a href='https://json-schema.org/understanding-json-schema/reference/numeric.html'>
JSON schema description (numbers)</a> in for more details""",
minimum=64,
maximum=128,
section="JSON Schema validation options",
),
# Some further cool stuff as advanced options are also possible
# You can have the user entering a dict object as a JSON with validation
"object": Param(
{"key": "value"},
type=["object", "null"],
title="JSON entry field",
section="Special advanced stuff with form fields",
),
# If you want to have static parameters which are always passed and not editable by the user
# then you can use the JSON schema option of passing constant values. These parameters
# will not be displayed but passed to the DAG
"hidden_secret_field": Param("constant value", const="constant value"),
# Finally besides the standard provided field generator you can have you own HTML form code
# injected - but be careful, you can also mess-up the layout!
"color_picker": Param(
"#FF8800",
type="string",
title="Pick a color",
description_html="""This is a special HTML widget as custom implementation in the DAG code.
It is templated with the following parameter to render proper HTML form fields:
<ul>
<li><code>{name}</code>: Name of the HTML input field that is expected.</li>
<li><code>{value}</code>:
(Default) value that should be displayed when showing/loading the form.</li>
<li>Note: If you have elements changing a value, call <code>updateJSONconf()</code> to update
the form data to be posted as <code>dag_run.conf</code>.</li>
</ul>
Example: <code><input name='{name}' value='{value}' onchange='updateJSONconf()' /></code>
""",
custom_html_form="""
<table width="100%" cellspacing="5"><tbody><tr><td>
<label for="r_{name}">Red:</label>
</td><td width="80%">
<input id="r_{name}" type="range" min="0" max="255" value="0" onchange="u_{name}()"/>
</td><td rowspan="3" style="padding-left: 10px;">
<div id="preview_{name}"
style="line-height: 40px; margin-bottom: 7px; width: 100%; background-color: {value};"
> </div>
<input class="form-control" type="text" maxlength="7" id="{name}" name="{name}"
value="{value}" onchange="v_{name}()" />
</td></tr><tr><td>
<label for="g_{name}">Green:</label>
</td><td>
<input id="g_{name}" type="range" min="0" max="255" value="0" onchange="u_{name}()"/>
</td></tr><tr><td>
<label for="b_{name}">Blue:</label>
</td><td>
<input id="b_{name}" type="range" min="0" max="255" value="0" onchange="u_{name}()"/>
</td></tr></tbody></table>
<script lang="javascript">
const hex_chars = "0123456789ABCDEF";
function i2hex(name) {
var i = document.getElementById(name).value;
return hex_chars.substr(parseInt(i / 16), 1) + hex_chars.substr(parseInt(i % 16), 1)
}
function u_{name}() {
var hex_val = "#"+i2hex("r_{name}")+i2hex("g_{name}")+i2hex("b_{name}");
document.getElementById("{name}").value = hex_val;
document.getElementById("preview_{name}").style.background = hex_val;
updateJSONconf();
}
function hex2i(text) {
return hex_chars.indexOf(text.substr(0,1)) * 16 + hex_chars.indexOf(text.substr(1,1));
}
function v_{name}() {
var value = document.getElementById("{name}").value.toUpperCase();
document.getElementById("r_{name}").value = hex2i(value.substr(1,2));
document.getElementById("g_{name}").value = hex2i(value.substr(3,2));
document.getElementById("b_{name}").value = hex2i(value.substr(5,2));
document.getElementById("preview_{name}").style.background = value;
}
v_{name}();
</script>""",
section="Special advanced stuff with form fields",
),
},
) as dag:
@task(task_id="show_params")
def show_params(**kwargs) -> None:
ti: TaskInstance = kwargs["ti"]
dag_run: DagRun = ti.dag_run
if not dag_run.conf:
print("Uups, no parameters supplied as DagRun.conf, was the trigger w/o form?")
raise AirflowSkipException("No DagRun.conf parameters supplied.")
print(f"This DAG was triggered with the following parameters:\n{json.dumps(dag_run.conf, indent=4)}")
show_params()
| 14,177 | 47.721649 | 117 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_dynamic_task_mapping.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Example DAG demonstrating the usage of dynamic task mapping."""
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.decorators import task
with DAG(dag_id="example_dynamic_task_mapping", start_date=datetime(2022, 3, 4)) as dag:
@task
def add_one(x: int):
return x + 1
@task
def sum_it(values):
total = sum(values)
print(f"Total was {total}")
added_values = add_one.expand(x=[1, 2, 3])
sum_it(added_values)
| 1,300 | 32.358974 | 88 |
py
|
airflow
|
airflow-main/airflow/example_dags/example_branch_datetime_operator.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating the usage of DateTimeBranchOperator with datetime as well as time objects as
targets.
"""
from __future__ import annotations
import pendulum
from airflow import DAG
from airflow.operators.datetime import BranchDateTimeOperator
from airflow.operators.empty import EmptyOperator
dag1 = DAG(
dag_id="example_branch_datetime_operator",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
schedule="@daily",
)
# [START howto_branch_datetime_operator]
empty_task_11 = EmptyOperator(task_id="date_in_range", dag=dag1)
empty_task_21 = EmptyOperator(task_id="date_outside_range", dag=dag1)
cond1 = BranchDateTimeOperator(
task_id="datetime_branch",
follow_task_ids_if_true=["date_in_range"],
follow_task_ids_if_false=["date_outside_range"],
target_upper=pendulum.datetime(2020, 10, 10, 15, 0, 0),
target_lower=pendulum.datetime(2020, 10, 10, 14, 0, 0),
dag=dag1,
)
# Run empty_task_11 if cond1 executes between 2020-10-10 14:00:00 and 2020-10-10 15:00:00
cond1 >> [empty_task_11, empty_task_21]
# [END howto_branch_datetime_operator]
dag2 = DAG(
dag_id="example_branch_datetime_operator_2",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
schedule="@daily",
)
# [START howto_branch_datetime_operator_next_day]
empty_task_12 = EmptyOperator(task_id="date_in_range", dag=dag2)
empty_task_22 = EmptyOperator(task_id="date_outside_range", dag=dag2)
cond2 = BranchDateTimeOperator(
task_id="datetime_branch",
follow_task_ids_if_true=["date_in_range"],
follow_task_ids_if_false=["date_outside_range"],
target_upper=pendulum.time(0, 0, 0),
target_lower=pendulum.time(15, 0, 0),
dag=dag2,
)
# Since target_lower happens after target_upper, target_upper will be moved to the following day
# Run empty_task_12 if cond2 executes between 15:00:00, and 00:00:00 of the following day
cond2 >> [empty_task_12, empty_task_22]
# [END howto_branch_datetime_operator_next_day]
dag3 = DAG(
dag_id="example_branch_datetime_operator_3",
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
tags=["example"],
schedule="@daily",
)
# [START howto_branch_datetime_operator_logical_date]
empty_task_13 = EmptyOperator(task_id="date_in_range", dag=dag3)
empty_task_23 = EmptyOperator(task_id="date_outside_range", dag=dag3)
cond3 = BranchDateTimeOperator(
task_id="datetime_branch",
use_task_logical_date=True,
follow_task_ids_if_true=["date_in_range"],
follow_task_ids_if_false=["date_outside_range"],
target_upper=pendulum.datetime(2020, 10, 10, 15, 0, 0),
target_lower=pendulum.datetime(2020, 10, 10, 14, 0, 0),
dag=dag3,
)
# Run empty_task_13 if cond3 executes between 2020-10-10 14:00:00 and 2020-10-10 15:00:00
cond3 >> [empty_task_13, empty_task_23]
# [END howto_branch_datetime_operator_logical_date]
| 3,722 | 34.457143 | 102 |
py
|
airflow
|
airflow-main/airflow/example_dags/plugins/listener_plugin.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.example_dags.plugins import event_listener
from airflow.plugins_manager import AirflowPlugin
class MetadataCollectionPlugin(AirflowPlugin):
name = "MetadataCollectionPlugin"
listeners = [event_listener]
| 1,048 | 37.851852 | 62 |
py
|
airflow
|
airflow-main/airflow/example_dags/plugins/event_listener.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from airflow.listeners import hookimpl
if TYPE_CHECKING:
from airflow.models.dagrun import DagRun
from airflow.models.taskinstance import TaskInstance
from airflow.utils.state import TaskInstanceState
# [START howto_listen_ti_running_task]
@hookimpl
def on_task_instance_running(previous_state: TaskInstanceState, task_instance: TaskInstance, session):
"""
This method is called when task state changes to RUNNING.
Through callback, parameters like previous_task_state, task_instance object can be accessed.
This will give more information about current task_instance that is running its dag_run,
task and dag information.
"""
print("Task instance is in running state")
print(" Previous state of the Task instance:", previous_state)
state: TaskInstanceState = task_instance.state
name: str = task_instance.task_id
start_date = task_instance.start_date
dagrun = task_instance.dag_run
dagrun_status = dagrun.state
task = task_instance.task
dag = task.dag
dag_name = None
if dag:
dag_name = dag.dag_id
print(f"Current task name:{name} state:{state} start_date:{start_date}")
print(f"Dag name:{dag_name} and current dag run status:{dagrun_status}")
# [END howto_listen_ti_running_task]
# [START howto_listen_ti_success_task]
@hookimpl
def on_task_instance_success(previous_state: TaskInstanceState, task_instance: TaskInstance, session):
"""
This method is called when task state changes to SUCCESS.
Through callback, parameters like previous_task_state, task_instance object can be accessed.
This will give more information about current task_instance that has succeeded its
dag_run, task and dag information.
"""
print("Task instance in success state")
print(" Previous state of the Task instance:", previous_state)
dag_id = task_instance.dag_id
hostname = task_instance.hostname
operator = task_instance.operator
dagrun = task_instance.dag_run
queued_at = dagrun.queued_at
print(f"Dag name:{dag_id} queued_at:{queued_at}")
print(f"Task hostname:{hostname} operator:{operator}")
# [END howto_listen_ti_success_task]
# [START howto_listen_ti_failure_task]
@hookimpl
def on_task_instance_failed(previous_state: TaskInstanceState, task_instance: TaskInstance, session):
"""
This method is called when task state changes to FAILED.
Through callback, parameters like previous_task_state, task_instance object can be accessed.
This will give more information about current task_instance that has failed its dag_run,
task and dag information.
"""
print("Task instance in failure state")
start_date = task_instance.start_date
end_date = task_instance.end_date
duration = task_instance.duration
dagrun = task_instance.dag_run
task = task_instance.task
dag = task_instance.task.dag
print(f"Task start:{start_date} end:{end_date} duration:{duration}")
print(f"Task:{task} dag:{dag} dagrun:{dagrun}")
# [END howto_listen_ti_failure_task]
# [START howto_listen_dagrun_success_task]
@hookimpl
def on_dag_run_success(dag_run: DagRun, message: str):
"""
This method is called when dag run state changes to SUCCESS.
"""
print("Dag run in success state")
start_date = dag_run.start_date
end_date = dag_run.end_date
print(f"Dag run start:{start_date} end:{end_date}")
# [END howto_listen_dagrun_success_task]
# [START howto_listen_dagrun_failure_task]
@hookimpl
def on_dag_run_failed(dag_run: DagRun, message: str):
"""
This method is called when dag run state changes to FAILED.
"""
print("Dag run in failure state")
dag_id = dag_run.dag_id
run_id = dag_run.run_id
external_trigger = dag_run.external_trigger
print(f"Dag information:{dag_id} Run id: {run_id} external trigger: {external_trigger}")
# [END howto_listen_dagrun_failure_task]
# [START howto_listen_dagrun_running_task]
@hookimpl
def on_dag_run_running(dag_run: DagRun, message: str):
"""
This method is called when dag run state changes to RUNNING.
"""
print("Dag run in running state")
queued_at = dag_run.queued_at
dag_hash_info = dag_run.dag_hash
print(f"Dag information Queued at: {queued_at} hash info: {dag_hash_info}")
# [END howto_listen_dagrun_running_task]
| 5,206 | 32.165605 | 102 |
py
|
airflow
|
airflow-main/airflow/example_dags/plugins/__init__.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 |
py
|
airflow
|
airflow-main/airflow/example_dags/plugins/workday.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Plugin to demonstrate timetable registration and accommodate example DAGs."""
from __future__ import annotations
import logging
from datetime import timedelta
# [START howto_timetable]
from pendulum import UTC, Date, DateTime, Time
from airflow.plugins_manager import AirflowPlugin
from airflow.timetables.base import DagRunInfo, DataInterval, TimeRestriction, Timetable
log = logging.getLogger(__name__)
try:
from pandas.tseries.holiday import USFederalHolidayCalendar
holiday_calendar = USFederalHolidayCalendar()
except ImportError:
log.warning("Could not import pandas. Holidays will not be considered.")
holiday_calendar = None
class AfterWorkdayTimetable(Timetable):
def get_next_workday(self, d: DateTime, incr=1) -> DateTime:
next_start = d
while True:
if next_start.weekday() in (5, 6): # If next start is in the weekend go to next day
next_start = next_start + incr * timedelta(days=1)
continue
if holiday_calendar is not None:
holidays = holiday_calendar.holidays(start=next_start, end=next_start).to_pydatetime()
if next_start in holidays: # If next start is a holiday go to next day
next_start = next_start + incr * timedelta(days=1)
continue
break
return next_start
# [START howto_timetable_infer_manual_data_interval]
def infer_manual_data_interval(self, run_after: DateTime) -> DataInterval:
start = DateTime.combine((run_after - timedelta(days=1)).date(), Time.min).replace(tzinfo=UTC)
# Skip backwards over weekends and holidays to find last run
start = self.get_next_workday(start, incr=-1)
return DataInterval(start=start, end=(start + timedelta(days=1)))
# [END howto_timetable_infer_manual_data_interval]
# [START howto_timetable_next_dagrun_info]
def next_dagrun_info(
self,
*,
last_automated_data_interval: DataInterval | None,
restriction: TimeRestriction,
) -> DagRunInfo | None:
if last_automated_data_interval is not None: # There was a previous run on the regular schedule.
last_start = last_automated_data_interval.start
next_start = DateTime.combine((last_start + timedelta(days=1)).date(), Time.min).replace(
tzinfo=UTC
)
else: # This is the first ever run on the regular schedule.
next_start = restriction.earliest
if next_start is None: # No start_date. Don't schedule.
return None
if not restriction.catchup:
# If the DAG has catchup=False, today is the earliest to consider.
next_start = max(next_start, DateTime.combine(Date.today(), Time.min).replace(tzinfo=UTC))
elif next_start.time() != Time.min:
# If earliest does not fall on midnight, skip to the next day.
next_start = DateTime.combine(next_start.date() + timedelta(days=1), Time.min).replace(
tzinfo=UTC
)
# Skip weekends and holidays
next_start = self.get_next_workday(next_start)
if restriction.latest is not None and next_start > restriction.latest:
return None # Over the DAG's scheduled end; don't schedule.
return DagRunInfo.interval(start=next_start, end=(next_start + timedelta(days=1)))
# [END howto_timetable_next_dagrun_info]
class WorkdayTimetablePlugin(AirflowPlugin):
name = "workday_timetable_plugin"
timetables = [AfterWorkdayTimetable]
# [END howto_timetable]
| 4,445 | 41.75 | 106 |
py
|
airflow
|
airflow-main/airflow/example_dags/subdags/subdag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Helper function to generate a DAG and operators given some arguments."""
from __future__ import annotations
# [START subdag]
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
def subdag(parent_dag_name, child_dag_name, args) -> DAG:
"""
Generate a DAG to be used as a subdag.
:param str parent_dag_name: Id of the parent DAG
:param str child_dag_name: Id of the child DAG
:param dict args: Default arguments to provide to the subdag
:return: DAG to use as a subdag
"""
dag_subdag = DAG(
dag_id=f"{parent_dag_name}.{child_dag_name}",
default_args=args,
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
schedule="@daily",
)
for i in range(5):
EmptyOperator(
task_id=f"{child_dag_name}-task-{i + 1}",
default_args=args,
dag=dag_subdag,
)
return dag_subdag
# [END subdag]
| 1,761 | 30.464286 | 75 |
py
|
airflow
|
airflow-main/airflow/example_dags/subdags/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 |
py
|
airflow
|
airflow-main/airflow/example_dags/libs/helper.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
def print_stuff():
print("annotated!")
| 867 | 36.73913 | 62 |
py
|
airflow
|
airflow-main/airflow/example_dags/libs/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 |
py
|
airflow
|
airflow-main/airflow/kubernetes/secret.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Classes for interacting with Kubernetes API."""
from __future__ import annotations
import copy
import uuid
from kubernetes.client import models as k8s
from airflow.exceptions import AirflowConfigException
from airflow.kubernetes.k8s_model import K8SModel
class Secret(K8SModel):
"""Defines Kubernetes Secret Volume."""
def __init__(self, deploy_type, deploy_target, secret, key=None, items=None):
"""
Initialize a Kubernetes Secret Object.
Used to track requested secrets from the user.
:param deploy_type: The type of secret deploy in Kubernetes, either `env` or
`volume`
:param deploy_target: (Optional) The environment variable when
`deploy_type` `env` or file path when `deploy_type` `volume` where
expose secret. If `key` is not provided deploy target should be None.
:param secret: Name of the secrets object in Kubernetes
:param key: (Optional) Key of the secret within the Kubernetes Secret
if not provided in `deploy_type` `env` it will mount all secrets in object
:param items: (Optional) items that can be added to a volume secret for specifying projects of
secret keys to paths
https://kubernetes.io/docs/concepts/configuration/secret/#projection-of-secret-keys-to-specific-paths
"""
if deploy_type not in ("env", "volume"):
raise AirflowConfigException("deploy_type must be env or volume")
self.deploy_type = deploy_type
self.deploy_target = deploy_target
self.items = items or []
if deploy_target is not None and deploy_type == "env":
# if deploying to env, capitalize the deploy target
self.deploy_target = deploy_target.upper()
if key is not None and deploy_target is None:
raise AirflowConfigException("If `key` is set, `deploy_target` should not be None")
self.secret = secret
self.key = key
def to_env_secret(self) -> k8s.V1EnvVar:
"""Stores es environment secret."""
return k8s.V1EnvVar(
name=self.deploy_target,
value_from=k8s.V1EnvVarSource(
secret_key_ref=k8s.V1SecretKeySelector(name=self.secret, key=self.key)
),
)
def to_env_from_secret(self) -> k8s.V1EnvFromSource:
"""Reads from environment to secret."""
return k8s.V1EnvFromSource(secret_ref=k8s.V1SecretEnvSource(name=self.secret))
def to_volume_secret(self) -> tuple[k8s.V1Volume, k8s.V1VolumeMount]:
"""Converts to volume secret."""
vol_id = f"secretvol{uuid.uuid4()}"
volume = k8s.V1Volume(name=vol_id, secret=k8s.V1SecretVolumeSource(secret_name=self.secret))
if self.items:
volume.secret.items = self.items
return (volume, k8s.V1VolumeMount(mount_path=self.deploy_target, name=vol_id, read_only=True))
def attach_to_pod(self, pod: k8s.V1Pod) -> k8s.V1Pod:
"""Attaches to pod."""
cp_pod = copy.deepcopy(pod)
if self.deploy_type == "volume":
volume, volume_mount = self.to_volume_secret()
if cp_pod.spec.volumes is None:
cp_pod.spec.volumes = []
cp_pod.spec.volumes.append(volume)
if cp_pod.spec.containers[0].volume_mounts is None:
cp_pod.spec.containers[0].volume_mounts = []
cp_pod.spec.containers[0].volume_mounts.append(volume_mount)
if self.deploy_type == "env" and self.key is not None:
env = self.to_env_secret()
if cp_pod.spec.containers[0].env is None:
cp_pod.spec.containers[0].env = []
cp_pod.spec.containers[0].env.append(env)
if self.deploy_type == "env" and self.key is None:
env_from = self.to_env_from_secret()
if cp_pod.spec.containers[0].env_from is None:
cp_pod.spec.containers[0].env_from = []
cp_pod.spec.containers[0].env_from.append(env_from)
return cp_pod
def __eq__(self, other):
return (
self.deploy_type == other.deploy_type
and self.deploy_target == other.deploy_target
and self.secret == other.secret
and self.key == other.key
)
def __repr__(self):
return f"Secret({self.deploy_type}, {self.deploy_target}, {self.secret}, {self.key})"
| 5,198 | 40.592 | 109 |
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.