repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
airflow | airflow-main/helm_tests/airflow_aux/test_migrate_database_job.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import jmespath
import pytest
from tests.charts.helm_template_generator import render_chart
class TestMigrateDatabaseJob:
"""Tests migrate DB job."""
def test_should_run_by_default(self):
docs = render_chart(show_only=["templates/jobs/migrate-database-job.yaml"])
assert "Job" == docs[0]["kind"]
assert "run-airflow-migrations" == jmespath.search("spec.template.spec.containers[0].name", docs[0])
assert 50000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0])
@pytest.mark.parametrize(
"migrate_database_job_enabled,created",
[
(False, False),
(True, True),
],
)
def test_enable_migrate_database_job(self, migrate_database_job_enabled, created):
docs = render_chart(
values={
"migrateDatabaseJob": {"enabled": migrate_database_job_enabled},
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert bool(docs) is created
def test_should_support_annotations(self):
docs = render_chart(
values={"migrateDatabaseJob": {"annotations": {"foo": "bar"}, "jobAnnotations": {"fiz": "fuz"}}},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
annotations = jmespath.search("spec.template.metadata.annotations", docs[0])
assert "foo" in annotations
assert "bar" == annotations["foo"]
job_annotations = jmespath.search("metadata.annotations", docs[0])
assert "fiz" in job_annotations
assert "fuz" == job_annotations["fiz"]
def test_should_create_valid_affinity_tolerations_and_node_selector(self):
docs = render_chart(
values={
"migrateDatabaseJob": {
"affinity": {
"nodeAffinity": {
"requiredDuringSchedulingIgnoredDuringExecution": {
"nodeSelectorTerms": [
{
"matchExpressions": [
{"key": "foo", "operator": "In", "values": ["true"]},
]
}
]
}
}
},
"tolerations": [
{"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"}
],
"nodeSelector": {"diskType": "ssd"},
}
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert "Job" == jmespath.search("kind", docs[0])
assert "foo" == jmespath.search(
"spec.template.spec.affinity.nodeAffinity."
"requiredDuringSchedulingIgnoredDuringExecution."
"nodeSelectorTerms[0]."
"matchExpressions[0]."
"key",
docs[0],
)
assert "ssd" == jmespath.search(
"spec.template.spec.nodeSelector.diskType",
docs[0],
)
assert "dynamic-pods" == jmespath.search(
"spec.template.spec.tolerations[0].key",
docs[0],
)
@pytest.mark.parametrize(
"use_default_image,expected_image",
[
(True, "apache/airflow:2.1.0"),
(False, "apache/airflow:user-image"),
],
)
def test_should_use_correct_image(self, use_default_image, expected_image):
docs = render_chart(
values={
"defaultAirflowRepository": "apache/airflow",
"defaultAirflowTag": "2.1.0",
"images": {
"airflow": {
"repository": "apache/airflow",
"tag": "user-image",
},
"useDefaultImageForMigration": use_default_image,
},
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert expected_image == jmespath.search("spec.template.spec.containers[0].image", docs[0])
def test_should_add_extra_containers(self):
docs = render_chart(
values={
"migrateDatabaseJob": {
"extraContainers": [
{"name": "test-container", "image": "test-registry/test-repo:test-tag"}
],
},
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert {
"name": "test-container",
"image": "test-registry/test-repo:test-tag",
} == jmespath.search("spec.template.spec.containers[-1]", docs[0])
def test_set_resources(self):
docs = render_chart(
values={
"migrateDatabaseJob": {
"resources": {
"requests": {
"cpu": "1000mi",
"memory": "512Mi",
},
"limits": {
"cpu": "1000mi",
"memory": "512Mi",
},
},
},
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert {
"requests": {
"cpu": "1000mi",
"memory": "512Mi",
},
"limits": {
"cpu": "1000mi",
"memory": "512Mi",
},
} == jmespath.search("spec.template.spec.containers[0].resources", docs[0])
def test_should_disable_default_helm_hooks(self):
docs = render_chart(
values={"migrateDatabaseJob": {"useHelmHooks": False}},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
annotations = jmespath.search("metadata.annotations", docs[0])
assert annotations is None
def test_should_set_correct_helm_hooks_weight(self):
docs = render_chart(
show_only=[
"templates/jobs/migrate-database-job.yaml",
],
)
annotations = jmespath.search("metadata.annotations", docs[0])
assert annotations["helm.sh/hook-weight"] == "1"
def test_should_add_extra_volumes(self):
docs = render_chart(
values={
"migrateDatabaseJob": {
"extraVolumes": [{"name": "myvolume-{{ .Chart.Name }}", "emptyDir": {}}],
},
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert {"name": "myvolume-airflow", "emptyDir": {}} == jmespath.search(
"spec.template.spec.volumes[-1]", docs[0]
)
def test_should_add_extra_volume_mounts(self):
docs = render_chart(
values={
"migrateDatabaseJob": {
"extraVolumeMounts": [{"name": "foobar-{{ .Chart.Name }}", "mountPath": "foo/bar"}],
},
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert {"name": "foobar-airflow", "mountPath": "foo/bar"} == jmespath.search(
"spec.template.spec.containers[0].volumeMounts[-1]", docs[0]
)
def test_should_add_global_volume_and_global_volume_mount(self):
docs = render_chart(
values={
"volumes": [{"name": "myvolume", "emptyDir": {}}],
"volumeMounts": [{"name": "foobar", "mountPath": "foo/bar"}],
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert {"name": "myvolume", "emptyDir": {}} == jmespath.search(
"spec.template.spec.volumes[-1]", docs[0]
)
assert {"name": "foobar", "mountPath": "foo/bar"} == jmespath.search(
"spec.template.spec.containers[0].volumeMounts[-1]", docs[0]
)
def test_job_ttl_after_finished(self):
docs = render_chart(
values={"migrateDatabaseJob": {"ttlSecondsAfterFinished": 1}},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
ttl = jmespath.search("spec.ttlSecondsAfterFinished", docs[0])
assert ttl == 1
def test_job_ttl_after_finished_zero(self):
docs = render_chart(
values={"migrateDatabaseJob": {"ttlSecondsAfterFinished": 0}},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
ttl = jmespath.search("spec.ttlSecondsAfterFinished", docs[0])
assert ttl == 0
def test_job_ttl_after_finished_nil(self):
docs = render_chart(
values={"migrateDatabaseJob": {"ttlSecondsAfterFinished": None}},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
spec = jmespath.search("spec", docs[0])
assert "ttlSecondsAfterFinished" not in spec
@pytest.mark.parametrize(
"airflow_version, expected_arg",
[
("1.10.14", "airflow upgradedb"),
("2.0.2", "airflow db upgrade"),
],
)
def test_default_command_and_args_airflow_version(self, airflow_version, expected_arg):
docs = render_chart(
values={
"airflowVersion": airflow_version,
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None
assert [
"bash",
"-c",
f"exec \\\n{expected_arg}",
] == jmespath.search("spec.template.spec.containers[0].args", docs[0])
@pytest.mark.parametrize("command", [None, ["custom", "command"]])
@pytest.mark.parametrize("args", [None, ["custom", "args"]])
def test_command_and_args_overrides(self, command, args):
docs = render_chart(
values={"migrateDatabaseJob": {"command": command, "args": args}},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0])
assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0])
def test_command_and_args_overrides_are_templated(self):
docs = render_chart(
values={
"migrateDatabaseJob": {"command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"]}
},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0])
assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0])
def test_no_airflow_local_settings(self):
docs = render_chart(
values={"airflowLocalSettings": None}, show_only=["templates/jobs/migrate-database-job.yaml"]
)
volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0])
assert "airflow_local_settings.py" not in str(volume_mounts)
def test_airflow_local_settings(self):
docs = render_chart(
values={"airflowLocalSettings": "# Well hello!"},
show_only=["templates/jobs/migrate-database-job.yaml"],
)
assert {
"name": "config",
"mountPath": "/opt/airflow/config/airflow_local_settings.py",
"subPath": "airflow_local_settings.py",
"readOnly": True,
} in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0])
| 12,589 | 37.619632 | 110 | py |
airflow | airflow-main/helm_tests/airflow_aux/test_annotations.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from tests.charts.helm_template_generator import render_chart
def deployment_annotations(obj):
return obj["spec"]["template"]["metadata"]["annotations"]
def cronjob_annotations(obj):
return obj["spec"]["jobTemplate"]["spec"]["template"]["metadata"]["annotations"]
def get_object_annotations(obj):
if obj["kind"] == "CronJob":
return cronjob_annotations(obj)
return deployment_annotations(obj)
class TestServiceAccountAnnotations:
"""Tests Service Account Annotations."""
@pytest.mark.parametrize(
"values,show_only,expected_annotations",
[
(
{
"cleanup": {
"enabled": True,
"serviceAccount": {
"annotations": {
"example": "cleanup",
},
},
},
},
"templates/cleanup/cleanup-serviceaccount.yaml",
{
"example": "cleanup",
},
),
(
{
"scheduler": {
"serviceAccount": {
"annotations": {
"example": "scheduler",
},
},
},
},
"templates/scheduler/scheduler-serviceaccount.yaml",
{
"example": "scheduler",
},
),
(
{
"webserver": {
"serviceAccount": {
"annotations": {
"example": "webserver",
},
},
},
},
"templates/webserver/webserver-serviceaccount.yaml",
{
"example": "webserver",
},
),
(
{
"workers": {
"serviceAccount": {
"annotations": {
"example": "worker",
},
},
},
},
"templates/workers/worker-serviceaccount.yaml",
{
"example": "worker",
},
),
(
{
"flower": {
"enabled": True,
"serviceAccount": {
"annotations": {
"example": "flower",
},
},
},
},
"templates/flower/flower-serviceaccount.yaml",
{
"example": "flower",
},
),
(
{
"statsd": {
"serviceAccount": {
"annotations": {
"example": "statsd",
},
},
},
},
"templates/statsd/statsd-serviceaccount.yaml",
{
"example": "statsd",
},
),
(
{
"redis": {
"serviceAccount": {
"annotations": {
"example": "redis",
},
},
},
},
"templates/redis/redis-serviceaccount.yaml",
{
"example": "redis",
},
),
(
{
"pgbouncer": {
"enabled": True,
"serviceAccount": {
"annotations": {
"example": "pgbouncer",
},
},
},
},
"templates/pgbouncer/pgbouncer-serviceaccount.yaml",
{
"example": "pgbouncer",
},
),
(
{
"createUserJob": {
"serviceAccount": {
"annotations": {
"example": "createuser",
},
},
},
},
"templates/jobs/create-user-job-serviceaccount.yaml",
{
"example": "createuser",
},
),
(
{
"migrateDatabaseJob": {
"serviceAccount": {
"annotations": {
"example": "migratedb",
},
},
},
},
"templates/jobs/migrate-database-job-serviceaccount.yaml",
{
"example": "migratedb",
},
),
(
{
"triggerer": {
"serviceAccount": {
"annotations": {
"example": "triggerer",
},
},
},
},
"templates/triggerer/triggerer-serviceaccount.yaml",
{
"example": "triggerer",
},
),
(
{
"dagProcessor": {
"enabled": True,
"serviceAccount": {
"annotations": {
"example": "dag-processor",
},
},
},
},
"templates/dag-processor/dag-processor-serviceaccount.yaml",
{
"example": "dag-processor",
},
),
],
)
def test_annotations_are_added(self, values, show_only, expected_annotations):
k8s_objects = render_chart(
values=values,
show_only=[show_only],
)
# This test relies on the convention that the helm chart puts a single
# ServiceAccount in its own .yaml file, so by specifying `show_only`,
# we should only get a single k8s_object here - the target object that
# we hope to test on.
assert len(k8s_objects) == 1
obj = k8s_objects[0]
for k, v in expected_annotations.items():
assert k in obj["metadata"]["annotations"]
assert v == obj["metadata"]["annotations"][k]
@pytest.mark.parametrize(
"values,show_only,expected_annotations",
[
(
{
"scheduler": {
"podAnnotations": {
"example": "scheduler",
},
},
},
"templates/scheduler/scheduler-deployment.yaml",
{
"example": "scheduler",
},
),
(
{
"webserver": {
"podAnnotations": {
"example": "webserver",
},
},
},
"templates/webserver/webserver-deployment.yaml",
{
"example": "webserver",
},
),
(
{
"workers": {
"podAnnotations": {
"example": "worker",
},
},
},
"templates/workers/worker-deployment.yaml",
{
"example": "worker",
},
),
(
{
"flower": {
"enabled": True,
"podAnnotations": {
"example": "flower",
},
},
},
"templates/flower/flower-deployment.yaml",
{
"example": "flower",
},
),
(
{
"triggerer": {
"podAnnotations": {
"example": "triggerer",
},
},
},
"templates/triggerer/triggerer-deployment.yaml",
{
"example": "triggerer",
},
),
(
{
"dagProcessor": {
"enabled": True,
"podAnnotations": {
"example": "dag-processor",
},
},
},
"templates/dag-processor/dag-processor-deployment.yaml",
{
"example": "dag-processor",
},
),
(
{
"cleanup": {
"enabled": True,
"podAnnotations": {
"example": "cleanup",
},
}
},
"templates/cleanup/cleanup-cronjob.yaml",
{
"example": "cleanup",
},
),
(
{
"redis": {
"podAnnotations": {
"example": "redis",
},
},
},
"templates/redis/redis-statefulset.yaml",
{
"example": "redis",
},
),
(
{
"statsd": {
"podAnnotations": {
"example": "statsd",
},
},
},
"templates/statsd/statsd-deployment.yaml",
{
"example": "statsd",
},
),
(
{
"pgbouncer": {
"enabled": True,
"podAnnotations": {
"example": "pgbouncer",
},
},
},
"templates/pgbouncer/pgbouncer-deployment.yaml",
{
"example": "pgbouncer",
},
),
],
)
class TestPerComponentPodAnnotations:
"""Tests Per Component Pod Annotations."""
def test_annotations_are_added(self, values, show_only, expected_annotations):
k8s_objects = render_chart(
values=values,
show_only=[show_only],
)
# This test relies on the convention that the helm chart puts a single
# Deployment in its own .yaml file, so by specifying `show_only`,
# we should only get a single k8s_object here - the target object that
# we hope to test on.
assert len(k8s_objects) == 1
obj = k8s_objects[0]
annotations = get_object_annotations(obj)
for k, v in expected_annotations.items():
assert k in annotations
assert v == annotations[k]
def test_precedence(self, values, show_only, expected_annotations):
values_global_annotations = {"airflowPodAnnotations": {k: "GLOBAL" for k in expected_annotations}}
values_merged = {**values, **values_global_annotations}
k8s_objects = render_chart(
values=values_merged,
show_only=[show_only],
)
# This test relies on the convention that the helm chart puts a single
# Deployment in its own .yaml file, so by specifying `show_only`,
# we should only get a single k8s_object here - the target object that
# we hope to test on.
assert len(k8s_objects) == 1
obj = k8s_objects[0]
annotations = get_object_annotations(obj)
for k, v in expected_annotations.items():
assert k in annotations
assert v == annotations[k]
| 13,231 | 29.843823 | 106 | py |
airflow | airflow-main/docs/build_docs.py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Builds documentation and runs spell checking
# isort:skip_file (needed to workaround isort bug)
"""
from __future__ import annotations
import argparse
import multiprocessing
import os
import sys
from collections import defaultdict
from itertools import filterfalse, tee
from typing import Callable, Iterable, NamedTuple, TypeVar
from rich.console import Console
from tabulate import tabulate
from docs.exts.docs_build import dev_index_generator, lint_checks
from docs.exts.docs_build.code_utils import CONSOLE_WIDTH, PROVIDER_INIT_FILE
from docs.exts.docs_build.docs_builder import DOCS_DIR, AirflowDocsBuilder, get_available_packages
from docs.exts.docs_build.errors import DocBuildError, display_errors_summary
from docs.exts.docs_build.fetch_inventories import fetch_inventories
from docs.exts.docs_build.github_action_utils import with_group
from docs.exts.docs_build.package_filter import process_package_filters
from docs.exts.docs_build.spelling_checks import SpellingError, display_spelling_error_summary
TEXT_RED = "\033[31m"
TEXT_RESET = "\033[0m"
if __name__ not in ("__main__", "__mp_main__"):
raise SystemExit(
"This file is intended to be executed as an executable program. You cannot use it as a module."
"To run this script, run the ./build_docs.py command"
)
CHANNEL_INVITATION = """\
If you need help, write to #documentation channel on Airflow's Slack.
Channel link: https://apache-airflow.slack.com/archives/CJ1LVREHX
Invitation link: https://s.apache.org/airflow-slack\
"""
ERRORS_ELIGIBLE_TO_REBUILD = [
"failed to reach any of the inventories with the following issues",
"toctree contains reference to nonexisting document",
"undefined label:",
"unknown document:",
"Error loading airflow.providers",
]
ON_GITHUB_ACTIONS = os.environ.get("GITHUB_ACTIONS", "false") == "true"
console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH)
T = TypeVar("T")
def partition(pred: Callable[[T], bool], iterable: Iterable[T]) -> tuple[Iterable[T], Iterable[T]]:
"""Use a predicate to partition entries into false entries and true entries"""
iter_1, iter_2 = tee(iterable)
return filterfalse(pred, iter_1), filter(pred, iter_2)
def _promote_new_flags():
console.print()
console.print("[yellow]Still tired of waiting for documentation to be built?[/]")
console.print()
if ON_GITHUB_ACTIONS:
console.print("You can quickly build documentation locally with just one command.")
console.print(" [info]breeze build-docs[/]")
console.print()
console.print("[yellow]Still too slow?[/]")
console.print()
console.print("You can only build one documentation package:")
console.print(" [info]breeze build-docs --package-filter <PACKAGE-NAME>[/]")
console.print()
console.print("This usually takes from [yellow]20 seconds[/] to [yellow]2 minutes[/].")
console.print()
console.print("You can also use other extra flags to iterate faster:")
console.print(" [info]--docs-only - Only build documentation[/]")
console.print(" [info]--spellcheck-only - Only perform spellchecking[/]")
console.print()
console.print("For more info:")
console.print(" [info]breeze build-docs --help[/]")
console.print()
def _get_parser():
available_packages_list = " * " + "\n * ".join(get_available_packages())
parser = argparse.ArgumentParser(
description="Builds documentation and runs spell checking",
epilog=f"List of supported documentation packages:\n{available_packages_list}",
)
parser.formatter_class = argparse.RawTextHelpFormatter
parser.add_argument(
"--disable-checks", dest="disable_checks", action="store_true", help="Disables extra checks"
)
parser.add_argument(
"--disable-provider-checks",
dest="disable_provider_checks",
action="store_true",
help="Disables extra checks for providers",
)
parser.add_argument(
"--one-pass-only",
dest="one_pass_only",
action="store_true",
help="Do not attempt multiple builds on error",
)
parser.add_argument(
"--package-filter",
action="append",
help=(
"Filter specifying for which packages the documentation is to be built. Wildcard are supported."
),
)
parser.add_argument("--docs-only", dest="docs_only", action="store_true", help="Only build documentation")
parser.add_argument(
"--spellcheck-only", dest="spellcheck_only", action="store_true", help="Only perform spellchecking"
)
parser.add_argument(
"-j",
"--jobs",
dest="jobs",
type=int,
default=0,
help=(
"""\
Number of parallel processes that will be spawned to build the docs.
If passed 0, the value will be determined based on the number of CPUs.
"""
),
)
parser.add_argument(
"-v",
"--verbose",
dest="verbose",
action="store_true",
help=(
"Increases the verbosity of the script i.e. always displays a full log of "
"the build process, not just when it encounters errors"
),
)
return parser
class BuildSpecification(NamedTuple):
"""Specification of single build."""
package_name: str
verbose: bool
class BuildDocsResult(NamedTuple):
"""Result of building documentation."""
package_name: str
log_file_name: str
errors: list[DocBuildError]
class SpellCheckResult(NamedTuple):
"""Result of spellcheck."""
package_name: str
log_file_name: str
errors: list[SpellingError]
def perform_docs_build_for_single_package(build_specification: BuildSpecification) -> BuildDocsResult:
"""Performs single package docs build."""
builder = AirflowDocsBuilder(package_name=build_specification.package_name)
console.print(f"[info]{build_specification.package_name:60}:[/] Building documentation")
result = BuildDocsResult(
package_name=build_specification.package_name,
errors=builder.build_sphinx_docs(
verbose=build_specification.verbose,
),
log_file_name=builder.log_build_filename,
)
return result
def perform_spell_check_for_single_package(build_specification: BuildSpecification) -> SpellCheckResult:
"""Performs single package spell check."""
builder = AirflowDocsBuilder(package_name=build_specification.package_name)
console.print(f"[info]{build_specification.package_name:60}:[/] Checking spelling started")
result = SpellCheckResult(
package_name=build_specification.package_name,
errors=builder.check_spelling(
verbose=build_specification.verbose,
),
log_file_name=builder.log_spelling_filename,
)
console.print(f"[info]{build_specification.package_name:60}:[/] Checking spelling completed")
return result
def build_docs_for_packages(
current_packages: list[str],
docs_only: bool,
spellcheck_only: bool,
jobs: int,
verbose: bool,
) -> tuple[dict[str, list[DocBuildError]], dict[str, list[SpellingError]]]:
"""Builds documentation for all packages and combines errors."""
all_build_errors: dict[str, list[DocBuildError]] = defaultdict(list)
all_spelling_errors: dict[str, list[SpellingError]] = defaultdict(list)
with with_group("Cleaning documentation files"):
for package_name in current_packages:
console.print(f"[info]{package_name:60}:[/] Cleaning files")
builder = AirflowDocsBuilder(package_name=package_name)
builder.clean_files()
if jobs > 1:
run_in_parallel(
all_build_errors,
all_spelling_errors,
current_packages,
docs_only,
jobs,
spellcheck_only,
verbose,
)
else:
run_sequentially(
all_build_errors,
all_spelling_errors,
current_packages,
docs_only,
spellcheck_only,
verbose,
)
return all_build_errors, all_spelling_errors
def run_sequentially(
all_build_errors,
all_spelling_errors,
current_packages,
docs_only,
spellcheck_only,
verbose,
):
"""Run both - spellcheck and docs build sequentially without multiprocessing"""
if not spellcheck_only:
for package_name in current_packages:
build_result = perform_docs_build_for_single_package(
build_specification=BuildSpecification(
package_name=package_name,
verbose=verbose,
)
)
if build_result.errors:
all_build_errors[package_name].extend(build_result.errors)
print_build_output(build_result)
if not docs_only:
for package_name in current_packages:
spellcheck_result = perform_spell_check_for_single_package(
build_specification=BuildSpecification(
package_name=package_name,
verbose=verbose,
)
)
if spellcheck_result.errors:
all_spelling_errors[package_name].extend(spellcheck_result.errors)
print_spelling_output(spellcheck_result)
def run_in_parallel(
all_build_errors,
all_spelling_errors,
current_packages,
docs_only,
jobs,
spellcheck_only,
verbose,
):
"""Run both - spellcheck and docs build sequentially without multiprocessing"""
with multiprocessing.Pool(processes=jobs) as pool:
if not spellcheck_only:
run_docs_build_in_parallel(
all_build_errors=all_build_errors,
current_packages=current_packages,
verbose=verbose,
pool=pool,
)
if not docs_only:
run_spell_check_in_parallel(
all_spelling_errors=all_spelling_errors,
current_packages=current_packages,
verbose=verbose,
pool=pool,
)
def print_build_output(result: BuildDocsResult):
"""Prints output of docs build job."""
with with_group(f"{TEXT_RED}Output for documentation build {result.package_name}{TEXT_RESET}"):
console.print()
console.print(f"[info]{result.package_name:60}: " + "#" * 80)
with open(result.log_file_name) as output:
for line in output.read().splitlines():
console.print(f"{result.package_name:60} {line}")
console.print(f"[info]{result.package_name:60}: " + "#" * 80)
def run_docs_build_in_parallel(
all_build_errors: dict[str, list[DocBuildError]],
current_packages: list[str],
verbose: bool,
pool,
):
"""Runs documentation building in parallel."""
doc_build_specifications: list[BuildSpecification] = []
with with_group("Scheduling documentation to build"):
for package_name in current_packages:
console.print(f"[info]{package_name:60}:[/] Scheduling documentation to build")
doc_build_specifications.append(
BuildSpecification(
package_name=package_name,
verbose=verbose,
)
)
with with_group("Running docs building"):
console.print()
result_list = pool.map(perform_docs_build_for_single_package, doc_build_specifications)
for result in result_list:
if result.errors:
all_build_errors[result.package_name].extend(result.errors)
print_build_output(result)
def print_spelling_output(result: SpellCheckResult):
"""Prints output of spell check job."""
with with_group(f"{TEXT_RED}Output for spelling check: {result.package_name}{TEXT_RESET}"):
console.print()
console.print(f"[info]{result.package_name:60}: " + "#" * 80)
with open(result.log_file_name) as output:
for line in output.read().splitlines():
console.print(f"{result.package_name:60} {line}")
console.print(f"[info]{result.package_name:60}: " + "#" * 80)
console.print()
def run_spell_check_in_parallel(
all_spelling_errors: dict[str, list[SpellingError]],
current_packages: list[str],
verbose: bool,
pool,
):
"""Runs spell check in parallel."""
spell_check_specifications: list[BuildSpecification] = []
with with_group("Scheduling spell checking of documentation"):
for package_name in current_packages:
console.print(f"[info]{package_name:60}:[/] Scheduling spellchecking")
spell_check_specifications.append(BuildSpecification(package_name=package_name, verbose=verbose))
with with_group("Running spell checking of documentation"):
console.print()
result_list = pool.map(perform_spell_check_for_single_package, spell_check_specifications)
for result in result_list:
if result.errors:
all_spelling_errors[result.package_name].extend(result.errors)
print_spelling_output(result)
def display_packages_summary(
build_errors: dict[str, list[DocBuildError]], spelling_errors: dict[str, list[SpellingError]]
):
"""Displays a summary that contains information on the number of errors in each packages"""
packages_names = {*build_errors.keys(), *spelling_errors.keys()}
tabular_data = [
{
"Package name": f"[info]{package_name}[/]",
"Count of doc build errors": len(build_errors.get(package_name, [])),
"Count of spelling errors": len(spelling_errors.get(package_name, [])),
}
for package_name in sorted(packages_names, key=lambda k: k or "")
]
console.print("#" * 20, " Packages errors summary ", "#" * 20)
console.print(tabulate(tabular_data=tabular_data, headers="keys"))
console.print("#" * 50)
def print_build_errors_and_exit(
build_errors: dict[str, list[DocBuildError]],
spelling_errors: dict[str, list[SpellingError]],
) -> None:
"""Prints build errors and exists."""
if build_errors or spelling_errors:
if build_errors:
display_errors_summary(build_errors)
console.print()
if spelling_errors:
display_spelling_error_summary(spelling_errors)
console.print()
console.print("The documentation has errors.")
display_packages_summary(build_errors, spelling_errors)
console.print()
console.print(CHANNEL_INVITATION)
sys.exit(1)
else:
console.print("[green]Documentation build is successful[/]")
def main():
"""Main code"""
args = _get_parser().parse_args()
available_packages = get_available_packages()
docs_only = args.docs_only
spellcheck_only = args.spellcheck_only
disable_provider_checks = args.disable_provider_checks
disable_checks = args.disable_checks
package_filters = args.package_filter
with with_group("Available packages"):
for pkg in sorted(available_packages):
console.print(f" - {pkg}")
if package_filters:
console.print("Current package filters: ", package_filters)
current_packages = process_package_filters(available_packages, package_filters)
with with_group("Fetching inventories"):
# Inventories that could not be retrieved should be built first. This may mean this is a
# new package.
packages_without_inventories = fetch_inventories()
normal_packages, priority_packages = partition(
lambda d: d in packages_without_inventories, current_packages
)
normal_packages, priority_packages = list(normal_packages), list(priority_packages)
jobs = args.jobs if args.jobs != 0 else os.cpu_count()
with with_group(
f"Documentation will be built for {len(current_packages)} package(s) with {jobs} parallel jobs"
):
for pkg_no, pkg in enumerate(current_packages, start=1):
console.print(f"{pkg_no}. {pkg}")
all_build_errors: dict[str | None, list[DocBuildError]] = {}
all_spelling_errors: dict[str | None, list[SpellingError]] = {}
if priority_packages:
# Build priority packages
package_build_errors, package_spelling_errors = build_docs_for_packages(
current_packages=priority_packages,
docs_only=docs_only,
spellcheck_only=spellcheck_only,
jobs=jobs,
verbose=args.verbose,
)
if package_build_errors:
all_build_errors.update(package_build_errors)
if package_spelling_errors:
all_spelling_errors.update(package_spelling_errors)
# Build normal packages
# If only one inventory is missing, the remaining packages are correct. If we are missing
# two or more inventories, it is better to try to build for all packages as the previous packages
# may have failed as well.
package_build_errors, package_spelling_errors = build_docs_for_packages(
current_packages=current_packages if len(priority_packages) > 1 else normal_packages,
docs_only=docs_only,
spellcheck_only=spellcheck_only,
jobs=jobs,
verbose=args.verbose,
)
if package_build_errors:
all_build_errors.update(package_build_errors)
if package_spelling_errors:
all_spelling_errors.update(package_spelling_errors)
if not args.one_pass_only:
# Build documentation for some packages again if it can help them.
package_build_errors, package_spelling_errors = retry_building_docs_if_needed(
all_build_errors,
all_spelling_errors,
args,
docs_only,
jobs,
package_build_errors,
package_spelling_errors,
spellcheck_only,
)
# And try again in case one change spans across three-level dependencies
retry_building_docs_if_needed(
all_build_errors,
all_spelling_errors,
args,
docs_only,
jobs,
package_build_errors,
package_spelling_errors,
spellcheck_only,
)
if not disable_checks:
general_errors = lint_checks.run_all_check(disable_provider_checks=disable_provider_checks)
if general_errors:
all_build_errors[None] = general_errors
dev_index_generator.generate_index(f"{DOCS_DIR}/_build/index.html")
if not package_filters:
_promote_new_flags()
if os.path.exists(PROVIDER_INIT_FILE):
os.remove(PROVIDER_INIT_FILE)
print_build_errors_and_exit(
all_build_errors,
all_spelling_errors,
)
def retry_building_docs_if_needed(
all_build_errors,
all_spelling_errors,
args,
docs_only,
jobs,
package_build_errors,
package_spelling_errors,
spellcheck_only,
):
to_retry_packages = [
package_name
for package_name, errors in package_build_errors.items()
if any(any((m in e.message) for m in ERRORS_ELIGIBLE_TO_REBUILD) for e in errors)
]
if to_retry_packages:
for package_name in to_retry_packages:
if package_name in all_build_errors:
del all_build_errors[package_name]
if package_name in all_spelling_errors:
del all_spelling_errors[package_name]
package_build_errors, package_spelling_errors = build_docs_for_packages(
current_packages=to_retry_packages,
docs_only=docs_only,
spellcheck_only=spellcheck_only,
jobs=jobs,
verbose=args.verbose,
)
if package_build_errors:
all_build_errors.update(package_build_errors)
if package_spelling_errors:
all_spelling_errors.update(package_spelling_errors)
return package_build_errors, package_spelling_errors
return package_build_errors, package_spelling_errors
if __name__ == "__main__":
main()
| 20,909 | 35.492147 | 110 | py |
airflow | airflow-main/docs/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/docs/conf.py | # Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Configuration of Airflow Docs"""
from __future__ import annotations
# Airflow documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 9 20:50:01 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import json
import os
import pathlib
import re
import sys
from collections import defaultdict
from pathlib import Path
from typing import Any
import yaml
from packaging.version import parse as parse_version
import airflow
from airflow.configuration import AirflowConfigParser, default_config_yaml
sys.path.append(str(Path(__file__).parent / "exts"))
from docs_build.third_party_inventories import THIRD_PARTY_INDEXES # noqa: E402
CONF_DIR = pathlib.Path(__file__).parent.absolute()
INVENTORY_CACHE_DIR = CONF_DIR / "_inventory_cache"
ROOT_DIR = CONF_DIR.parent
# By default (e.g. on RTD), build docs for `airflow` package
PACKAGE_NAME = os.environ.get("AIRFLOW_PACKAGE_NAME", "apache-airflow")
PACKAGE_DIR: pathlib.Path
if PACKAGE_NAME == "apache-airflow":
PACKAGE_DIR = ROOT_DIR / "airflow"
PACKAGE_VERSION = airflow.__version__
SYSTEM_TESTS_DIR = None
elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
from provider_yaml_utils import load_package_data
ALL_PROVIDER_YAMLS = load_package_data()
try:
CURRENT_PROVIDER = next(
provider_yaml
for provider_yaml in ALL_PROVIDER_YAMLS
if provider_yaml["package-name"] == PACKAGE_NAME
)
except StopIteration:
raise Exception(f"Could not find provider.yaml file for package: {PACKAGE_NAME}")
PACKAGE_DIR = pathlib.Path(CURRENT_PROVIDER["package-dir"])
PACKAGE_VERSION = CURRENT_PROVIDER["versions"][0]
SYSTEM_TESTS_DIR = CURRENT_PROVIDER["system-tests-dir"]
elif PACKAGE_NAME == "apache-airflow-providers":
from provider_yaml_utils import load_package_data
PACKAGE_DIR = ROOT_DIR / "airflow" / "providers"
PACKAGE_VERSION = "devel"
ALL_PROVIDER_YAMLS = load_package_data()
SYSTEM_TESTS_DIR = None
elif PACKAGE_NAME == "helm-chart":
PACKAGE_DIR = ROOT_DIR / "chart"
chart_yaml_file = PACKAGE_DIR / "Chart.yaml"
with chart_yaml_file.open() as chart_file:
chart_yaml_contents = yaml.safe_load(chart_file)
PACKAGE_VERSION = chart_yaml_contents["version"]
SYSTEM_TESTS_DIR = None
else:
PACKAGE_VERSION = "devel"
SYSTEM_TESTS_DIR = None
# Adds to environment variables for easy access from other plugins like airflow_intersphinx.
os.environ["AIRFLOW_PACKAGE_NAME"] = PACKAGE_NAME
# Hack to allow changing for piece of the code to behave differently while
# the docs are being built. The main objective was to alter the
# behavior of the utils.apply_default that was hiding function headers
os.environ["BUILDING_AIRFLOW_DOCS"] = "TRUE"
# == Sphinx configuration ======================================================
# -- Project information -------------------------------------------------------
# See: https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
# General information about the project.
project = PACKAGE_NAME
# # The version info for the project you're documenting
version = PACKAGE_VERSION
# The full version, including alpha/beta/rc tags.
release = PACKAGE_VERSION
rst_epilog = f"""
.. |version| replace:: {version}
.. |airflow-version| replace:: {airflow.__version__}
.. |experimental| replace:: This is an :ref:`experimental feature <experimental>`.
"""
smartquotes_excludes = {"builders": ["man", "text", "spelling"]}
# -- General configuration -----------------------------------------------------
# See: https://www.sphinx-doc.org/en/master/usage/configuration.html
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"provider_init_hack",
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
"sphinxarg.ext",
"sphinx.ext.intersphinx",
"exampleinclude",
"docroles",
"removemarktransform",
"sphinx_copybutton",
"airflow_intersphinx",
"sphinxcontrib.spelling",
"sphinx_airflow_theme",
"redirects",
"substitution_extensions",
]
if PACKAGE_NAME == "apache-airflow":
extensions.extend(
[
"sphinx_jinja",
"sphinx.ext.graphviz",
"sphinxcontrib.httpdomain",
"sphinxcontrib.httpdomain",
"extra_files_with_substitutions",
# First, generate redoc
"sphinxcontrib.redoc",
# Second, update redoc script
"sphinx_script_update",
]
)
if PACKAGE_NAME == "apache-airflow-providers":
extensions.extend(
[
"sphinx_jinja",
"operators_and_hooks_ref",
"providers_packages_ref",
]
)
elif PACKAGE_NAME == "helm-chart":
extensions.append("sphinx_jinja")
elif PACKAGE_NAME == "docker-stack":
# No extra extensions
pass
elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
extensions.extend(
[
"extra_provider_files_with_substitutions",
"autoapi.extension",
]
)
else:
extensions.append("autoapi.extension")
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns: list[str]
if PACKAGE_NAME == "apache-airflow":
exclude_patterns = [
# We only link to selected subpackages.
"_api/airflow/index.rst",
# Included in the cluster-policies doc
"_api/airflow/policies/index.rst",
"README.rst",
]
elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
extensions.extend(
[
"sphinx_jinja",
]
)
exclude_patterns = ["operators/_partials"]
else:
exclude_patterns = []
def _get_rst_filepath_from_path(filepath: pathlib.Path):
if filepath.is_dir():
result = filepath
else:
if filepath.name == "__init__.py":
result = filepath.parent
else:
result = filepath.with_name(filepath.stem)
result /= "index.rst"
return f"_api/{result.relative_to(ROOT_DIR)}"
if PACKAGE_NAME == "apache-airflow":
# Exclude top-level packages
# do not exclude these top-level modules from the doc build:
_allowed_top_level = ("exceptions.py", "policies.py")
browsable_packages = {
"hooks",
"decorators",
"example_dags",
"executors",
"models",
"operators",
"providers",
"secrets",
"sensors",
"timetables",
"triggers",
"utils",
}
browsable_utils: set[str] = set()
models_included: set[str] = {
"baseoperator.py",
"connection.py",
"dag.py",
"dagbag.py",
"param.py",
"taskinstance.py",
"taskinstancekey.py",
"variable.py",
"xcom.py",
}
root = ROOT_DIR / "airflow"
for path in root.iterdir():
if path.is_file() and path.name not in _allowed_top_level:
exclude_patterns.append(_get_rst_filepath_from_path(path))
if path.is_dir() and path.name not in browsable_packages:
exclude_patterns.append(f"_api/airflow/{path.name}")
# Don't include all of utils, just the specific ones we decoded to include
for path in (root / "utils").iterdir():
if path.name not in browsable_utils:
exclude_patterns.append(_get_rst_filepath_from_path(path))
for path in (root / "models").iterdir():
if path.name not in models_included:
exclude_patterns.append(_get_rst_filepath_from_path(path))
elif PACKAGE_NAME != "docker-stack":
exclude_patterns.extend(
_get_rst_filepath_from_path(f) for f in pathlib.Path(PACKAGE_DIR).glob("**/example_dags")
)
# Add any paths that contain templates here, relative to this directory.
templates_path = ["templates"]
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = True
# -- Options for HTML output ---------------------------------------------------
# See: https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_airflow_theme"
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
if PACKAGE_NAME == "apache-airflow":
html_title = "Airflow Documentation"
else:
html_title = f"{PACKAGE_NAME} Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = ""
# given, this must be the name of an image file (path relative to the
# configuration directory) that is the favicon of the docs. Modern browsers
# use this as the icon for tabs, windows and bookmarks. It should be a
# Windows-style icon file (.ico), which is 16x16 or 32x32 pixels large.
html_favicon = "../airflow/www/static/pin_32.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
if PACKAGE_NAME in ["apache-airflow", "helm-chart"]:
html_static_path = [f"{PACKAGE_NAME}/static"]
else:
html_static_path = []
# A list of JavaScript filename. The entry must be a filename string or a
# tuple containing the filename string and the attributes dictionary. The
# filename must be relative to the html_static_path, or a full URI with
# scheme like http://example.org/script.js.
if PACKAGE_NAME in ["apache-airflow", "helm-chart"]:
html_js_files = ["gh-jira-links.js"]
else:
html_js_files = []
if PACKAGE_NAME == "apache-airflow":
html_extra_path = [
f"{ROOT_DIR}/docs/apache-airflow/howto/docker-compose/airflow.sh",
]
html_extra_with_substitutions = [
f"{ROOT_DIR}/docs/apache-airflow/howto/docker-compose/docker-compose.yaml",
f"{ROOT_DIR}/docs/docker-stack/build.rst",
]
# Replace "|version|" in links
manual_substitutions_in_generated_html = [
"installation/installing-from-pypi.html",
"installation/installing-from-sources.html",
]
if PACKAGE_NAME.startswith("apache-airflow-providers"):
manual_substitutions_in_generated_html = ["example-dags.html", "operators.html", "index.html"]
if PACKAGE_NAME == "docker-stack":
# Replace "|version|" inside ```` quotes
manual_substitutions_in_generated_html = ["build.html"]
# -- Theme configuration -------------------------------------------------------
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
"**": [
"version-selector.html",
"searchbox.html",
"globaltoc.html",
]
if PACKAGE_VERSION != "devel"
else [
"searchbox.html",
"globaltoc.html",
]
}
# If false, no index is generated.
html_use_index = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
html_theme_options: dict[str, Any] = {"hide_website_buttons": True, "sidebar_includehidden": True}
html_theme_options["navbar_links"] = [
{"href": "/community/", "text": "Community"},
{"href": "/meetups/", "text": "Meetups"},
{"href": "/docs/", "text": "Documentation"},
{"href": "/use-cases/", "text": "Use-cases"},
{"href": "/announcements/", "text": "Announcements"},
{"href": "/blog/", "text": "Blog"},
{"href": "/ecosystem/", "text": "Ecosystem"},
]
# A dictionary of values to pass into the template engine's context for all pages.
html_context = {
# Google Analytics ID.
# For more information look at:
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/layout.html#L222-L232
"theme_analytics_id": "UA-140539454-1",
# Variables used to build a button for editing the source code
#
# The path is created according to the following template:
#
# https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/
# {{ theme_vcs_pageview_mode|default("blob") }}/{{ github_version }}{{ conf_py_path }}
# {{ pagename }}{{ suffix }}
#
# More information:
# https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/doc_builder/templates/doc_builder/conf.py.tmpl#L100-L103
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/breadcrumbs.html#L45
# https://github.com/apache/airflow-site/blob/91f760c/sphinx_airflow_theme/sphinx_airflow_theme/suggest_change_button.html#L36-L40
#
"theme_vcs_pageview_mode": "edit",
"conf_py_path": f"/docs/{PACKAGE_NAME}/",
"github_user": "apache",
"github_repo": "airflow",
"github_version": "main",
"display_github": "main",
"suffix": ".rst",
}
# == Extensions configuration ==================================================
# -- Options for sphinx_jinja ------------------------------------------
# See: https://github.com/tardyp/sphinx-jinja
airflow_version = parse_version(
re.search( # type: ignore[union-attr,arg-type]
r"__version__ = \"([0-9\.]*)(\.dev[0-9]*)?\"",
(Path(__file__).parents[1] / "airflow" / "__init__.py").read_text(),
).groups(0)[0]
)
# Jinja context
if PACKAGE_NAME == "apache-airflow":
deprecated_options: dict[str, dict[str, tuple[str, str, str]]] = defaultdict(dict)
for (section, key), (
(deprecated_section, deprecated_key, since_version)
) in AirflowConfigParser.deprecated_options.items():
deprecated_options[deprecated_section][deprecated_key] = section, key, since_version
for (section, key), deprecated in AirflowConfigParser.many_to_one_deprecated_options.items():
for deprecated_section, deprecated_key, since_version in deprecated:
deprecated_options[deprecated_section][deprecated_key] = section, key, since_version
configs = default_config_yaml()
# We want the default/example we show in the docs to reflect the value _after_
# the config has been templated, not before
# e.g. {{dag_id}} in default_config.cfg -> {dag_id} in airflow.cfg, and what we want in docs
keys_to_format = ["default", "example"]
for conf_name, conf_section in configs.items():
for option_name, option in list(conf_section["options"].items()):
for key in keys_to_format:
if option[key] and "{{" in option[key]:
option[key] = option[key].replace("{{", "{").replace("}}", "}")
version_added = option["version_added"]
if version_added is not None and parse_version(version_added) > airflow_version:
del conf_section["options"][option_name]
# Sort options, config and deprecated options for JINJA variables to display
for section_name, config in configs.items():
config["options"] = {k: v for k, v in sorted(config["options"].items())}
configs = {k: v for k, v in sorted(configs.items())}
for section in deprecated_options:
deprecated_options[section] = {k: v for k, v in sorted(deprecated_options[section].items())}
jinja_contexts = {
"config_ctx": {"configs": configs, "deprecated_options": deprecated_options},
"quick_start_ctx": {
"doc_root_url": f"https://airflow.apache.org/docs/apache-airflow/{PACKAGE_VERSION}/"
},
"official_download_page": {
"base_url": f"https://downloads.apache.org/airflow/{PACKAGE_VERSION}",
"closer_lua_url": f"https://www.apache.org/dyn/closer.lua/airflow/{PACKAGE_VERSION}",
"airflow_version": PACKAGE_VERSION,
},
}
elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
def _load_config():
file_path = PACKAGE_DIR / "config_templates" / "config.yml"
if not file_path.exists():
return {}
with file_path.open() as f:
return yaml.safe_load(f)
config = _load_config()
jinja_contexts = {
"config_ctx": {"configs": config},
"official_download_page": {
"base_url": "https://downloads.apache.org/airflow/providers",
"closer_lua_url": "https://www.apache.org/dyn/closer.lua/airflow/providers",
"package_name": PACKAGE_NAME,
"package_name_underscores": PACKAGE_NAME.replace("-", "_"),
"package_version": PACKAGE_VERSION,
},
}
elif PACKAGE_NAME == "apache-airflow-providers":
jinja_contexts = {
"official_download_page": {
"all_providers": ALL_PROVIDER_YAMLS,
},
}
elif PACKAGE_NAME == "helm-chart":
def _str_representer(dumper, data):
style = "|" if "\n" in data else None # show as a block scalar if we have more than 1 line
return dumper.represent_scalar("tag:yaml.org,2002:str", data, style)
yaml.add_representer(str, _str_representer)
def _format_default(value: Any) -> str:
if value == "":
return '""'
if value is None:
return "~"
return str(value)
def _format_examples(param_name: str, schema: dict) -> str | None:
if not schema.get("examples"):
return None
# Nicer to have the parameter name shown as well
out = ""
for ex in schema["examples"]:
if schema["type"] == "array":
ex = [ex]
out += yaml.dump({param_name: ex})
return out
def _get_params(root_schema: dict, prefix: str = "", default_section: str = "") -> list[dict]:
"""
Given an jsonschema objects properties dict, return a flattened list of all parameters
from that object and any nested objects
"""
# TODO: handle arrays? probably missing more cases too
out = []
for param_name, schema in root_schema.items():
prefixed_name = f"{prefix}.{param_name}" if prefix else param_name
section_name = schema["x-docsSection"] if "x-docsSection" in schema else default_section
if section_name and schema["description"] and "default" in schema:
out.append(
{
"section": section_name,
"name": prefixed_name,
"description": schema["description"],
"default": _format_default(schema["default"]),
"examples": _format_examples(param_name, schema),
}
)
if schema.get("properties"):
out += _get_params(schema["properties"], prefixed_name, section_name)
return out
schema_file = PACKAGE_DIR / "values.schema.json"
with schema_file.open() as config_file:
chart_schema = json.load(config_file)
params = _get_params(chart_schema["properties"])
# Now, split into sections
sections: dict[str, list[dict[str, str]]] = {}
for param in params:
if param["section"] not in sections:
sections[param["section"]] = []
sections[param["section"]].append(param)
# and order each section
for section in sections.values(): # type: ignore
section.sort(key=lambda i: i["name"]) # type: ignore
# and finally order the sections!
ordered_sections = []
for name in chart_schema["x-docsSectionOrder"]:
if name not in sections:
raise ValueError(f"Unable to find any parameters for section: {name}")
ordered_sections.append({"name": name, "params": sections.pop(name)})
if sections:
raise ValueError(f"Found section(s) which were not in `section_order`: {list(sections.keys())}")
jinja_contexts = {
"params_ctx": {"sections": ordered_sections},
"official_download_page": {
"base_url": "https://downloads.apache.org/airflow/helm-chart",
"closer_lua_url": "https://www.apache.org/dyn/closer.lua/airflow/helm-chart",
"package_name": PACKAGE_NAME,
"package_version": PACKAGE_VERSION,
},
}
# -- Options for sphinx.ext.autodoc --------------------------------------------
# See: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
# This value contains a list of modules to be mocked up. This is useful when some external dependencies
# are not met at build time and break the building process.
autodoc_mock_imports = [
"MySQLdb",
"adal",
"alibabacloud_adb20211201",
"alibabacloud_tea_openapi",
"analytics",
"azure",
"azure.cosmos",
"azure.datalake",
"azure.kusto",
"azure.mgmt",
"boto3",
"botocore",
"bson",
"cassandra",
"celery",
"cloudant",
"cryptography",
"datadog",
"distributed",
"docker",
"google",
"google_auth_httplib2",
"googleapiclient",
"grpc",
"hdfs",
"httplib2",
"jaydebeapi",
"jenkins",
"jira",
"kubernetes",
"msrestazure",
"oss2",
"oracledb",
"pandas",
"pandas_gbq",
"paramiko",
"pinotdb",
"psycopg2",
"pydruid",
"pyhive",
"pyhive",
"pymongo",
"pymssql",
"pysftp",
"qds_sdk",
"redis",
"simple_salesforce",
"slack_sdk",
"smbclient",
"snowflake",
"sqlalchemy-drill",
"sshtunnel",
"telegram",
"tenacity",
"vertica_python",
"winrm",
"zenpy",
]
# The default options for autodoc directives. They are applied to all autodoc directives automatically.
autodoc_default_options = {"show-inheritance": True, "members": True}
autodoc_typehints = "description"
autodoc_typehints_description_target = "documented"
autodoc_typehints_format = "short"
# -- Options for sphinx.ext.intersphinx ----------------------------------------
# See: https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
# This config value contains names of other projects that should
# be linked to in this documentation.
# Inventories are only downloaded once by docs/exts/docs_build/fetch_inventories.py.
intersphinx_mapping = {
pkg_name: (f"{THIRD_PARTY_INDEXES[pkg_name]}/", (f"{INVENTORY_CACHE_DIR}/{pkg_name}/objects.inv",))
for pkg_name in [
"boto3",
"celery",
"docker",
"hdfs",
"jinja2",
"mongodb",
"pandas",
"python",
"requests",
"sqlalchemy",
]
}
if PACKAGE_NAME in ("apache-airflow-providers-google", "apache-airflow"):
intersphinx_mapping.update(
{
pkg_name: (
f"{THIRD_PARTY_INDEXES[pkg_name]}/",
(f"{INVENTORY_CACHE_DIR}/{pkg_name}/objects.inv",),
)
for pkg_name in [
"google-api-core",
"google-cloud-automl",
"google-cloud-bigquery",
"google-cloud-bigquery-datatransfer",
"google-cloud-bigquery-storage",
"google-cloud-bigtable",
"google-cloud-container",
"google-cloud-core",
"google-cloud-datacatalog",
"google-cloud-datastore",
"google-cloud-dlp",
"google-cloud-kms",
"google-cloud-language",
"google-cloud-monitoring",
"google-cloud-pubsub",
"google-cloud-redis",
"google-cloud-spanner",
"google-cloud-speech",
"google-cloud-storage",
"google-cloud-tasks",
"google-cloud-texttospeech",
"google-cloud-translate",
"google-cloud-videointelligence",
"google-cloud-vision",
]
}
)
# -- Options for sphinx.ext.viewcode -------------------------------------------
# See: https://www.sphinx-doc.org/es/master/usage/extensions/viewcode.html
# If this is True, viewcode extension will emit viewcode-follow-imported event to resolve the name of
# the module by other extensions. The default is True.
viewcode_follow_imported_members = True
# -- Options for sphinx-autoapi ------------------------------------------------
# See: https://sphinx-autoapi.readthedocs.io/en/latest/config.html
# Paths (relative or absolute) to the source code that you wish to generate
# your API documentation from.
autoapi_dirs: list[os.PathLike] = []
if PACKAGE_NAME != "docker-stack":
autoapi_dirs.append(PACKAGE_DIR)
if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR):
autoapi_dirs.append(SYSTEM_TESTS_DIR)
# A directory that has user-defined templates to override our default templates.
if PACKAGE_NAME == "apache-airflow":
autoapi_template_dir = "autoapi_templates"
# A list of patterns to ignore when finding files
autoapi_ignore = [
"*/airflow/_vendor/*",
"*/airflow/executors/*",
"*/_internal*",
"*/node_modules/*",
"*/migrations/*",
"*/contrib/*",
"**/example_sla_dag.py",
"**/example_taskflow_api_docker_virtualenv.py",
"**/example_dag_decorator.py",
]
if PACKAGE_NAME == "apache-airflow":
autoapi_ignore.append("*/airflow/providers/*")
elif PACKAGE_NAME == "docker-stack":
autoapi_ignore.append("*/airflow/providers/*")
else:
autoapi_ignore.append("*/airflow/providers/cncf/kubernetes/backcompat/*")
autoapi_ignore.append("*/airflow/providers/google/ads/*")
autoapi_ignore.append("*/example_dags/*")
# Keep the AutoAPI generated files on the filesystem after the run.
# Useful for debugging.
autoapi_keep_files = True
# Relative path to output the AutoAPI files into. This can also be used to place the generated documentation
# anywhere in your documentation hierarchy.
autoapi_root = "_api"
# Whether to insert the generated documentation into the TOC tree. If this is False, the default AutoAPI
# index page is not generated and you will need to include the generated documentation in a
# TOC tree entry yourself.
autoapi_add_toctree_entry = False
# By default autoapi will include private members -- we don't want that!
autoapi_options = [
"members",
"undoc-members",
"show-inheritance",
"show-module-summary",
"special-members",
]
suppress_warnings = [
"autoapi.python_import_resolution",
]
# -- Options for ext.exampleinclude --------------------------------------------
exampleinclude_sourceroot = os.path.abspath("..")
# -- Options for ext.redirects -------------------------------------------------
redirects_file = "redirects.txt"
# -- Options for sphinxcontrib-spelling ----------------------------------------
spelling_word_list_filename = [os.path.join(CONF_DIR, "spelling_wordlist.txt")]
if PACKAGE_NAME == "apache-airflow":
spelling_exclude_patterns = ["project.rst", "changelog.rst"]
if PACKAGE_NAME == "helm-chart":
spelling_exclude_patterns = ["changelog.rst"]
spelling_ignore_contributor_names = False
spelling_ignore_importable_modules = True
graphviz_output_format = "svg"
# -- Options for sphinxcontrib.redoc -------------------------------------------
# See: https://sphinxcontrib-redoc.readthedocs.io/en/stable/
if PACKAGE_NAME == "apache-airflow":
OPENAPI_FILE = os.path.join(
os.path.dirname(__file__), "..", "airflow", "api_connexion", "openapi", "v1.yaml"
)
redoc = [
{
"name": "Airflow REST API",
"page": "stable-rest-api-ref",
"spec": OPENAPI_FILE,
"opts": {
"hide-hostname": True,
"no-auto-auth": True,
},
},
]
# Options for script updater
redoc_script_url = "https://cdn.jsdelivr.net/npm/[email protected]/bundles/redoc.standalone.js"
def skip_util_classes(app, what, name, obj, skip, options):
if what == "data" and "STATICA_HACK" in name:
skip = True
elif ":sphinx-autoapi-skip:" in obj.docstring:
skip = True
elif ":meta private:" in obj.docstring:
skip = True
return skip
def setup(sphinx):
if "autoapi.extension" in extensions:
sphinx.connect("autoapi-skip-member", skip_util_classes)
| 29,234 | 34.436364 | 134 | py |
airflow | airflow-main/docs/apache-airflow/empty_plugin/empty_plugin.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Plugins example"""
from __future__ import annotations
from flask import Blueprint
from flask_appbuilder import BaseView, expose
from airflow.plugins_manager import AirflowPlugin
from airflow.security import permissions
from airflow.www.auth import has_access
class EmptyPluginView(BaseView):
"""Creating a Flask-AppBuilder View"""
default_view = "index"
@expose("/")
@has_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE),
]
)
def index(self):
"""Create default view"""
return self.render_template("empty_plugin/index.html", name="Empty Plugin")
# Creating a flask blueprint
bp = Blueprint(
"Empty Plugin",
__name__,
template_folder="templates",
static_folder="static",
static_url_path="/static/empty_plugin",
)
class EmptyPlugin(AirflowPlugin):
"""Defining the plugin class"""
name = "Empty Plugin"
flask_blueprints = [bp]
appbuilder_views = [{"name": "Empty Plugin", "category": "Extra Views", "view": EmptyPluginView()}]
| 1,849 | 29.327869 | 103 | py |
airflow | airflow-main/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This dag only runs some simple tasks to test Airflow's task execution."""
from __future__ import annotations
# [START dag]
import datetime
import pendulum
from airflow.models.dag import DAG
from airflow.operators.empty import EmptyOperator
now = pendulum.now(tz="UTC")
now_to_the_hour = (now - datetime.timedelta(0, 0, 0, 0, 0, 3)).replace(minute=0, second=0, microsecond=0)
START_DATE = now_to_the_hour
DAG_NAME = "test_dag_v1"
dag = DAG(
DAG_NAME,
schedule="*/10 * * * *",
default_args={"depends_on_past": True},
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
)
run_this_1 = EmptyOperator(task_id="run_this_1", dag=dag)
run_this_2 = EmptyOperator(task_id="run_this_2", dag=dag)
run_this_2.set_upstream(run_this_1)
run_this_3 = EmptyOperator(task_id="run_this_3", dag=dag)
run_this_3.set_upstream(run_this_2)
# [END dag]
| 1,658 | 33.5625 | 105 | py |
airflow | airflow-main/docs/exts/sphinx_script_update.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import hashlib
import json
import os
import shutil
import sys
import tempfile
from functools import lru_cache
import requests
from sphinx.builders import html as builders
from sphinx.util import logging
log = logging.getLogger(__name__)
def _copy_file(src: str, dst: str) -> None:
log.info("Copying %s -> %s", src, dst)
shutil.copy2(src, dst, follow_symlinks=False)
def _gethash(string: str):
hash_object = hashlib.sha256(string.encode())
return hash_object.hexdigest()
def _user_cache_dir(appname=None):
"""Return full path to the user-specific cache dir for this application"""
if sys.platform == "win32":
# Windows has a complex procedure to download the App Dir directory because this directory can be
# changed in window registry, so i use temporary directory for cache
path = os.path.join(tempfile.gettempdir(), appname)
elif sys.platform == "darwin":
path = os.path.expanduser("~/Library/Caches")
else:
path = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
path = os.path.join(path, appname)
return path
@lru_cache(maxsize=None)
def fetch_and_cache(script_url: str, output_filename: str):
"""Fetch URL to local cache and returns path."""
cache_key = _gethash(script_url)
cache_dir = _user_cache_dir("redoc-doc")
cache_metadata_filepath = os.path.join(cache_dir, "cache-metadata.json")
cache_filepath = os.path.join(cache_dir, f"{cache_key}-{output_filename}")
# Create cache directory
os.makedirs(cache_dir, exist_ok=True)
# Load cache metadata
cache_metadata: dict[str, str] = {}
if os.path.exists(cache_metadata_filepath):
try:
with open(cache_metadata_filepath) as cache_file:
cache_metadata = json.load(cache_file)
except json.JSONDecodeError:
os.remove(cache_metadata_filepath)
etag = cache_metadata.get(cache_key)
# If we have a file and etag, check the fast path
if os.path.exists(cache_filepath) and etag:
res = requests.get(script_url, headers={"If-None-Match": etag})
if res.status_code == 304:
return cache_filepath
# Slow patch
res = requests.get(script_url)
res.raise_for_status()
with open(cache_filepath, "wb") as output_file:
output_file.write(res.content)
# Save cache metadata, if needed
etag = res.headers.get("etag", None)
if etag:
cache_metadata[cache_key] = etag
with open(cache_metadata_filepath, "w") as cache_file:
json.dump(cache_metadata, cache_file)
return cache_filepath
def builder_inited(app):
"""Sphinx "builder-inited" event handler."""
script_url = app.config.redoc_script_url
output_filename = "script.js"
fetch_and_cache(script_url, output_filename)
def build_finished(app, exception):
"""Sphinx "build-finished" event handler."""
if exception or not isinstance(app.builder, builders.StandaloneHTMLBuilder):
return
script_url = app.config.redoc_script_url
output_filename = "script.js"
cache_filepath = fetch_and_cache(script_url, output_filename)
_copy_file(cache_filepath, os.path.join(app.builder.outdir, "_static", "redoc.js"))
def setup(app):
"""Setup plugin"""
app.add_config_value("redoc_script_url", None, "env")
app.connect("builder-inited", builder_inited)
app.connect("build-finished", build_finished)
return {"parallel_read_safe": True, "parallel_write_safe": True}
| 4,330 | 33.648 | 105 | py |
airflow | airflow-main/docs/exts/extra_files_with_substitutions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
def copy_docker_compose(app, exception):
"""Sphinx "build-finished" event handler."""
from sphinx.builders import html as builders
if exception or not isinstance(app.builder, builders.StandaloneHTMLBuilder):
return
# Replace `|version|` in the docker-compose.yaml that requires manual substitutions
for path in app.config.html_extra_with_substitutions:
with open(path) as file:
with open(os.path.join(app.outdir, os.path.basename(path)), "w") as output_file:
for line in file:
output_file.write(line.replace("|version|", app.config.version))
# Replace `|version|` in the installation files that requires manual substitutions (in links)
for path in app.config.manual_substitutions_in_generated_html:
with open(os.path.join(app.outdir, os.path.dirname(path), os.path.basename(path))) as input_file:
content = input_file.readlines()
with open(
os.path.join(app.outdir, os.path.dirname(path), os.path.basename(path)), "w"
) as output_file:
for line in content:
output_file.write(line.replace("|version|", app.config.version))
def setup(app):
"""Setup plugin"""
app.connect("build-finished", copy_docker_compose)
app.add_config_value("html_extra_with_substitutions", [], "[str]")
app.add_config_value("manual_substitutions_in_generated_html", [], "[str]")
return {
"parallel_write_safe": True,
"parallel_read_safe": True,
}
| 2,367 | 39.827586 | 105 | py |
airflow | airflow-main/docs/exts/removemarktransform.py | # Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Remove Transform Mark for Sphinx"""
from __future__ import annotations
import re
from docutils import nodes
from pygments.lexers import Python3Lexer, PythonLexer, guess_lexer
from sphinx.transforms import SphinxTransform
from sphinx.transforms.post_transforms.code import TrimDoctestFlagsTransform
docmark_re = re.compile(r"\s*#\s*\[(START|END)\s*[a-z_A-Z]+].*$", re.MULTILINE)
class TrimDocMarkerFlagsTransform(SphinxTransform):
"""
Trim doc marker like ``# [START howto_concept]` from python code-blocks.
Based on:
https://github.com/sphinx-doc/sphinx/blob/master/sphinx/transforms/post_transforms/code.py
class TrimDoctestFlagsTransform
"""
default_priority = TrimDoctestFlagsTransform.default_priority + 1
def apply(self, **kwargs):
for node in self.document.traverse(nodes.literal_block):
if self.is_pycode(node):
source = node.rawsource
source = docmark_re.sub("", source)
node.rawsource = source
node[:] = [nodes.Text(source)]
@staticmethod
def is_pycode(node: nodes.literal_block) -> bool:
"""Checks if the node is literal block of python"""
if node.rawsource != node.astext():
return False # skip parsed-literal node
language = node.get("language")
if language in ("py", "py3", "python", "python3", "default"):
return True
elif language == "guess":
try:
lexer = guess_lexer(node.rawsource)
return isinstance(lexer, (PythonLexer, Python3Lexer))
except Exception:
pass
return False
def setup(app):
"""Sets the transform up"""
app.add_post_transform(TrimDocMarkerFlagsTransform)
return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True}
| 2,708 | 35.12 | 94 | py |
airflow | airflow-main/docs/exts/operators_and_hooks_ref.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from functools import lru_cache
from typing import Iterable
import jinja2
import rich_click as click
from docutils import nodes
from docutils.nodes import Element
# No stub exists for docutils.parsers.rst.directives. See https://github.com/python/typeshed/issues/5755.
from docutils.parsers.rst import Directive, directives # type: ignore[attr-defined]
from docutils.statemachine import StringList
from provider_yaml_utils import get_provider_yaml_paths, load_package_data
from sphinx.util import nested_parse_with_titles
from sphinx.util.docutils import switch_source_input
CMD_OPERATORS_AND_HOOKS = "operators-and-hooks"
CMD_TRANSFERS = "transfers"
"""
Directives for rendering tables with operators.
To test the template rendering process, you can also run this script as a standalone program.
PYTHONPATH=$PWD/../ python exts/operators_and_hooks_ref.py --help
"""
DEFAULT_HEADER_SEPARATOR = "="
CURRENT_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
DOCS_DIR = os.path.join(ROOT_DIR, "docs")
@lru_cache(maxsize=None)
def _get_jinja_env():
loader = jinja2.FileSystemLoader(CURRENT_DIR, followlinks=True)
env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined)
return env
def _render_template(template_name, **kwargs):
return _get_jinja_env().get_template(template_name).render(**kwargs)
def _docs_path(filepath: str):
if not filepath.startswith("/docs/"):
raise Exception(f"The path must starts with '/docs/'. Current value: {filepath}")
if not filepath.endswith(".rst"):
raise Exception(f"The path must ends with '.rst'. Current value: {filepath}")
if filepath.startswith("/docs/apache-airflow-providers-"):
_, _, provider, rest = filepath.split("/", maxsplit=3)
filepath = f"{provider}:{rest}"
else:
filepath = os.path.join(ROOT_DIR, filepath.lstrip("/"))
filepath = os.path.relpath(filepath, DOCS_DIR)
len_rst = len(".rst")
filepath = filepath[:-len_rst]
return filepath
def _prepare_resource_index(package_data, resource_type):
return {
integration["integration-name"]: {**integration, "package-name": provider["package-name"]}
for provider in package_data
for integration in provider.get(resource_type, [])
}
def _prepare_operators_data(tags: set[str] | None):
package_data = load_package_data()
all_integrations = _prepare_resource_index(package_data, "integrations")
if tags is None:
to_display_integration = all_integrations.values()
else:
to_display_integration = [
integration for integration in all_integrations.values() if tags.intersection(integration["tags"])
]
all_operators_by_integration = _prepare_resource_index(package_data, "operators")
all_hooks_by_integration = _prepare_resource_index(package_data, "hooks")
all_sensors_by_integration = _prepare_resource_index(package_data, "sensors")
results = []
for integration in to_display_integration:
item = {
"integration": integration,
}
operators = all_operators_by_integration.get(integration["integration-name"])
sensors = all_sensors_by_integration.get(integration["integration-name"])
hooks = all_hooks_by_integration.get(integration["integration-name"])
if "how-to-guide" in item["integration"]:
item["integration"]["how-to-guide"] = [_docs_path(d) for d in item["integration"]["how-to-guide"]]
if operators:
item["operators"] = operators
if sensors:
item["sensors"] = sensors
if hooks:
item["hooks"] = hooks
if operators or sensors or hooks:
results.append(item)
return sorted(results, key=lambda d: d["integration"]["integration-name"].lower())
def _render_operator_content(*, tags: set[str] | None, header_separator: str):
tabular_data = _prepare_operators_data(tags)
return _render_template(
"operators_and_hooks_ref.rst.jinja2", items=tabular_data, header_separator=header_separator
)
def _prepare_transfer_data(tags: set[str] | None):
package_data = load_package_data()
all_operators_by_integration = _prepare_resource_index(package_data, "integrations")
# Add edge case
for name in ["SQL", "Local"]:
all_operators_by_integration[name] = {"integration-name": name}
all_transfers = [
{
**transfer,
"package-name": provider["package-name"],
"source-integration": all_operators_by_integration[transfer["source-integration-name"]],
"target-integration": all_operators_by_integration[transfer["target-integration-name"]],
}
for provider in package_data
for transfer in provider.get("transfers", [])
]
if tags is None:
to_display_transfers = all_transfers
else:
to_display_transfers = [
transfer
for transfer in all_transfers
if tags.intersection(transfer["source-integration"].get("tags", set()))
or tags.intersection(transfer["target-integration"].get("tags", set()))
]
for transfer in to_display_transfers:
if "how-to-guide" not in transfer:
continue
transfer["how-to-guide"] = _docs_path(transfer["how-to-guide"])
return to_display_transfers
def _render_transfer_content(*, tags: set[str] | None, header_separator: str):
tabular_data = _prepare_transfer_data(tags)
return _render_template(
"operators_and_hooks_ref-transfers.rst.jinja2", items=tabular_data, header_separator=header_separator
)
class BaseJinjaReferenceDirective(Directive):
"""The base directive for OperatorsHooksReferenceDirective and TransfersReferenceDirective"""
optional_arguments = 1
option_spec = {"tags": directives.unchanged, "header-separator": directives.unchanged_required}
def run(self):
tags_arg = self.options.get("tags")
tags = {t.strip() for t in tags_arg.split(",")} if tags_arg else None
header_separator = self.options.get("header-separator")
new_content = self.render_content(tags=tags, header_separator=header_separator)
with switch_source_input(self.state, self.content):
new_content = StringList(new_content.splitlines(), source="")
node: Element = nodes.section()
# necessary so that the child nodes get the right source/line set
node.document = self.state.document
nested_parse_with_titles(self.state, new_content, node)
# record all filenames as dependencies -- this will at least
# partially make automatic invalidation possible
for filepath in get_provider_yaml_paths():
self.state.document.settings.record_dependencies.add(filepath)
return node.children
def render_content(self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR):
"""Return content in RST format"""
raise NotImplementedError("Tou need to override render_content method.")
def _common_render_list_content(*, header_separator: str, resource_type: str, template: str):
tabular_data = {
provider["package-name"]: {
"name": provider["name"],
resource_type: provider.get(resource_type, []),
}
for provider in load_package_data()
if provider.get(resource_type) is not None
}
return _render_template(template, items=tabular_data, header_separator=header_separator)
class OperatorsHooksReferenceDirective(BaseJinjaReferenceDirective):
"""Generates a list of operators, sensors, hooks"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _render_operator_content(
tags=tags,
header_separator=header_separator,
)
class TransfersReferenceDirective(BaseJinjaReferenceDirective):
"""Generate a list of transfer operators"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _render_transfer_content(
tags=tags,
header_separator=header_separator,
)
class LoggingDirective(BaseJinjaReferenceDirective):
"""Generate list of logging handlers"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _common_render_list_content(
header_separator=header_separator, resource_type="logging", template="logging.rst.jinja2"
)
class AuthBackendDirective(BaseJinjaReferenceDirective):
"""Generate list of auth backend handlers"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _common_render_list_content(
header_separator=header_separator,
resource_type="auth-backends",
template="auth_backend.rst.jinja2",
)
class SecretsBackendDirective(BaseJinjaReferenceDirective):
"""Generate list of secret backend handlers"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _common_render_list_content(
header_separator=header_separator,
resource_type="secrets-backends",
template="secret_backend.rst.jinja2",
)
class ConnectionsDirective(BaseJinjaReferenceDirective):
"""Generate list of connections"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _common_render_list_content(
header_separator=header_separator,
resource_type="connection-types",
template="connections.rst.jinja2",
)
class ExtraLinksDirective(BaseJinjaReferenceDirective):
"""Generate list of extra links"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _common_render_list_content(
header_separator=header_separator, resource_type="extra-links", template="extra_links.rst.jinja2"
)
class NotificationsDirective(BaseJinjaReferenceDirective):
"""Generate list of notifiers"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _common_render_list_content(
header_separator=header_separator,
resource_type="notifications",
template="notifications.rst.jinja2",
)
class ExecutorsDirective(BaseJinjaReferenceDirective):
"""Generate list of executors"""
def render_content(
self, *, tags: set[str] | None, header_separator: str = DEFAULT_HEADER_SEPARATOR
) -> str:
return _common_render_list_content(
header_separator=header_separator, resource_type="executors", template="executors.rst.jinja2"
)
def setup(app):
"""Setup plugin"""
app.add_directive("operators-hooks-ref", OperatorsHooksReferenceDirective)
app.add_directive("transfers-ref", TransfersReferenceDirective)
app.add_directive("airflow-logging", LoggingDirective)
app.add_directive("airflow-auth-backends", AuthBackendDirective)
app.add_directive("airflow-secrets-backends", SecretsBackendDirective)
app.add_directive("airflow-connections", ConnectionsDirective)
app.add_directive("airflow-extra-links", ExtraLinksDirective)
app.add_directive("airflow-notifications", NotificationsDirective)
app.add_directive("airflow-executors", ExecutorsDirective)
return {"parallel_read_safe": True, "parallel_write_safe": True}
option_tag = click.option(
"--tag",
multiple=True,
help="If passed, displays integrations that have a matching tag",
)
option_header_separator = click.option(
"--header-separator", default=DEFAULT_HEADER_SEPARATOR, show_default=True
)
@click.group(context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 500})
def cli():
"""Render tables with integrations"""
@cli.command()
@option_tag
@option_header_separator
def operators_and_hooks(tag: Iterable[str], header_separator: str):
"""Renders Operators ahd Hooks content"""
print(_render_operator_content(tags=set(tag) if tag else None, header_separator=header_separator))
@cli.command()
@option_tag
@option_header_separator
def transfers(tag: Iterable[str], header_separator: str):
"""Renders Transfers content"""
print(_render_transfer_content(tags=set(tag) if tag else None, header_separator=header_separator))
@cli.command()
@option_header_separator
def logging(header_separator: str):
"""Renders Logger content"""
print(
_common_render_list_content(
header_separator=header_separator, resource_type="logging", template="logging.rst.jinja2"
)
)
@cli.command()
@option_header_separator
def auth_backends(header_separator: str):
"""Renders Logger content"""
print(
_common_render_list_content(
header_separator=header_separator,
resource_type="auth-backends",
template="auth_backend.rst.jinja2",
)
)
@cli.command()
@option_header_separator
def secret_backends(header_separator: str):
"""Renders Secret Backends content"""
print(
_common_render_list_content(
header_separator=header_separator,
resource_type="secrets-backends",
template="secret_backend.rst.jinja2",
)
)
@cli.command()
@option_header_separator
def connections(header_separator: str):
"""Renders Connections content"""
print(
_common_render_list_content(
header_separator=header_separator,
resource_type="connection-types",
template="connections.rst.jinja2",
)
)
@cli.command()
@option_header_separator
def extra_links(header_separator: str):
"""Renders Extra links content"""
print(
_common_render_list_content(
header_separator=header_separator, resource_type="extra-links", template="extra_links.rst.jinja2"
)
)
if __name__ == "__main__":
cli()
| 15,294 | 33.920091 | 110 | py |
airflow | airflow-main/docs/exts/exampleinclude.py | # flake8: noqa
# Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
"""Nice formatted include for examples"""
import traceback
from os import path
from docutils import nodes
# No stub exists for docutils.parsers.rst.directives. See https://github.com/python/typeshed/issues/5755.
from docutils.parsers.rst import directives # type: ignore[attr-defined]
from sphinx.directives.code import LiteralIncludeReader
from sphinx.ext.viewcode import viewcode_anchor
from sphinx.locale import _
from sphinx.pycode import ModuleAnalyzer
from sphinx.util import logging, parselinenos
from sphinx.util.docutils import SphinxDirective
from sphinx.util.nodes import set_source_info
try:
import sphinx_airflow_theme
airflow_theme_is_available = True
except ImportError:
airflow_theme_is_available = False
logger = logging.getLogger(__name__)
class ExampleHeader(nodes.reference, nodes.FixedTextElement):
"""Header for examples."""
class ExampleInclude(SphinxDirective):
"""
Like ``.. literalinclude:: ``, but it does not support caption option.
Adds a header with a reference to the full source code
Based on:
https://raw.githubusercontent.com/sphinx-doc/sphinx/v1.8.3/sphinx/directives/code.py
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
"dedent": int,
"linenos": directives.flag,
"lineno-start": int,
"lineno-match": directives.flag,
"tab-width": int,
"language": directives.unchanged_required,
"encoding": directives.encoding,
"pyobject": directives.unchanged_required,
"lines": directives.unchanged_required,
"start-after": directives.unchanged_required,
"end-before": directives.unchanged_required,
"start-at": directives.unchanged_required,
"end-at": directives.unchanged_required,
"prepend": directives.unchanged_required,
"append": directives.unchanged_required,
"emphasize-lines": directives.unchanged_required,
"class": directives.class_option,
"name": directives.unchanged,
"diff": directives.unchanged_required,
}
def run(self):
document = self.state.document
if not document.settings.file_insertion_enabled:
return [document.reporter.warning("File insertion disabled", line=self.lineno)]
# convert options['diff'] to absolute a_path
if "diff" in self.options:
_, a_path = self.env.relfn2path(self.options["diff"])
self.options["diff"] = a_path
try:
location = self.state_machine.get_source_and_line(self.lineno)
rel_filename, filename = self.env.relfn2path(self.arguments[0])
self.env.note_dependency(rel_filename)
reader = LiteralIncludeReader(filename, self.options, self.config)
text, lines = reader.read(location=location)
retnode = nodes.literal_block(text, text, source=filename)
set_source_info(self, retnode)
if self.options.get("diff"): # if diff is set, set udiff
retnode["language"] = "udiff"
elif "language" in self.options:
retnode["language"] = self.options["language"]
retnode["linenos"] = (
"linenos" in self.options or "lineno-start" in self.options or "lineno-match" in self.options
)
retnode["classes"] += self.options.get("class", [])
extra_args = retnode["highlight_args"] = {}
if "emphasize-lines" in self.options:
hl_lines = parselinenos(self.options["emphasize-lines"], lines)
if any(i >= lines for i in hl_lines):
logger.warning(
"line number spec is out of range(1-%d): %r", lines, self.options["emphasize-lines"]
)
extra_args["hl_lines"] = [x + 1 for x in hl_lines if x < lines]
extra_args["linenostart"] = reader.lineno_start
container_node = nodes.compound(classes=["example-block-wrapper"])
container_node += ExampleHeader(filename=filename)
container_node += retnode
retnode = container_node
return [retnode]
except Exception as exc:
return [document.reporter.warning(str(exc), line=self.lineno)]
def register_source(app, env, modname):
"""
Registers source code.
:param app: application
:param env: environment of the plugin
:param modname: name of the module to load
:return: True if the code is registered successfully, False otherwise
"""
entry = env._viewcode_modules.get(modname, None)
if entry is False:
print(f"[{modname}] Entry is false for ")
return False
code_tags = app.emit_firstresult("viewcode-find-source", modname)
if code_tags is None:
try:
analyzer = ModuleAnalyzer.for_module(modname)
except Exception as ex:
logger.info(
'Module "%s" could not be loaded. Full source will not be available. "%s"', modname, ex
)
# We cannot use regular warnings or exception methods because those warnings are interpreted
# by running python process and converted into "real" warnings, so we need to print the
# traceback here at info level
tb = traceback.format_exc()
logger.info("%s", tb)
env._viewcode_modules[modname] = False
return False
if not isinstance(analyzer.code, str):
code = analyzer.code.decode(analyzer.encoding)
else:
code = analyzer.code
analyzer.find_tags()
tags = analyzer.tags
else:
code, tags = code_tags
if entry is None or entry[0] != code:
entry = code, tags, {}, ""
env._viewcode_modules[modname] = entry
return True
def create_node(env, relative_path, show_button):
"""
Creates documentation node for example include.
:param env: environment of the documentation
:param relative_path: path of the code
:param show_button: whether to show "view code" button
:return paragraph with the node
"""
pagename = "_modules/" + relative_path[:-3]
header_classes = ["example-header"]
if show_button:
header_classes += ["example-header--with-button"]
paragraph = nodes.paragraph(relative_path, classes=header_classes)
paragraph += nodes.inline("", relative_path, classes=["example-title"])
if show_button:
pending_ref = viewcode_anchor(
reftarget=pagename,
refid="",
refdoc=env.docname,
classes=["example-header-button viewcode-button"],
)
pending_ref += nodes.inline("", _("View Source"))
paragraph += pending_ref
return paragraph
def doctree_read(app, doctree):
"""
Reads documentation tree for the application and register sources in the generated documentation.
:param app: application
:param doctree: documentation tree
:return None
"""
env = app.builder.env
if not hasattr(env, "_viewcode_modules"):
env._viewcode_modules = {}
if app.builder.name == "singlehtml":
return
for objnode in doctree.traverse(ExampleHeader):
filepath = objnode.get("filename")
relative_path = path.relpath(
filepath, path.commonprefix([app.config.exampleinclude_sourceroot, filepath])
)
modname = relative_path.replace("/", ".")[:-3]
show_button = register_source(app, env, modname)
onlynode = create_node(env, relative_path, show_button)
objnode.replace_self(onlynode)
def setup(app):
"""
Sets the plugin up and returns configuration of the plugin.
:param app: application.
:return json description of the configuration that is needed by the plugin.
"""
directives.register_directive("exampleinclude", ExampleInclude)
app.connect("doctree-read", doctree_read)
app.add_config_value("exampleinclude_sourceroot", None, "env")
if not airflow_theme_is_available:
# Sphinx airflow theme has its own styles.
app.add_css_file("exampleinclude.css")
return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True}
| 9,242 | 35.389764 | 109 | py |
airflow | airflow-main/docs/exts/substitution_extensions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import Any
from docutils import nodes
from docutils.nodes import Node, system_message
# No stub exists for docutils.parsers.rst.directives. See https://github.com/python/typeshed/issues/5755.
from docutils.parsers.rst import Directive, directives # type: ignore[attr-defined]
from docutils.parsers.rst.roles import code_role
from sphinx.application import Sphinx
from sphinx.transforms import SphinxTransform
from sphinx.transforms.post_transforms.code import HighlightLanguageTransform
LOGGER = logging.getLogger(__name__)
OriginalCodeBlock: Directive = directives._directives["code-block"]
_SUBSTITUTION_OPTION_NAME = "substitutions"
class SubstitutionCodeBlock(OriginalCodeBlock): # type: ignore
"""Similar to CodeBlock but replaces placeholders with variables."""
option_spec = OriginalCodeBlock.option_spec.copy()
option_spec[_SUBSTITUTION_OPTION_NAME] = directives.flag
def run(self) -> list:
"""Decorate code block so that SubstitutionCodeBlockTransform will notice it"""
[node] = super().run()
if _SUBSTITUTION_OPTION_NAME in self.options:
node.attributes["substitutions"] = True
return [node]
class SubstitutionCodeBlockTransform(SphinxTransform):
"""Substitute ``|variables|`` in code and code-block nodes"""
# Run before we highlight the code!
default_priority = HighlightLanguageTransform.default_priority - 1
def apply(self, **kwargs: Any) -> None:
def condition(node):
return isinstance(node, (nodes.literal_block, nodes.literal))
for node in self.document.traverse(condition):
if _SUBSTITUTION_OPTION_NAME not in node:
continue
# Some nodes don't have a direct document property, so walk up until we find it
document = node.document
parent = node.parent
while document is None:
parent = parent.parent
document = parent.document
substitution_defs = document.substitution_defs
for child in node.children:
old_child = child
for name, value in substitution_defs.items():
replacement = value.astext()
child = nodes.Text(child.replace(f"|{name}|", replacement))
node.replace(old_child, child)
# The highlighter checks this -- without this, it will refuse to apply highlighting
node.rawsource = node.astext()
def substitution_code_role(*args, **kwargs) -> tuple[list[Node], list[system_message]]:
"""Decorate an inline code so that SubstitutionCodeBlockTransform will notice it"""
[node], system_messages = code_role(*args, **kwargs)
node[_SUBSTITUTION_OPTION_NAME] = True
return [node], system_messages
substitution_code_role.options = { # type: ignore
"class": directives.class_option,
"language": directives.unchanged,
}
class AddSpacepadSubstReference(SphinxTransform):
"""
Add a custom ``|version-spacepad|`` replacement definition
Since this desired replacement text is all just whitespace, we can't use
the normal RST to define this, we instead of to create this definition
manually after docutils has parsed the source files.
"""
# Run as early as possible
default_priority = 1
def apply(self, **kwargs: Any) -> None:
substitution_defs = self.document.substitution_defs
version = substitution_defs["version"].astext()
pad = " " * len(version)
substitution_defs["version-spacepad"] = nodes.substitution_definition(version, pad)
...
def setup(app: Sphinx) -> dict:
"""Setup plugin"""
app.add_config_value("substitutions", [], "html")
directives.register_directive("code-block", SubstitutionCodeBlock)
app.add_role("subst-code", substitution_code_role)
app.add_post_transform(SubstitutionCodeBlockTransform)
app.add_post_transform(AddSpacepadSubstReference)
return {"parallel_write_safe": True, "parallel_read_safe": True}
| 4,893 | 36.937984 | 105 | py |
airflow | airflow-main/docs/exts/airflow_intersphinx.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import time
from typing import Any
from provider_yaml_utils import load_package_data
from sphinx.application import Sphinx
CURRENT_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
DOCS_DIR = os.path.join(ROOT_DIR, "docs")
DOCS_PROVIDER_DIR = os.path.join(ROOT_DIR, "docs")
def _create_init_py(app, config):
del app
# del config
intersphinx_mapping = getattr(config, "intersphinx_mapping", None) or {}
providers_mapping = _generate_provider_intersphinx_mapping()
intersphinx_mapping.update(providers_mapping)
config.intersphinx_mapping = intersphinx_mapping
def _generate_provider_intersphinx_mapping():
airflow_mapping = {}
current_version = "stable"
for provider in load_package_data():
package_name = provider["package-name"]
if os.environ.get("AIRFLOW_PACKAGE_NAME") == package_name:
continue
provider_base_url = f"/docs/{package_name}/{current_version}/"
doc_inventory = f"{DOCS_DIR}/_build/docs/{package_name}/{current_version}/objects.inv"
cache_inventory = f"{DOCS_DIR}/_inventory_cache/{package_name}/objects.inv"
# Skip adding the mapping if the path does not exist
if not os.path.exists(doc_inventory) and not os.path.exists(cache_inventory):
continue
airflow_mapping[package_name] = (
# base URI
provider_base_url,
(doc_inventory if os.path.exists(doc_inventory) else cache_inventory,),
)
for pkg_name in ["apache-airflow", "helm-chart"]:
if os.environ.get("AIRFLOW_PACKAGE_NAME") == pkg_name:
continue
doc_inventory = f"{DOCS_DIR}/_build/docs/{pkg_name}/{current_version}/objects.inv"
cache_inventory = f"{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv"
airflow_mapping[pkg_name] = (
# base URI
f"/docs/{pkg_name}/stable/",
(doc_inventory if os.path.exists(doc_inventory) else cache_inventory,),
)
for pkg_name in ["apache-airflow-providers", "docker-stack"]:
if os.environ.get("AIRFLOW_PACKAGE_NAME") == pkg_name:
continue
doc_inventory = f"{DOCS_DIR}/_build/docs/{pkg_name}/objects.inv"
cache_inventory = f"{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv"
airflow_mapping[pkg_name] = (
# base URI
f"/docs/{pkg_name}/",
(doc_inventory if os.path.exists(doc_inventory) else cache_inventory,),
)
return airflow_mapping
def setup(app: Sphinx):
"""Sets the plugin up"""
app.connect("config-inited", _create_init_py)
return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True}
if __name__ == "__main__":
def main():
"""A simple application that displays the roles available for Airflow documentation."""
import concurrent.futures
import sys
from sphinx.ext.intersphinx import fetch_inventory_group
class _MockConfig:
intersphinx_timeout = None
intersphinx_cache_limit = 1
tls_verify = False
user_agent = None
class _MockApp:
srcdir = ""
config = _MockConfig()
def warn(self, msg: str) -> None:
"""Display warning"""
print(msg, file=sys.stderr)
def fetch_inventories(intersphinx_mapping) -> dict[str, Any]:
now = int(time.time())
cache: dict[Any, Any] = {}
with concurrent.futures.ThreadPoolExecutor() as pool:
for name, (uri, invs) in intersphinx_mapping.values():
pool.submit(fetch_inventory_group, name, uri, invs, cache, _MockApp(), now)
inv_dict = {}
for uri, (name, now, invdata) in cache.items():
del uri
del now
inv_dict[name] = invdata
return inv_dict
def domain_and_object_type_to_role(domain: str, object_type: str) -> str:
if domain == "py":
from sphinx.domains.python import PythonDomain
role_name = PythonDomain.object_types[object_type].roles[0]
elif domain == "std":
from sphinx.domains.std import StandardDomain
role_name = StandardDomain.object_types[object_type].roles[0]
else:
role_name = object_type
return role_name
def inspect_main(inv_data, name) -> None:
try:
for key in sorted(inv_data or {}):
for entry, _ in sorted(inv_data[key].items()):
domain, object_type = key.split(":")
role_name = domain_and_object_type_to_role(domain, object_type)
print(f":{role_name}:`{name}:{entry}`")
except ValueError as exc:
print(exc.args[0] % exc.args[1:])
except Exception as exc:
print(f"Unknown error: {exc!r}")
provider_mapping = _generate_provider_intersphinx_mapping()
for key, value in provider_mapping.copy().items():
provider_mapping[key] = (key, value)
inv_dict = fetch_inventories(provider_mapping)
for name, inv_data in inv_dict.items():
inspect_main(inv_data, name)
import logging
logging.basicConfig(level=logging.DEBUG)
main()
| 6,310 | 34.857955 | 95 | py |
airflow | airflow-main/docs/exts/extra_provider_files_with_substitutions.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from pathlib import Path
def fix_provider_references(app, exception):
"""Sphinx "build-finished" event handler."""
from sphinx.builders import html as builders
if exception or not isinstance(app.builder, builders.StandaloneHTMLBuilder):
return
# Replace `|version|` in the files that require manual substitution
for path in Path(app.outdir).rglob("*.html"):
if not path.exists():
continue
with open(path) as input_file:
content = input_file.readlines()
with open(path, "w") as output_file:
for line in content:
output_file.write(line.replace("|version|", app.config.version))
def setup(app):
"""Setup plugin"""
app.connect("build-finished", fix_provider_references)
return {
"parallel_write_safe": True,
"parallel_read_safe": True,
}
| 1,698 | 34.395833 | 80 | py |
airflow | airflow-main/docs/exts/docroles.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Document roles"""
from __future__ import annotations
#
#
from functools import partial
from docutils import nodes, utils
from sphinx.ext.autodoc.importer import import_module
from sphinx.ext.autodoc.mock import mock
class RoleException(Exception):
"""Exception for roles extension"""
def get_template_field(env, fullname) -> list[str]:
"""
Gets template fields for specific operator class.
:param env: env config
:param fullname: Full path to operator class.
For example: ``airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductSetOperator``
:return: List of template field
"""
modname, classname = fullname.rsplit(".", 1)
try:
with mock(env.config.autodoc_mock_imports):
mod = import_module(modname)
except ImportError:
raise RoleException(f"Error loading {modname} module.")
clazz = getattr(mod, classname)
if not clazz:
raise RoleException(f"Error finding {classname} class in {modname} module.")
template_fields = getattr(clazz, "template_fields")
if not template_fields:
raise RoleException(f"Could not find the template fields for {classname} class in {modname} module.")
return list(template_fields)
def template_field_role(
app,
typ,
rawtext,
text,
lineno,
inliner,
options=None,
content=None,
):
"""
A role that allows you to include a list of template fields in the middle of the text. This is especially
useful when writing guides describing how to use the operator.
The result is a list of fields where each field is shorted in the literal block.
Sample usage::
:template-fields:`airflow.operators.bash.BashOperator`
For further information look at:
* [http://docutils.sourceforge.net/docs/howto/rst-roles.html](Creating reStructuredText Interpreted
Text Roles)
"""
if options is None:
options = {}
if content is None:
content = []
text = utils.unescape(text)
try:
template_fields = get_template_field(app.env, text)
except RoleException as e:
msg = inliner.reporter.error(
f"invalid class name {text} \n{e}",
line=lineno,
)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
node = nodes.inline(rawtext=rawtext)
for i, field in enumerate(template_fields):
if i != 0:
node += nodes.Text(", ")
node += nodes.literal(field, "", nodes.Text(field))
return [node], []
def setup(app):
"""Sets the extension up"""
from docutils.parsers.rst import roles
roles.register_local_role("template-fields", partial(template_field_role, app))
return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True}
| 3,616 | 29.652542 | 109 | py |
airflow | airflow-main/docs/exts/__init__.py | # Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 838 | 43.157895 | 62 | py |
airflow | airflow-main/docs/exts/provider_yaml_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
from glob import glob
from pathlib import Path
from typing import Any
import jsonschema
import yaml
ROOT_DIR = Path(__file__).parents[2].resolve()
PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json"
def _load_schema() -> dict[str, Any]:
with open(PROVIDER_DATA_SCHEMA_PATH) as schema_file:
content = json.load(schema_file)
return content
def _filepath_to_module(filepath: str):
return str(Path(filepath).relative_to(ROOT_DIR)).replace("/", ".")
def _filepath_to_system_tests(filepath: str):
return str(
ROOT_DIR
/ "tests"
/ "system"
/ "providers"
/ Path(filepath).relative_to(ROOT_DIR / "airflow" / "providers")
)
def get_provider_yaml_paths():
"""Returns list of provider.yaml files"""
return sorted(glob(f"{ROOT_DIR}/airflow/providers/**/provider.yaml", recursive=True))
def load_package_data() -> list[dict[str, Any]]:
"""
Load all data from providers files
:return: A list containing the contents of all provider.yaml files.
"""
schema = _load_schema()
result = []
for provider_yaml_path in get_provider_yaml_paths():
with open(provider_yaml_path) as yaml_file:
provider = yaml.safe_load(yaml_file)
try:
jsonschema.validate(provider, schema=schema)
except jsonschema.ValidationError:
raise Exception(f"Unable to parse: {provider_yaml_path}.")
if provider["suspended"]:
continue
provider_yaml_dir = os.path.dirname(provider_yaml_path)
provider["python-module"] = _filepath_to_module(provider_yaml_dir)
provider["package-dir"] = provider_yaml_dir
provider["system-tests-dir"] = _filepath_to_system_tests(provider_yaml_dir)
result.append(provider)
return result
| 2,674 | 32.4375 | 89 | py |
airflow | airflow-main/docs/exts/redirects.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Based on: https://github.com/sphinx-contrib/redirects"""
from __future__ import annotations
import os
from sphinx.builders import html as builders
from sphinx.util import logging
TEMPLATE = '<html><head><meta http-equiv="refresh" content="0; url={}"/></head></html>'
log = logging.getLogger(__name__)
def generate_redirects(app):
"""Generate redirects files."""
redirect_file_path = os.path.join(app.srcdir, app.config.redirects_file)
if not os.path.exists(redirect_file_path):
log.info("Could not found the redirect file: %s", redirect_file_path)
return
in_suffix = next(iter(app.config.source_suffix.keys()))
if not isinstance(app.builder, builders.StandaloneHTMLBuilder):
return
with open(redirect_file_path) as redirects:
for line in redirects.readlines():
# Skip empty line
if not line.strip():
continue
# Skip comments
if line.startswith("#"):
continue
from_path, _, to_path = line.rstrip().partition(" ")
log.debug("Redirecting '%s' to '%s'", from_path, to_path)
from_path = from_path.replace(in_suffix, ".html")
to_path = to_path.replace(in_suffix, ".html")
to_path_prefix = f"..{os.path.sep}" * (len(from_path.split(os.path.sep)) - 1)
to_path = to_path_prefix + to_path
log.debug("Resolved redirect '%s' to '%s'", from_path, to_path)
redirected_filename = os.path.join(app.builder.outdir, from_path)
redirected_directory = os.path.dirname(redirected_filename)
os.makedirs(redirected_directory, exist_ok=True)
with open(redirected_filename, "w") as f:
f.write(TEMPLATE.format(to_path))
def setup(app):
"""Setup plugin"""
app.add_config_value("redirects_file", "redirects", "env")
app.connect("builder-inited", generate_redirects)
return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True}
| 2,833 | 35.333333 | 90 | py |
airflow | airflow-main/docs/exts/providers_packages_ref.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from provider_yaml_utils import load_package_data
from sphinx.application import Sphinx
def _on_config_inited(app, config):
del app
jinja_context = getattr(config, "jinja_contexts", None) or {}
jinja_context["providers_ctx"] = {"providers": load_package_data()}
config.jinja_contexts = jinja_context
def setup(app: Sphinx):
"""Setup plugin"""
app.setup_extension("sphinx_jinja")
app.connect("config-inited", _on_config_inited)
app.add_crossref_type(
directivename="provider",
rolename="provider",
)
return {"parallel_read_safe": True, "parallel_write_safe": True}
| 1,447 | 34.317073 | 71 | py |
airflow | airflow-main/docs/exts/provider_init_hack.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Bugs in sphinx-autoapi using metaclasses prevent us from upgrading to 1.3
which has implicit namespace support. Until that time, we make it look
like a real package for building docs
"""
from __future__ import annotations
import os
from sphinx.application import Sphinx
ROOT_PROJECT_DIR = os.path.abspath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir)
)
PROVIDER_INIT_FILE = os.path.join(ROOT_PROJECT_DIR, "airflow", "providers", "__init__.py")
def _create_init_py(app, config):
del app
del config
# This file is deleted by /docs/build_docs.py. If you are not using the script, the file will be
# deleted by pre-commit.
with open(PROVIDER_INIT_FILE, "w"):
pass
def setup(app: Sphinx):
"""
Sets the plugin up and returns configuration of the plugin.
:param app: application.
:return json description of the configuration that is needed by the plugin.
"""
app.connect("config-inited", _create_init_py)
return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True}
| 1,880 | 33.833333 | 100 | py |
airflow | airflow-main/docs/exts/docs_build/code_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from contextlib import suppress
from docs.exts.provider_yaml_utils import load_package_data
ROOT_PROJECT_DIR = os.path.abspath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir)
)
PROVIDER_INIT_FILE = os.path.join(ROOT_PROJECT_DIR, "airflow", "providers", "__init__.py")
DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs")
AIRFLOW_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow")
ALL_PROVIDER_YAMLS = load_package_data()
AIRFLOW_SITE_DIR: str = os.environ.get("AIRFLOW_SITE_DIRECTORY") or ""
PROCESS_TIMEOUT = 15 * 60
CONSOLE_WIDTH = 180
def prepare_code_snippet(file_path: str, line_no: int, context_lines_count: int = 5) -> str:
"""Prepares code snippet.
:param file_path: file path
:param line_no: line number
:param context_lines_count: number of lines of context.
"""
def guess_lexer_for_filename(filename):
from pygments.lexers import get_lexer_for_filename
from pygments.util import ClassNotFound
try:
lexer = get_lexer_for_filename(filename)
except ClassNotFound:
from pygments.lexers.special import TextLexer
lexer = TextLexer()
return lexer
with open(file_path) as text_file:
# Highlight code
code = text_file.read()
with suppress(ImportError):
import pygments
from pygments.formatters.terminal import TerminalFormatter
code = pygments.highlight(
code=code, formatter=TerminalFormatter(), lexer=guess_lexer_for_filename(file_path)
)
code_lines = code.split("\n")
# Prepend line number
code_lines = [f"{line_no:4} | {line}" for line_no, line in enumerate(code_lines, 1)]
# # Cut out the snippet
start_line_no = max(0, line_no - context_lines_count)
end_line_no = line_no + context_lines_count
code_lines = code_lines[start_line_no:end_line_no]
# Join lines
code = "\n".join(code_lines)
return code
def pretty_format_path(path: str, start: str) -> str:
"""Formats path nicely."""
relpath = os.path.relpath(path, start)
if relpath == path:
return path
return f"{start}/{relpath}"
| 3,067 | 34.264368 | 99 | py |
airflow | airflow-main/docs/exts/docs_build/spelling_checks.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import re
from functools import total_ordering
from typing import NamedTuple
from rich.console import Console
from airflow.utils.code_utils import prepare_code_snippet
from docs.exts.docs_build.code_utils import CONSOLE_WIDTH
CURRENT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH)
@total_ordering
class SpellingError(NamedTuple):
"""Spelling errors found when building docs."""
file_path: str | None
line_no: int | None
spelling: str | None
suggestion: str | None
context_line: str | None
message: str
def __eq__(self, other):
left = (
self.file_path,
self.line_no,
self.spelling,
self.context_line,
self.message,
)
right = (
other.file_path,
other.line_no,
other.spelling,
other.context_line,
other.message,
)
return left == right
def __ne__(self, other):
return not self == other
def __lt__(self, other):
file_path_a = self.file_path or ""
file_path_b = other.file_path or ""
line_no_a = self.line_no or 0
line_no_b = other.line_no or 0
context_line_a = self.context_line or ""
context_line_b = other.context_line or ""
left = (file_path_a, line_no_a, context_line_a, self.spelling, self.message)
right = (
file_path_b,
line_no_b,
context_line_b,
other.spelling,
other.message,
)
return left < right
def parse_spelling_warnings(warning_text: str, docs_dir: str) -> list[SpellingError]:
"""
Parses warnings from Sphinx.
:param warning_text: warning to parse
:param docs_dir: documentation directory
:return: list of SpellingError.
"""
sphinx_spelling_errors = []
for sphinx_warning in warning_text.split("\n"):
if not sphinx_warning:
continue
warning_parts = None
match = re.search(r"(.*):(\w*):\s\((\w*)\)\s?(\w*)\s?(.*)", sphinx_warning)
if match:
warning_parts = match.groups()
if warning_parts and len(warning_parts) == 5:
try:
sphinx_spelling_errors.append(
SpellingError(
file_path=os.path.join(docs_dir, warning_parts[0]),
line_no=int(warning_parts[1]) if warning_parts[1] not in ("None", "") else None,
spelling=warning_parts[2],
suggestion=warning_parts[3] if warning_parts[3] else None,
context_line=warning_parts[4],
message=sphinx_warning,
)
)
except Exception:
# If an exception occurred while parsing the warning message, display the raw warning message.
sphinx_spelling_errors.append(
SpellingError(
file_path=None,
line_no=None,
spelling=None,
suggestion=None,
context_line=None,
message=sphinx_warning,
)
)
else:
sphinx_spelling_errors.append(
SpellingError(
file_path=None,
line_no=None,
spelling=None,
suggestion=None,
context_line=None,
message=sphinx_warning,
)
)
return sphinx_spelling_errors
def display_spelling_error_summary(spelling_errors: dict[str, list[SpellingError]]) -> None:
"""Displays summary of Spelling errors"""
console.print()
console.print("[red]" + "#" * 30 + " Start spelling errors summary " + "#" * 30 + "[/]")
console.print()
for package_name, errors in sorted(spelling_errors.items()):
if package_name:
console.print("=" * 30, f" [info]{package_name}[/] ", "=" * 30)
else:
console.print("=" * 30, " [info]General[/] ", "=" * 30)
for warning_no, error in enumerate(sorted(errors), 1):
console.print("-" * 30, f"Error {warning_no:3}", "-" * 30)
_display_error(error)
console.print("=" * 100)
console.print()
msg = """
If there are spelling errors in the summary above, and the spelling is
correct, add the spelling to docs/spelling_wordlist.txt or use the
spelling directive.
Check https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html#private-dictionaries
for more details.
If there are no spelling errors in the summary above, there might be an
issue unrelated to spelling. Please review the traceback.
"""
console.print(msg)
console.print()
console.print
console.print("[red]" + "#" * 30 + " End docs build errors summary " + "#" * 30 + "[/]")
console.print
def _display_error(error: SpellingError):
console.print(error.message)
console.print()
if error.file_path:
console.print(f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)}")
if error.spelling:
console.print(f"Incorrect Spelling: '{error.spelling}'")
if error.suggestion:
console.print(f"Suggested Spelling: '{error.suggestion}'")
if error.context_line:
console.print(f"Line with Error: '{error.context_line}'")
if error.file_path and not error.file_path.endswith("<unknown>") and error.line_no:
console.print(f"Line Number: {error.line_no}")
console.print(prepare_code_snippet(error.file_path, error.line_no))
| 6,720 | 34.75 | 110 | py |
airflow | airflow-main/docs/exts/docs_build/third_party_inventories.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
THIRD_PARTY_INDEXES = {
"boto3": "https://boto3.amazonaws.com/v1/documentation/api/latest",
"celery": "https://docs.celeryq.dev/en/stable/",
"docker": "https://docker-py.readthedocs.io/en/stable",
"hdfs": "https://hdfscli.readthedocs.io/en/latest",
"jinja2": "https://jinja.palletsprojects.com/en/2.11.x",
"mongodb": "https://pymongo.readthedocs.io/en/3.11.3",
"pandas": "https://pandas.pydata.org/pandas-docs/stable",
"python": "https://docs.python.org/3",
"requests": "https://requests.readthedocs.io/en/stable",
"sqlalchemy": "https://docs.sqlalchemy.org/en/latest",
"google-api-core": "https://googleapis.dev/python/google-api-core/latest",
"google-cloud-automl": "https://googleapis.dev/python/automl/latest",
"google-cloud-bigquery": "https://googleapis.dev/python/bigquery/latest",
"google-cloud-bigquery-datatransfer": "https://googleapis.dev/python/bigquerydatatransfer/latest",
"google-cloud-bigquery-storage": "https://googleapis.dev/python/bigquerystorage/latest",
"google-cloud-bigtable": "https://googleapis.dev/python/bigtable/latest",
"google-cloud-container": "https://googleapis.dev/python/container/latest",
"google-cloud-core": "https://googleapis.dev/python/google-cloud-core/latest",
"google-cloud-datacatalog": "https://googleapis.dev/python/datacatalog/latest",
"google-cloud-datastore": "https://googleapis.dev/python/datastore/latest",
"google-cloud-dlp": "https://googleapis.dev/python/dlp/latest",
"google-cloud-kms": "https://googleapis.dev/python/cloudkms/latest",
"google-cloud-language": "https://googleapis.dev/python/language/latest",
"google-cloud-monitoring": "https://googleapis.dev/python/monitoring/latest",
"google-cloud-pubsub": "https://googleapis.dev/python/pubsub/latest",
"google-cloud-redis": "https://googleapis.dev/python/redis/latest",
"google-cloud-spanner": "https://googleapis.dev/python/spanner/latest",
"google-cloud-speech": "https://googleapis.dev/python/speech/latest",
"google-cloud-storage": "https://googleapis.dev/python/storage/latest",
"google-cloud-tasks": "https://googleapis.dev/python/cloudtasks/latest",
"google-cloud-texttospeech": "https://googleapis.dev/python/texttospeech/latest",
"google-cloud-translate": "https://googleapis.dev/python/translation/latest",
"google-cloud-videointelligence": "https://googleapis.dev/python/videointelligence/latest",
"google-cloud-vision": "https://googleapis.dev/python/vision/latest",
}
| 3,353 | 59.981818 | 102 | py |
airflow | airflow-main/docs/exts/docs_build/errors.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from functools import total_ordering
from typing import NamedTuple
from rich.console import Console
from airflow.utils.code_utils import prepare_code_snippet
from docs.exts.docs_build.code_utils import CONSOLE_WIDTH # isort:skip (needed to workaround isort bug)
CURRENT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH)
@total_ordering
class DocBuildError(NamedTuple):
"""Errors found in docs build."""
file_path: str | None
line_no: int | None
message: str
def __eq__(self, other):
left = (self.file_path, self.line_no, self.message)
right = (other.file_path, other.line_no, other.message)
return left == right
def __ne__(self, other):
return not self == other
def __lt__(self, right):
file_path_a = self.file_path or ""
file_path_b = right.file_path or ""
line_no_a = self.line_no or 0
line_no_b = right.line_no or 0
left = (file_path_a, line_no_a, self.message)
right = (file_path_b, line_no_b, right.message)
return left < right
def display_errors_summary(build_errors: dict[str, list[DocBuildError]]) -> None:
"""Displays summary of errors"""
console.print()
console.print("[red]" + "#" * 30 + " Start docs build errors summary " + "#" * 30 + "[/]")
console.print()
for package_name, errors in build_errors.items():
if package_name:
console.print("=" * 30 + f" [info]{package_name}[/] " + "=" * 30)
else:
console.print("=" * 30, " [info]General[/] ", "=" * 30)
for warning_no, error in enumerate(sorted(errors), 1):
console.print("-" * 30, f"[red]Error {warning_no:3}[/]", "-" * 20)
console.print(error.message)
console.print()
if error.file_path and not error.file_path.endswith("<unknown>") and error.line_no:
console.print(
f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)} ({error.line_no})"
)
console.print()
console.print(prepare_code_snippet(error.file_path, error.line_no))
elif error.file_path:
console.print(f"File path: {error.file_path}")
console.print()
console.print("[red]" + "#" * 30 + " End docs build errors summary " + "#" * 30 + "[/]")
console.print()
def parse_sphinx_warnings(warning_text: str, docs_dir: str) -> list[DocBuildError]:
"""
Parses warnings from Sphinx.
:param warning_text: warning to parse
:param docs_dir: documentation directory
:return: list of DocBuildErrors.
"""
sphinx_build_errors = []
for sphinx_warning in warning_text.split("\n"):
if not sphinx_warning:
continue
warning_parts = sphinx_warning.split(":", 2)
if len(warning_parts) == 3:
try:
sphinx_build_errors.append(
DocBuildError(
file_path=os.path.join(docs_dir, warning_parts[0]),
line_no=int(warning_parts[1]),
message=warning_parts[2],
)
)
except Exception:
# If an exception occurred while parsing the warning message, display the raw warning message.
sphinx_build_errors.append(
DocBuildError(file_path=None, line_no=None, message=sphinx_warning)
)
else:
sphinx_build_errors.append(DocBuildError(file_path=None, line_no=None, message=sphinx_warning))
return sphinx_build_errors
| 4,610 | 37.747899 | 110 | py |
airflow | airflow-main/docs/exts/docs_build/lint_checks.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import ast
import os
import re
from glob import glob
from itertools import chain
from typing import Iterable
from docs.exts.docs_build.docs_builder import ALL_PROVIDER_YAMLS
from docs.exts.docs_build.errors import DocBuildError
ROOT_PROJECT_DIR = os.path.abspath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir)
)
ROOT_PACKAGE_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow")
DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs")
def find_existing_guide_operator_names(src_dir_pattern: str) -> set[str]:
"""
Find names of existing operators.
:return names of existing operators.
"""
operator_names = set()
paths = glob(src_dir_pattern, recursive=True)
for path in paths:
with open(path) as f:
operator_names |= set(re.findall(".. _howto/operator:(.+?):", f.read()))
return operator_names
def extract_ast_class_def_by_name(ast_tree, class_name):
"""
Extracts class definition by name
:param ast_tree: AST tree
:param class_name: name of the class.
:return: class node found
"""
for node in ast.walk(ast_tree):
if isinstance(node, ast.ClassDef) and node.name == class_name:
return node
return None
def _generate_missing_guide_error(path, line_no, operator_name):
return DocBuildError(
file_path=path,
line_no=line_no,
message=(
f"Link to the guide is missing in operator's description: {operator_name}.\n"
f"Please add link to the guide to the description in the following form:\n"
f"\n"
f".. seealso::\n"
f" For more information on how to use this operator, take a look at the guide:\n"
f" :ref:`howto/operator:{operator_name}`\n"
),
)
def check_guide_links_in_operator_descriptions() -> list[DocBuildError]:
"""Check if there are links to guides in operator's descriptions."""
build_errors = []
build_errors.extend(
_check_missing_guide_references(
operator_names=find_existing_guide_operator_names(
f"{DOCS_DIR}/apache-airflow/howto/operator/**/*.rst"
),
python_module_paths=chain(
glob(f"{ROOT_PACKAGE_DIR}/operators/*.py"),
glob(f"{ROOT_PACKAGE_DIR}/sensors/*.py"),
),
)
)
for provider in ALL_PROVIDER_YAMLS:
operator_names = {
*find_existing_guide_operator_names(f"{DOCS_DIR}/{provider['package-name']}/operators/**/*.rst"),
*find_existing_guide_operator_names(f"{DOCS_DIR}/{provider['package-name']}/operators.rst"),
}
# Extract all potential python modules that can contain operators
python_module_paths = chain(
glob(f"{provider['package-dir']}/**/operators/*.py", recursive=True),
glob(f"{provider['package-dir']}/**/sensors/*.py", recursive=True),
glob(f"{provider['package-dir']}/**/transfers/*.py", recursive=True),
)
build_errors.extend(
_check_missing_guide_references(
operator_names=operator_names, python_module_paths=python_module_paths
)
)
return build_errors
def _check_missing_guide_references(operator_names, python_module_paths) -> list[DocBuildError]:
build_errors = []
for py_module_path in python_module_paths:
with open(py_module_path) as f:
py_content = f.read()
if "This module is deprecated" in py_content:
continue
for existing_operator in operator_names:
if f"class {existing_operator}" not in py_content:
continue
# This is a potential file with necessary class definition.
# To make sure it's a real Python class definition, we build AST tree
ast_tree = ast.parse(py_content)
class_def = extract_ast_class_def_by_name(ast_tree, existing_operator)
if class_def is None:
continue
docstring = ast.get_docstring(class_def)
if docstring:
if "This class is deprecated." in docstring:
continue
if f":ref:`howto/operator:{existing_operator}`" in docstring:
continue
build_errors.append(
_generate_missing_guide_error(py_module_path, class_def.lineno, existing_operator)
)
return build_errors
def assert_file_not_contains(
*, file_path: str, pattern: str, message: str | None = None
) -> DocBuildError | None:
"""
Asserts that file does not contain the pattern. Return message error if it does.
:param file_path: file
:param pattern: pattern
:param message: message to return
"""
return _extract_file_content(file_path, message, pattern, False)
def assert_file_contains(*, file_path: str, pattern: str, message: str | None = None) -> DocBuildError | None:
"""
Asserts that file does contain the pattern. Return message error if it does not.
:param file_path: file
:param pattern: pattern
:param message: message to return
"""
return _extract_file_content(file_path, message, pattern, True)
def _extract_file_content(file_path: str, message: str | None, pattern: str, expected_contain: bool):
if not message:
message = f"Pattern '{pattern}' could not be found in '{file_path}' file."
with open(file_path, "rb", 0) as doc_file:
pattern_compiled = re.compile(pattern)
found = False
for num, line in enumerate(doc_file, 1):
line_decode = line.decode()
result = re.search(pattern_compiled, line_decode)
if not expected_contain and result:
return DocBuildError(file_path=file_path, line_no=num, message=message)
elif expected_contain and result:
found = True
if expected_contain and not found:
return DocBuildError(file_path=file_path, line_no=None, message=message)
return None
def filter_file_list_by_pattern(file_paths: Iterable[str], pattern: str) -> list[str]:
"""
Filters file list to those that content matches the pattern
:param file_paths: file paths to check
:param pattern: pattern to match
:return: list of files matching the pattern
"""
output_paths = []
pattern_compiled = re.compile(pattern)
for file_path in file_paths:
with open(file_path, "rb", 0) as text_file:
text_file_content = text_file.read().decode()
if re.findall(pattern_compiled, text_file_content):
output_paths.append(file_path)
return output_paths
def find_modules(deprecated_only: bool = False) -> set[str]:
"""
Finds all modules.
:param deprecated_only: whether only deprecated modules should be found.
:return: set of all modules found
"""
file_paths = glob(f"{ROOT_PACKAGE_DIR}/**/*.py", recursive=True)
# Exclude __init__.py
file_paths = [f for f in file_paths if not f.endswith("__init__.py")]
if deprecated_only:
file_paths = filter_file_list_by_pattern(file_paths, r"This module is deprecated.")
# Make path relative
file_paths = [os.path.relpath(f, ROOT_PROJECT_DIR) for f in file_paths]
# Convert filename to module
modules_names = {file_path.rpartition(".")[0].replace("/", ".") for file_path in file_paths}
return modules_names
def check_exampleinclude_for_example_dags() -> list[DocBuildError]:
"""Checks all exampleincludes for example dags."""
all_docs_files = glob(f"{DOCS_DIR}/**/*.rst", recursive=True)
build_errors = []
for doc_file in all_docs_files:
build_error = assert_file_not_contains(
file_path=doc_file,
pattern=r"literalinclude::.+(?:example_dags|tests/system/)",
message=(
"literalinclude directive is prohibited for example DAGs. \n"
"You should use the exampleinclude directive to include example DAGs."
),
)
if build_error:
build_errors.append(build_error)
return build_errors
def check_enforce_code_block() -> list[DocBuildError]:
"""Checks all code:: blocks."""
all_docs_files = glob(f"{DOCS_DIR}/**/*.rst", recursive=True)
build_errors = []
for doc_file in all_docs_files:
build_error = assert_file_not_contains(
file_path=doc_file,
pattern=r"^.. code::",
message=(
"We recommend using the code-block directive instead of the code directive. "
"The code-block directive is more feature-full."
),
)
if build_error:
build_errors.append(build_error)
return build_errors
def find_example_dags(provider_dir):
system_tests_dir = provider_dir.replace(f"{ROOT_PACKAGE_DIR}/", "")
yield from glob(f"{provider_dir}/**/*example_dags", recursive=True)
yield from glob(f"{ROOT_PROJECT_DIR}/tests/system/{system_tests_dir}/*/", recursive=True)
def check_pypi_repository_in_provider_tocs() -> list[DocBuildError]:
"""Checks that each documentation for provider packages has a link to PyPI files in the TOC."""
build_errors = []
for provider in ALL_PROVIDER_YAMLS:
doc_file_path = f"{DOCS_DIR}/{provider['package-name']}/index.rst"
expected_text = f"PyPI Repository <https://pypi.org/project/{provider['package-name']}/>"
build_error = assert_file_contains(
file_path=doc_file_path,
pattern=re.escape(expected_text),
message=(
f"A link to the PyPI in table of contents is missing. Can you please add it?\n\n"
f" {expected_text}"
),
)
if build_error:
build_errors.append(build_error)
return build_errors
def run_all_check(disable_provider_checks: bool = False) -> list[DocBuildError]:
"""Run all checks from this module"""
general_errors = []
general_errors.extend(check_guide_links_in_operator_descriptions())
general_errors.extend(check_enforce_code_block())
general_errors.extend(check_exampleinclude_for_example_dags())
if not disable_provider_checks:
general_errors.extend(check_pypi_repository_in_provider_tocs())
return general_errors
| 11,214 | 36.135762 | 110 | py |
airflow | airflow-main/docs/exts/docs_build/helm_chart_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from pathlib import Path
import yaml
CHART_DIR = Path(__file__).resolve().parents[2] / "chart"
CHART_YAML_PATH = os.path.join(CHART_DIR, "Chart.yaml")
def chart_yaml() -> dict:
with open(CHART_YAML_PATH) as f:
return yaml.safe_load(f)
def chart_version() -> str:
return chart_yaml()["version"]
| 1,147 | 31.8 | 62 | py |
airflow | airflow-main/docs/exts/docs_build/dev_index_generator.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import argparse
import os
import sys
from glob import glob
import jinja2
# isort:off (needed to workaround isort bug)
from docs.exts.provider_yaml_utils import load_package_data
# isort:on (needed to workaround isort bug)
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
BUILD_DIR = os.path.abspath(os.path.join(DOCS_DIR, "_build"))
ALL_PROVIDER_YAMLS = load_package_data()
def _get_jinja_env():
loader = jinja2.FileSystemLoader(CURRENT_DIR, followlinks=True)
env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined)
return env
def _render_template(template_name, **kwargs):
return _get_jinja_env().get_template(template_name).render(**kwargs)
def _render_content():
provider_packages = [
os.path.basename(os.path.dirname(p)) for p in glob(f"{BUILD_DIR}/docs/apache-airflow-providers-*/")
]
providers = []
for package_name in provider_packages:
try:
current_provider = next(
provider_yaml
for provider_yaml in ALL_PROVIDER_YAMLS
if provider_yaml["package-name"] == package_name
)
providers.append(current_provider)
except StopIteration:
print(f"WARNING! Could not find provider.yaml file for package: {package_name}")
content = _render_template(
"dev_index_template.html.jinja2", providers=sorted(providers, key=lambda k: k["package-name"])
)
return content
def generate_index(out_file: str) -> None:
"""
Generates an index for development documentation.
:param out_file: The path where the index should be stored
"""
content = _render_content()
with open(out_file, "w") as output_file:
output_file.write(content)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("outfile", nargs="?", type=argparse.FileType("w"), default=sys.stdout)
args = parser.parse_args()
args.outfile.write(_render_content())
| 2,890 | 33.011765 | 107 | py |
airflow | airflow-main/docs/exts/docs_build/package_filter.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import fnmatch
def process_package_filters(available_packages: list[str], package_filters: list[str] | None):
"""Filters the package list against a set of filters.
A packet is returned if it matches at least one filter. The function keeps the order of the packages.
"""
if not package_filters:
return available_packages
invalid_filters = [
f for f in package_filters if not any(fnmatch.fnmatch(p, f) for p in available_packages)
]
if invalid_filters:
raise SystemExit(
f"Some filters did not find any package: {invalid_filters}, Please check if they are correct."
)
return [p for p in available_packages if any(fnmatch.fnmatch(p, f) for f in package_filters)]
| 1,562 | 39.076923 | 106 | py |
airflow | airflow-main/docs/exts/docs_build/docs_builder.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import re
import shlex
import shutil
from glob import glob
from subprocess import run
from rich.console import Console
from .code_utils import (
AIRFLOW_SITE_DIR,
ALL_PROVIDER_YAMLS,
CONSOLE_WIDTH,
DOCS_DIR,
PROCESS_TIMEOUT,
pretty_format_path,
)
from .errors import DocBuildError, parse_sphinx_warnings
from .helm_chart_utils import chart_version
from .spelling_checks import SpellingError, parse_spelling_warnings
console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH)
class AirflowDocsBuilder:
"""Documentation builder for Airflow."""
def __init__(self, package_name: str):
self.package_name = package_name
@property
def _doctree_dir(self) -> str:
return f"{DOCS_DIR}/_doctrees/docs/{self.package_name}"
@property
def _inventory_cache_dir(self) -> str:
return f"{DOCS_DIR}/_inventory_cache"
@property
def is_versioned(self):
"""Is current documentation package versioned?"""
# Disable versioning. This documentation does not apply to any released product and we can update
# it as needed, i.e. with each new package of providers.
return self.package_name not in ("apache-airflow-providers", "docker-stack")
@property
def _build_dir(self) -> str:
if self.is_versioned:
version = "stable"
return f"{DOCS_DIR}/_build/docs/{self.package_name}/{version}"
else:
return f"{DOCS_DIR}/_build/docs/{self.package_name}"
@property
def log_spelling_filename(self) -> str:
"""Log from spelling job."""
return os.path.join(self._build_dir, f"output-spelling-{self.package_name}.log")
@property
def log_spelling_output_dir(self) -> str:
"""Results from spelling job."""
return os.path.join(self._build_dir, f"output-spelling-results-{self.package_name}")
@property
def log_build_filename(self) -> str:
"""Log from build job."""
return os.path.join(self._build_dir, f"output-build-{self.package_name}.log")
@property
def log_build_warning_filename(self) -> str:
"""Warnings from build job."""
return os.path.join(self._build_dir, f"warning-build-{self.package_name}.log")
@property
def _current_version(self):
if not self.is_versioned:
raise Exception("This documentation package is not versioned")
if self.package_name == "apache-airflow":
from airflow.version import version as airflow_version
return airflow_version
if self.package_name.startswith("apache-airflow-providers-"):
provider = next(p for p in ALL_PROVIDER_YAMLS if p["package-name"] == self.package_name)
return provider["versions"][0]
if self.package_name == "helm-chart":
return chart_version()
return Exception(f"Unsupported package: {self.package_name}")
@property
def _publish_dir(self) -> str:
if self.is_versioned:
return f"docs-archive/{self.package_name}/{self._current_version}"
else:
return f"docs-archive/{self.package_name}"
@property
def _src_dir(self) -> str:
return f"{DOCS_DIR}/{self.package_name}"
def clean_files(self) -> None:
"""Cleanup all artifacts generated by previous builds."""
api_dir = os.path.join(self._src_dir, "_api")
shutil.rmtree(api_dir, ignore_errors=True)
shutil.rmtree(self._build_dir, ignore_errors=True)
os.makedirs(api_dir, exist_ok=True)
os.makedirs(self._build_dir, exist_ok=True)
def check_spelling(self, verbose: bool) -> list[SpellingError]:
"""
Checks spelling
:param verbose: whether to show output while running
:return: list of errors
"""
spelling_errors = []
os.makedirs(self._build_dir, exist_ok=True)
shutil.rmtree(self.log_spelling_output_dir, ignore_errors=True)
os.makedirs(self.log_spelling_output_dir, exist_ok=True)
build_cmd = [
"sphinx-build",
"-W", # turn warnings into errors
"--color", # do emit colored output
"-T", # show full traceback on exception
"-b", # builder to use
"spelling",
"-c",
DOCS_DIR,
"-d", # path for the cached environment and doctree files
self._doctree_dir,
self._src_dir, # path to documentation source files
self.log_spelling_output_dir,
]
env = os.environ.copy()
env["AIRFLOW_PACKAGE_NAME"] = self.package_name
if verbose:
console.print(
f"[info]{self.package_name:60}:[/] Executing cmd: ",
" ".join(shlex.quote(c) for c in build_cmd),
)
console.print(f"[info]{self.package_name:60}:[/] The output is hidden until an error occurs.")
with open(self.log_spelling_filename, "w") as output:
completed_proc = run(
build_cmd,
cwd=self._src_dir,
env=env,
stdout=output if not verbose else None,
stderr=output if not verbose else None,
timeout=PROCESS_TIMEOUT,
)
if completed_proc.returncode != 0:
spelling_errors.append(
SpellingError(
file_path=None,
line_no=None,
spelling=None,
suggestion=None,
context_line=None,
message=(
f"Sphinx spellcheck returned non-zero exit status: {completed_proc.returncode}."
),
)
)
warning_text = ""
for filepath in glob(f"{self.log_spelling_output_dir}/**/*.spelling", recursive=True):
with open(filepath) as spelling_file:
warning_text += spelling_file.read()
spelling_errors.extend(parse_spelling_warnings(warning_text, self._src_dir))
console.print(f"[info]{self.package_name:60}:[/] [red]Finished spell-checking with errors[/]")
else:
if spelling_errors:
console.print(
f"[info]{self.package_name:60}:[/] [yellow]Finished spell-checking with warnings[/]"
)
else:
console.print(
f"[info]{self.package_name:60}:[/] [green]Finished spell-checking successfully[/]"
)
return spelling_errors
def build_sphinx_docs(self, verbose: bool) -> list[DocBuildError]:
"""
Build Sphinx documentation.
:param verbose: whether to show output while running
:return: list of errors
"""
build_errors = []
os.makedirs(self._build_dir, exist_ok=True)
build_cmd = [
"sphinx-build",
"-T", # show full traceback on exception
"--color", # do emit colored output
"-b", # builder to use
"html",
"-d", # path for the cached environment and doctree files
self._doctree_dir,
"-c",
DOCS_DIR,
"-w", # write warnings (and errors) to given file
self.log_build_warning_filename,
self._src_dir,
self._build_dir, # path to output directory
]
env = os.environ.copy()
env["AIRFLOW_PACKAGE_NAME"] = self.package_name
if verbose:
console.print(
f"[info]{self.package_name:60}:[/] Executing cmd: ",
" ".join(shlex.quote(c) for c in build_cmd),
)
else:
console.print(
f"[info]{self.package_name:60}:[/] Running sphinx. "
f"The output is hidden until an error occurs."
)
with open(self.log_build_filename, "w") as output:
completed_proc = run(
build_cmd,
cwd=self._src_dir,
env=env,
stdout=output if not verbose else None,
stderr=output if not verbose else None,
timeout=PROCESS_TIMEOUT,
)
if completed_proc.returncode != 0:
build_errors.append(
DocBuildError(
file_path=None,
line_no=None,
message=f"Sphinx returned non-zero exit status: {completed_proc.returncode}.",
)
)
if os.path.isfile(self.log_build_warning_filename):
with open(self.log_build_warning_filename) as warning_file:
warning_text = warning_file.read()
# Remove 7-bit C1 ANSI escape sequences
warning_text = re.sub(r"\x1B[@-_][0-?]*[ -/]*[@-~]", "", warning_text)
build_errors.extend(parse_sphinx_warnings(warning_text, self._src_dir))
if build_errors:
console.print(f"[info]{self.package_name:60}:[/] [red]Finished docs building with errors[/]")
else:
console.print(f"[info]{self.package_name:60}:[/] [green]Finished docs building successfully[/]")
return build_errors
def publish(self, override_versioned: bool):
"""Copy documentation packages files to airflow-site repository."""
console.print(f"Publishing docs for {self.package_name}")
output_dir = os.path.join(AIRFLOW_SITE_DIR, self._publish_dir)
pretty_source = pretty_format_path(self._build_dir, os.getcwd())
pretty_target = pretty_format_path(output_dir, AIRFLOW_SITE_DIR)
console.print(f"Copy directory: {pretty_source} => {pretty_target}")
if os.path.exists(output_dir):
if self.is_versioned:
if override_versioned:
console.print(f"Overriding previously existing {output_dir}! ")
else:
console.print(
f"Skipping previously existing {output_dir}! "
f"Delete it manually if you want to regenerate it!"
)
console.print()
return
shutil.rmtree(output_dir)
shutil.copytree(self._build_dir, output_dir)
if self.is_versioned:
with open(os.path.join(output_dir, "..", "stable.txt"), "w") as stable_file:
stable_file.write(self._current_version)
console.print()
def get_available_providers_packages():
"""Get list of all available providers packages to build."""
return [provider["package-name"] for provider in ALL_PROVIDER_YAMLS if not provider.get("suspended")]
def get_available_packages():
"""Get list of all available packages to build."""
provider_package_names = get_available_providers_packages()
return [
"apache-airflow",
*provider_package_names,
"apache-airflow-providers",
"helm-chart",
"docker-stack",
]
| 11,915 | 37.563107 | 108 | py |
airflow | airflow-main/docs/exts/docs_build/__init__.py | # Disable Flake8 because of all the sphinx imports
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 838 | 43.157895 | 62 | py |
airflow | airflow-main/docs/exts/docs_build/fetch_inventories.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import concurrent
import concurrent.futures
import datetime
import os
import shutil
import sys
import traceback
from itertools import repeat
from tempfile import NamedTemporaryFile
from typing import Iterator
import requests
import urllib3.exceptions
from requests.adapters import DEFAULT_POOLSIZE
from sphinx.util.inventory import InventoryFileReader
from airflow.utils.helpers import partition
from docs.exts.docs_build.docs_builder import get_available_providers_packages
from docs.exts.docs_build.third_party_inventories import THIRD_PARTY_INDEXES
CURRENT_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir, os.pardir))
DOCS_DIR = os.path.join(ROOT_DIR, "docs")
CACHE_DIR = os.path.join(DOCS_DIR, "_inventory_cache")
EXPIRATION_DATE_PATH = os.path.join(DOCS_DIR, "_inventory_cache", "expiration-date")
S3_DOC_URL = "http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com"
S3_DOC_URL_VERSIONED = S3_DOC_URL + "/docs/{package_name}/stable/objects.inv"
S3_DOC_URL_NON_VERSIONED = S3_DOC_URL + "/docs/{package_name}/objects.inv"
def _fetch_file(session: requests.Session, package_name: str, url: str, path: str) -> tuple[str, bool]:
"""
Download a file, validate Sphinx Inventory headers and returns status information as a tuple with package
name and success status(bool value).
"""
try:
response = session.get(url, allow_redirects=True, stream=True)
except (requests.RequestException, urllib3.exceptions.HTTPError):
print(f"{package_name}: Failed to fetch inventory: {url}")
traceback.print_exc(file=sys.stderr)
return package_name, False
if not response.ok:
print(f"{package_name}: Failed to fetch inventory: {url}")
print(f"{package_name}: Failed with status: {response.status_code}", file=sys.stderr)
return package_name, False
if response.url != url:
print(f"{package_name}: {url} redirected to {response.url}")
with NamedTemporaryFile(suffix=package_name, mode="wb+") as tf:
for chunk in response.iter_content(chunk_size=4096):
tf.write(chunk)
tf.flush()
tf.seek(0, 0)
line = InventoryFileReader(tf).readline()
if not line.startswith("# Sphinx inventory version"):
print(f"{package_name}: Response contain unexpected Sphinx Inventory header: {line!r}.")
return package_name, False
tf.seek(0, 0)
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "wb") as f:
shutil.copyfileobj(tf, f)
print(f"{package_name}: Fetched inventory: {response.url}")
return package_name, True
def _is_outdated(path: str):
if not os.path.exists(path):
return True
delta = datetime.datetime.now() - datetime.datetime.fromtimestamp(os.path.getmtime(path))
return delta > datetime.timedelta(hours=12)
def fetch_inventories():
"""Fetch all inventories for Airflow documentation packages and store in cache."""
os.makedirs(os.path.dirname(CACHE_DIR), exist_ok=True)
to_download: list[tuple[str, str, str]] = []
for pkg_name in get_available_providers_packages():
to_download.append(
(
pkg_name,
S3_DOC_URL_VERSIONED.format(package_name=pkg_name),
f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
)
for pkg_name in ["apache-airflow", "helm-chart"]:
to_download.append(
(
pkg_name,
S3_DOC_URL_VERSIONED.format(package_name=pkg_name),
f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
)
for pkg_name in ["apache-airflow-providers", "docker-stack"]:
to_download.append(
(
pkg_name,
S3_DOC_URL_NON_VERSIONED.format(package_name=pkg_name),
f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
)
to_download.extend(
(
pkg_name,
f"{doc_url}/objects.inv",
f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
for pkg_name, doc_url in THIRD_PARTY_INDEXES.items()
)
to_download = [(pkg_name, url, path) for pkg_name, url, path in to_download if _is_outdated(path)]
if not to_download:
print("Nothing to do")
return []
print(f"To download {len(to_download)} inventorie(s)")
with requests.Session() as session, concurrent.futures.ThreadPoolExecutor(DEFAULT_POOLSIZE) as pool:
download_results: Iterator[tuple[str, bool]] = pool.map(
_fetch_file,
repeat(session, len(to_download)),
(pkg_name for pkg_name, _, _ in to_download),
(url for _, url, _ in to_download),
(path for _, _, path in to_download),
)
failed, success = partition(lambda d: d[1], download_results)
failed, success = list(failed), list(success)
print(f"Result: {len(success)} success, {len(failed)} failed")
if failed:
terminate = False
print("Failed packages:")
for pkg_no, (pkg_name, _) in enumerate(failed, start=1):
print(f"{pkg_no}. {pkg_name}")
if not terminate and not pkg_name.startswith("apache-airflow"):
# For solve situation that newly created Community Provider doesn't upload inventory yet.
# And we terminate execution only if any error happen during fetching
# third party intersphinx inventories.
terminate = True
if terminate:
print("Terminate execution.")
raise SystemExit(1)
return [pkg_name for pkg_name, status in failed]
| 6,524 | 37.839286 | 109 | py |
airflow | airflow-main/docs/exts/docs_build/github_action_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from contextlib import contextmanager
@contextmanager
def with_group(title):
"""
If used in GitHub Action, creates an expandable group in the GitHub Action log.
Otherwise, display simple text groups.
For more information, see:
https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines
"""
if os.environ.get("GITHUB_ACTIONS", "false") != "true":
print("#" * 20, title, "#" * 20)
yield
return
print(f"::group::{title}")
print()
yield
print("\033[0m")
print("::endgroup::")
| 1,441 | 34.170732 | 125 | py |
airflow | airflow-main/docs/rtd-deprecation/conf.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
project = "Apache Airflow"
html_extra_path = ["404.html"]
| 880 | 39.045455 | 62 | py |
airflow | airflow-main/airflow/typing_compat.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module provides helper code to make type annotation within Airflow codebase easier."""
from __future__ import annotations
__all__ = [
"Literal",
"ParamSpec",
"Protocol",
"TypedDict",
"TypeGuard",
"runtime_checkable",
]
import sys
from typing import Protocol, TypedDict, runtime_checkable
# Literal in 3.8 is limited to one single argument, not e.g. "Literal[1, 2]".
if sys.version_info >= (3, 9):
from typing import Literal
else:
from typing import Literal
if sys.version_info >= (3, 10):
from typing import ParamSpec, TypeGuard
else:
from typing_extensions import ParamSpec, TypeGuard
| 1,425 | 32.162791 | 94 | py |
airflow | airflow-main/airflow/plugins_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Manages all plugins."""
from __future__ import annotations
import importlib
import importlib.machinery
import importlib.util
import inspect
import logging
import os
import sys
import types
from typing import TYPE_CHECKING, Any, Iterable
try:
import importlib_metadata
except ImportError:
from importlib import metadata as importlib_metadata # type: ignore[no-redef]
from types import ModuleType
from airflow import settings
from airflow.utils.entry_points import entry_points_with_dist
from airflow.utils.file import find_path_from_directory
from airflow.utils.module_loading import qualname
if TYPE_CHECKING:
from airflow.hooks.base import BaseHook
from airflow.listeners.listener import ListenerManager
from airflow.timetables.base import Timetable
log = logging.getLogger(__name__)
import_errors: dict[str, str] = {}
plugins: list[AirflowPlugin] | None = None
# Plugin components to integrate as modules
registered_hooks: list[BaseHook] | None = None
macros_modules: list[Any] | None = None
executors_modules: list[Any] | None = None
# Plugin components to integrate directly
admin_views: list[Any] | None = None
flask_blueprints: list[Any] | None = None
menu_links: list[Any] | None = None
flask_appbuilder_views: list[Any] | None = None
flask_appbuilder_menu_links: list[Any] | None = None
global_operator_extra_links: list[Any] | None = None
operator_extra_links: list[Any] | None = None
registered_operator_link_classes: dict[str, type] | None = None
registered_ti_dep_classes: dict[str, type] | None = None
timetable_classes: dict[str, type[Timetable]] | None = None
"""
Mapping of class names to class of OperatorLinks registered by plugins.
Used by the DAG serialization code to only allow specific classes to be created
during deserialization
"""
PLUGINS_ATTRIBUTES_TO_DUMP = {
"hooks",
"executors",
"macros",
"flask_blueprints",
"appbuilder_views",
"appbuilder_menu_items",
"global_operator_extra_links",
"operator_extra_links",
"ti_deps",
"timetables",
"source",
"listeners",
}
class AirflowPluginSource:
"""Class used to define an AirflowPluginSource."""
def __str__(self):
raise NotImplementedError
def __html__(self):
raise NotImplementedError
class PluginsDirectorySource(AirflowPluginSource):
"""Class used to define Plugins loaded from Plugins Directory."""
def __init__(self, path):
self.path = os.path.relpath(path, settings.PLUGINS_FOLDER)
def __str__(self):
return f"$PLUGINS_FOLDER/{self.path}"
def __html__(self):
return f"<em>$PLUGINS_FOLDER/</em>{self.path}"
class EntryPointSource(AirflowPluginSource):
"""Class used to define Plugins loaded from entrypoint."""
def __init__(self, entrypoint: importlib_metadata.EntryPoint, dist: importlib_metadata.Distribution):
self.dist = dist.metadata["Name"]
self.version = dist.version
self.entrypoint = str(entrypoint)
def __str__(self):
return f"{self.dist}=={self.version}: {self.entrypoint}"
def __html__(self):
return f"<em>{self.dist}=={self.version}:</em> {self.entrypoint}"
class AirflowPluginException(Exception):
"""Exception when loading plugin."""
class AirflowPlugin:
"""Class used to define AirflowPlugin."""
name: str | None = None
source: AirflowPluginSource | None = None
hooks: list[Any] = []
executors: list[Any] = []
macros: list[Any] = []
admin_views: list[Any] = []
flask_blueprints: list[Any] = []
menu_links: list[Any] = []
appbuilder_views: list[Any] = []
appbuilder_menu_items: list[Any] = []
# A list of global operator extra links that can redirect users to
# external systems. These extra links will be available on the
# task page in the form of buttons.
#
# Note: the global operator extra link can be overridden at each
# operator level.
global_operator_extra_links: list[Any] = []
# A list of operator extra links to override or add operator links
# to existing Airflow Operators.
# These extra links will be available on the task page in form of
# buttons.
operator_extra_links: list[Any] = []
ti_deps: list[Any] = []
# A list of timetable classes that can be used for DAG scheduling.
timetables: list[type[Timetable]] = []
listeners: list[ModuleType | object] = []
@classmethod
def validate(cls):
"""Validates that plugin has a name."""
if not cls.name:
raise AirflowPluginException("Your plugin needs a name.")
@classmethod
def on_load(cls, *args, **kwargs):
"""
Executed when the plugin is loaded; This method is only called once during runtime.
:param args: If future arguments are passed in on call.
:param kwargs: If future arguments are passed in on call.
"""
def is_valid_plugin(plugin_obj):
"""
Check whether a potential object is a subclass of the AirflowPlugin class.
:param plugin_obj: potential subclass of AirflowPlugin
:return: Whether or not the obj is a valid subclass of
AirflowPlugin
"""
global plugins
if (
inspect.isclass(plugin_obj)
and issubclass(plugin_obj, AirflowPlugin)
and (plugin_obj is not AirflowPlugin)
):
plugin_obj.validate()
return plugin_obj not in plugins
return False
def register_plugin(plugin_instance):
"""
Start plugin load and register it after success initialization.
:param plugin_instance: subclass of AirflowPlugin
"""
global plugins
plugin_instance.on_load()
plugins.append(plugin_instance)
def load_entrypoint_plugins():
"""
Load and register plugins AirflowPlugin subclasses from the entrypoints.
The entry_point group should be 'airflow.plugins'.
"""
global import_errors
log.debug("Loading plugins from entrypoints")
for entry_point, dist in entry_points_with_dist("airflow.plugins"):
log.debug("Importing entry_point plugin %s", entry_point.name)
try:
plugin_class = entry_point.load()
if not is_valid_plugin(plugin_class):
continue
plugin_instance = plugin_class()
plugin_instance.source = EntryPointSource(entry_point, dist)
register_plugin(plugin_instance)
except Exception as e:
log.exception("Failed to import plugin %s", entry_point.name)
import_errors[entry_point.module] = str(e)
def load_plugins_from_plugin_directory():
"""Load and register Airflow Plugins from plugins directory."""
global import_errors
log.debug("Loading plugins from directory: %s", settings.PLUGINS_FOLDER)
for file_path in find_path_from_directory(settings.PLUGINS_FOLDER, ".airflowignore"):
if not os.path.isfile(file_path):
continue
mod_name, file_ext = os.path.splitext(os.path.split(file_path)[-1])
if file_ext != ".py":
continue
try:
loader = importlib.machinery.SourceFileLoader(mod_name, file_path)
spec = importlib.util.spec_from_loader(mod_name, loader)
mod = importlib.util.module_from_spec(spec)
sys.modules[spec.name] = mod
loader.exec_module(mod)
log.debug("Importing plugin module %s", file_path)
for mod_attr_value in (m for m in mod.__dict__.values() if is_valid_plugin(m)):
plugin_instance = mod_attr_value()
plugin_instance.source = PluginsDirectorySource(file_path)
register_plugin(plugin_instance)
except Exception as e:
log.exception("Failed to import plugin %s", file_path)
import_errors[file_path] = str(e)
def make_module(name: str, objects: list[Any]):
"""Creates new module."""
if not objects:
return None
log.debug("Creating module %s", name)
name = name.lower()
module = types.ModuleType(name)
module._name = name.split(".")[-1] # type: ignore
module._objects = objects # type: ignore
module.__dict__.update((o.__name__, o) for o in objects)
return module
def ensure_plugins_loaded():
"""
Load plugins from plugins directory and entrypoints.
Plugins are only loaded if they have not been previously loaded.
"""
from airflow.stats import Stats
global plugins, registered_hooks
if plugins is not None:
log.debug("Plugins are already loaded. Skipping.")
return
if not settings.PLUGINS_FOLDER:
raise ValueError("Plugins folder is not set")
log.debug("Loading plugins")
with Stats.timer() as timer:
plugins = []
registered_hooks = []
load_plugins_from_plugin_directory()
load_entrypoint_plugins()
# We don't do anything with these for now, but we want to keep track of
# them so we can integrate them in to the UI's Connection screens
for plugin in plugins:
registered_hooks.extend(plugin.hooks)
num_loaded = len(plugins)
if num_loaded > 0:
log.debug("Loading %d plugin(s) took %.2f seconds", num_loaded, timer.duration)
def initialize_web_ui_plugins():
"""Collect extension points for WEB UI."""
global plugins
global flask_blueprints
global flask_appbuilder_views
global flask_appbuilder_menu_links
if (
flask_blueprints is not None
and flask_appbuilder_views is not None
and flask_appbuilder_menu_links is not None
):
return
ensure_plugins_loaded()
if plugins is None:
raise AirflowPluginException("Can't load plugins.")
log.debug("Initialize Web UI plugin")
flask_blueprints = []
flask_appbuilder_views = []
flask_appbuilder_menu_links = []
for plugin in plugins:
flask_appbuilder_views.extend(plugin.appbuilder_views)
flask_appbuilder_menu_links.extend(plugin.appbuilder_menu_items)
flask_blueprints.extend([{"name": plugin.name, "blueprint": bp} for bp in plugin.flask_blueprints])
if (plugin.admin_views and not plugin.appbuilder_views) or (
plugin.menu_links and not plugin.appbuilder_menu_items
):
log.warning(
"Plugin '%s' may not be compatible with the current Airflow version. "
"Please contact the author of the plugin.",
plugin.name,
)
def initialize_ti_deps_plugins():
"""Create modules for loaded extension from custom task instance dependency rule plugins."""
global registered_ti_dep_classes
if registered_ti_dep_classes is not None:
return
ensure_plugins_loaded()
if plugins is None:
raise AirflowPluginException("Can't load plugins.")
log.debug("Initialize custom taskinstance deps plugins")
registered_ti_dep_classes = {}
for plugin in plugins:
registered_ti_dep_classes.update(
{qualname(ti_dep.__class__): ti_dep.__class__ for ti_dep in plugin.ti_deps}
)
def initialize_extra_operators_links_plugins():
"""Create modules for loaded extension from extra operators links plugins."""
global global_operator_extra_links
global operator_extra_links
global registered_operator_link_classes
if (
global_operator_extra_links is not None
and operator_extra_links is not None
and registered_operator_link_classes is not None
):
return
ensure_plugins_loaded()
if plugins is None:
raise AirflowPluginException("Can't load plugins.")
log.debug("Initialize extra operators links plugins")
global_operator_extra_links = []
operator_extra_links = []
registered_operator_link_classes = {}
for plugin in plugins:
global_operator_extra_links.extend(plugin.global_operator_extra_links)
operator_extra_links.extend(list(plugin.operator_extra_links))
registered_operator_link_classes.update(
{qualname(link.__class__): link.__class__ for link in plugin.operator_extra_links}
)
def initialize_timetables_plugins():
"""Collect timetable classes registered by plugins."""
global timetable_classes
if timetable_classes is not None:
return
ensure_plugins_loaded()
if plugins is None:
raise AirflowPluginException("Can't load plugins.")
log.debug("Initialize extra timetables plugins")
timetable_classes = {
qualname(timetable_class): timetable_class
for plugin in plugins
for timetable_class in plugin.timetables
}
def integrate_executor_plugins() -> None:
"""Integrate executor plugins to the context."""
global plugins
global executors_modules
if executors_modules is not None:
return
ensure_plugins_loaded()
if plugins is None:
raise AirflowPluginException("Can't load plugins.")
log.debug("Integrate executor plugins")
executors_modules = []
for plugin in plugins:
if plugin.name is None:
raise AirflowPluginException("Invalid plugin name")
plugin_name: str = plugin.name
executors_module = make_module("airflow.executors." + plugin_name, plugin.executors)
if executors_module:
executors_modules.append(executors_module)
sys.modules[executors_module.__name__] = executors_module
def integrate_macros_plugins() -> None:
"""Integrates macro plugins."""
global plugins
global macros_modules
from airflow import macros
if macros_modules is not None:
return
ensure_plugins_loaded()
if plugins is None:
raise AirflowPluginException("Can't load plugins.")
log.debug("Integrate DAG plugins")
macros_modules = []
for plugin in plugins:
if plugin.name is None:
raise AirflowPluginException("Invalid plugin name")
macros_module = make_module(f"airflow.macros.{plugin.name}", plugin.macros)
if macros_module:
macros_modules.append(macros_module)
sys.modules[macros_module.__name__] = macros_module
# Register the newly created module on airflow.macros such that it
# can be accessed when rendering templates.
setattr(macros, plugin.name, macros_module)
def integrate_listener_plugins(listener_manager: ListenerManager) -> None:
"""Add listeners from plugins."""
global plugins
ensure_plugins_loaded()
if plugins:
for plugin in plugins:
if plugin.name is None:
raise AirflowPluginException("Invalid plugin name")
for listener in plugin.listeners:
listener_manager.add_listener(listener)
def get_plugin_info(attrs_to_dump: Iterable[str] | None = None) -> list[dict[str, Any]]:
"""
Dump plugins attributes.
:param attrs_to_dump: A list of plugin attributes to dump
"""
ensure_plugins_loaded()
integrate_executor_plugins()
integrate_macros_plugins()
initialize_web_ui_plugins()
initialize_extra_operators_links_plugins()
if not attrs_to_dump:
attrs_to_dump = PLUGINS_ATTRIBUTES_TO_DUMP
plugins_info = []
if plugins:
for plugin in plugins:
info: dict[str, Any] = {"name": plugin.name}
for attr in attrs_to_dump:
if attr in ("global_operator_extra_links", "operator_extra_links"):
info[attr] = [f"<{qualname(d.__class__)} object>" for d in getattr(plugin, attr)]
elif attr in ("macros", "timetables", "hooks", "executors"):
info[attr] = [qualname(d) for d in getattr(plugin, attr)]
elif attr == "listeners":
# listeners are always modules
info[attr] = [d.__name__ for d in getattr(plugin, attr)]
elif attr == "appbuilder_views":
info[attr] = [
{**d, "view": qualname(d["view"].__class__) if "view" in d else None}
for d in getattr(plugin, attr)
]
elif attr == "flask_blueprints":
info[attr] = [
f"<{qualname(d.__class__)}: name={d.name!r} import_name={d.import_name!r}>"
for d in getattr(plugin, attr)
]
else:
info[attr] = getattr(plugin, attr)
plugins_info.append(info)
return plugins_info
| 17,366 | 30.691606 | 107 | py |
airflow | airflow-main/airflow/sentry.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Sentry Integration."""
from __future__ import annotations
import logging
from functools import wraps
from typing import TYPE_CHECKING
from airflow.configuration import conf
from airflow.executors.executor_loader import ExecutorLoader
from airflow.utils.session import find_session_idx, provide_session
from airflow.utils.state import TaskInstanceState
if TYPE_CHECKING:
from sqlalchemy.orm import Session
from airflow.models.taskinstance import TaskInstance
log = logging.getLogger(__name__)
class DummySentry:
"""Blank class for Sentry."""
def add_tagging(self, task_instance):
"""Blank function for tagging."""
def add_breadcrumbs(self, task_instance, session: Session | None = None):
"""Blank function for breadcrumbs."""
def enrich_errors(self, run):
"""Blank function for formatting a TaskInstance._run_raw_task."""
return run
def flush(self):
"""Blank function for flushing errors."""
Sentry: DummySentry = DummySentry()
if conf.getboolean("sentry", "sentry_on", fallback=False):
import sentry_sdk
# Verify blinker installation
from blinker import signal # noqa: F401
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.logging import ignore_logger
class ConfiguredSentry(DummySentry):
"""Configure Sentry SDK."""
SCOPE_DAG_RUN_TAGS = frozenset(("data_interval_end", "data_interval_start", "execution_date"))
SCOPE_TASK_INSTANCE_TAGS = frozenset(("task_id", "dag_id", "try_number"))
SCOPE_CRUMBS = frozenset(("task_id", "state", "operator", "duration"))
UNSUPPORTED_SENTRY_OPTIONS = frozenset(
(
"integrations",
"in_app_include",
"in_app_exclude",
"ignore_errors",
"before_breadcrumb",
)
)
def __init__(self):
"""Initialize the Sentry SDK."""
ignore_logger("airflow.task")
sentry_flask = FlaskIntegration()
# LoggingIntegration is set by default.
integrations = [sentry_flask]
executor_class, _ = ExecutorLoader.import_default_executor_cls()
if executor_class.supports_sentry:
from sentry_sdk.integrations.celery import CeleryIntegration
sentry_celery = CeleryIntegration()
integrations.append(sentry_celery)
dsn = None
sentry_config_opts = conf.getsection("sentry") or {}
if sentry_config_opts:
sentry_config_opts.pop("sentry_on")
old_way_dsn = sentry_config_opts.pop("sentry_dsn", None)
new_way_dsn = sentry_config_opts.pop("dsn", None)
# supported backward compatibility with old way dsn option
dsn = old_way_dsn or new_way_dsn
unsupported_options = self.UNSUPPORTED_SENTRY_OPTIONS.intersection(sentry_config_opts.keys())
if unsupported_options:
log.warning(
"There are unsupported options in [sentry] section: %s",
", ".join(unsupported_options),
)
sentry_config_opts["before_send"] = conf.getimport("sentry", "before_send", fallback=None)
sentry_config_opts["transport"] = conf.getimport("sentry", "transport", fallback=None)
if dsn:
sentry_sdk.init(dsn=dsn, integrations=integrations, **sentry_config_opts)
else:
# Setting up Sentry using environment variables.
log.debug("Defaulting to SENTRY_DSN in environment.")
sentry_sdk.init(integrations=integrations, **sentry_config_opts)
def add_tagging(self, task_instance):
"""Function to add tagging for a task_instance."""
dag_run = task_instance.dag_run
task = task_instance.task
with sentry_sdk.configure_scope() as scope:
for tag_name in self.SCOPE_TASK_INSTANCE_TAGS:
attribute = getattr(task_instance, tag_name)
scope.set_tag(tag_name, attribute)
for tag_name in self.SCOPE_DAG_RUN_TAGS:
attribute = getattr(dag_run, tag_name)
scope.set_tag(tag_name, attribute)
scope.set_tag("operator", task.__class__.__name__)
@provide_session
def add_breadcrumbs(
self,
task_instance: TaskInstance,
session: Session | None = None,
) -> None:
"""Function to add breadcrumbs inside of a task_instance."""
if session is None:
return
dr = task_instance.get_dagrun(session)
task_instances = dr.get_task_instances(
state={TaskInstanceState.SUCCESS, TaskInstanceState.FAILED},
session=session,
)
for ti in task_instances:
data = {}
for crumb_tag in self.SCOPE_CRUMBS:
data[crumb_tag] = getattr(ti, crumb_tag)
sentry_sdk.add_breadcrumb(category="completed_tasks", data=data, level="info")
def enrich_errors(self, func):
"""
Decorate errors.
Wrap TaskInstance._run_raw_task and LocalTaskJob._run_mini_scheduler_on_child_tasks
to support task specific tags and breadcrumbs.
"""
session_args_idx = find_session_idx(func)
@wraps(func)
def wrapper(_self, *args, **kwargs):
# Wrapping the _run_raw_task function with push_scope to contain
# tags and breadcrumbs to a specific Task Instance
try:
session = kwargs.get("session", args[session_args_idx])
except IndexError:
session = None
with sentry_sdk.push_scope():
try:
# Is a LocalTaskJob get the task instance
if hasattr(_self, "task_instance"):
task_instance = _self.task_instance
else:
task_instance = _self
self.add_tagging(task_instance)
self.add_breadcrumbs(task_instance, session=session)
return func(_self, *args, **kwargs)
except Exception as e:
sentry_sdk.capture_exception(e)
raise
return wrapper
def flush(self):
sentry_sdk.flush()
Sentry = ConfiguredSentry()
| 7,548 | 36.745 | 109 | py |
airflow | airflow-main/airflow/__main__.py | #!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Main executable module."""
from __future__ import annotations
import os
import argcomplete
# The configuration module initializes and validates the conf object as a side effect the first
# time it is imported. If it is not imported before importing the settings module, the conf
# object will then be initted/validated as a side effect of it being imported in settings,
# however this can cause issues since those modules are very tightly coupled and can
# very easily cause import cycles in the conf init/validate code (since downstream code from
# those functions likely import settings).
# Therefore importing configuration early (as the first airflow import) avoids
# any possible import cycles with settings downstream.
from airflow import configuration
from airflow.cli import cli_parser
def main():
"""Main executable function."""
conf = configuration.conf
if conf.get("core", "security") == "kerberos":
os.environ["KRB5CCNAME"] = conf.get("kerberos", "ccache")
os.environ["KRB5_KTNAME"] = conf.get("kerberos", "keytab")
parser = cli_parser.get_parser()
argcomplete.autocomplete(parser)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()
| 2,062 | 37.924528 | 95 | py |
airflow | airflow-main/airflow/settings.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import atexit
import functools
import json
import logging
import os
import sys
import warnings
from typing import TYPE_CHECKING, Any, Callable
import pendulum
import pluggy
import sqlalchemy
from sqlalchemy import create_engine, exc, text
from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session as SASession, scoped_session, sessionmaker
from sqlalchemy.pool import NullPool
from airflow import policies
from airflow.configuration import AIRFLOW_HOME, WEBSERVER_CONFIG, conf # NOQA F401
from airflow.exceptions import RemovedInAirflow3Warning
from airflow.executors import executor_constants
from airflow.logging_config import configure_logging
from airflow.utils.orm_event_handlers import setup_event_handlers
from airflow.utils.state import State
if TYPE_CHECKING:
from airflow.www.utils import UIAlert
log = logging.getLogger(__name__)
TIMEZONE = pendulum.tz.timezone("UTC")
try:
tz = conf.get_mandatory_value("core", "default_timezone")
if tz == "system":
TIMEZONE = pendulum.tz.local_timezone()
else:
TIMEZONE = pendulum.tz.timezone(tz)
except Exception:
pass
log.info("Configured default timezone %s", TIMEZONE)
HEADER = "\n".join(
[
r" ____________ _____________",
r" ____ |__( )_________ __/__ /________ __",
r"____ /| |_ /__ ___/_ /_ __ /_ __ \_ | /| / /",
r"___ ___ | / _ / _ __/ _ / / /_/ /_ |/ |/ /",
r" _/_/ |_/_/ /_/ /_/ /_/ \____/____/|__/",
]
)
LOGGING_LEVEL = logging.INFO
# the prefix to append to gunicorn worker processes after init
GUNICORN_WORKER_READY_PREFIX = "[ready] "
LOG_FORMAT = conf.get("logging", "log_format")
SIMPLE_LOG_FORMAT = conf.get("logging", "simple_log_format")
SQL_ALCHEMY_CONN: str | None = None
PLUGINS_FOLDER: str | None = None
LOGGING_CLASS_PATH: str | None = None
DONOT_MODIFY_HANDLERS: bool | None = None
DAGS_FOLDER: str = os.path.expanduser(conf.get_mandatory_value("core", "DAGS_FOLDER"))
engine: Engine
Session: Callable[..., SASession]
# The JSON library to use for DAG Serialization and De-Serialization
json = json
# Dictionary containing State and colors associated to each state to
# display on the Webserver
STATE_COLORS = {
"deferred": "mediumpurple",
"failed": "red",
"queued": "gray",
"removed": "lightgrey",
"restarting": "violet",
"running": "lime",
"scheduled": "tan",
"shutdown": "blue",
"skipped": "hotpink",
"success": "green",
"up_for_reschedule": "turquoise",
"up_for_retry": "gold",
"upstream_failed": "orange",
}
@functools.lru_cache(maxsize=None)
def _get_rich_console(file):
# Delay imports until we need it
import rich.console
return rich.console.Console(file=file)
def custom_show_warning(message, category, filename, lineno, file=None, line=None):
"""Custom function to print rich and visible warnings."""
# Delay imports until we need it
from rich.markup import escape
msg = f"[bold]{line}" if line else f"[bold][yellow]{filename}:{lineno}"
msg += f" {category.__name__}[/bold]: {escape(str(message))}[/yellow]"
write_console = _get_rich_console(file or sys.stderr)
write_console.print(msg, soft_wrap=True)
def replace_showwarning(replacement):
"""Replace ``warnings.showwarning``, returning the original.
This is useful since we want to "reset" the ``showwarning`` hook on exit to
avoid lazy-loading issues. If a warning is emitted after Python cleaned up
the import system, we would no longer be able to import ``rich``.
"""
original = warnings.showwarning
warnings.showwarning = replacement
return original
original_show_warning = replace_showwarning(custom_show_warning)
atexit.register(functools.partial(replace_showwarning, original_show_warning))
POLICY_PLUGIN_MANAGER: Any = None # type: ignore
def task_policy(task):
return POLICY_PLUGIN_MANAGER.hook.task_policy(task=task)
def dag_policy(dag):
return POLICY_PLUGIN_MANAGER.hook.dag_policy(dag=dag)
def task_instance_mutation_hook(task_instance):
return POLICY_PLUGIN_MANAGER.hook.task_instance_mutation_hook(task_instance=task_instance)
task_instance_mutation_hook.is_noop = True # type: ignore
def pod_mutation_hook(pod):
return POLICY_PLUGIN_MANAGER.hook.pod_mutation_hook(pod=pod)
def get_airflow_context_vars(context):
return POLICY_PLUGIN_MANAGER.hook.get_airflow_context_vars(context=context)
def get_dagbag_import_timeout(dag_file_path: str):
return POLICY_PLUGIN_MANAGER.hook.get_dagbag_import_timeout(dag_file_path=dag_file_path)
def configure_policy_plugin_manager():
global POLICY_PLUGIN_MANAGER
POLICY_PLUGIN_MANAGER = pluggy.PluginManager(policies.local_settings_hookspec.project_name)
POLICY_PLUGIN_MANAGER.add_hookspecs(policies)
POLICY_PLUGIN_MANAGER.register(policies.DefaultPolicy)
def load_policy_plugins(pm: pluggy.PluginManager):
# We can't log duration etc here, as logging hasn't yet been configured!
pm.load_setuptools_entrypoints("airflow.policy")
def configure_vars():
"""Configure Global Variables from airflow.cfg."""
global SQL_ALCHEMY_CONN
global DAGS_FOLDER
global PLUGINS_FOLDER
global DONOT_MODIFY_HANDLERS
SQL_ALCHEMY_CONN = conf.get("database", "SQL_ALCHEMY_CONN")
DAGS_FOLDER = os.path.expanduser(conf.get("core", "DAGS_FOLDER"))
PLUGINS_FOLDER = conf.get("core", "plugins_folder", fallback=os.path.join(AIRFLOW_HOME, "plugins"))
# If donot_modify_handlers=True, we do not modify logging handlers in task_run command
# If the flag is set to False, we remove all handlers from the root logger
# and add all handlers from 'airflow.task' logger to the root Logger. This is done
# to get all the logs from the print & log statements in the DAG files before a task is run
# The handlers are restored after the task completes execution.
DONOT_MODIFY_HANDLERS = conf.getboolean("logging", "donot_modify_handlers", fallback=False)
def configure_orm(disable_connection_pool=False, pool_class=None):
"""Configure ORM using SQLAlchemy."""
from airflow.utils.log.secrets_masker import mask_secret
log.debug("Setting up DB connection pool (PID %s)", os.getpid())
global engine
global Session
engine_args = prepare_engine_args(disable_connection_pool, pool_class)
if conf.has_option("database", "sql_alchemy_connect_args"):
connect_args = conf.getimport("database", "sql_alchemy_connect_args")
else:
connect_args = {}
engine = create_engine(SQL_ALCHEMY_CONN, connect_args=connect_args, **engine_args, future=True)
mask_secret(engine.url.password)
setup_event_handlers(engine)
Session = scoped_session(
sessionmaker(
autocommit=False,
autoflush=False,
bind=engine,
expire_on_commit=False,
)
)
if engine.dialect.name == "mssql":
session = Session()
try:
result = session.execute(
sqlalchemy.text(
"SELECT is_read_committed_snapshot_on FROM sys.databases WHERE name=:database_name"
),
params={"database_name": engine.url.database},
)
data = result.fetchone()[0]
if data != 1:
log.critical("MSSQL database MUST have READ_COMMITTED_SNAPSHOT enabled.")
log.critical("The database %s has it disabled.", engine.url.database)
log.critical("This will cause random deadlocks, Refusing to start.")
log.critical(
"See https://airflow.apache.org/docs/apache-airflow/stable/howto/"
"set-up-database.html#setting-up-a-mssql-database"
)
raise Exception("MSSQL database MUST have READ_COMMITTED_SNAPSHOT enabled.")
finally:
session.close()
DEFAULT_ENGINE_ARGS = {
"postgresql": {
"executemany_mode": "values",
"executemany_values_page_size": 10000,
"executemany_batch_page_size": 2000,
},
}
def prepare_engine_args(disable_connection_pool=False, pool_class=None):
"""Prepare SQLAlchemy engine args."""
default_args = {}
for dialect, default in DEFAULT_ENGINE_ARGS.items():
if SQL_ALCHEMY_CONN.startswith(dialect):
default_args = default.copy()
break
engine_args: dict = conf.getjson(
"database", "sql_alchemy_engine_args", fallback=default_args
) # type: ignore
if pool_class:
# Don't use separate settings for size etc, only those from sql_alchemy_engine_args
engine_args["poolclass"] = pool_class
elif disable_connection_pool or not conf.getboolean("database", "SQL_ALCHEMY_POOL_ENABLED"):
engine_args["poolclass"] = NullPool
log.debug("settings.prepare_engine_args(): Using NullPool")
elif not SQL_ALCHEMY_CONN.startswith("sqlite"):
# Pool size engine args not supported by sqlite.
# If no config value is defined for the pool size, select a reasonable value.
# 0 means no limit, which could lead to exceeding the Database connection limit.
pool_size = conf.getint("database", "SQL_ALCHEMY_POOL_SIZE", fallback=5)
# The maximum overflow size of the pool.
# When the number of checked-out connections reaches the size set in pool_size,
# additional connections will be returned up to this limit.
# When those additional connections are returned to the pool, they are disconnected and discarded.
# It follows then that the total number of simultaneous connections
# the pool will allow is pool_size + max_overflow,
# and the total number of "sleeping" connections the pool will allow is pool_size.
# max_overflow can be set to -1 to indicate no overflow limit;
# no limit will be placed on the total number
# of concurrent connections. Defaults to 10.
max_overflow = conf.getint("database", "SQL_ALCHEMY_MAX_OVERFLOW", fallback=10)
# The DB server already has a value for wait_timeout (number of seconds after
# which an idle sleeping connection should be killed). Since other DBs may
# co-exist on the same server, SQLAlchemy should set its
# pool_recycle to an equal or smaller value.
pool_recycle = conf.getint("database", "SQL_ALCHEMY_POOL_RECYCLE", fallback=1800)
# Check connection at the start of each connection pool checkout.
# Typically, this is a simple statement like "SELECT 1", but may also make use
# of some DBAPI-specific method to test the connection for liveness.
# More information here:
# https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic
pool_pre_ping = conf.getboolean("database", "SQL_ALCHEMY_POOL_PRE_PING", fallback=True)
log.debug(
"settings.prepare_engine_args(): Using pool settings. pool_size=%d, max_overflow=%d, "
"pool_recycle=%d, pid=%d",
pool_size,
max_overflow,
pool_recycle,
os.getpid(),
)
engine_args["pool_size"] = pool_size
engine_args["pool_recycle"] = pool_recycle
engine_args["pool_pre_ping"] = pool_pre_ping
engine_args["max_overflow"] = max_overflow
# The default isolation level for MySQL (REPEATABLE READ) can introduce inconsistencies when
# running multiple schedulers, as repeated queries on the same session may read from stale snapshots.
# 'READ COMMITTED' is the default value for PostgreSQL.
# More information here:
# https://dev.mysql.com/doc/refman/8.0/en/innodb-transaction-isolation-levels.html"
# Similarly MSSQL default isolation level should be set to READ COMMITTED.
# We also make sure that READ_COMMITTED_SNAPSHOT option is on, in order to avoid deadlocks when
# Select queries are running. This is by default enforced during init/upgrade. More information:
# https://docs.microsoft.com/en-us/sql/t-sql/statements/set-transaction-isolation-level-transact-sql
if SQL_ALCHEMY_CONN.startswith(("mysql", "mssql")):
engine_args["isolation_level"] = "READ COMMITTED"
# Allow the user to specify an encoding for their DB otherwise default
# to utf-8 so jobs & users with non-latin1 characters can still use us.
engine_args["encoding"] = conf.get("database", "SQL_ENGINE_ENCODING", fallback="utf-8")
return engine_args
def dispose_orm():
"""Properly close pooled database connections."""
log.debug("Disposing DB connection pool (PID %s)", os.getpid())
global engine
global Session
if Session is not None: # type: ignore[truthy-function]
Session.remove()
Session = None
if engine:
engine.dispose()
engine = None
def reconfigure_orm(disable_connection_pool=False, pool_class=None):
"""Properly close database connections and re-configure ORM."""
dispose_orm()
configure_orm(disable_connection_pool=disable_connection_pool, pool_class=pool_class)
def configure_adapters():
"""Register Adapters and DB Converters."""
from pendulum import DateTime as Pendulum
if SQL_ALCHEMY_CONN.startswith("sqlite"):
from sqlite3 import register_adapter
register_adapter(Pendulum, lambda val: val.isoformat(" "))
if SQL_ALCHEMY_CONN.startswith("mysql"):
try:
import MySQLdb.converters
MySQLdb.converters.conversions[Pendulum] = MySQLdb.converters.DateTime2literal
except ImportError:
pass
try:
import pymysql.converters
pymysql.converters.conversions[Pendulum] = pymysql.converters.escape_datetime
except ImportError:
pass
def validate_session():
"""Validate ORM Session."""
global engine
worker_precheck = conf.getboolean("celery", "worker_precheck", fallback=False)
if not worker_precheck:
return True
else:
check_session = sessionmaker(bind=engine)
session = check_session()
try:
session.execute(text("select 1"))
conn_status = True
except exc.DBAPIError as err:
log.error(err)
conn_status = False
session.close()
return conn_status
def configure_action_logging() -> None:
"""Any additional configuration (register callback) for airflow.utils.action_loggers module."""
def prepare_syspath():
"""Ensure certain subfolders of AIRFLOW_HOME are on the classpath."""
if DAGS_FOLDER not in sys.path:
sys.path.append(DAGS_FOLDER)
# Add ./config/ for loading custom log parsers etc, or
# airflow_local_settings etc.
config_path = os.path.join(AIRFLOW_HOME, "config")
if config_path not in sys.path:
sys.path.append(config_path)
if PLUGINS_FOLDER not in sys.path:
sys.path.append(PLUGINS_FOLDER)
def get_session_lifetime_config():
"""Gets session timeout configs and handles outdated configs gracefully."""
session_lifetime_minutes = conf.get("webserver", "session_lifetime_minutes", fallback=None)
session_lifetime_days = conf.get("webserver", "session_lifetime_days", fallback=None)
uses_deprecated_lifetime_configs = session_lifetime_days or conf.get(
"webserver", "force_log_out_after", fallback=None
)
minutes_per_day = 24 * 60
default_lifetime_minutes = "43200"
if uses_deprecated_lifetime_configs and session_lifetime_minutes == default_lifetime_minutes:
warnings.warn(
"`session_lifetime_days` option from `[webserver]` section has been "
"renamed to `session_lifetime_minutes`. The new option allows to configure "
"session lifetime in minutes. The `force_log_out_after` option has been removed "
"from `[webserver]` section. Please update your configuration.",
category=RemovedInAirflow3Warning,
)
if session_lifetime_days:
session_lifetime_minutes = minutes_per_day * int(session_lifetime_days)
if not session_lifetime_minutes:
session_lifetime_days = 30
session_lifetime_minutes = minutes_per_day * session_lifetime_days
logging.debug("User session lifetime is set to %s minutes.", session_lifetime_minutes)
return int(session_lifetime_minutes)
def import_local_settings():
"""Import airflow_local_settings.py files to allow overriding any configs in settings.py file."""
try:
import airflow_local_settings
if hasattr(airflow_local_settings, "__all__"):
names = list(airflow_local_settings.__all__)
else:
names = list(filter(lambda n: not n.startswith("__"), airflow_local_settings.__dict__.keys()))
if "policy" in names and "task_policy" not in names:
warnings.warn(
"Using `policy` in airflow_local_settings.py is deprecated. "
"Please rename your `policy` to `task_policy`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
setattr(airflow_local_settings, "task_policy", airflow_local_settings.policy)
names.remove("policy")
plugin_functions = policies.make_plugin_from_local_settings(
POLICY_PLUGIN_MANAGER, airflow_local_settings, names
)
for name in names:
# If we have already handled a function by adding it to the plugin, then don't clobber the global
# function
if name in plugin_functions:
continue
globals()[name] = getattr(airflow_local_settings, name)
if POLICY_PLUGIN_MANAGER.hook.task_instance_mutation_hook.get_hookimpls():
task_instance_mutation_hook.is_noop = False
log.info("Loaded airflow_local_settings from %s .", airflow_local_settings.__file__)
except ModuleNotFoundError as e:
if e.name == "airflow_local_settings":
log.debug("No airflow_local_settings to import.", exc_info=True)
else:
log.critical(
"Failed to import airflow_local_settings due to a transitive module not found error.",
exc_info=True,
)
raise
except ImportError:
log.critical("Failed to import airflow_local_settings.", exc_info=True)
raise
def initialize():
"""Initialize Airflow with all the settings from this file."""
configure_vars()
prepare_syspath()
configure_policy_plugin_manager()
# Load policy plugins _before_ importing airflow_local_settings, as Pluggy uses LIFO and we want anything
# in airflow_local_settings to take precendec
load_policy_plugins(POLICY_PLUGIN_MANAGER)
import_local_settings()
global LOGGING_CLASS_PATH
LOGGING_CLASS_PATH = configure_logging()
State.state_color.update(STATE_COLORS)
configure_adapters()
# The webservers import this file from models.py with the default settings.
configure_orm()
configure_action_logging()
# Ensure we close DB connections at scheduler and gunicorn worker terminations
atexit.register(dispose_orm)
# Const stuff
KILOBYTE = 1024
MEGABYTE = KILOBYTE * KILOBYTE
WEB_COLORS = {"LIGHTBLUE": "#4d9de0", "LIGHTORANGE": "#FF9933"}
# Updating serialized DAG can not be faster than a minimum interval to reduce database
# write rate.
MIN_SERIALIZED_DAG_UPDATE_INTERVAL = conf.getint("core", "min_serialized_dag_update_interval", fallback=30)
# If set to True, serialized DAGs is compressed before writing to DB,
COMPRESS_SERIALIZED_DAGS = conf.getboolean("core", "compress_serialized_dags", fallback=False)
# Fetching serialized DAG can not be faster than a minimum interval to reduce database
# read rate. This config controls when your DAGs are updated in the Webserver
MIN_SERIALIZED_DAG_FETCH_INTERVAL = conf.getint("core", "min_serialized_dag_fetch_interval", fallback=10)
CAN_FORK = hasattr(os, "fork")
EXECUTE_TASKS_NEW_PYTHON_INTERPRETER = not CAN_FORK or conf.getboolean(
"core",
"execute_tasks_new_python_interpreter",
fallback=False,
)
ALLOW_FUTURE_EXEC_DATES = conf.getboolean("scheduler", "allow_trigger_in_future", fallback=False)
# Whether or not to check each dagrun against defined SLAs
CHECK_SLAS = conf.getboolean("core", "check_slas", fallback=True)
USE_JOB_SCHEDULE = conf.getboolean("scheduler", "use_job_schedule", fallback=True)
# By default Airflow plugins are lazily-loaded (only loaded when required). Set it to False,
# if you want to load plugins whenever 'airflow' is invoked via cli or loaded from module.
LAZY_LOAD_PLUGINS = conf.getboolean("core", "lazy_load_plugins", fallback=True)
# By default Airflow providers are lazily-discovered (discovery and imports happen only when required).
# Set it to False, if you want to discover providers whenever 'airflow' is invoked via cli or
# loaded from module.
LAZY_LOAD_PROVIDERS = conf.getboolean("core", "lazy_discover_providers", fallback=True)
# Determines if the executor utilizes Kubernetes
IS_K8S_OR_K8SCELERY_EXECUTOR = conf.get("core", "EXECUTOR") in {
executor_constants.KUBERNETES_EXECUTOR,
executor_constants.CELERY_KUBERNETES_EXECUTOR,
executor_constants.LOCAL_KUBERNETES_EXECUTOR,
}
IS_K8S_EXECUTOR_POD = bool(os.environ.get("AIRFLOW_IS_K8S_EXECUTOR_POD", ""))
"""Will be True if running in kubernetes executor pod."""
HIDE_SENSITIVE_VAR_CONN_FIELDS = conf.getboolean("core", "hide_sensitive_var_conn_fields")
# By default this is off, but is automatically configured on when running task
# instances
MASK_SECRETS_IN_LOGS = False
# Display alerts on the dashboard
# Useful for warning about setup issues or announcing changes to end users
# List of UIAlerts, which allows for specifying the message, category, and roles the
# message should be shown to. For example:
# from airflow.www.utils import UIAlert
#
# DASHBOARD_UIALERTS = [
# UIAlert("Welcome to Airflow"), # All users
# UIAlert("Airflow update happening next week", roles=["User"]), # Only users with the User role
# # A flash message with html:
# UIAlert('Visit <a href="http://airflow.apache.org">airflow.apache.org</a>', html=True),
# ]
#
DASHBOARD_UIALERTS: list[UIAlert] = []
# Prefix used to identify tables holding data moved during migration.
AIRFLOW_MOVED_TABLE_PREFIX = "_airflow_moved"
DAEMON_UMASK: str = conf.get("core", "daemon_umask", fallback="0o077")
# AIP-44: internal_api (experimental)
# This feature is not complete yet, so we disable it by default.
_ENABLE_AIP_44 = os.environ.get("AIRFLOW_ENABLE_AIP_44", "false").lower() in {"true", "t", "yes", "y", "1"}
| 23,573 | 37.207455 | 109 | py |
airflow | airflow-main/airflow/exceptions.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Note: Any AirflowException raised is expected to cause the TaskInstance
# to be marked in an ERROR state
"""Exceptions used by Airflow."""
from __future__ import annotations
import datetime
import warnings
from http import HTTPStatus
from typing import TYPE_CHECKING, Any, NamedTuple, Sized
if TYPE_CHECKING:
from airflow.models import DAG, DagRun
class AirflowException(Exception):
"""
Base class for all Airflow's errors.
Each custom exception should be derived from this class.
"""
status_code = HTTPStatus.INTERNAL_SERVER_ERROR
class AirflowBadRequest(AirflowException):
"""Raise when the application or server cannot handle the request."""
status_code = HTTPStatus.BAD_REQUEST
class AirflowNotFoundException(AirflowException):
"""Raise when the requested object/resource is not available in the system."""
status_code = HTTPStatus.NOT_FOUND
class AirflowConfigException(AirflowException):
"""Raise when there is configuration problem."""
class AirflowSensorTimeout(AirflowException):
"""Raise when there is a timeout on sensor polling."""
class AirflowRescheduleException(AirflowException):
"""
Raise when the task should be re-scheduled at a later time.
:param reschedule_date: The date when the task should be rescheduled
"""
def __init__(self, reschedule_date):
super().__init__()
self.reschedule_date = reschedule_date
class InvalidStatsNameException(AirflowException):
"""Raise when name of the stats is invalid."""
class AirflowTaskTimeout(AirflowException):
"""Raise when the task execution times-out."""
class AirflowWebServerTimeout(AirflowException):
"""Raise when the web server times out."""
class AirflowSkipException(AirflowException):
"""Raise when the task should be skipped."""
class AirflowFailException(AirflowException):
"""Raise when the task should be failed without retrying."""
class AirflowOptionalProviderFeatureException(AirflowException):
"""Raise by providers when imports are missing for optional provider features."""
class XComNotFound(AirflowException):
"""Raise when an XCom reference is being resolved against a non-existent XCom."""
def __init__(self, dag_id: str, task_id: str, key: str) -> None:
super().__init__()
self.dag_id = dag_id
self.task_id = task_id
self.key = key
def __str__(self) -> str:
return f'XComArg result from {self.task_id} at {self.dag_id} with key="{self.key}" is not found!'
class UnmappableOperator(AirflowException):
"""Raise when an operator is not implemented to be mappable."""
class XComForMappingNotPushed(AirflowException):
"""Raise when a mapped downstream's dependency fails to push XCom for task mapping."""
def __str__(self) -> str:
return "did not push XCom for task mapping"
class UnmappableXComTypePushed(AirflowException):
"""Raise when an unmappable type is pushed as a mapped downstream's dependency."""
def __init__(self, value: Any, *values: Any) -> None:
super().__init__(value, *values)
def __str__(self) -> str:
typename = type(self.args[0]).__qualname__
for arg in self.args[1:]:
typename = f"{typename}[{type(arg).__qualname__}]"
return f"unmappable return type {typename!r}"
class UnmappableXComLengthPushed(AirflowException):
"""Raise when the pushed value is too large to map as a downstream's dependency."""
def __init__(self, value: Sized, max_length: int) -> None:
super().__init__(value)
self.value = value
self.max_length = max_length
def __str__(self) -> str:
return f"unmappable return value length: {len(self.value)} > {self.max_length}"
class AirflowDagCycleException(AirflowException):
"""Raise when there is a cycle in DAG definition."""
class AirflowDagDuplicatedIdException(AirflowException):
"""Raise when a DAG's ID is already used by another DAG."""
def __init__(self, dag_id: str, incoming: str, existing: str) -> None:
super().__init__(dag_id, incoming, existing)
self.dag_id = dag_id
self.incoming = incoming
self.existing = existing
def __str__(self) -> str:
return f"Ignoring DAG {self.dag_id} from {self.incoming} - also found in {self.existing}"
class AirflowDagInconsistent(AirflowException):
"""Raise when a DAG has inconsistent attributes."""
class AirflowClusterPolicyViolation(AirflowException):
"""Raise when there is a violation of a Cluster Policy in DAG definition."""
class AirflowClusterPolicySkipDag(AirflowException):
"""Raise when skipping dag is needed in Cluster Policy."""
class AirflowClusterPolicyError(AirflowException):
"""
Raise when there is an error in Cluster Policy,
except AirflowClusterPolicyViolation and AirflowClusterPolicySkipDag.
"""
class AirflowTimetableInvalid(AirflowException):
"""Raise when a DAG has an invalid timetable."""
class DagNotFound(AirflowNotFoundException):
"""Raise when a DAG is not available in the system."""
class DagCodeNotFound(AirflowNotFoundException):
"""Raise when a DAG code is not available in the system."""
class DagRunNotFound(AirflowNotFoundException):
"""Raise when a DAG Run is not available in the system."""
class DagRunAlreadyExists(AirflowBadRequest):
"""Raise when creating a DAG run for DAG which already has DAG run entry."""
def __init__(self, dag_run: DagRun, execution_date: datetime.datetime, run_id: str) -> None:
super().__init__(
f"A DAG Run already exists for DAG {dag_run.dag_id} at {execution_date} with run id {run_id}"
)
self.dag_run = dag_run
class DagFileExists(AirflowBadRequest):
"""Raise when a DAG ID is still in DagBag i.e., DAG file is in DAG folder."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
warnings.warn("DagFileExists is deprecated and will be removed.", DeprecationWarning, stacklevel=2)
class DagInvalidTriggerRule(AirflowException):
"""Raise when a dag has 'fail_stop' enabled yet has a non-default trigger rule."""
@classmethod
def check(cls, dag: DAG | None, trigger_rule: str):
from airflow.models.abstractoperator import DEFAULT_TRIGGER_RULE
if dag is not None and dag.fail_stop and trigger_rule != DEFAULT_TRIGGER_RULE:
raise cls()
def __str__(self) -> str:
from airflow.models.abstractoperator import DEFAULT_TRIGGER_RULE
return f"A 'fail-stop' dag can only have {DEFAULT_TRIGGER_RULE} trigger rule"
class DuplicateTaskIdFound(AirflowException):
"""Raise when a Task with duplicate task_id is defined in the same DAG."""
class TaskAlreadyInTaskGroup(AirflowException):
"""Raise when a Task cannot be added to a TaskGroup since it already belongs to another TaskGroup."""
def __init__(self, task_id: str, existing_group_id: str | None, new_group_id: str) -> None:
super().__init__(task_id, new_group_id)
self.task_id = task_id
self.existing_group_id = existing_group_id
self.new_group_id = new_group_id
def __str__(self) -> str:
if self.existing_group_id is None:
existing_group = "the DAG's root group"
else:
existing_group = f"group {self.existing_group_id!r}"
return f"cannot add {self.task_id!r} to {self.new_group_id!r} (already in {existing_group})"
class SerializationError(AirflowException):
"""A problem occurred when trying to serialize something."""
class ParamValidationError(AirflowException):
"""Raise when DAG params is invalid."""
class TaskNotFound(AirflowNotFoundException):
"""Raise when a Task is not available in the system."""
class TaskInstanceNotFound(AirflowNotFoundException):
"""Raise when a task instance is not available in the system."""
class PoolNotFound(AirflowNotFoundException):
"""Raise when a Pool is not available in the system."""
class NoAvailablePoolSlot(AirflowException):
"""Raise when there is not enough slots in pool."""
class DagConcurrencyLimitReached(AirflowException):
"""Raise when DAG max_active_tasks limit is reached."""
class TaskConcurrencyLimitReached(AirflowException):
"""Raise when task max_active_tasks limit is reached."""
class BackfillUnfinished(AirflowException):
"""
Raises when not all tasks succeed in backfill.
:param message: The human-readable description of the exception
:param ti_status: The information about all task statuses
"""
def __init__(self, message, ti_status):
super().__init__(message)
self.ti_status = ti_status
class FileSyntaxError(NamedTuple):
"""Information about a single error in a file."""
line_no: int | None
message: str
def __str__(self):
return f"{self.message}. Line number: s{str(self.line_no)},"
class AirflowFileParseException(AirflowException):
"""
Raises when connection or variable file can not be parsed.
:param msg: The human-readable description of the exception
:param file_path: A processed file that contains errors
:param parse_errors: File syntax errors
"""
def __init__(self, msg: str, file_path: str, parse_errors: list[FileSyntaxError]) -> None:
super().__init__(msg)
self.msg = msg
self.file_path = file_path
self.parse_errors = parse_errors
def __str__(self):
from airflow.utils.code_utils import prepare_code_snippet
from airflow.utils.platform import is_tty
result = f"{self.msg}\nFilename: {self.file_path}\n\n"
for error_no, parse_error in enumerate(self.parse_errors, 1):
result += "=" * 20 + f" Parse error {error_no:3} " + "=" * 20 + "\n"
result += f"{parse_error.message}\n"
if parse_error.line_no:
result += f"Line number: {parse_error.line_no}\n"
if parse_error.line_no and is_tty():
result += "\n" + prepare_code_snippet(self.file_path, parse_error.line_no) + "\n"
return result
class ConnectionNotUnique(AirflowException):
"""Raise when multiple values are found for the same connection ID."""
class TaskDeferred(BaseException):
"""
Signal an operator moving to deferred state.
Special exception raised to signal that the operator it was raised from
wishes to defer until a trigger fires.
"""
def __init__(
self,
*,
trigger,
method_name: str,
kwargs: dict[str, Any] | None = None,
timeout: datetime.timedelta | None = None,
):
super().__init__()
self.trigger = trigger
self.method_name = method_name
self.kwargs = kwargs
self.timeout = timeout
# Check timeout type at runtime
if self.timeout is not None and not hasattr(self.timeout, "total_seconds"):
raise ValueError("Timeout value must be a timedelta")
def __repr__(self) -> str:
return f"<TaskDeferred trigger={self.trigger} method={self.method_name}>"
class TaskDeferralError(AirflowException):
"""Raised when a task failed during deferral for some reason."""
class PodMutationHookException(AirflowException):
"""Raised when exception happens during Pod Mutation Hook execution."""
class PodReconciliationError(AirflowException):
"""Raised when an error is encountered while trying to merge pod configs."""
class RemovedInAirflow3Warning(DeprecationWarning):
"""Issued for usage of deprecated features that will be removed in Airflow3."""
deprecated_since: str | None = None
"Indicates the airflow version that started raising this deprecation warning"
class AirflowProviderDeprecationWarning(DeprecationWarning):
"""Issued for usage of deprecated features of Airflow provider."""
deprecated_provider_since: str | None = None
"Indicates the provider version that started raising this deprecation warning"
class DeserializingResultError(ValueError):
"""Raised when an error is encountered while a pickling library deserializes a pickle file."""
def __str__(self):
return (
"Error deserializing result. Note that result deserialization "
"is not supported across major Python versions. Cause: " + str(self.__cause__)
)
| 13,236 | 31.443627 | 107 | py |
airflow | airflow-main/airflow/logging_config.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import warnings
from logging.config import dictConfig
from airflow.configuration import conf
from airflow.exceptions import AirflowConfigException
from airflow.utils.module_loading import import_string
log = logging.getLogger(__name__)
def configure_logging():
"""Configure & Validate Airflow Logging."""
logging_class_path = ""
try:
logging_class_path = conf.get("logging", "logging_config_class")
except AirflowConfigException:
log.debug("Could not find key logging_config_class in config")
if logging_class_path:
try:
logging_config = import_string(logging_class_path)
# Make sure that the variable is in scope
if not isinstance(logging_config, dict):
raise ValueError("Logging Config should be of dict type")
log.info("Successfully imported user-defined logging config from %s", logging_class_path)
except Exception as err:
# Import default logging configurations.
raise ImportError(f"Unable to load custom logging from {logging_class_path} due to {err}")
else:
logging_class_path = "airflow.config_templates.airflow_local_settings.DEFAULT_LOGGING_CONFIG"
logging_config = import_string(logging_class_path)
log.debug("Unable to load custom logging, using default config instead")
try:
# Ensure that the password masking filter is applied to the 'task' handler
# no matter what the user did.
if "filters" in logging_config and "mask_secrets" in logging_config["filters"]:
# But if they replace the logging config _entirely_, don't try to set this, it won't work
task_handler_config = logging_config["handlers"]["task"]
task_handler_config.setdefault("filters", [])
if "mask_secrets" not in task_handler_config["filters"]:
task_handler_config["filters"].append("mask_secrets")
# Try to init logging
dictConfig(logging_config)
except (ValueError, KeyError) as e:
log.error("Unable to load the config, contains a configuration error.")
# When there is an error in the config, escalate the exception
# otherwise Airflow would silently fall back on the default config
raise e
validate_logging_config(logging_config)
return logging_class_path
def validate_logging_config(logging_config):
"""Validate the provided Logging Config."""
# Now lets validate the other logging-related settings
task_log_reader = conf.get("logging", "task_log_reader")
logger = logging.getLogger("airflow.task")
def _get_handler(name):
return next((h for h in logger.handlers if h.name == name), None)
if _get_handler(task_log_reader) is None:
# Check for pre 1.10 setting that might be in deployed airflow.cfg files
if task_log_reader == "file.task" and _get_handler("task"):
warnings.warn(
f"task_log_reader setting in [logging] has a deprecated value of {task_log_reader!r}, "
"but no handler with this name was found. Please update your config to use task. "
"Running config has been adjusted to match",
DeprecationWarning,
)
conf.set("logging", "task_log_reader", "task")
else:
raise AirflowConfigException(
f"Configured task_log_reader {task_log_reader!r} was not a handler of "
f"the 'airflow.task' logger."
)
| 4,377 | 40.301887 | 103 | py |
airflow | airflow-main/airflow/stats.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import socket
from typing import TYPE_CHECKING, Callable
from airflow.configuration import conf
from airflow.metrics import datadog_logger, otel_logger, statsd_logger
from airflow.metrics.base_stats_logger import NoStatsLogger, StatsLogger
log = logging.getLogger(__name__)
class _Stats(type):
factory: Callable
instance: StatsLogger | NoStatsLogger | None = None
def __getattr__(cls, name: str) -> str:
if not cls.instance:
try:
cls.instance = cls.factory()
except (socket.gaierror, ImportError) as e:
log.error("Could not configure StatsClient: %s, using NoStatsLogger instead.", e)
cls.instance = NoStatsLogger()
return getattr(cls.instance, name)
def __init__(cls, *args, **kwargs) -> None:
super().__init__(cls)
if not hasattr(cls.__class__, "factory"):
is_datadog_enabled_defined = conf.has_option("metrics", "statsd_datadog_enabled")
if is_datadog_enabled_defined and conf.getboolean("metrics", "statsd_datadog_enabled"):
cls.__class__.factory = datadog_logger.get_dogstatsd_logger
elif conf.getboolean("metrics", "statsd_on"):
cls.__class__.factory = statsd_logger.get_statsd_logger
elif conf.getboolean("metrics", "otel_on"):
cls.__class__.factory = otel_logger.get_otel_logger
else:
cls.__class__.factory = NoStatsLogger
@classmethod
def get_constant_tags(cls) -> list[str]:
"""Get constant DataDog tags to add to all stats."""
tags: list[str] = []
tags_in_string = conf.get("metrics", "statsd_datadog_tags", fallback=None)
if tags_in_string is None or tags_in_string == "":
return tags
else:
for key_value in tags_in_string.split(","):
tags.append(key_value)
return tags
if TYPE_CHECKING:
Stats: StatsLogger
else:
class Stats(metaclass=_Stats):
"""Empty class for Stats - we use metaclass to inject the right one."""
| 2,936 | 37.644737 | 99 | py |
airflow | airflow-main/airflow/configuration.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import functools
import json
import logging
import multiprocessing
import os
import pathlib
import shlex
import stat
import subprocess
import sys
import warnings
from base64 import b64encode
from collections import OrderedDict
# Ignored Mypy on configparser because it thinks the configparser module has no _UNSET attribute
from configparser import _UNSET, ConfigParser, NoOptionError, NoSectionError # type: ignore
from contextlib import contextmanager, suppress
from json.decoder import JSONDecodeError
from typing import IO, Any, Dict, Iterable, Pattern, Set, Tuple, Union
from urllib.parse import urlsplit
import re2
from typing_extensions import overload
from airflow.auth.managers.base_auth_manager import BaseAuthManager
from airflow.exceptions import AirflowConfigException
from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH, BaseSecretsBackend
from airflow.utils import yaml
from airflow.utils.module_loading import import_string
from airflow.utils.weight_rule import WeightRule
log = logging.getLogger(__name__)
# show Airflow's deprecation warnings
if not sys.warnoptions:
warnings.filterwarnings(action="default", category=DeprecationWarning, module="airflow")
warnings.filterwarnings(action="default", category=PendingDeprecationWarning, module="airflow")
_SQLITE3_VERSION_PATTERN = re2.compile(r"(?P<version>^\d+(?:\.\d+)*)\D?.*$")
ConfigType = Union[str, int, float, bool]
ConfigOptionsDictType = Dict[str, ConfigType]
ConfigSectionSourcesType = Dict[str, Union[str, Tuple[str, str]]]
ConfigSourcesType = Dict[str, ConfigSectionSourcesType]
ENV_VAR_PREFIX = "AIRFLOW__"
def _parse_sqlite_version(s: str) -> tuple[int, ...]:
match = _SQLITE3_VERSION_PATTERN.match(s)
if match is None:
return ()
return tuple(int(p) for p in match.group("version").split("."))
@overload
def expand_env_var(env_var: None) -> None:
...
@overload
def expand_env_var(env_var: str) -> str:
...
def expand_env_var(env_var: str | None) -> str | None:
"""
Expands (potentially nested) env vars.
Repeat and apply `expandvars` and `expanduser` until
interpolation stops having any effect.
"""
if not env_var:
return env_var
while True:
interpolated = os.path.expanduser(os.path.expandvars(str(env_var)))
if interpolated == env_var:
return interpolated
else:
env_var = interpolated
def run_command(command: str) -> str:
"""Runs command and returns stdout."""
process = subprocess.Popen(
shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True
)
output, stderr = (stream.decode(sys.getdefaultencoding(), "ignore") for stream in process.communicate())
if process.returncode != 0:
raise AirflowConfigException(
f"Cannot execute {command}. Error code is: {process.returncode}. "
f"Output: {output}, Stderr: {stderr}"
)
return output
def _get_config_value_from_secret_backend(config_key: str) -> str | None:
"""Get Config option values from Secret Backend."""
try:
secrets_client = get_custom_secret_backend()
if not secrets_client:
return None
return secrets_client.get_config(config_key)
except Exception as e:
raise AirflowConfigException(
"Cannot retrieve config from alternative secrets backend. "
"Make sure it is configured properly and that the Backend "
"is accessible.\n"
f"{e}"
)
def _default_config_file_path(file_name: str) -> str:
templates_dir = os.path.join(os.path.dirname(__file__), "config_templates")
return os.path.join(templates_dir, file_name)
def default_config_yaml() -> dict[str, Any]:
"""
Read Airflow configs from YAML file.
:return: Python dictionary containing configs & their info
"""
with open(_default_config_file_path("config.yml")) as config_file:
return yaml.safe_load(config_file)
class AirflowConfigParser(ConfigParser):
"""Custom Airflow Configparser supporting defaults and deprecated options."""
# These configuration elements can be fetched as the stdout of commands
# following the "{section}__{name}_cmd" pattern, the idea behind this
# is to not store password on boxes in text files.
# These configs can also be fetched from Secrets backend
# following the "{section}__{name}__secret" pattern
@functools.cached_property
def sensitive_config_values(self) -> Set[tuple[str, str]]: # noqa: UP006
default_config = default_config_yaml()
flattened = {
(s, k): item for s, s_c in default_config.items() for k, item in s_c.get("options").items()
}
sensitive = {(section, key) for (section, key), v in flattened.items() if v.get("sensitive") is True}
depr_option = {self.deprecated_options[x][:-1] for x in sensitive if x in self.deprecated_options}
depr_section = {
(self.deprecated_sections[s][0], k) for s, k in sensitive if s in self.deprecated_sections
}
sensitive.update(depr_section, depr_option)
return sensitive
# A mapping of (new section, new option) -> (old section, old option, since_version).
# When reading new option, the old option will be checked to see if it exists. If it does a
# DeprecationWarning will be issued and the old option will be used instead
deprecated_options: dict[tuple[str, str], tuple[str, str, str]] = {
("celery", "worker_precheck"): ("core", "worker_precheck", "2.0.0"),
("logging", "interleave_timestamp_parser"): ("core", "interleave_timestamp_parser", "2.6.1"),
("logging", "base_log_folder"): ("core", "base_log_folder", "2.0.0"),
("logging", "remote_logging"): ("core", "remote_logging", "2.0.0"),
("logging", "remote_log_conn_id"): ("core", "remote_log_conn_id", "2.0.0"),
("logging", "remote_base_log_folder"): ("core", "remote_base_log_folder", "2.0.0"),
("logging", "encrypt_s3_logs"): ("core", "encrypt_s3_logs", "2.0.0"),
("logging", "logging_level"): ("core", "logging_level", "2.0.0"),
("logging", "fab_logging_level"): ("core", "fab_logging_level", "2.0.0"),
("logging", "logging_config_class"): ("core", "logging_config_class", "2.0.0"),
("logging", "colored_console_log"): ("core", "colored_console_log", "2.0.0"),
("logging", "colored_log_format"): ("core", "colored_log_format", "2.0.0"),
("logging", "colored_formatter_class"): ("core", "colored_formatter_class", "2.0.0"),
("logging", "log_format"): ("core", "log_format", "2.0.0"),
("logging", "simple_log_format"): ("core", "simple_log_format", "2.0.0"),
("logging", "task_log_prefix_template"): ("core", "task_log_prefix_template", "2.0.0"),
("logging", "log_filename_template"): ("core", "log_filename_template", "2.0.0"),
("logging", "log_processor_filename_template"): ("core", "log_processor_filename_template", "2.0.0"),
("logging", "dag_processor_manager_log_location"): (
"core",
"dag_processor_manager_log_location",
"2.0.0",
),
("logging", "task_log_reader"): ("core", "task_log_reader", "2.0.0"),
("metrics", "metrics_allow_list"): ("metrics", "statsd_allow_list", "2.6.0"),
("metrics", "metrics_block_list"): ("metrics", "statsd_block_list", "2.6.0"),
("metrics", "statsd_on"): ("scheduler", "statsd_on", "2.0.0"),
("metrics", "statsd_host"): ("scheduler", "statsd_host", "2.0.0"),
("metrics", "statsd_port"): ("scheduler", "statsd_port", "2.0.0"),
("metrics", "statsd_prefix"): ("scheduler", "statsd_prefix", "2.0.0"),
("metrics", "statsd_allow_list"): ("scheduler", "statsd_allow_list", "2.0.0"),
("metrics", "stat_name_handler"): ("scheduler", "stat_name_handler", "2.0.0"),
("metrics", "statsd_datadog_enabled"): ("scheduler", "statsd_datadog_enabled", "2.0.0"),
("metrics", "statsd_datadog_tags"): ("scheduler", "statsd_datadog_tags", "2.0.0"),
("metrics", "statsd_datadog_metrics_tags"): ("scheduler", "statsd_datadog_metrics_tags", "2.6.0"),
("metrics", "statsd_custom_client_path"): ("scheduler", "statsd_custom_client_path", "2.0.0"),
("scheduler", "parsing_processes"): ("scheduler", "max_threads", "1.10.14"),
("scheduler", "scheduler_idle_sleep_time"): ("scheduler", "processor_poll_interval", "2.2.0"),
("operators", "default_queue"): ("celery", "default_queue", "2.1.0"),
("core", "hide_sensitive_var_conn_fields"): ("admin", "hide_sensitive_variable_fields", "2.1.0"),
("core", "sensitive_var_conn_names"): ("admin", "sensitive_variable_fields", "2.1.0"),
("core", "default_pool_task_slot_count"): ("core", "non_pooled_task_slot_count", "1.10.4"),
("core", "max_active_tasks_per_dag"): ("core", "dag_concurrency", "2.2.0"),
("logging", "worker_log_server_port"): ("celery", "worker_log_server_port", "2.2.0"),
("api", "access_control_allow_origins"): ("api", "access_control_allow_origin", "2.2.0"),
("api", "auth_backends"): ("api", "auth_backend", "2.3.0"),
("database", "sql_alchemy_conn"): ("core", "sql_alchemy_conn", "2.3.0"),
("database", "sql_engine_encoding"): ("core", "sql_engine_encoding", "2.3.0"),
("database", "sql_engine_collation_for_ids"): ("core", "sql_engine_collation_for_ids", "2.3.0"),
("database", "sql_alchemy_pool_enabled"): ("core", "sql_alchemy_pool_enabled", "2.3.0"),
("database", "sql_alchemy_pool_size"): ("core", "sql_alchemy_pool_size", "2.3.0"),
("database", "sql_alchemy_max_overflow"): ("core", "sql_alchemy_max_overflow", "2.3.0"),
("database", "sql_alchemy_pool_recycle"): ("core", "sql_alchemy_pool_recycle", "2.3.0"),
("database", "sql_alchemy_pool_pre_ping"): ("core", "sql_alchemy_pool_pre_ping", "2.3.0"),
("database", "sql_alchemy_schema"): ("core", "sql_alchemy_schema", "2.3.0"),
("database", "sql_alchemy_connect_args"): ("core", "sql_alchemy_connect_args", "2.3.0"),
("database", "load_default_connections"): ("core", "load_default_connections", "2.3.0"),
("database", "max_db_retries"): ("core", "max_db_retries", "2.3.0"),
("scheduler", "parsing_cleanup_interval"): ("scheduler", "deactivate_stale_dags_interval", "2.5.0"),
("scheduler", "task_queued_timeout_check_interval"): (
"kubernetes_executor",
"worker_pods_pending_timeout_check_interval",
"2.6.0",
),
}
# A mapping of new configurations to a list of old configurations for when one configuration
# deprecates more than one other deprecation. The deprecation logic for these configurations
# is defined in SchedulerJobRunner.
many_to_one_deprecated_options: dict[tuple[str, str], list[tuple[str, str, str]]] = {
("scheduler", "task_queued_timeout"): [
("celery", "stalled_task_timeout", "2.6.0"),
("celery", "task_adoption_timeout", "2.6.0"),
("kubernetes_executor", "worker_pods_pending_timeout", "2.6.0"),
]
}
# A mapping of new section -> (old section, since_version).
deprecated_sections: dict[str, tuple[str, str]] = {"kubernetes_executor": ("kubernetes", "2.5.0")}
# Now build the inverse so we can go from old_section/old_key to new_section/new_key
# if someone tries to retrieve it based on old_section/old_key
@functools.cached_property
def inversed_deprecated_options(self):
return {(sec, name): key for key, (sec, name, ver) in self.deprecated_options.items()}
@functools.cached_property
def inversed_deprecated_sections(self):
return {
old_section: new_section for new_section, (old_section, ver) in self.deprecated_sections.items()
}
# A mapping of old default values that we want to change and warn the user
# about. Mapping of section -> setting -> { old, replace, by_version }
deprecated_values: dict[str, dict[str, tuple[Pattern, str, str]]] = {
"core": {
"hostname_callable": (re2.compile(r":"), r".", "2.1"),
},
"webserver": {
"navbar_color": (re2.compile(r"(?i)\A#007A87\z"), "#fff", "2.1"),
"dag_default_view": (re2.compile(r"^tree$"), "grid", "3.0"),
},
"email": {
"email_backend": (
re2.compile(r"^airflow\.contrib\.utils\.sendgrid\.send_email$"),
r"airflow.providers.sendgrid.utils.emailer.send_email",
"2.1",
),
},
"logging": {
"log_filename_template": (
re2.compile(re2.escape("{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log")),
"XX-set-after-default-config-loaded-XX",
"3.0",
),
},
"api": {
"auth_backends": (
re2.compile(r"^airflow\.api\.auth\.backend\.deny_all$|^$"),
"airflow.api.auth.backend.session",
"3.0",
),
},
"elasticsearch": {
"log_id_template": (
re2.compile("^" + re2.escape("{dag_id}-{task_id}-{execution_date}-{try_number}") + "$"),
"{dag_id}-{task_id}-{run_id}-{map_index}-{try_number}",
"3.0",
)
},
}
_available_logging_levels = ["CRITICAL", "FATAL", "ERROR", "WARN", "WARNING", "INFO", "DEBUG"]
enums_options = {
("core", "default_task_weight_rule"): sorted(WeightRule.all_weight_rules()),
("core", "dag_ignore_file_syntax"): ["regexp", "glob"],
("core", "mp_start_method"): multiprocessing.get_all_start_methods(),
("scheduler", "file_parsing_sort_mode"): ["modified_time", "random_seeded_by_host", "alphabetical"],
("logging", "logging_level"): _available_logging_levels,
("logging", "fab_logging_level"): _available_logging_levels,
# celery_logging_level can be empty, which uses logging_level as fallback
("logging", "celery_logging_level"): _available_logging_levels + [""],
("webserver", "analytical_tool"): ["google_analytics", "metarouter", "segment", ""],
}
upgraded_values: dict[tuple[str, str], str]
"""Mapping of (section,option) to the old value that was upgraded"""
# This method transforms option names on every read, get, or set operation.
# This changes from the default behaviour of ConfigParser from lower-casing
# to instead be case-preserving
def optionxform(self, optionstr: str) -> str:
return optionstr
def __init__(self, default_config: str | None = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.upgraded_values = {}
self.airflow_defaults = ConfigParser(*args, **kwargs)
if default_config is not None:
self.airflow_defaults.read_string(default_config)
# Set the upgrade value based on the current loaded default
default = self.airflow_defaults.get("logging", "log_filename_template", fallback=None)
if default:
replacement = self.deprecated_values["logging"]["log_filename_template"]
self.deprecated_values["logging"]["log_filename_template"] = (
replacement[0],
default,
replacement[2],
)
else:
# In case of tests it might not exist
with suppress(KeyError):
del self.deprecated_values["logging"]["log_filename_template"]
else:
with suppress(KeyError):
del self.deprecated_values["logging"]["log_filename_template"]
self.is_validated = False
self._suppress_future_warnings = False
def validate(self):
self._validate_sqlite3_version()
self._validate_enums()
for section, replacement in self.deprecated_values.items():
for name, info in replacement.items():
old, new, version = info
current_value = self.get(section, name, fallback="")
if self._using_old_value(old, current_value):
self.upgraded_values[(section, name)] = current_value
new_value = old.sub(new, current_value)
self._update_env_var(section=section, name=name, new_value=new_value)
self._create_future_warning(
name=name,
section=section,
current_value=current_value,
new_value=new_value,
version=version,
)
self._upgrade_auth_backends()
self._upgrade_postgres_metastore_conn()
self.is_validated = True
def _upgrade_auth_backends(self):
"""
Ensure a custom auth_backends setting contains session.
This is required by the UI for ajax queries.
"""
old_value = self.get("api", "auth_backends", fallback="")
if old_value in ("airflow.api.auth.backend.default", ""):
# handled by deprecated_values
pass
elif old_value.find("airflow.api.auth.backend.session") == -1:
new_value = old_value + ",airflow.api.auth.backend.session"
self._update_env_var(section="api", name="auth_backends", new_value=new_value)
self.upgraded_values[("api", "auth_backends")] = old_value
# if the old value is set via env var, we need to wipe it
# otherwise, it'll "win" over our adjusted value
old_env_var = self._env_var_name("api", "auth_backend")
os.environ.pop(old_env_var, None)
warnings.warn(
"The auth_backends setting in [api] has had airflow.api.auth.backend.session added "
"in the running config, which is needed by the UI. Please update your config before "
"Apache Airflow 3.0.",
FutureWarning,
)
def _upgrade_postgres_metastore_conn(self):
"""
Upgrade SQL schemas.
As of SQLAlchemy 1.4, schemes `postgres+psycopg2` and `postgres`
must be replaced with `postgresql`.
"""
section, key = "database", "sql_alchemy_conn"
old_value = self.get(section, key, _extra_stacklevel=1)
bad_schemes = ["postgres+psycopg2", "postgres"]
good_scheme = "postgresql"
parsed = urlsplit(old_value)
if parsed.scheme in bad_schemes:
warnings.warn(
f"Bad scheme in Airflow configuration core > sql_alchemy_conn: `{parsed.scheme}`. "
"As of SQLAlchemy 1.4 (adopted in Airflow 2.3) this is no longer supported. You must "
f"change to `{good_scheme}` before the next Airflow release.",
FutureWarning,
)
self.upgraded_values[(section, key)] = old_value
new_value = re2.sub("^" + re2.escape(f"{parsed.scheme}://"), f"{good_scheme}://", old_value)
self._update_env_var(section=section, name=key, new_value=new_value)
# if the old value is set via env var, we need to wipe it
# otherwise, it'll "win" over our adjusted value
old_env_var = self._env_var_name("core", key)
os.environ.pop(old_env_var, None)
def _validate_enums(self):
"""Validate that enum type config has an accepted value."""
for (section_key, option_key), enum_options in self.enums_options.items():
if self.has_option(section_key, option_key):
value = self.get(section_key, option_key)
if value not in enum_options:
raise AirflowConfigException(
f"`[{section_key}] {option_key}` should not be "
f"{value!r}. Possible values: {', '.join(enum_options)}."
)
def _validate_sqlite3_version(self):
"""Validate SQLite version.
Some features in storing rendered fields require SQLite >= 3.15.0.
"""
if "sqlite" not in self.get("database", "sql_alchemy_conn"):
return
import sqlite3
min_sqlite_version = (3, 15, 0)
if _parse_sqlite_version(sqlite3.sqlite_version) >= min_sqlite_version:
return
from airflow.utils.docs import get_docs_url
min_sqlite_version_str = ".".join(str(s) for s in min_sqlite_version)
raise AirflowConfigException(
f"error: SQLite C library too old (< {min_sqlite_version_str}). "
f"See {get_docs_url('howto/set-up-database.html#setting-up-a-sqlite-database')}"
)
def _using_old_value(self, old: Pattern, current_value: str) -> bool:
return old.search(current_value) is not None
def _update_env_var(self, section: str, name: str, new_value: str):
env_var = self._env_var_name(section, name)
# Set it as an env var so that any subprocesses keep the same override!
os.environ[env_var] = new_value
@staticmethod
def _create_future_warning(name: str, section: str, current_value: Any, new_value: Any, version: str):
warnings.warn(
f"The {name!r} setting in [{section}] has the old default value of {current_value!r}. "
f"This value has been changed to {new_value!r} in the running config, but "
f"please update your config before Apache Airflow {version}.",
FutureWarning,
)
def _env_var_name(self, section: str, key: str) -> str:
return f"{ENV_VAR_PREFIX}{section.replace('.', '_').upper()}__{key.upper()}"
def _get_env_var_option(self, section: str, key: str):
# must have format AIRFLOW__{SECTION}__{KEY} (note double underscore)
env_var = self._env_var_name(section, key)
if env_var in os.environ:
return expand_env_var(os.environ[env_var])
# alternatively AIRFLOW__{SECTION}__{KEY}_CMD (for a command)
env_var_cmd = env_var + "_CMD"
if env_var_cmd in os.environ:
# if this is a valid command key...
if (section, key) in self.sensitive_config_values:
return run_command(os.environ[env_var_cmd])
# alternatively AIRFLOW__{SECTION}__{KEY}_SECRET (to get from Secrets Backend)
env_var_secret_path = env_var + "_SECRET"
if env_var_secret_path in os.environ:
# if this is a valid secret path...
if (section, key) in self.sensitive_config_values:
return _get_config_value_from_secret_backend(os.environ[env_var_secret_path])
return None
def _get_cmd_option(self, section: str, key: str):
fallback_key = key + "_cmd"
if (section, key) in self.sensitive_config_values:
if super().has_option(section, fallback_key):
command = super().get(section, fallback_key)
return run_command(command)
return None
def _get_cmd_option_from_config_sources(
self, config_sources: ConfigSourcesType, section: str, key: str
) -> str | None:
fallback_key = key + "_cmd"
if (section, key) in self.sensitive_config_values:
section_dict = config_sources.get(section)
if section_dict is not None:
command_value = section_dict.get(fallback_key)
if command_value is not None:
if isinstance(command_value, str):
command = command_value
else:
command = command_value[0]
return run_command(command)
return None
def _get_secret_option(self, section: str, key: str) -> str | None:
"""Get Config option values from Secret Backend."""
fallback_key = key + "_secret"
if (section, key) in self.sensitive_config_values:
if super().has_option(section, fallback_key):
secrets_path = super().get(section, fallback_key)
return _get_config_value_from_secret_backend(secrets_path)
return None
def _get_secret_option_from_config_sources(
self, config_sources: ConfigSourcesType, section: str, key: str
) -> str | None:
fallback_key = key + "_secret"
if (section, key) in self.sensitive_config_values:
section_dict = config_sources.get(section)
if section_dict is not None:
secrets_path_value = section_dict.get(fallback_key)
if secrets_path_value is not None:
if isinstance(secrets_path_value, str):
secrets_path = secrets_path_value
else:
secrets_path = secrets_path_value[0]
return _get_config_value_from_secret_backend(secrets_path)
return None
def get_mandatory_value(self, section: str, key: str, **kwargs) -> str:
value = self.get(section, key, _extra_stacklevel=1, **kwargs)
if value is None:
raise ValueError(f"The value {section}/{key} should be set!")
return value
@overload # type: ignore[override]
def get(self, section: str, key: str, fallback: str = ..., **kwargs) -> str: # type: ignore[override]
...
@overload # type: ignore[override]
def get(self, section: str, key: str, **kwargs) -> str | None: # type: ignore[override]
...
def get( # type: ignore[override, misc]
self,
section: str,
key: str,
_extra_stacklevel: int = 0,
**kwargs,
) -> str | None:
section = str(section).lower()
key = str(key).lower()
warning_emitted = False
deprecated_section: str | None
deprecated_key: str | None
# For when we rename whole sections
if section in self.inversed_deprecated_sections:
deprecated_section, deprecated_key = (section, key)
section = self.inversed_deprecated_sections[section]
if not self._suppress_future_warnings:
warnings.warn(
f"The config section [{deprecated_section}] has been renamed to "
f"[{section}]. Please update your `conf.get*` call to use the new name",
FutureWarning,
stacklevel=2 + _extra_stacklevel,
)
# Don't warn about individual rename if the whole section is renamed
warning_emitted = True
elif (section, key) in self.inversed_deprecated_options:
# Handle using deprecated section/key instead of the new section/key
new_section, new_key = self.inversed_deprecated_options[(section, key)]
if not self._suppress_future_warnings and not warning_emitted:
warnings.warn(
f"section/key [{section}/{key}] has been deprecated, you should use"
f"[{new_section}/{new_key}] instead. Please update your `conf.get*` call to use the "
"new name",
FutureWarning,
stacklevel=2 + _extra_stacklevel,
)
warning_emitted = True
deprecated_section, deprecated_key = section, key
section, key = (new_section, new_key)
elif section in self.deprecated_sections:
# When accessing the new section name, make sure we check under the old config name
deprecated_key = key
deprecated_section = self.deprecated_sections[section][0]
else:
deprecated_section, deprecated_key, _ = self.deprecated_options.get(
(section, key), (None, None, None)
)
# first check environment variables
option = self._get_environment_variables(
deprecated_key,
deprecated_section,
key,
section,
issue_warning=not warning_emitted,
extra_stacklevel=_extra_stacklevel,
)
if option is not None:
return option
# ...then the config file
option = self._get_option_from_config_file(
deprecated_key,
deprecated_section,
key,
kwargs,
section,
issue_warning=not warning_emitted,
extra_stacklevel=_extra_stacklevel,
)
if option is not None:
return option
# ...then commands
option = self._get_option_from_commands(
deprecated_key,
deprecated_section,
key,
section,
issue_warning=not warning_emitted,
extra_stacklevel=_extra_stacklevel,
)
if option is not None:
return option
# ...then from secret backends
option = self._get_option_from_secrets(
deprecated_key,
deprecated_section,
key,
section,
issue_warning=not warning_emitted,
extra_stacklevel=_extra_stacklevel,
)
if option is not None:
return option
# ...then the default config
if self.airflow_defaults.has_option(section, key) or "fallback" in kwargs:
return expand_env_var(self.airflow_defaults.get(section, key, **kwargs))
log.warning("section/key [%s/%s] not found in config", section, key)
raise AirflowConfigException(f"section/key [{section}/{key}] not found in config")
def _get_option_from_secrets(
self,
deprecated_key: str | None,
deprecated_section: str | None,
key: str,
section: str,
issue_warning: bool = True,
extra_stacklevel: int = 0,
) -> str | None:
option = self._get_secret_option(section, key)
if option:
return option
if deprecated_section and deprecated_key:
with self.suppress_future_warnings():
option = self._get_secret_option(deprecated_section, deprecated_key)
if option:
if issue_warning:
self._warn_deprecate(section, key, deprecated_section, deprecated_key, extra_stacklevel)
return option
return None
def _get_option_from_commands(
self,
deprecated_key: str | None,
deprecated_section: str | None,
key: str,
section: str,
issue_warning: bool = True,
extra_stacklevel: int = 0,
) -> str | None:
option = self._get_cmd_option(section, key)
if option:
return option
if deprecated_section and deprecated_key:
with self.suppress_future_warnings():
option = self._get_cmd_option(deprecated_section, deprecated_key)
if option:
if issue_warning:
self._warn_deprecate(section, key, deprecated_section, deprecated_key, extra_stacklevel)
return option
return None
def _get_option_from_config_file(
self,
deprecated_key: str | None,
deprecated_section: str | None,
key: str,
kwargs: dict[str, Any],
section: str,
issue_warning: bool = True,
extra_stacklevel: int = 0,
) -> str | None:
if super().has_option(section, key):
# Use the parent's methods to get the actual config here to be able to
# separate the config from default config.
return expand_env_var(super().get(section, key, **kwargs))
if deprecated_section and deprecated_key:
if super().has_option(deprecated_section, deprecated_key):
if issue_warning:
self._warn_deprecate(section, key, deprecated_section, deprecated_key, extra_stacklevel)
with self.suppress_future_warnings():
return expand_env_var(super().get(deprecated_section, deprecated_key, **kwargs))
return None
def _get_environment_variables(
self,
deprecated_key: str | None,
deprecated_section: str | None,
key: str,
section: str,
issue_warning: bool = True,
extra_stacklevel: int = 0,
) -> str | None:
option = self._get_env_var_option(section, key)
if option is not None:
return option
if deprecated_section and deprecated_key:
with self.suppress_future_warnings():
option = self._get_env_var_option(deprecated_section, deprecated_key)
if option is not None:
if issue_warning:
self._warn_deprecate(section, key, deprecated_section, deprecated_key, extra_stacklevel)
return option
return None
def getboolean(self, section: str, key: str, **kwargs) -> bool: # type: ignore[override]
val = str(self.get(section, key, _extra_stacklevel=1, **kwargs)).lower().strip()
if "#" in val:
val = val.split("#")[0].strip()
if val in ("t", "true", "1"):
return True
elif val in ("f", "false", "0"):
return False
else:
raise AirflowConfigException(
f'Failed to convert value to bool. Please check "{key}" key in "{section}" section. '
f'Current value: "{val}".'
)
def getint(self, section: str, key: str, **kwargs) -> int: # type: ignore[override]
val = self.get(section, key, _extra_stacklevel=1, **kwargs)
if val is None:
raise AirflowConfigException(
f"Failed to convert value None to int. "
f'Please check "{key}" key in "{section}" section is set.'
)
try:
return int(val)
except ValueError:
raise AirflowConfigException(
f'Failed to convert value to int. Please check "{key}" key in "{section}" section. '
f'Current value: "{val}".'
)
def getfloat(self, section: str, key: str, **kwargs) -> float: # type: ignore[override]
val = self.get(section, key, _extra_stacklevel=1, **kwargs)
if val is None:
raise AirflowConfigException(
f"Failed to convert value None to float. "
f'Please check "{key}" key in "{section}" section is set.'
)
try:
return float(val)
except ValueError:
raise AirflowConfigException(
f'Failed to convert value to float. Please check "{key}" key in "{section}" section. '
f'Current value: "{val}".'
)
def getimport(self, section: str, key: str, **kwargs) -> Any:
"""
Reads options, imports the full qualified name, and returns the object.
In case of failure, it throws an exception with the key and section names
:return: The object or None, if the option is empty
"""
full_qualified_path = conf.get(section=section, key=key, **kwargs)
if not full_qualified_path:
return None
try:
return import_string(full_qualified_path)
except ImportError as e:
log.error(e)
raise AirflowConfigException(
f'The object could not be loaded. Please check "{key}" key in "{section}" section. '
f'Current value: "{full_qualified_path}".'
)
def getjson(
self, section: str, key: str, fallback=_UNSET, **kwargs
) -> dict | list | str | int | float | None:
"""
Return a config value parsed from a JSON string.
``fallback`` is *not* JSON parsed but used verbatim when no config value is given.
"""
# get always returns the fallback value as a string, so for this if
# someone gives us an object we want to keep that
default = _UNSET
if fallback is not _UNSET:
default = fallback
fallback = _UNSET
try:
data = self.get(section=section, key=key, fallback=fallback, _extra_stacklevel=1, **kwargs)
except (NoSectionError, NoOptionError):
return default
if not data:
return default if default is not _UNSET else None
try:
return json.loads(data)
except JSONDecodeError as e:
raise AirflowConfigException(f"Unable to parse [{section}] {key!r} as valid json") from e
def gettimedelta(
self, section: str, key: str, fallback: Any = None, **kwargs
) -> datetime.timedelta | None:
"""
Gets the config value for the given section and key, and converts it into datetime.timedelta object.
If the key is missing, then it is considered as `None`.
:param section: the section from the config
:param key: the key defined in the given section
:param fallback: fallback value when no config value is given, defaults to None
:raises AirflowConfigException: raised because ValueError or OverflowError
:return: datetime.timedelta(seconds=<config_value>) or None
"""
val = self.get(section, key, fallback=fallback, _extra_stacklevel=1, **kwargs)
if val:
# the given value must be convertible to integer
try:
int_val = int(val)
except ValueError:
raise AirflowConfigException(
f'Failed to convert value to int. Please check "{key}" key in "{section}" section. '
f'Current value: "{val}".'
)
try:
return datetime.timedelta(seconds=int_val)
except OverflowError as err:
raise AirflowConfigException(
f"Failed to convert value to timedelta in `seconds`. "
f"{err}. "
f'Please check "{key}" key in "{section}" section. Current value: "{val}".'
)
return fallback
def read(
self,
filenames: (str | bytes | os.PathLike | Iterable[str | bytes | os.PathLike]),
encoding=None,
):
super().read(filenames=filenames, encoding=encoding)
# The RawConfigParser defines "Mapping" from abc.collections is not subscriptable - so we have
# to use Dict here.
def read_dict( # type: ignore[override]
self, dictionary: dict[str, dict[str, Any]], source: str = "<dict>"
):
super().read_dict(dictionary=dictionary, source=source)
def has_option(self, section: str, option: str) -> bool:
try:
# Using self.get() to avoid reimplementing the priority order
# of config variables (env, config, cmd, defaults)
# UNSET to avoid logging a warning about missing values
self.get(section, option, fallback=_UNSET, _extra_stacklevel=1)
return True
except (NoOptionError, NoSectionError):
return False
def remove_option(self, section: str, option: str, remove_default: bool = True):
"""
Remove an option if it exists in config from a file or default config.
If both of config have the same option, this removes the option
in both configs unless remove_default=False.
"""
if super().has_option(section, option):
super().remove_option(section, option)
if self.airflow_defaults.has_option(section, option) and remove_default:
self.airflow_defaults.remove_option(section, option)
def getsection(self, section: str) -> ConfigOptionsDictType | None:
"""
Returns the section as a dict.
Values are converted to int, float, bool as required.
:param section: section from the config
"""
if not self.has_section(section) and not self.airflow_defaults.has_section(section):
return None
if self.airflow_defaults.has_section(section):
_section: ConfigOptionsDictType = OrderedDict(self.airflow_defaults.items(section))
else:
_section = OrderedDict()
if self.has_section(section):
_section.update(OrderedDict(self.items(section)))
section_prefix = self._env_var_name(section, "")
for env_var in sorted(os.environ.keys()):
if env_var.startswith(section_prefix):
key = env_var.replace(section_prefix, "")
if key.endswith("_CMD"):
key = key[:-4]
key = key.lower()
_section[key] = self._get_env_var_option(section, key)
for key, val in _section.items():
if val is None:
raise AirflowConfigException(
f"Failed to convert value automatically. "
f'Please check "{key}" key in "{section}" section is set.'
)
try:
_section[key] = int(val)
except ValueError:
try:
_section[key] = float(val)
except ValueError:
if isinstance(val, str) and val.lower() in ("t", "true"):
_section[key] = True
elif isinstance(val, str) and val.lower() in ("f", "false"):
_section[key] = False
return _section
def write( # type: ignore[override]
self, fp: IO, space_around_delimiters: bool = True, section: str | None = None
) -> None:
# This is based on the configparser.RawConfigParser.write method code to add support for
# reading options from environment variables.
# Various type ignores below deal with less-than-perfect RawConfigParser superclass typing
if space_around_delimiters:
delimiter = f" {self._delimiters[0]} " # type: ignore[attr-defined]
else:
delimiter = self._delimiters[0] # type: ignore[attr-defined]
if self._defaults: # type: ignore
self._write_section( # type: ignore[attr-defined]
fp, self.default_section, self._defaults.items(), delimiter # type: ignore[attr-defined]
)
sections = (
{section: dict(self.getsection(section))} # type: ignore[arg-type]
if section
else self._sections # type: ignore[attr-defined]
)
for sect in sections:
item_section: ConfigOptionsDictType = self.getsection(sect) # type: ignore[assignment]
self._write_section(fp, sect, item_section.items(), delimiter) # type: ignore[attr-defined]
def as_dict(
self,
display_source: bool = False,
display_sensitive: bool = False,
raw: bool = False,
include_env: bool = True,
include_cmds: bool = True,
include_secret: bool = True,
) -> ConfigSourcesType:
"""
Returns the current configuration as an OrderedDict of OrderedDicts.
When materializing current configuration Airflow defaults are
materialized along with user set configs. If any of the `include_*`
options are False then the result of calling command or secret key
configs do not override Airflow defaults and instead are passed through.
In order to then avoid Airflow defaults from overwriting user set
command or secret key configs we filter out bare sensitive_config_values
that are set to Airflow defaults when command or secret key configs
produce different values.
:param display_source: If False, the option value is returned. If True,
a tuple of (option_value, source) is returned. Source is either
'airflow.cfg', 'default', 'env var', or 'cmd'.
:param display_sensitive: If True, the values of options set by env
vars and bash commands will be displayed. If False, those options
are shown as '< hidden >'
:param raw: Should the values be output as interpolated values, or the
"raw" form that can be fed back in to ConfigParser
:param include_env: Should the value of configuration from AIRFLOW__
environment variables be included or not
:param include_cmds: Should the result of calling any *_cmd config be
set (True, default), or should the _cmd options be left as the
command to run (False)
:param include_secret: Should the result of calling any *_secret config be
set (True, default), or should the _secret options be left as the
path to get the secret from (False)
:return: Dictionary, where the key is the name of the section and the content is
the dictionary with the name of the parameter and its value.
"""
if not display_sensitive:
# We want to hide the sensitive values at the appropriate methods
# since envs from cmds, secrets can be read at _include_envs method
if not all([include_env, include_cmds, include_secret]):
raise ValueError(
"If display_sensitive is false, then include_env, "
"include_cmds, include_secret must all be set as True"
)
config_sources: ConfigSourcesType = {}
configs = [
("default", self.airflow_defaults),
("airflow.cfg", self),
]
self._replace_config_with_display_sources(
config_sources,
configs,
display_source,
raw,
self.deprecated_options,
include_cmds=include_cmds,
include_env=include_env,
include_secret=include_secret,
)
# add env vars and overwrite because they have priority
if include_env:
self._include_envs(config_sources, display_sensitive, display_source, raw)
else:
self._filter_by_source(config_sources, display_source, self._get_env_var_option)
# add bash commands
if include_cmds:
self._include_commands(config_sources, display_sensitive, display_source, raw)
else:
self._filter_by_source(config_sources, display_source, self._get_cmd_option)
# add config from secret backends
if include_secret:
self._include_secrets(config_sources, display_sensitive, display_source, raw)
else:
self._filter_by_source(config_sources, display_source, self._get_secret_option)
if not display_sensitive:
# This ensures the ones from config file is hidden too
# if they are not provided through env, cmd and secret
hidden = "< hidden >"
for section, key in self.sensitive_config_values:
if not config_sources.get(section):
continue
if config_sources[section].get(key, None):
if display_source:
source = config_sources[section][key][1]
config_sources[section][key] = (hidden, source)
else:
config_sources[section][key] = hidden
return config_sources
def _include_secrets(
self,
config_sources: ConfigSourcesType,
display_sensitive: bool,
display_source: bool,
raw: bool,
):
for section, key in self.sensitive_config_values:
value: str | None = self._get_secret_option_from_config_sources(config_sources, section, key)
if value:
if not display_sensitive:
value = "< hidden >"
if display_source:
opt: str | tuple[str, str] = (value, "secret")
elif raw:
opt = value.replace("%", "%%")
else:
opt = value
config_sources.setdefault(section, OrderedDict()).update({key: opt})
del config_sources[section][key + "_secret"]
def _include_commands(
self,
config_sources: ConfigSourcesType,
display_sensitive: bool,
display_source: bool,
raw: bool,
):
for section, key in self.sensitive_config_values:
opt = self._get_cmd_option_from_config_sources(config_sources, section, key)
if not opt:
continue
opt_to_set: str | tuple[str, str] | None = opt
if not display_sensitive:
opt_to_set = "< hidden >"
if display_source:
opt_to_set = (str(opt_to_set), "cmd")
elif raw:
opt_to_set = str(opt_to_set).replace("%", "%%")
if opt_to_set is not None:
dict_to_update: dict[str, str | tuple[str, str]] = {key: opt_to_set}
config_sources.setdefault(section, OrderedDict()).update(dict_to_update)
del config_sources[section][key + "_cmd"]
def _include_envs(
self,
config_sources: ConfigSourcesType,
display_sensitive: bool,
display_source: bool,
raw: bool,
):
for env_var in [
os_environment for os_environment in os.environ if os_environment.startswith(ENV_VAR_PREFIX)
]:
try:
_, section, key = env_var.split("__", 2)
opt = self._get_env_var_option(section, key)
except ValueError:
continue
if opt is None:
log.warning("Ignoring unknown env var '%s'", env_var)
continue
if not display_sensitive and env_var != self._env_var_name("core", "unit_test_mode"):
# Don't hide cmd/secret values here
if not env_var.lower().endswith("cmd") and not env_var.lower().endswith("secret"):
if (section, key) in self.sensitive_config_values:
opt = "< hidden >"
elif raw:
opt = opt.replace("%", "%%")
if display_source:
opt = (opt, "env var")
section = section.lower()
# if we lower key for kubernetes_environment_variables section,
# then we won't be able to set any Airflow environment
# variables. Airflow only parse environment variables starts
# with AIRFLOW_. Therefore, we need to make it a special case.
if section != "kubernetes_environment_variables":
key = key.lower()
config_sources.setdefault(section, OrderedDict()).update({key: opt})
def _filter_by_source(
self,
config_sources: ConfigSourcesType,
display_source: bool,
getter_func,
):
"""
Deletes default configs from current configuration.
An OrderedDict of OrderedDicts, if it would conflict with special sensitive_config_values.
This is necessary because bare configs take precedence over the command
or secret key equivalents so if the current running config is
materialized with Airflow defaults they in turn override user set
command or secret key configs.
:param config_sources: The current configuration to operate on
:param display_source: If False, configuration options contain raw
values. If True, options are a tuple of (option_value, source).
Source is either 'airflow.cfg', 'default', 'env var', or 'cmd'.
:param getter_func: A callback function that gets the user configured
override value for a particular sensitive_config_values config.
:return: None, the given config_sources is filtered if necessary,
otherwise untouched.
"""
for section, key in self.sensitive_config_values:
# Don't bother if we don't have section / key
if section not in config_sources or key not in config_sources[section]:
continue
# Check that there is something to override defaults
try:
getter_opt = getter_func(section, key)
except ValueError:
continue
if not getter_opt:
continue
# Check to see that there is a default value
if not self.airflow_defaults.has_option(section, key):
continue
# Check to see if bare setting is the same as defaults
if display_source:
# when display_source = true, we know that the config_sources contains tuple
opt, source = config_sources[section][key] # type: ignore
else:
opt = config_sources[section][key]
if opt == self.airflow_defaults.get(section, key):
del config_sources[section][key]
@staticmethod
def _replace_config_with_display_sources(
config_sources: ConfigSourcesType,
configs: Iterable[tuple[str, ConfigParser]],
display_source: bool,
raw: bool,
deprecated_options: dict[tuple[str, str], tuple[str, str, str]],
include_env: bool,
include_cmds: bool,
include_secret: bool,
):
for source_name, config in configs:
for section in config.sections():
AirflowConfigParser._replace_section_config_with_display_sources(
config,
config_sources,
display_source,
raw,
section,
source_name,
deprecated_options,
configs,
include_env=include_env,
include_cmds=include_cmds,
include_secret=include_secret,
)
@staticmethod
def _deprecated_value_is_set_in_config(
deprecated_section: str,
deprecated_key: str,
configs: Iterable[tuple[str, ConfigParser]],
) -> bool:
for config_type, config in configs:
if config_type == "default":
continue
try:
deprecated_section_array = config.items(section=deprecated_section, raw=True)
for key_candidate, _ in deprecated_section_array:
if key_candidate == deprecated_key:
return True
except NoSectionError:
pass
return False
@staticmethod
def _deprecated_variable_is_set(deprecated_section: str, deprecated_key: str) -> bool:
return (
os.environ.get(f"{ENV_VAR_PREFIX}{deprecated_section.upper()}__{deprecated_key.upper()}")
is not None
)
@staticmethod
def _deprecated_command_is_set_in_config(
deprecated_section: str, deprecated_key: str, configs: Iterable[tuple[str, ConfigParser]]
) -> bool:
return AirflowConfigParser._deprecated_value_is_set_in_config(
deprecated_section=deprecated_section, deprecated_key=deprecated_key + "_cmd", configs=configs
)
@staticmethod
def _deprecated_variable_command_is_set(deprecated_section: str, deprecated_key: str) -> bool:
return (
os.environ.get(f"{ENV_VAR_PREFIX}{deprecated_section.upper()}__{deprecated_key.upper()}_CMD")
is not None
)
@staticmethod
def _deprecated_secret_is_set_in_config(
deprecated_section: str, deprecated_key: str, configs: Iterable[tuple[str, ConfigParser]]
) -> bool:
return AirflowConfigParser._deprecated_value_is_set_in_config(
deprecated_section=deprecated_section, deprecated_key=deprecated_key + "_secret", configs=configs
)
@staticmethod
def _deprecated_variable_secret_is_set(deprecated_section: str, deprecated_key: str) -> bool:
return (
os.environ.get(f"{ENV_VAR_PREFIX}{deprecated_section.upper()}__{deprecated_key.upper()}_SECRET")
is not None
)
@contextmanager
def suppress_future_warnings(self):
suppress_future_warnings = self._suppress_future_warnings
self._suppress_future_warnings = True
yield self
self._suppress_future_warnings = suppress_future_warnings
@staticmethod
def _replace_section_config_with_display_sources(
config: ConfigParser,
config_sources: ConfigSourcesType,
display_source: bool,
raw: bool,
section: str,
source_name: str,
deprecated_options: dict[tuple[str, str], tuple[str, str, str]],
configs: Iterable[tuple[str, ConfigParser]],
include_env: bool,
include_cmds: bool,
include_secret: bool,
):
sect = config_sources.setdefault(section, OrderedDict())
if isinstance(config, AirflowConfigParser):
with config.suppress_future_warnings():
items = config.items(section=section, raw=raw)
else:
items = config.items(section=section, raw=raw)
for k, val in items:
deprecated_section, deprecated_key, _ = deprecated_options.get((section, k), (None, None, None))
if deprecated_section and deprecated_key:
if source_name == "default":
# If deprecated entry has some non-default value set for any of the sources requested,
# We should NOT set default for the new entry (because it will override anything
# coming from the deprecated ones)
if AirflowConfigParser._deprecated_value_is_set_in_config(
deprecated_section, deprecated_key, configs
):
continue
if include_env and AirflowConfigParser._deprecated_variable_is_set(
deprecated_section, deprecated_key
):
continue
if include_cmds and (
AirflowConfigParser._deprecated_variable_command_is_set(
deprecated_section, deprecated_key
)
or AirflowConfigParser._deprecated_command_is_set_in_config(
deprecated_section, deprecated_key, configs
)
):
continue
if include_secret and (
AirflowConfigParser._deprecated_variable_secret_is_set(
deprecated_section, deprecated_key
)
or AirflowConfigParser._deprecated_secret_is_set_in_config(
deprecated_section, deprecated_key, configs
)
):
continue
if display_source:
sect[k] = (val, source_name)
else:
sect[k] = val
def load_test_config(self):
"""
Load the unit test configuration.
Note: this is not reversible.
"""
# remove all sections, falling back to defaults
for section in self.sections():
self.remove_section(section)
# then read test config
path = _default_config_file_path("default_test.cfg")
log.info("Reading default test configuration from %s", path)
self.read_string(_parameterized_config_from_template("default_test.cfg"))
# then read any "custom" test settings
log.info("Reading test configuration from %s", TEST_CONFIG_FILE)
self.read(TEST_CONFIG_FILE)
@staticmethod
def _warn_deprecate(
section: str, key: str, deprecated_section: str, deprecated_name: str, extra_stacklevel: int
):
if section == deprecated_section:
warnings.warn(
f"The {deprecated_name} option in [{section}] has been renamed to {key} - "
f"the old setting has been used, but please update your config.",
DeprecationWarning,
stacklevel=4 + extra_stacklevel,
)
else:
warnings.warn(
f"The {deprecated_name} option in [{deprecated_section}] has been moved to the {key} option "
f"in [{section}] - the old setting has been used, but please update your config.",
DeprecationWarning,
stacklevel=4 + extra_stacklevel,
)
def __getstate__(self):
return {
name: getattr(self, name)
for name in [
"_sections",
"is_validated",
"airflow_defaults",
]
}
def __setstate__(self, state):
self.__init__()
config = state.pop("_sections")
self.read_dict(config)
self.__dict__.update(state)
def get_airflow_home() -> str:
"""Get path to Airflow Home."""
return expand_env_var(os.environ.get("AIRFLOW_HOME", "~/airflow"))
def get_airflow_config(airflow_home) -> str:
"""Get Path to airflow.cfg path."""
airflow_config_var = os.environ.get("AIRFLOW_CONFIG")
if airflow_config_var is None:
return os.path.join(airflow_home, "airflow.cfg")
return expand_env_var(airflow_config_var)
def _parameterized_config_from_template(filename) -> str:
TEMPLATE_START = "# ----------------------- TEMPLATE BEGINS HERE -----------------------\n"
path = _default_config_file_path(filename)
with open(path) as fh:
for line in fh:
if line != TEMPLATE_START:
continue
return parameterized_config(fh.read().strip())
raise RuntimeError(f"Template marker not found in {path!r}")
def parameterized_config(template) -> str:
"""
Generates configuration from provided template & variables defined in current scope.
:param template: a config content templated with {{variables}}
"""
all_vars = {k: v for d in [globals(), locals()] for k, v in d.items()}
return template.format(**all_vars)
def get_airflow_test_config(airflow_home) -> str:
"""Get path to unittests.cfg."""
if "AIRFLOW_TEST_CONFIG" not in os.environ:
return os.path.join(airflow_home, "unittests.cfg")
# It will never return None
return expand_env_var(os.environ["AIRFLOW_TEST_CONFIG"]) # type: ignore[return-value]
def _generate_fernet_key() -> str:
from cryptography.fernet import Fernet
return Fernet.generate_key().decode()
def initialize_config() -> AirflowConfigParser:
"""
Load the Airflow config files.
Called for you automatically as part of the Airflow boot process.
"""
global FERNET_KEY, AIRFLOW_HOME, WEBSERVER_CONFIG
default_config = _parameterized_config_from_template("default_airflow.cfg")
local_conf = AirflowConfigParser(default_config=default_config)
if local_conf.getboolean("core", "unit_test_mode"):
# Load test config only
if not os.path.isfile(TEST_CONFIG_FILE):
from cryptography.fernet import Fernet
log.info("Creating new Airflow config file for unit tests in: %s", TEST_CONFIG_FILE)
pathlib.Path(AIRFLOW_HOME).mkdir(parents=True, exist_ok=True)
FERNET_KEY = Fernet.generate_key().decode()
with open(TEST_CONFIG_FILE, "w") as file:
cfg = _parameterized_config_from_template("default_test.cfg")
file.write(cfg)
make_group_other_inaccessible(TEST_CONFIG_FILE)
local_conf.load_test_config()
else:
# Load normal config
if not os.path.isfile(AIRFLOW_CONFIG):
from cryptography.fernet import Fernet
log.info("Creating new Airflow config file in: %s", AIRFLOW_CONFIG)
pathlib.Path(AIRFLOW_HOME).mkdir(parents=True, exist_ok=True)
FERNET_KEY = Fernet.generate_key().decode()
with open(AIRFLOW_CONFIG, "w") as file:
file.write(default_config)
make_group_other_inaccessible(AIRFLOW_CONFIG)
log.info("Reading the config from %s", AIRFLOW_CONFIG)
local_conf.read(AIRFLOW_CONFIG)
if local_conf.has_option("core", "AIRFLOW_HOME"):
msg = (
"Specifying both AIRFLOW_HOME environment variable and airflow_home "
"in the config file is deprecated. Please use only the AIRFLOW_HOME "
"environment variable and remove the config file entry."
)
if "AIRFLOW_HOME" in os.environ:
warnings.warn(msg, category=DeprecationWarning)
elif local_conf.get("core", "airflow_home") == AIRFLOW_HOME:
warnings.warn(
"Specifying airflow_home in the config file is deprecated. As you "
"have left it at the default value you should remove the setting "
"from your airflow.cfg and suffer no change in behaviour.",
category=DeprecationWarning,
)
else:
# there
AIRFLOW_HOME = local_conf.get("core", "airflow_home") # type: ignore[assignment]
warnings.warn(msg, category=DeprecationWarning)
# They _might_ have set unit_test_mode in the airflow.cfg, we still
# want to respect that and then load the unittests.cfg
if local_conf.getboolean("core", "unit_test_mode"):
local_conf.load_test_config()
WEBSERVER_CONFIG = local_conf.get("webserver", "config_file")
if not os.path.isfile(WEBSERVER_CONFIG):
import shutil
log.info("Creating new FAB webserver config file in: %s", WEBSERVER_CONFIG)
shutil.copy(_default_config_file_path("default_webserver_config.py"), WEBSERVER_CONFIG)
return local_conf
def make_group_other_inaccessible(file_path: str):
try:
permissions = os.stat(file_path)
os.chmod(file_path, permissions.st_mode & (stat.S_IRUSR | stat.S_IWUSR))
except Exception as e:
log.warning(
"Could not change permissions of config file to be group/other inaccessible. "
"Continuing with original permissions:",
e,
)
# Historical convenience functions to access config entries
def load_test_config():
"""Historical load_test_config."""
warnings.warn(
"Accessing configuration method 'load_test_config' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.load_test_config'",
DeprecationWarning,
stacklevel=2,
)
conf.load_test_config()
def get(*args, **kwargs) -> ConfigType | None:
"""Historical get."""
warnings.warn(
"Accessing configuration method 'get' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.get'",
DeprecationWarning,
stacklevel=2,
)
return conf.get(*args, **kwargs)
def getboolean(*args, **kwargs) -> bool:
"""Historical getboolean."""
warnings.warn(
"Accessing configuration method 'getboolean' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getboolean'",
DeprecationWarning,
stacklevel=2,
)
return conf.getboolean(*args, **kwargs)
def getfloat(*args, **kwargs) -> float:
"""Historical getfloat."""
warnings.warn(
"Accessing configuration method 'getfloat' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getfloat'",
DeprecationWarning,
stacklevel=2,
)
return conf.getfloat(*args, **kwargs)
def getint(*args, **kwargs) -> int:
"""Historical getint."""
warnings.warn(
"Accessing configuration method 'getint' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getint'",
DeprecationWarning,
stacklevel=2,
)
return conf.getint(*args, **kwargs)
def getsection(*args, **kwargs) -> ConfigOptionsDictType | None:
"""Historical getsection."""
warnings.warn(
"Accessing configuration method 'getsection' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getsection'",
DeprecationWarning,
stacklevel=2,
)
return conf.getsection(*args, **kwargs)
def has_option(*args, **kwargs) -> bool:
"""Historical has_option."""
warnings.warn(
"Accessing configuration method 'has_option' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.has_option'",
DeprecationWarning,
stacklevel=2,
)
return conf.has_option(*args, **kwargs)
def remove_option(*args, **kwargs) -> bool:
"""Historical remove_option."""
warnings.warn(
"Accessing configuration method 'remove_option' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.remove_option'",
DeprecationWarning,
stacklevel=2,
)
return conf.remove_option(*args, **kwargs)
def as_dict(*args, **kwargs) -> ConfigSourcesType:
"""Historical as_dict."""
warnings.warn(
"Accessing configuration method 'as_dict' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.as_dict'",
DeprecationWarning,
stacklevel=2,
)
return conf.as_dict(*args, **kwargs)
def set(*args, **kwargs) -> None:
"""Historical set."""
warnings.warn(
"Accessing configuration method 'set' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.set'",
DeprecationWarning,
stacklevel=2,
)
conf.set(*args, **kwargs)
def ensure_secrets_loaded() -> list[BaseSecretsBackend]:
"""
Ensure that all secrets backends are loaded.
If the secrets_backend_list contains only 2 default backends, reload it.
"""
# Check if the secrets_backend_list contains only 2 default backends
if len(secrets_backend_list) == 2:
return initialize_secrets_backends()
return secrets_backend_list
def get_custom_secret_backend() -> BaseSecretsBackend | None:
"""Get Secret Backend if defined in airflow.cfg."""
secrets_backend_cls = conf.getimport(section="secrets", key="backend")
if not secrets_backend_cls:
return None
try:
backend_kwargs = conf.getjson(section="secrets", key="backend_kwargs")
if not backend_kwargs:
backend_kwargs = {}
elif not isinstance(backend_kwargs, dict):
raise ValueError("not a dict")
except AirflowConfigException:
log.warning("Failed to parse [secrets] backend_kwargs as JSON, defaulting to no kwargs.")
backend_kwargs = {}
except ValueError:
log.warning("Failed to parse [secrets] backend_kwargs into a dict, defaulting to no kwargs.")
backend_kwargs = {}
return secrets_backend_cls(**backend_kwargs)
def initialize_secrets_backends() -> list[BaseSecretsBackend]:
"""
Initialize secrets backend.
* import secrets backend classes
* instantiate them and return them in a list
"""
backend_list = []
custom_secret_backend = get_custom_secret_backend()
if custom_secret_backend is not None:
backend_list.append(custom_secret_backend)
for class_name in DEFAULT_SECRETS_SEARCH_PATH:
secrets_backend_cls = import_string(class_name)
backend_list.append(secrets_backend_cls())
return backend_list
def initialize_auth_manager() -> BaseAuthManager:
"""
Initialize auth manager.
* import user manager class
* instantiate it and return it
"""
auth_manager_cls = conf.getimport(section="core", key="auth_manager")
if not auth_manager_cls:
raise AirflowConfigException(
"No auth manager defined in the config. "
"Please specify one using section/key [core/auth_manager]."
)
return auth_manager_cls()
@functools.lru_cache(maxsize=None)
def _DEFAULT_CONFIG() -> str:
path = _default_config_file_path("default_airflow.cfg")
with open(path) as fh:
return fh.read()
@functools.lru_cache(maxsize=None)
def _TEST_CONFIG() -> str:
path = _default_config_file_path("default_test.cfg")
with open(path) as fh:
return fh.read()
_deprecated = {
"DEFAULT_CONFIG": _DEFAULT_CONFIG,
"TEST_CONFIG": _TEST_CONFIG,
"TEST_CONFIG_FILE_PATH": functools.partial(_default_config_file_path, "default_test.cfg"),
"DEFAULT_CONFIG_FILE_PATH": functools.partial(_default_config_file_path, "default_airflow.cfg"),
}
def __getattr__(name):
if name in _deprecated:
warnings.warn(
f"{__name__}.{name} is deprecated and will be removed in future",
DeprecationWarning,
stacklevel=2,
)
return _deprecated[name]()
raise AttributeError(f"module {__name__} has no attribute {name}")
# Setting AIRFLOW_HOME and AIRFLOW_CONFIG from environment variables, using
# "~/airflow" and "$AIRFLOW_HOME/airflow.cfg" respectively as defaults.
AIRFLOW_HOME = get_airflow_home()
AIRFLOW_CONFIG = get_airflow_config(AIRFLOW_HOME)
# Set up dags folder for unit tests
# this directory won't exist if users install via pip
_TEST_DAGS_FOLDER = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "tests", "dags"
)
if os.path.exists(_TEST_DAGS_FOLDER):
TEST_DAGS_FOLDER = _TEST_DAGS_FOLDER
else:
TEST_DAGS_FOLDER = os.path.join(AIRFLOW_HOME, "dags")
# Set up plugins folder for unit tests
_TEST_PLUGINS_FOLDER = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "tests", "plugins"
)
if os.path.exists(_TEST_PLUGINS_FOLDER):
TEST_PLUGINS_FOLDER = _TEST_PLUGINS_FOLDER
else:
TEST_PLUGINS_FOLDER = os.path.join(AIRFLOW_HOME, "plugins")
TEST_CONFIG_FILE = get_airflow_test_config(AIRFLOW_HOME)
SECRET_KEY = b64encode(os.urandom(16)).decode("utf-8")
FERNET_KEY = "" # Set only if needed when generating a new file
WEBSERVER_CONFIG = "" # Set by initialize_config
conf = initialize_config()
secrets_backend_list = initialize_secrets_backends()
auth_manager = initialize_auth_manager()
conf.validate()
| 76,033 | 40.525942 | 109 | py |
airflow | airflow-main/airflow/version.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
# Compat -- somethings access `airflow.version.version` directly
from airflow import __version__ as version
__all__ = ["version"]
| 954 | 38.791667 | 64 | py |
airflow | airflow-main/airflow/providers_manager.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Manages all providers."""
from __future__ import annotations
import fnmatch
import functools
import json
import logging
import os
import sys
import warnings
from collections import OrderedDict
from dataclasses import dataclass
from functools import wraps
from time import perf_counter
from typing import TYPE_CHECKING, Any, Callable, MutableMapping, NamedTuple, TypeVar, cast
from packaging.utils import canonicalize_name
from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.typing_compat import Literal
from airflow.utils import yaml
from airflow.utils.entry_points import entry_points_with_dist
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.module_loading import import_string
from airflow.utils.singleton import Singleton
log = logging.getLogger(__name__)
if sys.version_info >= (3, 9):
from importlib.resources import files as resource_files
else:
from importlib_resources import files as resource_files
MIN_PROVIDER_VERSIONS = {
"apache-airflow-providers-celery": "2.1.0",
}
def _ensure_prefix_for_placeholders(field_behaviors: dict[str, Any], conn_type: str):
"""
Verify the correct placeholder prefix.
If the given field_behaviors dict contains a placeholder's node, and there
are placeholders for extra fields (i.e. anything other than the built-in conn
attrs), and if those extra fields are unprefixed, then add the prefix.
The reason we need to do this is, all custom conn fields live in the same dictionary,
so we need to namespace them with a prefix internally. But for user convenience,
and consistency between the `get_ui_field_behaviour` method and the extra dict itself,
we allow users to supply the unprefixed name.
"""
conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
def ensure_prefix(field):
if field not in conn_attrs and not field.startswith("extra__"):
return f"extra__{conn_type}__{field}"
else:
return field
if "placeholders" in field_behaviors:
placeholders = field_behaviors["placeholders"]
field_behaviors["placeholders"] = {ensure_prefix(k): v for k, v in placeholders.items()}
return field_behaviors
if TYPE_CHECKING:
from airflow.decorators.base import TaskDecorator
from airflow.hooks.base import BaseHook
class LazyDictWithCache(MutableMapping):
"""
Lazy-loaded cached dictionary.
Dictionary, which in case you set callable, executes the passed callable with `key` attribute
at first use - and returns and caches the result.
"""
__slots__ = ["_resolved", "_raw_dict"]
def __init__(self, *args, **kw):
self._resolved = set()
self._raw_dict = dict(*args, **kw)
def __setitem__(self, key, value):
self._raw_dict.__setitem__(key, value)
def __getitem__(self, key):
value = self._raw_dict.__getitem__(key)
if key not in self._resolved and callable(value):
# exchange callable with result of calling it -- but only once! allow resolver to return a
# callable itself
value = value()
self._resolved.add(key)
self._raw_dict.__setitem__(key, value)
return value
def __delitem__(self, key):
self._raw_dict.__delitem__(key)
try:
self._resolved.remove(key)
except KeyError:
pass
def __iter__(self):
return iter(self._raw_dict)
def __len__(self):
return len(self._raw_dict)
def __contains__(self, key):
return key in self._raw_dict
def _read_schema_from_resources_or_local_file(filename: str) -> dict:
try:
with resource_files("airflow").joinpath(filename).open("rb") as f:
schema = json.load(f)
except FileNotFoundError:
import pathlib
with (pathlib.Path(__file__).parent / filename).open("rb") as f:
schema = json.load(f)
return schema
def _create_provider_info_schema_validator():
"""Creates JSON schema validator from the provider_info.schema.json."""
import jsonschema
schema = _read_schema_from_resources_or_local_file("provider_info.schema.json")
cls = jsonschema.validators.validator_for(schema)
validator = cls(schema)
return validator
def _create_customized_form_field_behaviours_schema_validator():
"""Creates JSON schema validator from the customized_form_field_behaviours.schema.json."""
import jsonschema
schema = _read_schema_from_resources_or_local_file("customized_form_field_behaviours.schema.json")
cls = jsonschema.validators.validator_for(schema)
validator = cls(schema)
return validator
def _check_builtin_provider_prefix(provider_package: str, class_name: str) -> bool:
if provider_package.startswith("apache-airflow"):
provider_path = provider_package[len("apache-") :].replace("-", ".")
if not class_name.startswith(provider_path):
log.warning(
"Coherence check failed when importing '%s' from '%s' package. It should start with '%s'",
class_name,
provider_package,
provider_path,
)
return False
return True
@dataclass
class ProviderInfo:
"""
Provider information.
:param version: version string
:param data: dictionary with information about the provider
:param source_or_package: whether the provider is source files or PyPI package. When installed from
sources we suppress provider import errors.
"""
version: str
data: dict
package_or_source: Literal["source"] | Literal["package"]
def __post_init__(self):
if self.package_or_source not in ("source", "package"):
raise ValueError(
f"Received {self.package_or_source!r} for `package_or_source`. "
"Must be either 'package' or 'source'."
)
self.is_source = self.package_or_source == "source"
class HookClassProvider(NamedTuple):
"""Hook class and Provider it comes from."""
hook_class_name: str
package_name: str
class TriggerInfo(NamedTuple):
"""Trigger class and provider it comes from."""
trigger_class_name: str
package_name: str
integration_name: str
class HookInfo(NamedTuple):
"""Hook information."""
hook_class_name: str
connection_id_attribute_name: str
package_name: str
hook_name: str
connection_type: str
connection_testable: bool
class ConnectionFormWidgetInfo(NamedTuple):
"""Connection Form Widget information."""
hook_class_name: str
package_name: str
field: Any
field_name: str
is_sensitive: bool
T = TypeVar("T", bound=Callable)
logger = logging.getLogger(__name__)
def log_debug_import_from_sources(class_name, e, provider_package):
"""Log debug imports from sources."""
log.debug(
"Optional feature disabled on exception when importing '%s' from '%s' package",
class_name,
provider_package,
exc_info=e,
)
def log_optional_feature_disabled(class_name, e, provider_package):
"""Log optional feature disabled."""
log.debug(
"Optional feature disabled on exception when importing '%s' from '%s' package",
class_name,
provider_package,
exc_info=e,
)
log.info(
"Optional provider feature disabled when importing '%s' from '%s' package",
class_name,
provider_package,
)
def log_import_warning(class_name, e, provider_package):
"""Log import warning."""
log.warning(
"Exception when importing '%s' from '%s' package",
class_name,
provider_package,
exc_info=e,
)
# This is a temporary measure until all community providers will add AirflowOptionalProviderFeatureException
# where they have optional features. We are going to add tests in our CI to catch all such cases and will
# fix them, but until now all "known unhandled optional feature errors" from community providers
# should be added here
KNOWN_UNHANDLED_OPTIONAL_FEATURE_ERRORS = [("apache-airflow-providers-google", "No module named 'paramiko'")]
def _correctness_check(
provider_package: str, class_name: str, provider_info: ProviderInfo
) -> type[BaseHook] | None:
"""
Performs coherence check on provider classes.
For apache-airflow providers - it checks if it starts with appropriate package. For all providers
it tries to import the provider - checking that there are no exceptions during importing.
It logs appropriate warning in case it detects any problems.
:param provider_package: name of the provider package
:param class_name: name of the class to import
:return the class if the class is OK, None otherwise.
"""
if not _check_builtin_provider_prefix(provider_package, class_name):
return None
try:
imported_class = import_string(class_name)
except AirflowOptionalProviderFeatureException as e:
# When the provider class raises AirflowOptionalProviderFeatureException
# this is an expected case when only some classes in provider are
# available. We just log debug level here and print info message in logs so that
# the user is aware of it
log_optional_feature_disabled(class_name, e, provider_package)
return None
except ImportError as e:
if provider_info.is_source:
# When we have providers from sources, then we just turn all import logs to debug logs
# As this is pretty expected that you have a number of dependencies not installed
# (we always have all providers from sources until we split providers to separate repo)
log_debug_import_from_sources(class_name, e, provider_package)
return None
if "No module named 'airflow.providers." in e.msg:
# handle cases where another provider is missing. This can only happen if
# there is an optional feature, so we log debug and print information about it
log_optional_feature_disabled(class_name, e, provider_package)
return None
for known_error in KNOWN_UNHANDLED_OPTIONAL_FEATURE_ERRORS:
# Until we convert all providers to use AirflowOptionalProviderFeatureException
# we assume any problem with importing another "provider" is because this is an
# optional feature, so we log debug and print information about it
if known_error[0] == provider_package and known_error[1] in e.msg:
log_optional_feature_disabled(class_name, e, provider_package)
return None
# But when we have no idea - we print warning to logs
log_import_warning(class_name, e, provider_package)
return None
except Exception as e:
log_import_warning(class_name, e, provider_package)
return None
return imported_class
# We want to have better control over initialization of parameters and be able to debug and test it
# So we add our own decorator
def provider_info_cache(cache_name: str) -> Callable[[T], T]:
"""
Decorate and cache provider info.
Decorator factory that create decorator that caches initialization of provider's parameters
:param cache_name: Name of the cache
"""
def provider_info_cache_decorator(func: T):
@wraps(func)
def wrapped_function(*args, **kwargs):
providers_manager_instance = args[0]
if cache_name in providers_manager_instance._initialized_cache:
return
start_time = perf_counter()
logger.debug("Initializing Providers Manager[%s]", cache_name)
func(*args, **kwargs)
providers_manager_instance._initialized_cache[cache_name] = True
logger.debug(
"Initialization of Providers Manager[%s] took %.2f seconds",
cache_name,
perf_counter() - start_time,
)
return cast(T, wrapped_function)
return provider_info_cache_decorator
class ProvidersManager(LoggingMixin, metaclass=Singleton):
"""
Manages all provider packages.
This is a Singleton class. The first time it is
instantiated, it discovers all available providers in installed packages and
local source folders (if airflow is run from sources).
"""
resource_version = "0"
def __init__(self):
"""Initializes the manager."""
super().__init__()
self._initialized_cache: dict[str, bool] = {}
# Keeps dict of providers keyed by module name
self._provider_dict: dict[str, ProviderInfo] = {}
# Keeps dict of hooks keyed by connection type
self._hooks_dict: dict[str, HookInfo] = {}
self._taskflow_decorators: dict[str, Callable] = LazyDictWithCache()
# keeps mapping between connection_types and hook class, package they come from
self._hook_provider_dict: dict[str, HookClassProvider] = {}
# Keeps dict of hooks keyed by connection type. They are lazy evaluated at access time
self._hooks_lazy_dict: LazyDictWithCache[str, HookInfo | Callable] = LazyDictWithCache()
# Keeps methods that should be used to add custom widgets tuple of keyed by name of the extra field
self._connection_form_widgets: dict[str, ConnectionFormWidgetInfo] = {}
# Customizations for javascript fields are kept here
self._field_behaviours: dict[str, dict] = {}
self._extra_link_class_name_set: set[str] = set()
self._logging_class_name_set: set[str] = set()
self._secrets_backend_class_name_set: set[str] = set()
self._executor_class_name_set: set[str] = set()
self._provider_configs: dict[str, dict[str, Any]] = {}
self._api_auth_backend_module_names: set[str] = set()
self._trigger_info_set: set[TriggerInfo] = set()
self._provider_schema_validator = _create_provider_info_schema_validator()
self._customized_form_fields_schema_validator = (
_create_customized_form_field_behaviours_schema_validator()
)
@provider_info_cache("list")
def initialize_providers_list(self):
"""Lazy initialization of providers list."""
# Local source folders are loaded first. They should take precedence over the package ones for
# Development purpose. In production provider.yaml files are not present in the 'airflow" directory
# So there is no risk we are going to override package provider accidentally. This can only happen
# in case of local development
self._discover_all_airflow_builtin_providers_from_local_sources()
self._discover_all_providers_from_packages()
self._verify_all_providers_all_compatible()
self._provider_dict = OrderedDict(sorted(self._provider_dict.items()))
def _verify_all_providers_all_compatible(self):
from packaging import version as packaging_version
for provider_id, info in self._provider_dict.items():
min_version = MIN_PROVIDER_VERSIONS.get(provider_id)
if min_version:
if packaging_version.parse(min_version) > packaging_version.parse(info.version):
log.warning(
"The package %s is not compatible with this version of Airflow. "
"The package has version %s but the minimum supported version "
"of the package is %s",
provider_id,
info.version,
min_version,
)
@provider_info_cache("hooks")
def initialize_providers_hooks(self):
"""Lazy initialization of providers hooks."""
self.initialize_providers_list()
self._discover_hooks()
self._hook_provider_dict = OrderedDict(sorted(self._hook_provider_dict.items()))
@provider_info_cache("taskflow_decorators")
def initialize_providers_taskflow_decorator(self):
"""Lazy initialization of providers hooks."""
self.initialize_providers_list()
self._discover_taskflow_decorators()
@provider_info_cache("extra_links")
def initialize_providers_extra_links(self):
"""Lazy initialization of providers extra links."""
self.initialize_providers_list()
self._discover_extra_links()
@provider_info_cache("logging")
def initialize_providers_logging(self):
"""Lazy initialization of providers logging information."""
self.initialize_providers_list()
self._discover_logging()
@provider_info_cache("secrets_backends")
def initialize_providers_secrets_backends(self):
"""Lazy initialization of providers secrets_backends information."""
self.initialize_providers_list()
self._discover_secrets_backends()
@provider_info_cache("executors")
def initialize_providers_executors(self):
"""Lazy initialization of providers executors information."""
self.initialize_providers_list()
self._discover_executors()
@provider_info_cache("config")
def initialize_providers_configuration(self):
"""Lazy initialization of providers configuration information."""
self.initialize_providers_list()
self._discover_config()
# Now update conf with the new provider configuration from providers
from airflow.configuration import conf
conf.load_provider_configuration()
@provider_info_cache("auth_backends")
def initialize_providers_auth_backends(self):
"""Lazy initialization of providers API auth_backends information."""
self.initialize_providers_list()
self._discover_auth_backends()
def _discover_all_providers_from_packages(self) -> None:
"""
Discover all providers by scanning packages installed.
The list of providers should be returned via the 'apache_airflow_provider'
entrypoint as a dictionary conforming to the 'airflow/provider_info.schema.json'
schema. Note that the schema is different at runtime than provider.yaml.schema.json.
The development version of provider schema is more strict and changes together with
the code. The runtime version is more relaxed (allows for additional properties)
and verifies only the subset of fields that are needed at runtime.
"""
for entry_point, dist in entry_points_with_dist("apache_airflow_provider"):
package_name = canonicalize_name(dist.metadata["name"])
if package_name in self._provider_dict:
continue
log.debug("Loading %s from package %s", entry_point, package_name)
version = dist.version
provider_info = entry_point.load()()
self._provider_schema_validator.validate(provider_info)
provider_info_package_name = provider_info["package-name"]
if package_name != provider_info_package_name:
raise Exception(
f"The package '{package_name}' from setuptools and "
f"{provider_info_package_name} do not match. Please make sure they are aligned"
)
if package_name not in self._provider_dict:
self._provider_dict[package_name] = ProviderInfo(version, provider_info, "package")
else:
log.warning(
"The provider for package '%s' could not be registered from because providers for that "
"package name have already been registered",
package_name,
)
def _discover_all_airflow_builtin_providers_from_local_sources(self) -> None:
"""
Finds all built-in airflow providers if airflow is run from the local sources.
It finds `provider.yaml` files for all such providers and registers the providers using those.
This 'provider.yaml' scanning takes precedence over scanning packages installed
in case you have both sources and packages installed, the providers will be loaded from
the "airflow" sources rather than from the packages.
"""
try:
import airflow.providers
except ImportError:
log.info("You have no providers installed.")
return
try:
seen = set()
for path in airflow.providers.__path__: # type: ignore[attr-defined]
# The same path can appear in the __path__ twice, under non-normalized paths (ie.
# /path/to/repo/airflow/providers and /path/to/repo/./airflow/providers)
path = os.path.realpath(path)
if path in seen:
continue
seen.add(path)
self._add_provider_info_from_local_source_files_on_path(path)
except Exception as e:
log.warning("Error when loading 'provider.yaml' files from airflow sources: %s", e)
def _add_provider_info_from_local_source_files_on_path(self, path) -> None:
"""
Finds all the provider.yaml files in the directory specified.
:param path: path where to look for provider.yaml files
"""
root_path = path
for folder, subdirs, files in os.walk(path, topdown=True):
for filename in fnmatch.filter(files, "provider.yaml"):
package_name = "apache-airflow-providers" + folder[len(root_path) :].replace(os.sep, "-")
self._add_provider_info_from_local_source_file(os.path.join(folder, filename), package_name)
subdirs[:] = []
def _add_provider_info_from_local_source_file(self, path, package_name) -> None:
"""
Parses found provider.yaml file and adds found provider to the dictionary.
:param path: full file path of the provider.yaml file
:param package_name: name of the package
"""
try:
log.debug("Loading %s from %s", package_name, path)
with open(path) as provider_yaml_file:
provider_info = yaml.safe_load(provider_yaml_file)
self._provider_schema_validator.validate(provider_info)
version = provider_info["versions"][0]
if package_name not in self._provider_dict:
self._provider_dict[package_name] = ProviderInfo(version, provider_info, "source")
else:
log.warning(
"The providers for package '%s' could not be registered because providers for that "
"package name have already been registered",
package_name,
)
except Exception as e:
log.warning("Error when loading '%s'", path, exc_info=e)
def _discover_hooks_from_connection_types(
self,
hook_class_names_registered: set[str],
already_registered_warning_connection_types: set[str],
package_name: str,
provider: ProviderInfo,
):
"""
Discover hooks from the "connection-types" property.
This is new, better method that replaces discovery from hook-class-names as it
allows to lazy import individual Hook classes when they are accessed.
The "connection-types" keeps information about both - connection type and class
name so we can discover all connection-types without importing the classes.
:param hook_class_names_registered: set of registered hook class names for this provider
:param already_registered_warning_connection_types: set of connections for which warning should be
printed in logs as they were already registered before
:param package_name:
:param provider:
:return:
"""
provider_uses_connection_types = False
connection_types = provider.data.get("connection-types")
if connection_types:
for connection_type_dict in connection_types:
connection_type = connection_type_dict["connection-type"]
hook_class_name = connection_type_dict["hook-class-name"]
hook_class_names_registered.add(hook_class_name)
already_registered = self._hook_provider_dict.get(connection_type)
if already_registered:
if already_registered.package_name != package_name:
already_registered_warning_connection_types.add(connection_type)
else:
log.warning(
"The connection type '%s' is already registered in the"
" package '%s' with different class names: '%s' and '%s'. ",
connection_type,
package_name,
already_registered.hook_class_name,
hook_class_name,
)
else:
self._hook_provider_dict[connection_type] = HookClassProvider(
hook_class_name=hook_class_name, package_name=package_name
)
# Defer importing hook to access time by setting import hook method as dict value
self._hooks_lazy_dict[connection_type] = functools.partial(
self._import_hook,
connection_type=connection_type,
provider_info=provider,
)
provider_uses_connection_types = True
return provider_uses_connection_types
def _discover_hooks_from_hook_class_names(
self,
hook_class_names_registered: set[str],
already_registered_warning_connection_types: set[str],
package_name: str,
provider: ProviderInfo,
provider_uses_connection_types: bool,
):
"""
Discover hooks from "hook-class-names' property.
This property is deprecated but we should support it in Airflow 2.
The hook-class-names array contained just Hook names without connection type,
therefore we need to import all those classes immediately to know which connection types
are supported. This makes it impossible to selectively only import those hooks that are used.
:param already_registered_warning_connection_types: list of connection hooks that we should warn
about when finished discovery
:param package_name: name of the provider package
:param provider: class that keeps information about version and details of the provider
:param provider_uses_connection_types: determines whether the provider uses "connection-types" new
form of passing connection types
:return:
"""
hook_class_names = provider.data.get("hook-class-names")
if hook_class_names:
for hook_class_name in hook_class_names:
if hook_class_name in hook_class_names_registered:
# Silently ignore the hook class - it's already marked for lazy-import by
# connection-types discovery
continue
hook_info = self._import_hook(
connection_type=None,
provider_info=provider,
hook_class_name=hook_class_name,
package_name=package_name,
)
if not hook_info:
# Problem why importing class - we ignore it. Log is written at import time
continue
already_registered = self._hook_provider_dict.get(hook_info.connection_type)
if already_registered:
if already_registered.package_name != package_name:
already_registered_warning_connection_types.add(hook_info.connection_type)
else:
if already_registered.hook_class_name != hook_class_name:
log.warning(
"The hook connection type '%s' is registered twice in the"
" package '%s' with different class names: '%s' and '%s'. "
" Please fix it!",
hook_info.connection_type,
package_name,
already_registered.hook_class_name,
hook_class_name,
)
else:
self._hook_provider_dict[hook_info.connection_type] = HookClassProvider(
hook_class_name=hook_class_name, package_name=package_name
)
self._hooks_lazy_dict[hook_info.connection_type] = hook_info
if not provider_uses_connection_types:
warnings.warn(
f"The provider {package_name} uses `hook-class-names` "
"property in provider-info and has no `connection-types` one. "
"The 'hook-class-names' property has been deprecated in favour "
"of 'connection-types' in Airflow 2.2. Use **both** in case you want to "
"have backwards compatibility with Airflow < 2.2",
DeprecationWarning,
)
for already_registered_connection_type in already_registered_warning_connection_types:
log.warning(
"The connection_type '%s' has been already registered by provider '%s.'",
already_registered_connection_type,
self._hook_provider_dict[already_registered_connection_type].package_name,
)
def _discover_hooks(self) -> None:
"""Retrieve all connections defined in the providers via Hooks."""
for package_name, provider in self._provider_dict.items():
duplicated_connection_types: set[str] = set()
hook_class_names_registered: set[str] = set()
provider_uses_connection_types = self._discover_hooks_from_connection_types(
hook_class_names_registered, duplicated_connection_types, package_name, provider
)
self._discover_hooks_from_hook_class_names(
hook_class_names_registered,
duplicated_connection_types,
package_name,
provider,
provider_uses_connection_types,
)
self._hook_provider_dict = OrderedDict(sorted(self._hook_provider_dict.items()))
@provider_info_cache("import_all_hooks")
def _import_info_from_all_hooks(self):
"""Force-import all hooks and initialize the connections/fields."""
# Retrieve all hooks to make sure that all of them are imported
_ = list(self._hooks_lazy_dict.values())
self._field_behaviours = OrderedDict(sorted(self._field_behaviours.items()))
# Widgets for connection forms are currently used in two places:
# 1. In the UI Connections, expected same order that it defined in Hook.
# 2. cli command - `airflow providers widgets` and expected that it in alphabetical order.
# It is not possible to recover original ordering after sorting,
# that the main reason why original sorting moved to cli part:
# self._connection_form_widgets = OrderedDict(sorted(self._connection_form_widgets.items()))
def _discover_taskflow_decorators(self) -> None:
for name, info in self._provider_dict.items():
for taskflow_decorator in info.data.get("task-decorators", []):
self._add_taskflow_decorator(
taskflow_decorator["name"], taskflow_decorator["class-name"], name
)
def _add_taskflow_decorator(self, name, decorator_class_name: str, provider_package: str) -> None:
if not _check_builtin_provider_prefix(provider_package, decorator_class_name):
return
if name in self._taskflow_decorators:
try:
existing = self._taskflow_decorators[name]
other_name = f"{existing.__module__}.{existing.__name__}"
except Exception:
# If problem importing, then get the value from the functools.partial
other_name = self._taskflow_decorators._raw_dict[name].args[0] # type: ignore[attr-defined]
log.warning(
"The taskflow decorator '%s' has been already registered (by %s).",
name,
other_name,
)
return
self._taskflow_decorators[name] = functools.partial(import_string, decorator_class_name)
@staticmethod
def _get_attr(obj: Any, attr_name: str):
"""Retrieve attributes of an object, or warn if not found."""
if not hasattr(obj, attr_name):
log.warning("The object '%s' is missing %s attribute and cannot be registered", obj, attr_name)
return None
return getattr(obj, attr_name)
def _import_hook(
self,
connection_type: str | None,
provider_info: ProviderInfo,
hook_class_name: str | None = None,
package_name: str | None = None,
) -> HookInfo | None:
"""
Import hook and retrieve hook information.
Either connection_type (for lazy loading) or hook_class_name must be set - but not both).
Only needs package_name if hook_class_name is passed (for lazy loading, package_name
is retrieved from _connection_type_class_provider_dict together with hook_class_name).
:param connection_type: type of the connection
:param hook_class_name: name of the hook class
:param package_name: provider package - only needed in case connection_type is missing
: return
"""
from wtforms import BooleanField, IntegerField, PasswordField, StringField
if connection_type is None and hook_class_name is None:
raise ValueError("Either connection_type or hook_class_name must be set")
if connection_type is not None and hook_class_name is not None:
raise ValueError(
f"Both connection_type ({connection_type} and "
f"hook_class_name {hook_class_name} are set. Only one should be set!"
)
if connection_type is not None:
class_provider = self._hook_provider_dict[connection_type]
package_name = class_provider.package_name
hook_class_name = class_provider.hook_class_name
else:
if not hook_class_name:
raise ValueError("Either connection_type or hook_class_name must be set")
if not package_name:
raise ValueError(
f"Provider package name is not set when hook_class_name ({hook_class_name}) is used"
)
allowed_field_classes = [IntegerField, PasswordField, StringField, BooleanField]
hook_class = _correctness_check(package_name, hook_class_name, provider_info)
if hook_class is None:
return None
try:
module, class_name = hook_class_name.rsplit(".", maxsplit=1)
# Do not use attr here. We want to check only direct class fields not those
# inherited from parent hook. This way we add form fields only once for the whole
# hierarchy and we add it only from the parent hook that provides those!
if "get_connection_form_widgets" in hook_class.__dict__:
widgets = hook_class.get_connection_form_widgets()
if widgets:
for widget in widgets.values():
if widget.field_class not in allowed_field_classes:
log.warning(
"The hook_class '%s' uses field of unsupported class '%s'. "
"Only '%s' field classes are supported",
hook_class_name,
widget.field_class,
allowed_field_classes,
)
return None
self._add_widgets(package_name, hook_class, widgets)
if "get_ui_field_behaviour" in hook_class.__dict__:
field_behaviours = hook_class.get_ui_field_behaviour()
if field_behaviours:
self._add_customized_fields(package_name, hook_class, field_behaviours)
except Exception as e:
log.warning(
"Exception when importing '%s' from '%s' package: %s",
hook_class_name,
package_name,
e,
)
return None
hook_connection_type = self._get_attr(hook_class, "conn_type")
if connection_type:
if hook_connection_type != connection_type:
log.warning(
"Inconsistency! The hook class '%s' declares connection type '%s'"
" but it is added by provider '%s' as connection_type '%s' in provider info. "
"This should be fixed!",
hook_class,
hook_connection_type,
package_name,
connection_type,
)
connection_type = hook_connection_type
connection_id_attribute_name: str = self._get_attr(hook_class, "conn_name_attr")
hook_name: str = self._get_attr(hook_class, "hook_name")
if not connection_type or not connection_id_attribute_name or not hook_name:
log.warning(
"The hook misses one of the key attributes: "
"conn_type: %s, conn_id_attribute_name: %s, hook_name: %s",
connection_type,
connection_id_attribute_name,
hook_name,
)
return None
return HookInfo(
hook_class_name=hook_class_name,
connection_id_attribute_name=connection_id_attribute_name,
package_name=package_name,
hook_name=hook_name,
connection_type=connection_type,
connection_testable=hasattr(hook_class, "test_connection"),
)
def _add_widgets(self, package_name: str, hook_class: type, widgets: dict[str, Any]):
conn_type = hook_class.conn_type # type: ignore
for field_identifier, field in widgets.items():
if field_identifier.startswith("extra__"):
prefixed_field_name = field_identifier
else:
prefixed_field_name = f"extra__{conn_type}__{field_identifier}"
if prefixed_field_name in self._connection_form_widgets:
log.warning(
"The field %s from class %s has already been added by another provider. Ignoring it.",
field_identifier,
hook_class.__name__,
)
# In case of inherited hooks this might be happening several times
continue
self._connection_form_widgets[prefixed_field_name] = ConnectionFormWidgetInfo(
hook_class.__name__,
package_name,
field,
field_identifier,
hasattr(field.field_class.widget, "input_type")
and field.field_class.widget.input_type == "password",
)
def _add_customized_fields(self, package_name: str, hook_class: type, customized_fields: dict):
try:
connection_type = getattr(hook_class, "conn_type")
self._customized_form_fields_schema_validator.validate(customized_fields)
if connection_type:
customized_fields = _ensure_prefix_for_placeholders(customized_fields, connection_type)
if connection_type in self._field_behaviours:
log.warning(
"The connection_type %s from package %s and class %s has already been added "
"by another provider. Ignoring it.",
connection_type,
package_name,
hook_class.__name__,
)
return
self._field_behaviours[connection_type] = customized_fields
except Exception as e:
log.warning(
"Error when loading customized fields from package '%s' hook class '%s': %s",
package_name,
hook_class.__name__,
e,
)
def _discover_extra_links(self) -> None:
"""Retrieves all extra links defined in the providers."""
for provider_package, provider in self._provider_dict.items():
if provider.data.get("extra-links"):
for extra_link_class_name in provider.data["extra-links"]:
if _correctness_check(provider_package, extra_link_class_name, provider):
self._extra_link_class_name_set.add(extra_link_class_name)
def _discover_logging(self) -> None:
"""Retrieve all logging defined in the providers."""
for provider_package, provider in self._provider_dict.items():
if provider.data.get("logging"):
for logging_class_name in provider.data["logging"]:
if _correctness_check(provider_package, logging_class_name, provider):
self._logging_class_name_set.add(logging_class_name)
def _discover_secrets_backends(self) -> None:
"""Retrieve all secrets backends defined in the providers."""
for provider_package, provider in self._provider_dict.items():
if provider.data.get("secrets-backends"):
for secrets_backends_class_name in provider.data["secrets-backends"]:
if _correctness_check(provider_package, secrets_backends_class_name, provider):
self._secrets_backend_class_name_set.add(secrets_backends_class_name)
def _discover_auth_backends(self) -> None:
"""Retrieve all API auth backends defined in the providers."""
for provider_package, provider in self._provider_dict.items():
if provider.data.get("auth-backends"):
for auth_backend_module_name in provider.data["auth-backends"]:
if _correctness_check(provider_package, auth_backend_module_name + ".init_app", provider):
self._api_auth_backend_module_names.add(auth_backend_module_name)
def _discover_executors(self) -> None:
"""Retrieve all executors defined in the providers."""
for provider_package, provider in self._provider_dict.items():
if provider.data.get("executors"):
for executors_class_name in provider.data["executors"]:
if _correctness_check(provider_package, executors_class_name, provider):
self._executor_class_name_set.add(executors_class_name)
def _discover_config(self) -> None:
"""Retrieve all configs defined in the providers."""
for provider_package, provider in self._provider_dict.items():
if provider.data.get("config"):
self._provider_configs[provider_package] = provider.data.get("config")
@provider_info_cache("triggers")
def initialize_providers_triggers(self):
"""Initialization of providers triggers."""
self.initialize_providers_list()
for provider_package, provider in self._provider_dict.items():
for trigger in provider.data.get("triggers", []):
for trigger_class_name in trigger.get("python-modules"):
self._trigger_info_set.add(
TriggerInfo(
package_name=provider_package,
trigger_class_name=trigger_class_name,
integration_name=trigger.get("integration-name", ""),
)
)
@property
def trigger(self) -> list[TriggerInfo]:
"""Returns information about available providers trigger class."""
self.initialize_providers_triggers()
return sorted(self._trigger_info_set, key=lambda x: x.package_name)
@property
def providers(self) -> dict[str, ProviderInfo]:
"""Returns information about available providers."""
self.initialize_providers_list()
return self._provider_dict
@property
def hooks(self) -> MutableMapping[str, HookInfo | None]:
"""
Return dictionary of connection_type-to-hook mapping.
Note that the dict can contain None values if a hook discovered cannot be imported!
"""
self.initialize_providers_hooks()
# When we return hooks here it will only be used to retrieve hook information
return self._hooks_lazy_dict
@property
def taskflow_decorators(self) -> dict[str, TaskDecorator]:
self.initialize_providers_taskflow_decorator()
return self._taskflow_decorators
@property
def extra_links_class_names(self) -> list[str]:
"""Returns set of extra link class names."""
self.initialize_providers_extra_links()
return sorted(self._extra_link_class_name_set)
@property
def connection_form_widgets(self) -> dict[str, ConnectionFormWidgetInfo]:
"""
Returns widgets for connection forms.
Dictionary keys in the same order that it defined in Hook.
"""
self.initialize_providers_hooks()
self._import_info_from_all_hooks()
return self._connection_form_widgets
@property
def field_behaviours(self) -> dict[str, dict]:
"""Returns dictionary with field behaviours for connection types."""
self.initialize_providers_hooks()
self._import_info_from_all_hooks()
return self._field_behaviours
@property
def logging_class_names(self) -> list[str]:
"""Returns set of log task handlers class names."""
self.initialize_providers_logging()
return sorted(self._logging_class_name_set)
@property
def secrets_backend_class_names(self) -> list[str]:
"""Returns set of secret backend class names."""
self.initialize_providers_secrets_backends()
return sorted(self._secrets_backend_class_name_set)
@property
def auth_backend_module_names(self) -> list[str]:
"""Returns set of API auth backend class names."""
self.initialize_providers_auth_backends()
return sorted(self._api_auth_backend_module_names)
@property
def executor_class_names(self) -> list[str]:
self.initialize_providers_executors()
return sorted(self._executor_class_name_set)
@property
def provider_configs(self) -> list[tuple[str, dict[str, Any]]]:
self.initialize_providers_configuration()
return sorted(self._provider_configs.items(), key=lambda x: x[0])
@property
def already_initialized_provider_configs(self) -> list[tuple[str, dict[str, Any]]]:
return sorted(self._provider_configs.items(), key=lambda x: x[0])
| 48,184 | 43.165903 | 110 | py |
airflow | airflow-main/airflow/policies.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
import pluggy
local_settings_hookspec = pluggy.HookspecMarker("airflow.policy")
hookimpl = pluggy.HookimplMarker("airflow.policy")
__all__: list[str] = ["hookimpl"]
if TYPE_CHECKING:
from airflow.models.baseoperator import BaseOperator
from airflow.models.dag import DAG
from airflow.models.taskinstance import TaskInstance
@local_settings_hookspec
def task_policy(task: BaseOperator) -> None:
"""
This policy setting allows altering tasks after they are loaded in the DagBag.
It allows administrator to rewire some task's parameters. Alternatively you can raise
``AirflowClusterPolicyViolation`` exception to stop DAG from being executed.
Here are a few examples of how this can be useful:
* You could enforce a specific queue (say the ``spark`` queue) for tasks using the ``SparkOperator`` to
make sure that these tasks get wired to the right workers
* You could enforce a task timeout policy, making sure that no tasks run for more than 48 hours
:param task: task to be mutated
"""
@local_settings_hookspec
def dag_policy(dag: DAG) -> None:
"""
This policy setting allows altering DAGs after they are loaded in the DagBag.
It allows administrator to rewire some DAG's parameters.
Alternatively you can raise ``AirflowClusterPolicyViolation`` exception
to stop DAG from being executed.
Here are a few examples of how this can be useful:
* You could enforce default user for DAGs
* Check if every DAG has configured tags
:param dag: dag to be mutated
"""
@local_settings_hookspec
def task_instance_mutation_hook(task_instance: TaskInstance) -> None:
"""
This setting allows altering task instances before being queued by the Airflow scheduler.
This could be used, for instance, to modify the task instance during retries.
:param task_instance: task instance to be mutated
"""
@local_settings_hookspec
def pod_mutation_hook(pod) -> None:
"""
Mutate pod before scheduling.
This setting allows altering ``kubernetes.client.models.V1Pod`` object before they are passed to the
Kubernetes client for scheduling.
This could be used, for instance, to add sidecar or init containers to every worker pod launched by
KubernetesExecutor or KubernetesPodOperator.
"""
@local_settings_hookspec(firstresult=True)
def get_airflow_context_vars(context) -> dict[str, str]: # type: ignore[empty-body]
"""
Inject airflow context vars into default airflow context vars.
This setting allows getting the airflow context vars, which are key value pairs. They are then injected
to default airflow context vars, which in the end are available as environment variables when running
tasks dag_id, task_id, execution_date, dag_run_id, try_number are reserved keys.
:param context: The context for the task_instance of interest.
"""
@local_settings_hookspec(firstresult=True)
def get_dagbag_import_timeout(dag_file_path: str) -> int | float: # type: ignore[empty-body]
"""
This setting allows for dynamic control of the DAG file parsing timeout based on the DAG file path.
It is useful when there are a few DAG files requiring longer parsing times, while others do not.
You can control them separately instead of having one value for all DAG files.
If the return value is less than or equal to 0, it means no timeout during the DAG parsing.
"""
class DefaultPolicy:
"""Default implementations of the policy functions.
:meta private:
"""
# Default implementations of the policy functions
@staticmethod
@hookimpl
def get_dagbag_import_timeout(dag_file_path: str):
from airflow.configuration import conf
return conf.getfloat("core", "DAGBAG_IMPORT_TIMEOUT")
@staticmethod
@hookimpl
def get_airflow_context_vars(context):
return {}
def make_plugin_from_local_settings(pm: pluggy.PluginManager, module, names: list[str]):
"""
Turn the functions from airflow_local_settings module into a custom/local plugin.
Allows plugin-registered functions to co-operate with pluggy/setuptool
entrypoint plugins of the same methods.
Airflow local settings will be "win" (i.e. they have the final say) as they are the last plugin
registered.
:meta private:
"""
import inspect
import textwrap
import attr
hook_methods = set()
def _make_shim_fn(name, desired_sig, target):
# Functions defined in airflow_local_settings are called by positional parameters, so the names don't
# have to match what we define in the "template" policy.
#
# However Pluggy validates the names match (and will raise an error if they don't!)
#
# To maintain compat, if we detect the names don't match, we will wrap it with a dynamically created
# shim function that looks somewhat like this:
#
# def dag_policy_name_mismatch_shim(dag):
# airflow_local_settings.dag_policy(dag)
#
codestr = textwrap.dedent(
f"""
def {name}_name_mismatch_shim{str(desired_sig)}:
return __target({' ,'.join(desired_sig.parameters)})
"""
)
code = compile(codestr, "<policy-shim>", "single")
scope = {"__target": target}
exec(code, scope, scope)
return scope[f"{name}_name_mismatch_shim"]
@attr.define(frozen=True)
class AirflowLocalSettingsPolicy:
hook_methods: tuple[str, ...]
__name__ = "AirflowLocalSettingsPolicy"
def __dir__(self):
return self.hook_methods
for name in names:
if not hasattr(pm.hook, name):
continue
policy = getattr(module, name)
if not policy:
continue
local_sig = inspect.signature(policy)
policy_sig = inspect.signature(globals()[name])
# We only care if names/order/number of parameters match, not type hints
if local_sig.parameters.keys() != policy_sig.parameters.keys():
policy = _make_shim_fn(name, policy_sig, target=policy)
setattr(AirflowLocalSettingsPolicy, name, staticmethod(hookimpl(policy, specname=name)))
hook_methods.add(name)
if hook_methods:
pm.register(AirflowLocalSettingsPolicy(hook_methods=tuple(hook_methods)))
return hook_methods
| 7,274 | 32.995327 | 109 | py |
airflow | airflow-main/airflow/templates.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import jinja2.nativetypes
import jinja2.sandbox
class _AirflowEnvironmentMixin:
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.filters.update(FILTERS)
def is_safe_attribute(self, obj, attr, value):
"""
Allow access to ``_`` prefix vars (but not ``__``).
Unlike the stock SandboxedEnvironment, we allow access to "private" attributes (ones starting with
``_``) whilst still blocking internal or truly private attributes (``__`` prefixed ones).
"""
return not jinja2.sandbox.is_internal_attribute(obj, attr)
class NativeEnvironment(_AirflowEnvironmentMixin, jinja2.nativetypes.NativeEnvironment):
"""NativeEnvironment for Airflow task templates."""
class SandboxedEnvironment(_AirflowEnvironmentMixin, jinja2.sandbox.SandboxedEnvironment):
"""SandboxedEnvironment for Airflow task templates."""
def ds_filter(value: datetime.date | datetime.time | None) -> str | None:
"""Date filter."""
if value is None:
return None
return value.strftime("%Y-%m-%d")
def ds_nodash_filter(value: datetime.date | datetime.time | None) -> str | None:
"""Date filter without dashes."""
if value is None:
return None
return value.strftime("%Y%m%d")
def ts_filter(value: datetime.date | datetime.time | None) -> str | None:
"""Timestamp filter."""
if value is None:
return None
return value.isoformat()
def ts_nodash_filter(value: datetime.date | datetime.time | None) -> str | None:
"""Timestamp filter without dashes."""
if value is None:
return None
return value.strftime("%Y%m%dT%H%M%S")
def ts_nodash_with_tz_filter(value: datetime.date | datetime.time | None) -> str | None:
"""Timestamp filter with timezone."""
if value is None:
return None
return value.isoformat().replace("-", "").replace(":", "")
FILTERS = {
"ds": ds_filter,
"ds_nodash": ds_nodash_filter,
"ts": ts_filter,
"ts_nodash": ts_nodash_filter,
"ts_nodash_with_tz": ts_nodash_with_tz_filter,
}
| 2,918 | 30.728261 | 106 | py |
airflow | airflow-main/airflow/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Init setup.
Authentication is implemented using flask_login and different environments can
implement their own login mechanisms by providing an `airflow_login` module
in their PYTHONPATH. airflow_login should be based off the `airflow.www.login`
isort:skip_file
"""
from __future__ import annotations
__version__ = "2.7.0.dev0"
# flake8: noqa: F401
import os
import sys
from typing import Callable
if os.environ.get("_AIRFLOW_PATCH_GEVENT"):
# If you are using gevents and start airflow webserver, you might want to run gevent monkeypatching
# as one of the first thing when Airflow is started. This allows gevent to patch networking and other
# system libraries to make them gevent-compatible before anything else patches them (for example boto)
from gevent.monkey import patch_all
patch_all()
# The configuration module initializes and validates the conf object as a side effect the first
# time it is imported. If it is not imported before importing the settings module, the conf
# object will then be initted/validated as a side effect of it being imported in settings,
# however this can cause issues since those modules are very tightly coupled and can
# very easily cause import cycles in the conf init/validate code (since downstream code from
# those functions likely import settings).
# configuration is therefore initted early here, simply by importing it.
from airflow import configuration
from airflow import settings
__all__ = ["__version__", "login", "DAG", "PY36", "PY37", "PY38", "PY39", "PY310", "XComArg"]
# Make `airflow` an namespace package, supporting installing
# airflow.providers.* in different locations (i.e. one in site, and one in user
# lib.)
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
# Perform side-effects unless someone has explicitly opted out before import
# WARNING: DO NOT USE THIS UNLESS YOU REALLY KNOW WHAT YOU'RE DOING.
# This environment variable prevents proper initialization, and things like
# configs, logging, the ORM, etc. will be broken. It is only useful if you only
# access certain trivial constants and free functions (e.g. `__version__`).
if not os.environ.get("_AIRFLOW__AS_LIBRARY", None):
settings.initialize()
login: Callable | None = None
PY36 = sys.version_info >= (3, 6)
PY37 = sys.version_info >= (3, 7)
PY38 = sys.version_info >= (3, 8)
PY39 = sys.version_info >= (3, 9)
PY310 = sys.version_info >= (3, 10)
PY311 = sys.version_info >= (3, 11)
# Things to lazy import in form {local_name: ('target_module', 'target_name')}
__lazy_imports: dict[str, tuple[str, str]] = {
"DAG": (".models.dag", "DAG"),
"Dataset": (".datasets", "Dataset"),
"XComArg": (".models.xcom_arg", "XComArg"),
"AirflowException": (".exceptions", "AirflowException"),
"version": (".version", ""),
}
def __getattr__(name: str):
# PEP-562: Lazy loaded attributes on python modules
module_path, attr_name = __lazy_imports.get(name, ("", ""))
if not module_path:
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
import importlib
mod = importlib.import_module(module_path, __name__)
if attr_name:
val = getattr(mod, attr_name)
else:
val = mod
# Store for next time
globals()[name] = val
return val
if not settings.LAZY_LOAD_PLUGINS:
from airflow import plugins_manager
plugins_manager.ensure_plugins_loaded()
if not settings.LAZY_LOAD_PROVIDERS:
from airflow import providers_manager
manager = providers_manager.ProvidersManager()
manager.initialize_providers_list()
manager.initialize_providers_hooks()
manager.initialize_providers_extra_links()
# This is never executed, but tricks static analyzers (PyDev, PyCharm,)
# into knowing the types of these symbols, and what
# they contain.
STATICA_HACK = True
globals()["kcah_acitats"[::-1].upper()] = False
if STATICA_HACK: # pragma: no cover
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.exceptions import AirflowException
from airflow.models.dataset import Dataset
| 4,922 | 36.015038 | 106 | py |
airflow | airflow-main/airflow/providers/jenkins/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-jenkins:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,532 | 35.5 | 116 | py |
airflow | airflow-main/airflow/providers/jenkins/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/jenkins/operators/jenkins_job_trigger.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import ast
import json
import socket
import time
from typing import Any, Iterable, Mapping, Sequence, Union
from urllib.error import HTTPError, URLError
import jenkins
from jenkins import Jenkins, JenkinsException
from requests import Request
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.jenkins.hooks.jenkins import JenkinsHook
JenkinsRequest = Mapping[str, Any]
ParamType = Union[str, dict, list, None]
def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> JenkinsRequest | None:
"""Create a Jenkins request from a raw request.
We need to get the headers in addition to the body answer to get the
location from them. This function uses ``jenkins_request`` from
python-jenkins with just the return call changed.
:param jenkins_server: The server to query
:param req: The request to execute
:return: Dict containing the response body (key body)
and the headers coming along (headers)
"""
try:
response = jenkins_server.jenkins_request(req)
response_body = response.content
response_headers = response.headers
if response_body is None:
raise jenkins.EmptyResponseException(
f"Error communicating with server[{jenkins_server.server}]: empty response"
)
return {"body": response_body.decode("utf-8"), "headers": response_headers}
except HTTPError as e:
# Jenkins's funky authentication means its nigh impossible to distinguish errors.
if e.code in [401, 403, 500]:
raise JenkinsException(f"Error in request. Possibly authentication failed [{e.code}]: {e.reason}")
elif e.code == 404:
raise jenkins.NotFoundException("Requested item could not be found")
else:
raise
except socket.timeout as e:
raise jenkins.TimeoutException(f"Error in request: {e}")
except URLError as e:
raise JenkinsException(f"Error in request: {e.reason}")
return None
class JenkinsJobTriggerOperator(BaseOperator):
"""Trigger a Jenkins Job and monitor its execution.
This operator depend on the python-jenkins library version >= 0.4.15 to
communicate with the Jenkins server. You'll also need to configure a Jenkins
connection in the connections screen.
:param jenkins_connection_id: The jenkins connection to use for this job
:param job_name: The name of the job to trigger
:param parameters: The parameters block provided to jenkins for use in
the API call when triggering a build. (templated)
:param sleep_time: How long will the operator sleep between each status
request for the job (min 1, default 10)
:param max_try_before_job_appears: The maximum number of requests to make
while waiting for the job to appears on jenkins server (default 10)
:param allowed_jenkins_states: Iterable of allowed result jenkins states, default is ``['SUCCESS']``
"""
template_fields: Sequence[str] = ("parameters",)
template_ext: Sequence[str] = (".json",)
ui_color = "#f9ec86"
def __init__(
self,
*,
jenkins_connection_id: str,
job_name: str,
parameters: ParamType = None,
sleep_time: int = 10,
max_try_before_job_appears: int = 10,
allowed_jenkins_states: Iterable[str] | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.job_name = job_name
self.parameters = parameters
self.sleep_time = max(sleep_time, 1)
self.jenkins_connection_id = jenkins_connection_id
self.max_try_before_job_appears = max_try_before_job_appears
self.allowed_jenkins_states = list(allowed_jenkins_states) if allowed_jenkins_states else ["SUCCESS"]
def build_job(self, jenkins_server: Jenkins, params: ParamType = None) -> JenkinsRequest | None:
"""Trigger a build job.
This returns a dict with 2 keys ``body`` and ``headers``. ``headers``
contains also a dict-like object which can be queried to get the
location to poll in the queue.
:param jenkins_server: The jenkins server where the job should be triggered
:param params: The parameters block to provide to jenkins API call.
:return: Dict containing the response body (key body)
and the headers coming along (headers)
"""
# Since params can be either JSON string, dictionary, or list,
# check type and pass to build_job_url
if params and isinstance(params, str):
params = ast.literal_eval(params)
request = Request(method="POST", url=jenkins_server.build_job_url(self.job_name, params, None))
return jenkins_request_with_headers(jenkins_server, request)
def poll_job_in_queue(self, location: str, jenkins_server: Jenkins) -> int:
"""Poll the jenkins queue until the job is executed.
When we trigger a job through an API call, the job is first put in the
queue without having a build number assigned. We have to wait until the
job exits the queue to know its build number.
To do so, we add ``/api/json`` (or ``/api/xml``) to the location
returned by the ``build_job`` call, and poll this file. When an
``executable`` block appears in the response, the job execution would
have started, and the field ``number`` would contains the build number.
:param location: Location to poll, returned in the header of the build_job call
:param jenkins_server: The jenkins server to poll
:return: The build_number corresponding to the triggered job
"""
try_count = 0
location += "/api/json"
# TODO Use get_queue_info instead
# once it will be available in python-jenkins (v > 0.4.15)
self.log.info("Polling jenkins queue at the url %s", location)
while try_count < self.max_try_before_job_appears:
try:
location_answer = jenkins_request_with_headers(
jenkins_server, Request(method="POST", url=location)
)
# we don't want to fail the operator, this will continue to poll
# until max_try_before_job_appears reached
except (HTTPError, JenkinsException):
self.log.warning("polling failed, retrying", exc_info=True)
try_count += 1
time.sleep(self.sleep_time)
continue
if location_answer is not None:
json_response = json.loads(location_answer["body"])
if (
"executable" in json_response
and json_response["executable"] is not None
and "number" in json_response["executable"]
):
build_number = json_response["executable"]["number"]
self.log.info("Job executed on Jenkins side with the build number %s", build_number)
return build_number
try_count += 1
time.sleep(self.sleep_time)
raise AirflowException(
f"The job hasn't been executed after polling the queue {self.max_try_before_job_appears} times"
)
def get_hook(self) -> JenkinsHook:
"""Instantiate the Jenkins hook."""
return JenkinsHook(self.jenkins_connection_id)
def execute(self, context: Mapping[Any, Any]) -> str | None:
self.log.info(
"Triggering the job %s on the jenkins : %s with the parameters : %s",
self.job_name,
self.jenkins_connection_id,
self.parameters,
)
jenkins_server = self.get_hook().get_jenkins_server()
jenkins_response = self.build_job(jenkins_server, self.parameters)
if jenkins_response:
build_number = self.poll_job_in_queue(jenkins_response["headers"]["Location"], jenkins_server)
time.sleep(self.sleep_time)
keep_polling_job = True
build_info = None
while keep_polling_job:
try:
build_info = jenkins_server.get_build_info(name=self.job_name, number=build_number)
if build_info["result"] is not None:
keep_polling_job = False
# Check if job ended with not allowed state.
if build_info["result"] not in self.allowed_jenkins_states:
raise AirflowException(
f"Jenkins job failed, final state : {build_info['result']}. "
f"Find more information on job url : {build_info['url']}"
)
else:
self.log.info("Waiting for job to complete : %s , build %s", self.job_name, build_number)
time.sleep(self.sleep_time)
except jenkins.NotFoundException as err:
raise AirflowException(f"Jenkins job status check failed. Final error was: {err.resp.status}")
except jenkins.JenkinsException as err:
raise AirflowException(
f"Jenkins call failed with error : {err}, if you have parameters "
"double check them, jenkins sends back "
"this exception for unknown parameters"
"You can also check logs for more details on this exception "
"(jenkins_url/log/rss)"
)
if build_info:
# If we can we return the url of the job
# for later use (like retrieving an artifact)
return build_info["url"]
return None
| 10,533 | 43.447257 | 110 | py |
airflow | airflow-main/airflow/providers/jenkins/hooks/jenkins.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from functools import wraps
from typing import Any
import jenkins
from airflow.hooks.base import BaseHook
def _ensure_prefixes(conn_type):
"""
Deprecated.
Remove when provider min airflow version >= 2.5.0 since
this is handled by provider manager from that version.
"""
def dec(func):
@wraps(func)
def inner():
field_behaviors = func()
conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
def _ensure_prefix(field):
if field not in conn_attrs and not field.startswith("extra__"):
return f"extra__{conn_type}__{field}"
else:
return field
if "placeholders" in field_behaviors:
placeholders = field_behaviors["placeholders"]
field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
return field_behaviors
return inner
return dec
class JenkinsHook(BaseHook):
"""Hook to manage connection to jenkins server."""
conn_name_attr = "conn_id"
default_conn_name = "jenkins_default"
conn_type = "jenkins"
hook_name = "Jenkins"
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_babel import lazy_gettext
from wtforms import BooleanField
return {
"use_https": BooleanField(
label=lazy_gettext("Use Https"),
description="Specifies whether to use https scheme. Defaults to http",
),
}
@staticmethod
@_ensure_prefixes(conn_type="jenkins")
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "extra"],
"relabeling": {},
"placeholders": {
"login": "Login for the Jenkins service you would like to connect to",
"password": "Password for the Jenkins service you would like to connect too",
"host": "Host for your Jenkins server. Should NOT contain scheme (http:// or https://)",
"port": "Specify a port number",
},
}
def __init__(self, conn_id: str = default_conn_name) -> None:
super().__init__()
connection = self.get_connection(conn_id)
self.connection = connection
connection_prefix = "http"
# connection.extra contains info about using https (true) or http (false)
if connection.extra_dejson.get("use_https"):
connection_prefix = "https"
url = f"{connection_prefix}://{connection.host}:{connection.port}/{connection.schema}"
self.log.info("Trying to connect to %s", url)
self.jenkins_server = jenkins.Jenkins(url, connection.login, connection.password)
def get_jenkins_server(self) -> jenkins.Jenkins:
"""Get jenkins server."""
return self.jenkins_server
def get_latest_build_number(self, job_name) -> int:
self.log.info("Build number not specified, getting latest build info from Jenkins")
job_info = self.jenkins_server.get_job_info(job_name)
return job_info["lastBuild"]["number"]
def get_build_result(self, job_name: str, build_number) -> str:
build_info = self.jenkins_server.get_build_info(job_name, build_number)
return build_info["result"]
def get_build_building_state(self, job_name: str, build_number: int | None) -> bool:
if not build_number:
build_number_to_check = self.get_latest_build_number(job_name)
else:
build_number_to_check = build_number
self.log.info("Getting build info for %s build number: #%s", job_name, build_number_to_check)
build_info = self.jenkins_server.get_build_info(job_name, build_number_to_check)
building = build_info["building"]
return building
| 4,843 | 36.550388 | 105 | py |
airflow | airflow-main/airflow/providers/jenkins/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/jenkins/sensors/jenkins.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Iterable
from airflow import AirflowException
from airflow.providers.jenkins.hooks.jenkins import JenkinsHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class JenkinsBuildSensor(BaseSensorOperator):
"""Monitor a Jenkins job and pass when it is finished building.
This is regardless of the build outcome.
:param jenkins_connection_id: The jenkins connection to use for this job
:param job_name: The name of the job to check
:param build_number: Build number to check - if None, the latest build will be used
"""
def __init__(
self,
*,
jenkins_connection_id: str,
job_name: str,
build_number: int | None = None,
target_states: Iterable[str] | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.job_name = job_name
self.build_number = build_number
self.jenkins_connection_id = jenkins_connection_id
self.target_states = target_states or ["SUCCESS", "FAILED"]
def poke(self, context: Context) -> bool:
self.log.info("Poking jenkins job %s", self.job_name)
hook = JenkinsHook(self.jenkins_connection_id)
build_number = self.build_number or hook.get_latest_build_number(self.job_name)
is_building = hook.get_build_building_state(self.job_name, build_number)
if is_building:
self.log.info("Build still ongoing!")
return False
build_result = hook.get_build_result(self.job_name, build_number)
self.log.info("Build is finished, result is %s", "build_result")
if build_result in self.target_states:
return True
else:
raise AirflowException(
f"Build {build_number} finished with a result {build_result}, "
f"which does not meet the target state {self.target_states}."
)
| 2,802 | 36.878378 | 87 | py |
airflow | airflow-main/airflow/providers/jenkins/sensors/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/slack/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "7.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-slack:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,530 | 35.452381 | 114 | py |
airflow | airflow-main/airflow/providers/slack/transfers/sql_to_slack.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
from pandas import DataFrame
from tabulate import tabulate
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.models import BaseOperator
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.slack.hooks.slack import SlackHook
from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook
from airflow.providers.slack.utils import parse_filename
if TYPE_CHECKING:
from airflow.utils.context import Context
class BaseSqlToSlackOperator(BaseOperator):
"""
Operator implements base sql methods for SQL to Slack Transfer operators.
:param sql: The SQL query to be executed
:param sql_conn_id: reference to a specific DB-API Connection.
:param sql_hook_params: Extra config params to be passed to the underlying hook.
Should match the desired hook constructor params.
:param parameters: The parameters to pass to the SQL query.
"""
def __init__(
self,
*,
sql: str,
sql_conn_id: str,
sql_hook_params: dict | None = None,
parameters: Iterable | Mapping | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.sql_conn_id = sql_conn_id
self.sql_hook_params = sql_hook_params
self.sql = sql
self.parameters = parameters
def _get_hook(self) -> DbApiHook:
self.log.debug("Get connection for %s", self.sql_conn_id)
conn = BaseHook.get_connection(self.sql_conn_id)
hook = conn.get_hook(hook_params=self.sql_hook_params)
if not callable(getattr(hook, "get_pandas_df", None)):
raise AirflowException(
"This hook is not supported. The hook class must have get_pandas_df method."
)
return hook
def _get_query_results(self) -> DataFrame:
sql_hook = self._get_hook()
self.log.info("Running SQL query: %s", self.sql)
df = sql_hook.get_pandas_df(self.sql, parameters=self.parameters)
return df
class SqlToSlackOperator(BaseSqlToSlackOperator):
"""
Executes an SQL statement in a given SQL connection and sends the results to Slack.
The results of the query are rendered into the 'slack_message' parameter as a Pandas
dataframe using a JINJA variable called '{{ results_df }}'. The 'results_df' variable
name can be changed by specifying a different 'results_df_name' parameter. The Tabulate
library is added to the JINJA environment as a filter to allow the dataframe to be
rendered nicely. For example, set 'slack_message' to {{ results_df |
tabulate(tablefmt="pretty", headers="keys") }} to send the results to Slack as an ascii
rendered table.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:SqlToSlackOperator`
:param sql: The SQL query to be executed (templated)
:param slack_message: The templated Slack message to send with the data returned from the SQL connection.
You can use the default JINJA variable {{ results_df }} to access the pandas dataframe containing the
SQL results
:param sql_conn_id: reference to a specific database.
:param sql_hook_params: Extra config params to be passed to the underlying hook.
Should match the desired hook constructor params.
:param slack_conn_id: The connection id for Slack.
:param slack_webhook_token: The token to use to authenticate to Slack. If this is not provided, the
'slack_conn_id' attribute needs to be specified in the 'password' field.
:param slack_channel: The channel to send message. Override default from Slack connection.
:param results_df_name: The name of the JINJA template's dataframe variable, default is 'results_df'
:param parameters: The parameters to pass to the SQL query
"""
template_fields: Sequence[str] = ("sql", "slack_message")
template_ext: Sequence[str] = (".sql", ".jinja", ".j2")
template_fields_renderers = {"sql": "sql", "slack_message": "jinja"}
times_rendered = 0
def __init__(
self,
*,
sql: str,
sql_conn_id: str,
sql_hook_params: dict | None = None,
slack_conn_id: str | None = None,
slack_webhook_token: str | None = None,
slack_channel: str | None = None,
slack_message: str,
results_df_name: str = "results_df",
parameters: Iterable | Mapping | None = None,
**kwargs,
) -> None:
super().__init__(
sql=sql, sql_conn_id=sql_conn_id, sql_hook_params=sql_hook_params, parameters=parameters, **kwargs
)
self.slack_conn_id = slack_conn_id
self.slack_webhook_token = slack_webhook_token
self.slack_channel = slack_channel
self.slack_message = slack_message
self.results_df_name = results_df_name
self.kwargs = kwargs
if not self.slack_conn_id and not self.slack_webhook_token:
raise AirflowException(
"SqlToSlackOperator requires either a `slack_conn_id` or a `slack_webhook_token` argument"
)
def _render_and_send_slack_message(self, context, df) -> None:
# Put the dataframe into the context and render the JINJA template fields
context[self.results_df_name] = df
self.render_template_fields(context)
slack_hook = self._get_slack_hook()
self.log.info("Sending slack message: %s", self.slack_message)
slack_hook.send(text=self.slack_message, channel=self.slack_channel)
def _get_slack_hook(self) -> SlackWebhookHook:
return SlackWebhookHook(
slack_webhook_conn_id=self.slack_conn_id, webhook_token=self.slack_webhook_token
)
def render_template_fields(self, context, jinja_env=None) -> None:
# If this is the first render of the template fields, exclude slack_message from rendering since
# the SQL results haven't been retrieved yet.
if self.times_rendered == 0:
fields_to_render: Iterable[str] = filter(lambda x: x != "slack_message", self.template_fields)
else:
fields_to_render = self.template_fields
if not jinja_env:
jinja_env = self.get_template_env()
# Add the tabulate library into the JINJA environment
jinja_env.filters["tabulate"] = tabulate
self._do_render_template_fields(self, fields_to_render, context, jinja_env, set())
self.times_rendered += 1
def execute(self, context: Context) -> None:
if not isinstance(self.sql, str):
raise AirflowException("Expected 'sql' parameter should be a string.")
if self.sql is None or self.sql.strip() == "":
raise AirflowException("Expected 'sql' parameter is missing.")
if self.slack_message is None or self.slack_message.strip() == "":
raise AirflowException("Expected 'slack_message' parameter is missing.")
df = self._get_query_results()
self._render_and_send_slack_message(context, df)
self.log.debug("Finished sending SQL data to Slack")
class SqlToSlackApiFileOperator(BaseSqlToSlackOperator):
"""
Executes an SQL statement in a given SQL connection and sends the results to Slack API as file.
:param sql: The SQL query to be executed
:param sql_conn_id: reference to a specific DB-API Connection.
:param slack_conn_id: :ref:`Slack API Connection <howto/connection:slack>`.
:param slack_filename: Filename for display in slack.
Should contain supported extension which referenced to ``SUPPORTED_FILE_FORMATS``.
It is also possible to set compression in extension:
``filename.csv.gzip``, ``filename.json.zip``, etc.
:param sql_hook_params: Extra config params to be passed to the underlying hook.
Should match the desired hook constructor params.
:param parameters: The parameters to pass to the SQL query.
:param slack_channels: Comma-separated list of channel names or IDs where the file will be shared.
If omitting this parameter, then file will send to workspace.
:param slack_initial_comment: The message text introducing the file in specified ``slack_channels``.
:param slack_title: Title of file.
:param df_kwargs: Keyword arguments forwarded to ``pandas.DataFrame.to_{format}()`` method.
Example:
.. code-block:: python
SqlToSlackApiFileOperator(
task_id="sql_to_slack",
sql="SELECT 1 a, 2 b, 3 c",
sql_conn_id="sql-connection",
slack_conn_id="slack-api-connection",
slack_filename="awesome.json.gz",
slack_channels="#random,#general",
slack_initial_comment="Awesome load to compressed multiline JSON.",
df_kwargs={
"orient": "records",
"lines": True,
},
)
"""
template_fields: Sequence[str] = (
"sql",
"slack_channels",
"slack_filename",
"slack_initial_comment",
"slack_title",
)
template_ext: Sequence[str] = (".sql", ".jinja", ".j2")
template_fields_renderers = {"sql": "sql", "slack_message": "jinja"}
SUPPORTED_FILE_FORMATS: Sequence[str] = ("csv", "json", "html")
def __init__(
self,
*,
sql: str,
sql_conn_id: str,
sql_hook_params: dict | None = None,
parameters: Iterable | Mapping | None = None,
slack_conn_id: str,
slack_filename: str,
slack_channels: str | Sequence[str] | None = None,
slack_initial_comment: str | None = None,
slack_title: str | None = None,
df_kwargs: dict | None = None,
**kwargs,
):
super().__init__(
sql=sql, sql_conn_id=sql_conn_id, sql_hook_params=sql_hook_params, parameters=parameters, **kwargs
)
self.slack_conn_id = slack_conn_id
self.slack_filename = slack_filename
self.slack_channels = slack_channels
self.slack_initial_comment = slack_initial_comment
self.slack_title = slack_title
self.df_kwargs = df_kwargs or {}
def execute(self, context: Context) -> None:
# Parse file format from filename
output_file_format, _ = parse_filename(
filename=self.slack_filename,
supported_file_formats=self.SUPPORTED_FILE_FORMATS,
)
slack_hook = SlackHook(slack_conn_id=self.slack_conn_id)
with NamedTemporaryFile(mode="w+", suffix=f"_{self.slack_filename}") as fp:
# tempfile.NamedTemporaryFile used only for create and remove temporary file,
# pandas will open file in correct mode itself depend on file type.
# So we close file descriptor here for avoid incidentally write anything.
fp.close()
output_file_name = fp.name
output_file_format = output_file_format.upper()
df_result = self._get_query_results()
if output_file_format == "CSV":
df_result.to_csv(output_file_name, **self.df_kwargs)
elif output_file_format == "JSON":
df_result.to_json(output_file_name, **self.df_kwargs)
elif output_file_format == "HTML":
df_result.to_html(output_file_name, **self.df_kwargs)
else:
# Not expected that this error happen. This only possible
# if SUPPORTED_FILE_FORMATS extended and no actual implementation for specific format.
raise AirflowException(f"Unexpected output file format: {output_file_format}")
slack_hook.send_file(
channels=self.slack_channels,
file=output_file_name,
filename=self.slack_filename,
initial_comment=self.slack_initial_comment,
title=self.slack_title,
)
| 12,888 | 41.537954 | 110 | py |
airflow | airflow-main/airflow/providers/slack/transfers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/slack/notifications/slack_notifier.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use :mod:`airflow.providers.slack.notifications.slack`."""
from __future__ import annotations
import warnings
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.slack.notifications.slack import SlackNotifier # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.slack.notifications.slack`",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
| 1,243 | 40.466667 | 95 | py |
airflow | airflow-main/airflow/providers/slack/notifications/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/slack/notifications/slack.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
from functools import cached_property
from typing import Sequence
from airflow.exceptions import AirflowOptionalProviderFeatureException
try:
from airflow.notifications.basenotifier import BaseNotifier
except ImportError:
raise AirflowOptionalProviderFeatureException(
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
)
from airflow.providers.slack.hooks.slack import SlackHook
ICON_URL: str = "https://raw.githubusercontent.com/apache/airflow/2.5.0/airflow/www/static/pin_100.png"
class SlackNotifier(BaseNotifier):
"""
Slack BaseNotifier.
:param slack_conn_id: Slack API token (https://api.slack.com/web).
:param text: The content of the message
:param channel: The channel to send the message to. Optional
:param username: The username to send the message as. Optional
:param icon_url: The icon to use for the message. Optional
:param attachments: A list of attachments to send with the message. Optional
:param blocks: A list of blocks to send with the message. Optional
"""
template_fields = ("text", "channel", "username", "attachments", "blocks")
def __init__(
self,
*,
slack_conn_id: str = "slack_api_default",
text: str = "This is a default message",
channel: str = "#general",
username: str = "Airflow",
icon_url: str = ICON_URL,
attachments: Sequence = (),
blocks: Sequence = (),
):
super().__init__()
self.slack_conn_id = slack_conn_id
self.text = text
self.channel = channel
self.username = username
self.icon_url = icon_url
self.attachments = attachments
self.blocks = blocks
@cached_property
def hook(self) -> SlackHook:
"""Slack Hook."""
return SlackHook(slack_conn_id=self.slack_conn_id)
def notify(self, context):
"""Send a message to a Slack Channel."""
api_call_params = {
"channel": self.channel,
"username": self.username,
"text": self.text,
"icon_url": self.icon_url,
"attachments": json.dumps(self.attachments),
"blocks": json.dumps(self.blocks),
}
self.hook.call("chat.postMessage", json=api_call_params)
send_slack_notification = SlackNotifier
| 3,201 | 33.804348 | 103 | py |
airflow | airflow-main/airflow/providers/slack/operators/slack_webhook.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from functools import cached_property
from typing import TYPE_CHECKING, Sequence
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.models import BaseOperator
from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class SlackWebhookOperator(BaseOperator):
"""
This operator allows you to post messages to Slack using Incoming Webhooks.
.. note::
You cannot override the default channel (chosen by the user who installed your app),
username, or icon when you're using Incoming Webhooks to post messages.
Instead, these values will always inherit from the associated Slack App configuration
(`link <https://api.slack.com/messaging/webhooks#advanced_message_formatting>`_).
It is possible to change this values only in `Legacy Slack Integration Incoming Webhook
<https://api.slack.com/legacy/custom-integrations/messaging/webhooks#legacy-customizations>`_.
.. warning::
This operator could take Slack Webhook Token from ``webhook_token``
as well as from :ref:`Slack Incoming Webhook connection <howto/connection:slack-incoming-webhook>`.
However, provide ``webhook_token`` it is not secure and this attribute
will be removed in the future version of provider.
:param slack_webhook_conn_id: :ref:`Slack Incoming Webhook <howto/connection:slack>`
connection id that has Incoming Webhook token in the password field.
:param message: The formatted text of the message to be published.
If ``blocks`` are included, this will become the fallback text used in notifications.
:param attachments: The attachments to send on Slack. Should be a list of
dictionaries representing Slack attachments.
:param blocks: The blocks to send on Slack. Should be a list of
dictionaries representing Slack blocks.
:param channel: The channel the message should be posted to
:param username: The username to post to slack with
:param icon_emoji: The emoji to use as icon for the user posting to Slack
:param icon_url: The icon image URL string to use in place of the default icon.
:param link_names: Whether or not to find and link channel and usernames in your
message
:param proxy: Proxy to use to make the Slack webhook call
:param webhook_token: (deprecated) Slack Incoming Webhook token.
Please use ``slack_webhook_conn_id`` instead.
"""
template_fields: Sequence[str] = (
"webhook_token",
"message",
"attachments",
"blocks",
"channel",
"username",
"proxy",
)
def __init__(
self,
*,
slack_webhook_conn_id: str | None = None,
webhook_token: str | None = None,
message: str = "",
attachments: list | None = None,
blocks: list | None = None,
channel: str | None = None,
username: str | None = None,
icon_emoji: str | None = None,
icon_url: str | None = None,
link_names: bool = False,
proxy: str | None = None,
**kwargs,
) -> None:
http_conn_id = kwargs.pop("http_conn_id", None)
if http_conn_id:
warnings.warn(
"Parameter `http_conn_id` is deprecated. Please use `slack_webhook_conn_id` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
if slack_webhook_conn_id:
raise AirflowException("You cannot provide both `slack_webhook_conn_id` and `http_conn_id`.")
slack_webhook_conn_id = http_conn_id
# Compatibility with previous version of operator which based on SimpleHttpOperator.
# Users might pass these arguments previously, however its never pass to SlackWebhookHook.
# We remove this arguments if found in ``kwargs`` and notify users if found any.
deprecated_class_attrs = []
for deprecated_attr in (
"endpoint",
"method",
"data",
"headers",
"response_check",
"response_filter",
"extra_options",
"log_response",
"auth_type",
"tcp_keep_alive",
"tcp_keep_alive_idle",
"tcp_keep_alive_count",
"tcp_keep_alive_interval",
):
if deprecated_attr in kwargs:
deprecated_class_attrs.append(deprecated_attr)
kwargs.pop(deprecated_attr)
if deprecated_class_attrs:
warnings.warn(
f"Provide {','.join(repr(a) for a in deprecated_class_attrs)} is deprecated "
f"and as has no affect, please remove it from {self.__class__.__name__} "
"constructor attributes otherwise in future version of provider it might cause an issue.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
super().__init__(**kwargs)
self.slack_webhook_conn_id = slack_webhook_conn_id
self.webhook_token = webhook_token
self.proxy = proxy
self.message = message
self.attachments = attachments
self.blocks = blocks
self.channel = channel
self.username = username
self.icon_emoji = icon_emoji
self.icon_url = icon_url
self.link_names = link_names
@cached_property
def hook(self) -> SlackWebhookHook:
"""Create and return an SlackWebhookHook (cached)."""
return SlackWebhookHook(
slack_webhook_conn_id=self.slack_webhook_conn_id,
proxy=self.proxy,
# Deprecated. SlackWebhookHook will notify user if user provide non-empty ``webhook_token``.
webhook_token=self.webhook_token,
)
def execute(self, context: Context) -> None:
"""Call the SlackWebhookHook to post the provided Slack message."""
self.hook.send(
text=self.message,
attachments=self.attachments,
blocks=self.blocks,
# Parameters below use for compatibility with previous version of Operator and warn user if it set
# Legacy Integration Parameters
channel=self.channel,
username=self.username,
icon_emoji=self.icon_emoji,
icon_url=self.icon_url,
# Unused Parameters, if not None than warn user
link_names=self.link_names,
)
| 7,402 | 41.302857 | 110 | py |
airflow | airflow-main/airflow/providers/slack/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/slack/operators/slack.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import warnings
from functools import cached_property
from typing import Any, Sequence
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.models import BaseOperator
from airflow.providers.slack.hooks.slack import SlackHook
from airflow.utils.log.secrets_masker import mask_secret
class SlackAPIOperator(BaseOperator):
"""Base Slack Operator class.
Only one of ``slack_conn_id`` or ``token`` is required.
:param slack_conn_id: :ref:`Slack API Connection <howto/connection:slack>`
which its password is Slack API token. Optional
:param token: Slack API token (https://api.slack.com/web). Optional
:param method: The Slack API Method to Call (https://api.slack.com/methods). Optional
:param api_params: API Method call parameters (https://api.slack.com/methods). Optional
:param client_args: Slack Hook parameters. Optional. Check airflow.providers.slack.hooks.SlackHook
"""
def __init__(
self,
*,
slack_conn_id: str | None = None,
token: str | None = None,
method: str | None = None,
api_params: dict | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
if token:
mask_secret(token)
self.token = token
self.slack_conn_id = slack_conn_id
self.method = method
self.api_params = api_params
@cached_property
def hook(self) -> SlackHook:
"""Slack Hook."""
return SlackHook(token=self.token, slack_conn_id=self.slack_conn_id)
def construct_api_call_params(self) -> Any:
"""API call parameters used by the execute function.
Allows templating on the source fields of the ``api_call_params`` dict
before construction.
Child classes should override this. Each SlackAPIOperator child class is
responsible for having function set ``self.api_call_params`` with a dict
of API call parameters (https://api.slack.com/methods)
"""
raise NotImplementedError(
"SlackAPIOperator should not be used directly. Chose one of the subclasses instead"
)
def execute(self, **kwargs):
if not self.api_params:
self.construct_api_call_params()
self.hook.call(self.method, json=self.api_params)
class SlackAPIPostOperator(SlackAPIOperator):
"""Post messages to a Slack channel.
.. code-block:: python
slack = SlackAPIPostOperator(
task_id="post_hello",
dag=dag,
token="...",
text="hello there!",
channel="#random",
)
:param channel: channel in which to post message on slack name (#general) or
ID (C12318391). (templated)
:param username: Username that airflow will be posting to Slack as. (templated)
:param text: message to send to slack. (templated)
:param icon_url: URL to icon used for this message
:param attachments: extra formatting details. (templated)
See https://api.slack.com/docs/attachments
:param blocks: extra block layouts. (templated)
See https://api.slack.com/reference/block-kit/blocks
"""
template_fields: Sequence[str] = ("username", "text", "attachments", "blocks", "channel")
ui_color = "#FFBA40"
def __init__(
self,
channel: str = "#general",
username: str = "Airflow",
text: str = (
"No message has been set.\n"
"Here is a cat video instead\n"
"https://www.youtube.com/watch?v=J---aiyznGQ"
),
icon_url: str = (
"https://raw.githubusercontent.com/apache/airflow/main/airflow/www/static/pin_100.png"
),
attachments: list | None = None,
blocks: list | None = None,
**kwargs,
) -> None:
self.method = "chat.postMessage"
self.channel = channel
self.username = username
self.text = text
self.icon_url = icon_url
self.attachments = attachments or []
self.blocks = blocks or []
super().__init__(method=self.method, **kwargs)
def construct_api_call_params(self) -> Any:
self.api_params = {
"channel": self.channel,
"username": self.username,
"text": self.text,
"icon_url": self.icon_url,
"attachments": json.dumps(self.attachments),
"blocks": json.dumps(self.blocks),
}
class SlackAPIFileOperator(SlackAPIOperator):
"""Send a file to a Slack channel.
.. code-block:: python
# Send file with filename and filetype
slack_operator_file = SlackAPIFileOperator(
task_id="slack_file_upload_1",
dag=dag,
slack_conn_id="slack",
channels="#general,#random",
initial_comment="Hello World!",
filename="/files/dags/test.txt",
filetype="txt",
)
# Send file content
slack_operator_file_content = SlackAPIFileOperator(
task_id="slack_file_upload_2",
dag=dag,
slack_conn_id="slack",
channels="#general",
initial_comment="Hello World!",
content="file content in txt",
)
:param channels: Comma-separated list of channel names or IDs where the file will be shared.
If set this argument to None, then file will send to associated workspace. (templated)
:param initial_comment: message to send to slack. (templated)
:param filename: name of the file (templated)
:param filetype: slack filetype. (templated) See: https://api.slack.com/types/file#file_types
:param content: file content. (templated)
:param title: title of file. (templated)
:param channel: (deprecated) channel in which to sent file on slack name
"""
template_fields: Sequence[str] = (
"channels",
"initial_comment",
"filename",
"filetype",
"content",
"title",
)
ui_color = "#44BEDF"
def __init__(
self,
channels: str | Sequence[str] | None = None,
initial_comment: str | None = None,
filename: str | None = None,
filetype: str | None = None,
content: str | None = None,
title: str | None = None,
channel: str | None = None,
**kwargs,
) -> None:
if channel:
warnings.warn(
"Argument `channel` is deprecated and will removed in a future releases. "
"Please use `channels` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
if channels:
raise ValueError(f"Cannot set both arguments: channel={channel!r} and channels={channels!r}.")
channels = channel
self.channels = channels
self.initial_comment = initial_comment
self.filename = filename
self.filetype = filetype
self.content = content
self.title = title
super().__init__(method="files.upload", **kwargs)
def execute(self, **kwargs):
self.hook.send_file(
channels=self.channels,
# For historical reason SlackAPIFileOperator use filename as reference to file
file=self.filename,
content=self.content,
initial_comment=self.initial_comment,
title=self.title,
)
| 8,268 | 34.187234 | 110 | py |
airflow | airflow-main/airflow/providers/slack/hooks/slack_webhook.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import warnings
from functools import cached_property, wraps
from typing import TYPE_CHECKING, Any, Callable
from urllib.parse import urlsplit
from slack_sdk import WebhookClient
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
from airflow.models import Connection
from airflow.providers.slack.utils import ConnectionExtraConfig
from airflow.utils.log.secrets_masker import mask_secret
if TYPE_CHECKING:
from slack_sdk.http_retry import RetryHandler
DEFAULT_SLACK_WEBHOOK_ENDPOINT = "https://hooks.slack.com/services"
LEGACY_INTEGRATION_PARAMS = ("channel", "username", "icon_emoji", "icon_url")
def check_webhook_response(func: Callable) -> Callable:
"""Function decorator that check WebhookResponse and raise an error if status code != 200."""
@wraps(func)
def wrapper(*args, **kwargs) -> Callable:
resp = func(*args, **kwargs)
if resp.status_code != 200:
raise AirflowException(
f"Response body: {resp.body!r}, Status Code: {resp.status_code}. "
"See: https://api.slack.com/messaging/webhooks#handling_errors"
)
return resp
return wrapper
def _ensure_prefixes(conn_type):
# TODO: Remove when provider min airflow version >= 2.5.0 since
# this is handled by provider manager from that version.
def dec(func):
@wraps(func)
def inner(cls):
field_behaviors = func(cls)
conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
def _ensure_prefix(field):
if field not in conn_attrs and not field.startswith("extra__"):
return f"extra__{conn_type}__{field}"
else:
return field
if "placeholders" in field_behaviors:
placeholders = field_behaviors["placeholders"]
field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
return field_behaviors
return inner
return dec
class SlackWebhookHook(BaseHook):
"""
This class provide a thin wrapper around the ``slack_sdk.WebhookClient``.
This hook allows you to post messages to Slack by using Incoming Webhooks.
.. seealso::
- :ref:`Slack Incoming Webhook connection <howto/connection:slack-incoming-webhook>`
- https://api.slack.com/messaging/webhooks
- https://slack.dev/python-slack-sdk/webhook/index.html
.. note::
You cannot override the default channel (chosen by the user who installed your app),
username, or icon when you're using Incoming Webhooks to post messages.
Instead, these values will always inherit from the associated Slack App configuration
(`link <https://api.slack.com/messaging/webhooks#advanced_message_formatting>`_).
It is possible to change this values only in `Legacy Slack Integration Incoming Webhook
<https://api.slack.com/legacy/custom-integrations/messaging/webhooks#legacy-customizations>`_.
.. warning::
This hook intend to use `Slack Incoming Webhook` connection
and might not work correctly with `Slack API` connection.
Examples:
.. code-block:: python
# Create hook
hook = SlackWebhookHook(slack_webhook_conn_id="slack_default")
# Post message in Slack channel by JSON formatted message
# See: https://api.slack.com/messaging/webhooks#posting_with_webhooks
hook.send_dict({"text": "Hello world!"})
# Post simple message in Slack channel
hook.send_text("Hello world!")
# Use ``slack_sdk.WebhookClient``
hook.client.send(text="Hello world!")
:param slack_webhook_conn_id: Slack Incoming Webhook connection id
that has Incoming Webhook token in the password field.
:param timeout: The maximum number of seconds the client will wait to connect
and receive a response from Slack. If not set than default WebhookClient value will use.
:param proxy: Proxy to make the Slack Incoming Webhook call.
:param retry_handlers: List of handlers to customize retry logic in ``slack_sdk.WebhookClient``.
:param webhook_token: (deprecated) Slack Incoming Webhook token.
Use instead Slack Incoming Webhook connection password field.
"""
conn_name_attr = "slack_webhook_conn_id"
default_conn_name = "slack_default"
conn_type = "slackwebhook"
hook_name = "Slack Incoming Webhook"
def __init__(
self,
slack_webhook_conn_id: str | None = None,
webhook_token: str | None = None,
timeout: int | None = None,
proxy: str | None = None,
retry_handlers: list[RetryHandler] | None = None,
**kwargs,
):
super().__init__()
http_conn_id = kwargs.pop("http_conn_id", None)
if http_conn_id:
warnings.warn(
"Parameter `http_conn_id` is deprecated. Please use `slack_webhook_conn_id` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
if slack_webhook_conn_id:
raise AirflowException("You cannot provide both `slack_webhook_conn_id` and `http_conn_id`.")
slack_webhook_conn_id = http_conn_id
if not slack_webhook_conn_id and not webhook_token:
raise AirflowException("Either `slack_webhook_conn_id` or `webhook_token` should be provided.")
if webhook_token:
mask_secret(webhook_token)
warnings.warn(
"Provide `webhook_token` as hook argument deprecated by security reason and will be removed "
"in a future releases. Please specify it in `Slack Webhook` connection.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
if not slack_webhook_conn_id:
warnings.warn(
"You have not set parameter `slack_webhook_conn_id`. Currently `Slack Incoming Webhook` "
"connection id optional but in a future release it will mandatory.",
FutureWarning,
stacklevel=2,
)
self.slack_webhook_conn_id = slack_webhook_conn_id
self.timeout = timeout
self.proxy = proxy
self.retry_handlers = retry_handlers
self._webhook_token = webhook_token
# Compatibility with previous version of SlackWebhookHook
deprecated_class_attrs = []
for deprecated_attr in (
"message",
"attachments",
"blocks",
"channel",
"username",
"icon_emoji",
"icon_url",
"link_names",
):
if deprecated_attr in kwargs:
deprecated_class_attrs.append(deprecated_attr)
setattr(self, deprecated_attr, kwargs.pop(deprecated_attr))
if deprecated_attr == "message":
# Slack WebHook Post Request not expected `message` as field,
# so we also set "text" attribute which will check by SlackWebhookHook._resolve_argument
self.text = getattr(self, deprecated_attr)
elif deprecated_attr == "link_names":
warnings.warn(
"`link_names` has no affect, if you want to mention user see: "
"https://api.slack.com/reference/surfaces/formatting#mentioning-users",
UserWarning,
stacklevel=2,
)
if deprecated_class_attrs:
warnings.warn(
f"Provide {','.join(repr(a) for a in deprecated_class_attrs)} as hook argument(s) "
f"is deprecated and will be removed in a future releases. "
f"Please specify attributes in `{self.__class__.__name__}.send` method instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
self.extra_client_args = kwargs
@cached_property
def client(self) -> WebhookClient:
"""Get the underlying slack_sdk.webhook.WebhookClient (cached)."""
return WebhookClient(**self._get_conn_params())
def get_conn(self) -> WebhookClient:
"""Get the underlying slack_sdk.webhook.WebhookClient (cached)."""
return self.client
@cached_property
def webhook_token(self) -> str:
"""Return Slack Webhook Token URL."""
warnings.warn(
"`SlackHook.webhook_token` property deprecated and will be removed in a future releases.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
return self._get_conn_params()["url"]
def _get_conn_params(self) -> dict[str, Any]:
"""Fetch connection params as a dict and merge it with hook parameters."""
default_schema, _, default_host = DEFAULT_SLACK_WEBHOOK_ENDPOINT.partition("://")
if self.slack_webhook_conn_id:
conn = self.get_connection(self.slack_webhook_conn_id)
else:
# If slack_webhook_conn_id not specified, then use connection with default schema and host
conn = Connection(
conn_id=None, conn_type=self.conn_type, host=default_schema, password=default_host
)
extra_config = ConnectionExtraConfig(
conn_type=self.conn_type,
conn_id=conn.conn_id,
extra=conn.extra_dejson,
)
conn_params: dict[str, Any] = {"retry_handlers": self.retry_handlers}
webhook_token = None
if self._webhook_token:
self.log.debug("Retrieving Slack Webhook Token from hook attribute.")
webhook_token = self._webhook_token
elif conn.conn_id:
if conn.password:
self.log.debug(
"Retrieving Slack Webhook Token from Connection ID %r password.",
self.slack_webhook_conn_id,
)
webhook_token = conn.password
webhook_token = webhook_token or ""
if not webhook_token and not conn.host:
raise AirflowException("Cannot get token: No valid Slack token nor valid Connection ID supplied.")
elif webhook_token and "://" in webhook_token:
self.log.debug("Retrieving Slack Webhook Token URL from webhook token.")
url = webhook_token
else:
self.log.debug("Constructing Slack Webhook Token URL.")
if conn.host and "://" in conn.host:
base_url = conn.host
else:
schema = conn.schema if conn.schema else default_schema
host = conn.host if conn.host else default_host
base_url = f"{schema}://{host}"
base_url = base_url.rstrip("/")
if not webhook_token:
parsed_token = (urlsplit(base_url).path or "").strip("/")
if base_url == DEFAULT_SLACK_WEBHOOK_ENDPOINT or not parsed_token:
# Raise an error in case of password not specified and
# 1. Result of constructing base_url equal https://hooks.slack.com/services
# 2. Empty url path, e.g. if base_url = https://hooks.slack.com
raise AirflowException(
"Cannot get token: No valid Slack token nor valid Connection ID supplied."
)
mask_secret(parsed_token)
warnings.warn(
f"Found Slack Webhook Token URL in Connection {conn.conn_id!r} `host` "
"and `password` field is empty. This behaviour deprecated "
"and could expose you token in the UI and will be removed in a future releases.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
url = (base_url.rstrip("/") + "/" + webhook_token.lstrip("/")).rstrip("/")
conn_params["url"] = url
# Merge Hook parameters with Connection config
conn_params.update(
{
"timeout": self.timeout or extra_config.getint("timeout", default=None),
"proxy": self.proxy or extra_config.get("proxy", default=None),
}
)
# Add additional client args
conn_params.update(self.extra_client_args)
if "logger" not in conn_params:
conn_params["logger"] = self.log
return {k: v for k, v in conn_params.items() if v is not None}
def _resolve_argument(self, name: str, value):
"""
Resolve message parameters.
.. note::
This method exist for compatibility and merge instance class attributes with
method attributes and not be required when assign class attributes to message
would completely remove.
"""
if value is None and name in (
"text",
"attachments",
"blocks",
"channel",
"username",
"icon_emoji",
"icon_url",
"link_names",
):
return getattr(self, name, None)
return value
@check_webhook_response
def send_dict(self, body: dict[str, Any] | str, *, headers: dict[str, str] | None = None):
"""
Performs a Slack Incoming Webhook request with given JSON data block.
:param body: JSON data structure, expected dict or JSON-string.
:param headers: Request headers for this request.
"""
if isinstance(body, str):
try:
body = json.loads(body)
except json.JSONDecodeError as err:
raise AirflowException(
f"Body expected valid JSON string, got {body!r}. Original error:\n * {err}"
) from None
if not isinstance(body, dict):
raise TypeError(f"Body expected dictionary, got {type(body).__name__}.")
if any(legacy_attr in body for legacy_attr in ("channel", "username", "icon_emoji", "icon_url")):
warnings.warn(
"You cannot override the default channel (chosen by the user who installed your app), "
"username, or icon when you're using Incoming Webhooks to post messages. "
"Instead, these values will always inherit from the associated Slack app configuration. "
"See: https://api.slack.com/messaging/webhooks#advanced_message_formatting. "
"It is possible to change this values only in Legacy Slack Integration Incoming Webhook: "
"https://api.slack.com/legacy/custom-integrations/messaging/webhooks#legacy-customizations",
UserWarning,
stacklevel=2,
)
return self.client.send_dict(body, headers=headers)
def send(
self,
*,
text: str | None = None,
attachments: list[dict[str, Any]] | None = None,
blocks: list[dict[str, Any]] | None = None,
response_type: str | None = None,
replace_original: bool | None = None,
delete_original: bool | None = None,
unfurl_links: bool | None = None,
unfurl_media: bool | None = None,
headers: dict[str, str] | None = None,
**kwargs,
):
"""
Performs a Slack Incoming Webhook request with given arguments.
:param text: The text message
(even when having blocks, setting this as well is recommended as it works as fallback).
:param attachments: A collection of attachments.
:param blocks: A collection of Block Kit UI components.
:param response_type: The type of message (either 'in_channel' or 'ephemeral').
:param replace_original: True if you use this option for response_url requests.
:param delete_original: True if you use this option for response_url requests.
:param unfurl_links: Option to indicate whether text url should unfurl.
:param unfurl_media: Option to indicate whether media url should unfurl.
:param headers: Request headers for this request.
"""
body = {
"text": self._resolve_argument("text", text),
"attachments": self._resolve_argument("attachments", attachments),
"blocks": self._resolve_argument("blocks", blocks),
"response_type": response_type,
"replace_original": replace_original,
"delete_original": delete_original,
"unfurl_links": unfurl_links,
"unfurl_media": unfurl_media,
# Legacy Integration Parameters
**{lip: self._resolve_argument(lip, kwargs.pop(lip, None)) for lip in LEGACY_INTEGRATION_PARAMS},
}
if kwargs:
warnings.warn(
f"Found unexpected keyword-argument(s) {', '.join(repr(k) for k in kwargs)} "
"in `send` method. This argument(s) have no effect.",
UserWarning,
stacklevel=2,
)
body = {k: v for k, v in body.items() if v is not None}
return self.send_dict(body=body, headers=headers)
def send_text(
self,
text: str,
*,
unfurl_links: bool | None = None,
unfurl_media: bool | None = None,
headers: dict[str, str] | None = None,
):
"""
Performs a Slack Incoming Webhook request with given text.
:param text: The text message.
:param unfurl_links: Option to indicate whether text url should unfurl.
:param unfurl_media: Option to indicate whether media url should unfurl.
:param headers: Request headers for this request.
"""
return self.send(text=text, unfurl_links=unfurl_links, unfurl_media=unfurl_media, headers=headers)
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
"""Returns dictionary of widgets to be added for the hook to handle extra values."""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import IntegerField, StringField
from wtforms.validators import NumberRange, Optional
return {
"timeout": IntegerField(
lazy_gettext("Timeout"),
widget=BS3TextFieldWidget(),
validators=[Optional(), NumberRange(min=1)],
description="Optional. The maximum number of seconds the client will wait to connect "
"and receive a response from Slack Incoming Webhook.",
),
"proxy": StringField(
lazy_gettext("Proxy"),
widget=BS3TextFieldWidget(),
description="Optional. Proxy to make the Slack Incoming Webhook call.",
),
}
@classmethod
@_ensure_prefixes(conn_type="slackwebhook")
def get_ui_field_behaviour(cls) -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["login", "port", "extra"],
"relabeling": {
"host": "Slack Webhook Endpoint",
"password": "Webhook Token",
},
"placeholders": {
"schema": "https",
"host": "hooks.slack.com/services",
"password": "T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX",
"timeout": "30",
"proxy": "http://localhost:9000",
},
}
def execute(self) -> None:
"""
Remote Popen (actually execute the slack webhook call).
.. note::
This method exist for compatibility with previous version of operator
and expected that Slack Incoming Webhook message constructing from class attributes rather than
pass as method arguments.
"""
warnings.warn(
"`SlackWebhookHook.execute` method deprecated and will be removed in a future releases. "
"Please use `SlackWebhookHook.send` or `SlackWebhookHook.send_dict` or "
"`SlackWebhookHook.send_text` methods instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
self.send()
| 21,222 | 41.276892 | 110 | py |
airflow | airflow-main/airflow/providers/slack/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/slack/hooks/slack.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import warnings
from functools import cached_property, wraps
from pathlib import Path
from typing import TYPE_CHECKING, Any, Sequence
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
from airflow.providers.slack.utils import ConnectionExtraConfig
from airflow.utils.helpers import exactly_one
from airflow.utils.log.secrets_masker import mask_secret
if TYPE_CHECKING:
from slack_sdk.http_retry import RetryHandler
from slack_sdk.web.slack_response import SlackResponse
def _ensure_prefixes(conn_type):
# TODO: Remove when provider min airflow version >= 2.5.0 since
# this is handled by provider manager from that version.
def dec(func):
@wraps(func)
def inner(cls):
field_behaviors = func(cls)
conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
def _ensure_prefix(field):
if field not in conn_attrs and not field.startswith("extra__"):
return f"extra__{conn_type}__{field}"
else:
return field
if "placeholders" in field_behaviors:
placeholders = field_behaviors["placeholders"]
field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
return field_behaviors
return inner
return dec
class SlackHook(BaseHook):
"""
Creates a Slack API Connection to be used for calls.
This class provide a thin wrapper around the ``slack_sdk.WebClient``.
.. seealso::
- :ref:`Slack API connection <howto/connection:slack>`
- https://api.slack.com/messaging
- https://slack.dev/python-slack-sdk/web/index.html
.. warning::
This hook intend to use `Slack API` connection
and might not work correctly with `Slack Incoming Webhook` and `HTTP` connections.
Takes both Slack API token directly and connection that has Slack API token. If both are
supplied, Slack API token will be used. Also exposes the rest of slack.WebClient args.
Examples:
.. code-block:: python
# Create hook
slack_hook = SlackHook(slack_conn_id="slack_api_default")
# Call generic API with parameters (errors are handled by hook)
# For more details check https://api.slack.com/methods/chat.postMessage
slack_hook.call("chat.postMessage", json={"channel": "#random", "text": "Hello world!"})
# Call method from Slack SDK (you have to handle errors yourself)
# For more details check https://slack.dev/python-slack-sdk/web/index.html#messaging
slack_hook.client.chat_postMessage(channel="#random", text="Hello world!")
:param slack_conn_id: :ref:`Slack connection id <howto/connection:slack>`
that has Slack API token in the password field.
:param timeout: The maximum number of seconds the client will wait to connect
and receive a response from Slack. If not set than default WebClient value will use.
:param base_url: A string representing the Slack API base URL.
If not set than default WebClient BASE_URL will use (``https://www.slack.com/api/``).
:param proxy: Proxy to make the Slack API call.
:param retry_handlers: List of handlers to customize retry logic in ``slack_sdk.WebClient``.
:param token: (deprecated) Slack API Token.
"""
conn_name_attr = "slack_conn_id"
default_conn_name = "slack_api_default"
conn_type = "slack"
hook_name = "Slack API"
def __init__(
self,
token: str | None = None,
slack_conn_id: str | None = None,
base_url: str | None = None,
timeout: int | None = None,
proxy: str | None = None,
retry_handlers: list[RetryHandler] | None = None,
**extra_client_args: Any,
) -> None:
if not token and not slack_conn_id:
raise AirflowException("Either `slack_conn_id` or `token` should be provided.")
if token:
mask_secret(token)
warnings.warn(
"Provide token as hook argument deprecated by security reason and will be removed "
"in a future releases. Please specify token in `Slack API` connection.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
if not slack_conn_id:
warnings.warn(
"You have not set parameter `slack_conn_id`. Currently `Slack API` connection id optional "
"but in a future release it will mandatory.",
FutureWarning,
stacklevel=2,
)
super().__init__()
self._token = token
self.slack_conn_id = slack_conn_id
self.base_url = base_url
self.timeout = timeout
self.proxy = proxy
self.retry_handlers = retry_handlers
self.extra_client_args = extra_client_args
if self.extra_client_args.pop("use_session", None) is not None:
warnings.warn("`use_session` has no affect in slack_sdk.WebClient.", UserWarning, stacklevel=2)
@cached_property
def client(self) -> WebClient:
"""Get the underlying slack_sdk.WebClient (cached)."""
return WebClient(**self._get_conn_params())
def get_conn(self) -> WebClient:
"""Get the underlying slack_sdk.WebClient (cached)."""
return self.client
def _get_conn_params(self) -> dict[str, Any]:
"""Fetch connection params as a dict and merge it with hook parameters."""
conn = self.get_connection(self.slack_conn_id) if self.slack_conn_id else None
conn_params: dict[str, Any] = {"retry_handlers": self.retry_handlers}
if self._token:
conn_params["token"] = self._token
elif conn:
if not conn.password:
raise AirflowNotFoundException(
f"Connection ID {self.slack_conn_id!r} does not contain password (Slack API Token)."
)
conn_params["token"] = conn.password
extra_config = ConnectionExtraConfig(
conn_type=self.conn_type,
conn_id=conn.conn_id if conn else None,
extra=conn.extra_dejson if conn else {},
)
# Merge Hook parameters with Connection config
conn_params.update(
{
"timeout": self.timeout or extra_config.getint("timeout", default=None),
"base_url": self.base_url or extra_config.get("base_url", default=None),
"proxy": self.proxy or extra_config.get("proxy", default=None),
}
)
# Add additional client args
conn_params.update(self.extra_client_args)
if "logger" not in conn_params:
conn_params["logger"] = self.log
return {k: v for k, v in conn_params.items() if v is not None}
@cached_property
def token(self) -> str:
warnings.warn(
"`SlackHook.token` property deprecated and will be removed in a future releases.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
return self._get_conn_params()["token"]
def __get_token(self, token: Any, slack_conn_id: Any) -> str:
warnings.warn(
"`SlackHook.__get_token` method deprecated and will be removed in a future releases.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
if token is not None:
return token
if slack_conn_id is not None:
conn = self.get_connection(slack_conn_id)
if not getattr(conn, "password", None):
raise AirflowException("Missing token(password) in Slack connection")
return conn.password
raise AirflowException("Cannot get token: No valid Slack token nor slack_conn_id supplied.")
def call(self, api_method: str, **kwargs) -> SlackResponse:
"""
Calls Slack WebClient `WebClient.api_call` with given arguments.
:param api_method: The target Slack API method. e.g. 'chat.postMessage'. Required.
:param http_verb: HTTP Verb. Optional (defaults to 'POST')
:param files: Files to multipart upload. e.g. {imageORfile: file_objectORfile_path}
:param data: The body to attach to the request. If a dictionary is provided,
form-encoding will take place. Optional.
:param params: The URL parameters to append to the URL. Optional.
:param json: JSON for the body to attach to the request. Optional.
:return: The server's response to an HTTP request. Data from the response can be
accessed like a dict. If the response included 'next_cursor' it can be
iterated on to execute subsequent requests.
"""
return self.client.api_call(api_method, **kwargs)
def send_file(
self,
*,
channels: str | Sequence[str] | None = None,
file: str | Path | None = None,
content: str | None = None,
filename: str | None = None,
filetype: str | None = None,
initial_comment: str | None = None,
title: str | None = None,
) -> SlackResponse:
"""
Create or upload an existing file.
:param channels: Comma-separated list of channel names or IDs where the file will be shared.
If omitting this parameter, then file will send to workspace.
:param file: Path to file which need to be sent.
:param content: File contents. If omitting this parameter, you must provide a file.
:param filename: Displayed filename.
:param filetype: A file type identifier.
:param initial_comment: The message text introducing the file in specified ``channels``.
:param title: Title of file.
.. seealso::
- `Slack API files.upload method <https://api.slack.com/methods/files.upload>`_
- `File types <https://api.slack.com/types/file#file_types>`_
"""
if not exactly_one(file, content):
raise ValueError("Either `file` or `content` must be provided, not both.")
elif file:
file = Path(file)
with open(file, "rb") as fp:
if not filename:
filename = file.name
return self.client.files_upload(
file=fp,
filename=filename,
filetype=filetype,
initial_comment=initial_comment,
title=title,
channels=channels,
)
return self.client.files_upload(
content=content,
filename=filename,
filetype=filetype,
initial_comment=initial_comment,
title=title,
channels=channels,
)
def test_connection(self):
"""Tests the Slack API connection.
.. seealso::
https://api.slack.com/methods/auth.test
"""
try:
response = self.call("auth.test")
response.validate()
except SlackApiError as e:
return False, str(e)
except Exception as e:
return False, f"Unknown error occurred while testing connection: {e}"
if isinstance(response.data, bytes):
# If response data binary then return simple message
return True, f"Connection successfully tested (url: {response.api_url})."
try:
return True, json.dumps(response.data)
except TypeError:
return True, str(response)
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
"""Returns dictionary of widgets to be added for the hook to handle extra values."""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import IntegerField, StringField
from wtforms.validators import NumberRange, Optional
return {
"timeout": IntegerField(
lazy_gettext("Timeout"),
widget=BS3TextFieldWidget(),
validators=[Optional(strip_whitespace=True), NumberRange(min=1)],
description="Optional. The maximum number of seconds the client will wait to connect "
"and receive a response from Slack API.",
),
"base_url": StringField(
lazy_gettext("Base URL"),
widget=BS3TextFieldWidget(),
description="Optional. A string representing the Slack API base URL.",
),
"proxy": StringField(
lazy_gettext("Proxy"),
widget=BS3TextFieldWidget(),
description="Optional. Proxy to make the Slack API call.",
),
}
@classmethod
@_ensure_prefixes(conn_type="slack")
def get_ui_field_behaviour(cls) -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["login", "port", "host", "schema", "extra"],
"relabeling": {
"password": "Slack API Token",
},
"placeholders": {
"password": "xoxb-1234567890123-09876543210987-AbCdEfGhIjKlMnOpQrStUvWx",
"timeout": "30",
"base_url": "https://www.slack.com/api/",
"proxy": "http://localhost:9000",
},
}
| 14,482 | 39.119114 | 108 | py |
airflow | airflow-main/airflow/providers/slack/utils/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from typing import Any, Sequence
from airflow.utils.types import NOTSET
class ConnectionExtraConfig:
"""Helper class for rom Connection Extra.
:param conn_type: Hook connection type.
:param conn_id: Connection ID uses for appropriate error messages.
:param extra: Connection extra dictionary.
"""
def __init__(self, conn_type: str, conn_id: str | None = None, extra: dict[str, Any] | None = None):
super().__init__()
self.conn_type = conn_type
self.conn_id = conn_id
self.extra = extra or {}
def get(self, field, default: Any = NOTSET):
"""Get specified field from Connection Extra.
:param field: Connection extra field name.
:param default: If specified then use as default value if field not present in Connection Extra.
"""
backcompat_key = f"extra__{self.conn_type}__{field}"
if self.extra.get(field) not in (None, ""):
if self.extra.get(backcompat_key) not in (None, ""):
warnings.warn(
f"Conflicting params `{field}` and `{backcompat_key}` found in extras for conn "
f"{self.conn_id}. Using value for `{field}`. Please ensure this is the correct value "
f"and remove the backcompat key `{backcompat_key}`."
)
return self.extra[field]
elif backcompat_key in self.extra and self.extra[backcompat_key] not in (None, ""):
# Addition validation with non-empty required for connection which created in the UI
# in Airflow 2.2. In these connections always present key-value pair for all prefixed extras
# even if user do not fill this fields.
# In additional fields from `wtforms.IntegerField` might contain None value.
# E.g.: `{'extra__slackwebhook__proxy': '', 'extra__slackwebhook__timeout': None}`
# From Airflow 2.3, using the prefix is no longer required.
return self.extra[backcompat_key]
else:
if default is NOTSET:
raise KeyError(
f"Couldn't find {backcompat_key!r} or {field!r} "
f"in Connection ({self.conn_id!r}) Extra and no default value specified."
)
return default
def getint(self, field, default: Any = NOTSET) -> Any:
"""Get specified field from Connection Extra and evaluate as integer.
:param field: Connection extra field name.
:param default: If specified then use as default value if field not present in Connection Extra.
"""
value = self.get(field=field, default=default)
if value != default:
value = int(value)
return value
def parse_filename(
filename: str, supported_file_formats: Sequence[str], fallback: str | None = None
) -> tuple[str, str | None]:
"""
Parse filetype and compression from given filename.
:param filename: filename to parse.
:param supported_file_formats: list of supported file extensions.
:param fallback: fallback to given file format.
:returns: filetype and compression (if specified)
"""
if not filename:
raise ValueError("Expected 'filename' parameter is missing.")
if fallback and fallback not in supported_file_formats:
raise ValueError(f"Invalid fallback value {fallback!r}, expected one of {supported_file_formats}.")
parts = filename.rsplit(".", 2)
try:
if len(parts) == 1:
raise ValueError(f"No file extension specified in filename {filename!r}.")
if parts[-1] in supported_file_formats:
return parts[-1], None
elif len(parts) == 2:
raise ValueError(
f"Unsupported file format {parts[-1]!r}, expected one of {supported_file_formats}."
)
else:
if parts[-2] not in supported_file_formats:
raise ValueError(
f"Unsupported file format '{parts[-2]}.{parts[-1]}', "
f"expected one of {supported_file_formats} with compression extension."
)
return parts[-2], parts[-1]
except ValueError as ex:
if fallback:
return fallback, None
raise ex from None
| 5,132 | 42.134454 | 107 | py |
airflow | airflow-main/airflow/providers/hashicorp/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.4.2"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-hashicorp:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,534 | 35.547619 | 118 | py |
airflow | airflow-main/airflow/providers/hashicorp/secrets/vault.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Objects relating to sourcing connections & variables from Hashicorp Vault."""
from __future__ import annotations
import warnings
from typing import TYPE_CHECKING
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.hashicorp._internal_client.vault_client import _VaultClient
from airflow.secrets import BaseSecretsBackend
from airflow.utils.log.logging_mixin import LoggingMixin
class VaultBackend(BaseSecretsBackend, LoggingMixin):
"""
Retrieves Connections and Variables from Hashicorp Vault.
Configurable via ``airflow.cfg`` as follows:
.. code-block:: ini
[secrets]
backend = airflow.providers.hashicorp.secrets.vault.VaultBackend
backend_kwargs = {
"connections_path": "connections",
"url": "http://127.0.0.1:8200",
"mount_point": "airflow"
}
For example, if your keys are under ``connections`` path in ``airflow`` mount_point, this
would be accessible if you provide ``{"connections_path": "connections"}`` and request
conn_id ``smtp_default``.
:param connections_path: Specifies the path of the secret to read to get Connections.
(default: 'connections'). If set to None (null), requests for connections will not be sent to Vault.
:param variables_path: Specifies the path of the secret to read to get Variable.
(default: 'variables'). If set to None (null), requests for variables will not be sent to Vault.
:param config_path: Specifies the path of the secret to read Airflow Configurations
(default: 'config'). If set to None (null), requests for configurations will not be sent to Vault.
:param url: Base URL for the Vault instance being addressed.
:param auth_type: Authentication Type for Vault. Default is ``token``. Available values are:
('approle', 'aws_iam', 'azure', 'github', 'gcp', 'kubernetes', 'ldap', 'radius', 'token', 'userpass')
:param auth_mount_point: It can be used to define mount_point for authentication chosen
Default depends on the authentication method used.
:param mount_point: The "path" the secret engine was mounted on. Default is "secret". Note that
this mount_point is not used for authentication if authentication is done via a
different engine. If set to None, the mount secret should be provided as a prefix for each
variable/connection_id. For authentication mount_points see, auth_mount_point.
:param kv_engine_version: Select the version of the engine to run (``1`` or ``2``, default: ``2``).
:param token: Authentication token to include in requests sent to Vault.
(for ``token`` and ``github`` auth_type)
:param token_path: path to file containing authentication token to include in requests sent to Vault
(for ``token`` and ``github`` auth_type).
:param username: Username for Authentication (for ``ldap`` and ``userpass`` auth_type).
:param password: Password for Authentication (for ``ldap`` and ``userpass`` auth_type).
:param key_id: Key ID for Authentication (for ``aws_iam`` and ''azure`` auth_type).
:param secret_id: Secret ID for Authentication (for ``approle``, ``aws_iam`` and ``azure`` auth_types).
:param role_id: Role ID for Authentication (for ``approle``, ``aws_iam`` auth_types).
:param kubernetes_role: Role for Authentication (for ``kubernetes`` auth_type).
:param kubernetes_jwt_path: Path for kubernetes jwt token (for ``kubernetes`` auth_type, default:
``/var/run/secrets/kubernetes.io/serviceaccount/token``).
:param gcp_key_path: Path to Google Cloud Service Account key file (JSON) (for ``gcp`` auth_type).
Mutually exclusive with gcp_keyfile_dict.
:param gcp_keyfile_dict: Dictionary of keyfile parameters. (for ``gcp`` auth_type).
Mutually exclusive with gcp_key_path.
:param gcp_scopes: Comma-separated string containing OAuth2 scopes (for ``gcp`` auth_type).
:param azure_tenant_id: The tenant id for the Azure Active Directory (for ``azure`` auth_type).
:param azure_resource: The configured URL for the application registered in Azure Active Directory
(for ``azure`` auth_type).
:param radius_host: Host for radius (for ``radius`` auth_type).
:param radius_secret: Secret for radius (for ``radius`` auth_type).
:param radius_port: Port for radius (for ``radius`` auth_type).
"""
def __init__(
self,
connections_path: str = "connections",
variables_path: str = "variables",
config_path: str = "config",
url: str | None = None,
auth_type: str = "token",
auth_mount_point: str | None = None,
mount_point: str | None = "secret",
kv_engine_version: int = 2,
token: str | None = None,
token_path: str | None = None,
username: str | None = None,
password: str | None = None,
key_id: str | None = None,
secret_id: str | None = None,
role_id: str | None = None,
kubernetes_role: str | None = None,
kubernetes_jwt_path: str = "/var/run/secrets/kubernetes.io/serviceaccount/token",
gcp_key_path: str | None = None,
gcp_keyfile_dict: dict | None = None,
gcp_scopes: str | None = None,
azure_tenant_id: str | None = None,
azure_resource: str | None = None,
radius_host: str | None = None,
radius_secret: str | None = None,
radius_port: int | None = None,
**kwargs,
):
super().__init__()
if connections_path is not None:
self.connections_path = connections_path.rstrip("/")
else:
self.connections_path = connections_path
if variables_path is not None:
self.variables_path = variables_path.rstrip("/")
else:
self.variables_path = variables_path
if config_path is not None:
self.config_path = config_path.rstrip("/")
else:
self.config_path = config_path
self.mount_point = mount_point
self.kv_engine_version = kv_engine_version
self.vault_client = _VaultClient(
url=url,
auth_type=auth_type,
auth_mount_point=auth_mount_point,
mount_point=mount_point,
kv_engine_version=kv_engine_version,
token=token,
token_path=token_path,
username=username,
password=password,
key_id=key_id,
secret_id=secret_id,
role_id=role_id,
kubernetes_role=kubernetes_role,
kubernetes_jwt_path=kubernetes_jwt_path,
gcp_key_path=gcp_key_path,
gcp_keyfile_dict=gcp_keyfile_dict,
gcp_scopes=gcp_scopes,
azure_tenant_id=azure_tenant_id,
azure_resource=azure_resource,
radius_host=radius_host,
radius_secret=radius_secret,
radius_port=radius_port,
**kwargs,
)
def _parse_path(self, secret_path: str) -> tuple[str | None, str | None]:
if not self.mount_point:
split_secret_path = secret_path.split("/", 1)
if len(split_secret_path) < 2:
return None, None
return split_secret_path[0], split_secret_path[1]
else:
return "", secret_path
def get_response(self, conn_id: str) -> dict | None:
"""
Get data from Vault.
:return: The data from the Vault path if exists
"""
mount_point, conn_key = self._parse_path(conn_id)
if self.connections_path is None or conn_key is None:
return None
if self.connections_path == "":
secret_path = conn_key
else:
secret_path = self.build_path(self.connections_path, conn_key)
return self.vault_client.get_secret(
secret_path=(mount_point + "/" if mount_point else "") + secret_path
)
def get_conn_uri(self, conn_id: str) -> str | None:
"""
Get serialized representation of connection.
:param conn_id: The connection id
:return: The connection uri retrieved from the secret
"""
# Since VaultBackend implements `get_connection`, `get_conn_uri` is not used. So we
# don't need to implement (or direct users to use) method `get_conn_value` instead
warnings.warn(
f"Method `{self.__class__.__name__}.get_conn_uri` is deprecated and will be removed "
"in a future release.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
response = self.get_response(conn_id)
return response.get("conn_uri") if response else None
# Make sure connection is imported this way for type checking, otherwise when importing
# the backend it will get a circular dependency and fail
if TYPE_CHECKING:
from airflow.models.connection import Connection
def get_connection(self, conn_id: str) -> Connection | None:
"""
Get connection from Vault as secret.
Prioritize conn_uri if exists, if not fall back to normal Connection creation.
:return: A Connection object constructed from Vault data
"""
# The Connection needs to be locally imported because otherwise we get into cyclic import
# problems when instantiating the backend during configuration
from airflow.models.connection import Connection
response = self.get_response(conn_id)
if response is None:
return None
uri = response.get("conn_uri")
if uri:
return Connection(conn_id, uri=uri)
return Connection(conn_id, **response)
def get_variable(self, key: str) -> str | None:
"""
Get Airflow Variable.
:param key: Variable Key
:return: Variable Value retrieved from the vault
"""
mount_point, variable_key = self._parse_path(key)
if self.variables_path is None or variable_key is None:
return None
if self.variables_path == "":
secret_path = variable_key
else:
secret_path = self.build_path(self.variables_path, variable_key)
response = self.vault_client.get_secret(
secret_path=(mount_point + "/" if mount_point else "") + secret_path
)
return response.get("value") if response else None
def get_config(self, key: str) -> str | None:
"""
Get Airflow Configuration.
:param key: Configuration Option Key
:return: Configuration Option Value retrieved from the vault
"""
mount_point, config_key = self._parse_path(key)
if self.config_path is None or config_key is None:
return None
if self.config_path == "":
secret_path = config_key
else:
secret_path = self.build_path(self.config_path, config_key)
response = self.vault_client.get_secret(
secret_path=(mount_point + "/" if mount_point else "") + secret_path
)
return response.get("value") if response else None
| 11,992 | 43.583643 | 109 | py |
airflow | airflow-main/airflow/providers/hashicorp/secrets/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/hashicorp/_internal_client/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/hashicorp/_internal_client/vault_client.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from functools import cached_property
import hvac
from hvac.api.auth_methods import Kubernetes
from hvac.exceptions import InvalidPath, VaultError
from requests import Response, Session
from requests.adapters import HTTPAdapter
from urllib3.util import Retry
from airflow.utils.log.logging_mixin import LoggingMixin
DEFAULT_KUBERNETES_JWT_PATH = "/var/run/secrets/kubernetes.io/serviceaccount/token"
DEFAULT_KV_ENGINE_VERSION = 2
VALID_KV_VERSIONS: list[int] = [1, 2]
VALID_AUTH_TYPES: list[str] = [
"approle",
"aws_iam",
"azure",
"github",
"gcp",
"kubernetes",
"ldap",
"radius",
"token",
"userpass",
]
class _VaultClient(LoggingMixin):
"""
Retrieves Authenticated client from Hashicorp Vault.
This is purely internal class promoting authentication code reuse between the Hook and the
SecretBackend, it should not be used directly in Airflow DAGs. Use VaultBackend for backend
integration and Hook in case you want to communicate with VaultHook using standard Airflow
Connection definition.
:param url: Base URL for the Vault instance being addressed.
:param auth_type: Authentication Type for Vault. Default is ``token``. Available values are in
('approle', 'aws_iam', 'azure', 'github', 'gcp', 'kubernetes', 'ldap', 'radius', 'token', 'userpass')
:param auth_mount_point: It can be used to define mount_point for authentication chosen
Default depends on the authentication method used.
:param mount_point: The "path" the secret engine was mounted on. Default is "secret". Note that
this mount_point is not used for authentication if authentication is done via a
different engine. For authentication mount_points see, auth_mount_point.
:param kv_engine_version: Selects the version of the engine to run (``1`` or ``2``, default: ``2``).
:param token: Authentication token to include in requests sent to Vault
(for ``token`` and ``github`` auth_type).
:param token_path: path to file containing authentication token to include in requests sent to Vault
(for ``token`` and ``github`` auth_type).
:param username: Username for Authentication (for ``ldap`` and ``userpass`` auth_types).
:param password: Password for Authentication (for ``ldap`` and ``userpass`` auth_types).
:param key_id: Key ID for Authentication (for ``aws_iam`` and ''azure`` auth_type).
:param secret_id: Secret ID for Authentication (for ``approle``, ``aws_iam`` and ``azure`` auth_types).
:param role_id: Role ID for Authentication (for ``approle``, ``aws_iam`` auth_types).
:param kubernetes_role: Role for Authentication (for ``kubernetes`` auth_type).
:param kubernetes_jwt_path: Path for kubernetes jwt token (for ``kubernetes`` auth_type, default:
``/var/run/secrets/kubernetes.io/serviceaccount/token``).
:param gcp_key_path: Path to Google Cloud Service Account key file (JSON) (for ``gcp`` auth_type).
Mutually exclusive with gcp_keyfile_dict
:param gcp_keyfile_dict: Dictionary of keyfile parameters. (for ``gcp`` auth_type).
Mutually exclusive with gcp_key_path
:param gcp_scopes: Comma-separated string containing OAuth2 scopes (for ``gcp`` auth_type).
:param azure_tenant_id: The tenant id for the Azure Active Directory (for ``azure`` auth_type).
:param azure_resource: The configured URL for the application registered in Azure Active Directory
(for ``azure`` auth_type).
:param radius_host: Host for radius (for ``radius`` auth_type).
:param radius_secret: Secret for radius (for ``radius`` auth_type).
:param radius_port: Port for radius (for ``radius`` auth_type).
"""
def __init__(
self,
url: str | None = None,
auth_type: str = "token",
auth_mount_point: str | None = None,
mount_point: str | None = "secret",
kv_engine_version: int | None = None,
token: str | None = None,
token_path: str | None = None,
username: str | None = None,
password: str | None = None,
key_id: str | None = None,
secret_id: str | None = None,
role_id: str | None = None,
kubernetes_role: str | None = None,
kubernetes_jwt_path: str | None = "/var/run/secrets/kubernetes.io/serviceaccount/token",
gcp_key_path: str | None = None,
gcp_keyfile_dict: dict | None = None,
gcp_scopes: str | None = None,
azure_tenant_id: str | None = None,
azure_resource: str | None = None,
radius_host: str | None = None,
radius_secret: str | None = None,
radius_port: int | None = None,
**kwargs,
):
super().__init__()
if kv_engine_version and kv_engine_version not in VALID_KV_VERSIONS:
raise VaultError(
f"The version is not supported: {kv_engine_version}. "
f"It should be one of {VALID_KV_VERSIONS}"
)
if auth_type not in VALID_AUTH_TYPES:
raise VaultError(
f"The auth_type is not supported: {auth_type}. It should be one of {VALID_AUTH_TYPES}"
)
if auth_type == "token" and not token and not token_path:
raise VaultError("The 'token' authentication type requires 'token' or 'token_path'")
if auth_type == "github" and not token and not token_path:
raise VaultError("The 'github' authentication type requires 'token' or 'token_path'")
if auth_type == "approle" and not role_id:
raise VaultError("The 'approle' authentication type requires 'role_id'")
if auth_type == "kubernetes":
if not kubernetes_role:
raise VaultError("The 'kubernetes' authentication type requires 'kubernetes_role'")
if not kubernetes_jwt_path:
raise VaultError("The 'kubernetes' authentication type requires 'kubernetes_jwt_path'")
if auth_type == "azure":
if not azure_resource:
raise VaultError("The 'azure' authentication type requires 'azure_resource'")
if not azure_tenant_id:
raise VaultError("The 'azure' authentication type requires 'azure_tenant_id'")
if auth_type == "radius":
if not radius_host:
raise VaultError("The 'radius' authentication type requires 'radius_host'")
if not radius_secret:
raise VaultError("The 'radius' authentication type requires 'radius_secret'")
self.kv_engine_version = kv_engine_version if kv_engine_version else 2
self.url = url
self.auth_type = auth_type
self.kwargs = kwargs
self.token = token
self.token_path = token_path
self.auth_mount_point = auth_mount_point
self.mount_point = mount_point
self.username = username
self.password = password
self.key_id = key_id
self.secret_id = secret_id
self.role_id = role_id
self.kubernetes_role = kubernetes_role
self.kubernetes_jwt_path = kubernetes_jwt_path
self.gcp_key_path = gcp_key_path
self.gcp_keyfile_dict = gcp_keyfile_dict
self.gcp_scopes = gcp_scopes
self.azure_tenant_id = azure_tenant_id
self.azure_resource = azure_resource
self.radius_host = radius_host
self.radius_secret = radius_secret
self.radius_port = radius_port
@property
def client(self):
"""
Checks that it is still authenticated to Vault and invalidates the cache if this is not the case.
:return: Vault Client
"""
if not self._client.is_authenticated():
# Invalidate the cache:
# https://github.com/pydanny/cached-property#invalidating-the-cache
self.__dict__.pop("_client", None)
return self._client
@cached_property
def _client(self) -> hvac.Client:
"""
Return an authenticated Hashicorp Vault client.
:return: Vault Client
"""
if "session" not in self.kwargs:
# If no session object provide one with retry as per hvac documentation:
# https://hvac.readthedocs.io/en/stable/advanced_usage.html#retrying-failed-requests
adapter = HTTPAdapter(
max_retries=Retry(
total=3,
backoff_factor=0.1,
status_forcelist=[412, 500, 502, 503],
raise_on_status=False,
)
)
session = Session()
session.mount("http://", adapter)
session.mount("https://", adapter)
self.kwargs["session"] = session
_client = hvac.Client(url=self.url, **self.kwargs)
if self.auth_type == "approle":
self._auth_approle(_client)
elif self.auth_type == "aws_iam":
self._auth_aws_iam(_client)
elif self.auth_type == "azure":
self._auth_azure(_client)
elif self.auth_type == "gcp":
self._auth_gcp(_client)
elif self.auth_type == "github":
self._auth_github(_client)
elif self.auth_type == "kubernetes":
self._auth_kubernetes(_client)
elif self.auth_type == "ldap":
self._auth_ldap(_client)
elif self.auth_type == "radius":
self._auth_radius(_client)
elif self.auth_type == "token":
self._set_token(_client)
elif self.auth_type == "userpass":
self._auth_userpass(_client)
else:
raise VaultError(f"Authentication type '{self.auth_type}' not supported")
if _client.is_authenticated():
return _client
else:
raise VaultError("Vault Authentication Error!")
def _auth_userpass(self, _client: hvac.Client) -> None:
if self.auth_mount_point:
_client.auth.userpass.login(
username=self.username, password=self.password, mount_point=self.auth_mount_point
)
else:
_client.auth.userpass.login(username=self.username, password=self.password)
def _auth_radius(self, _client: hvac.Client) -> None:
if self.auth_mount_point:
_client.auth.radius.configure(
host=self.radius_host,
secret=self.radius_secret,
port=self.radius_port,
mount_point=self.auth_mount_point,
)
else:
_client.auth.radius.configure(
host=self.radius_host, secret=self.radius_secret, port=self.radius_port
)
def _auth_ldap(self, _client: hvac.Client) -> None:
if self.auth_mount_point:
_client.auth.ldap.login(
username=self.username, password=self.password, mount_point=self.auth_mount_point
)
else:
_client.auth.ldap.login(username=self.username, password=self.password)
def _auth_kubernetes(self, _client: hvac.Client) -> None:
if not self.kubernetes_jwt_path:
raise VaultError("The kubernetes_jwt_path should be set here. This should not happen.")
with open(self.kubernetes_jwt_path) as f:
jwt = f.read().strip()
if self.auth_mount_point:
Kubernetes(_client.adapter).login(
role=self.kubernetes_role, jwt=jwt, mount_point=self.auth_mount_point
)
else:
Kubernetes(_client.adapter).login(role=self.kubernetes_role, jwt=jwt)
def _auth_github(self, _client: hvac.Client) -> None:
if self.auth_mount_point:
_client.auth.github.login(token=self.token, mount_point=self.auth_mount_point)
else:
_client.auth.github.login(token=self.token)
def _auth_gcp(self, _client: hvac.Client) -> None:
from airflow.providers.google.cloud.utils.credentials_provider import (
_get_scopes,
get_credentials_and_project_id,
)
scopes = _get_scopes(self.gcp_scopes)
credentials, _ = get_credentials_and_project_id(
key_path=self.gcp_key_path, keyfile_dict=self.gcp_keyfile_dict, scopes=scopes
)
if self.auth_mount_point:
_client.auth.gcp.configure(credentials=credentials, mount_point=self.auth_mount_point)
else:
_client.auth.gcp.configure(credentials=credentials)
def _auth_azure(self, _client: hvac.Client) -> None:
if self.auth_mount_point:
_client.auth.azure.configure(
tenant_id=self.azure_tenant_id,
resource=self.azure_resource,
client_id=self.key_id,
client_secret=self.secret_id,
mount_point=self.auth_mount_point,
)
else:
_client.auth.azure.configure(
tenant_id=self.azure_tenant_id,
resource=self.azure_resource,
client_id=self.key_id,
client_secret=self.secret_id,
)
def _auth_aws_iam(self, _client: hvac.Client) -> None:
if self.auth_mount_point:
_client.auth.aws.iam_login(
access_key=self.key_id,
secret_key=self.secret_id,
role=self.role_id,
mount_point=self.auth_mount_point,
)
else:
_client.auth.aws.iam_login(access_key=self.key_id, secret_key=self.secret_id, role=self.role_id)
def _auth_approle(self, _client: hvac.Client) -> None:
if self.auth_mount_point:
_client.auth.approle.login(
role_id=self.role_id, secret_id=self.secret_id, mount_point=self.auth_mount_point
)
else:
_client.auth.approle.login(role_id=self.role_id, secret_id=self.secret_id)
def _set_token(self, _client: hvac.Client) -> None:
if self.token_path:
with open(self.token_path) as f:
_client.token = f.read().strip()
else:
_client.token = self.token
def _parse_secret_path(self, secret_path: str) -> tuple[str, str]:
if not self.mount_point:
split_secret_path = secret_path.split("/", 1)
if len(split_secret_path) < 2:
raise InvalidPath
return split_secret_path[0], split_secret_path[1]
else:
return self.mount_point, secret_path
def get_secret(self, secret_path: str, secret_version: int | None = None) -> dict | None:
"""
Get secret value from the KV engine.
:param secret_path: The path of the secret.
:param secret_version: Specifies the version of Secret to return. If not set, the latest
version is returned. (Can only be used in case of version 2 of KV).
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v1.html
and https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
:return: secret stored in the vault as a dictionary
"""
mount_point = None
try:
mount_point, secret_path = self._parse_secret_path(secret_path)
if self.kv_engine_version == 1:
if secret_version:
raise VaultError("Secret version can only be used with version 2 of the KV engine")
response = self.client.secrets.kv.v1.read_secret(path=secret_path, mount_point=mount_point)
else:
response = self.client.secrets.kv.v2.read_secret_version(
path=secret_path, mount_point=mount_point, version=secret_version
)
except InvalidPath:
self.log.debug("Secret not found %s with mount point %s", secret_path, mount_point)
return None
return_data = response["data"] if self.kv_engine_version == 1 else response["data"]["data"]
return return_data
def get_secret_metadata(self, secret_path: str) -> dict | None:
"""
Reads secret metadata (including versions) from the engine. It is only valid for KV version 2.
:param secret_path: The path of the secret.
:return: secret metadata. This is a Dict containing metadata for the secret.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
"""
if self.kv_engine_version == 1:
raise VaultError("Metadata might only be used with version 2 of the KV engine.")
mount_point = None
try:
mount_point, secret_path = self._parse_secret_path(secret_path)
return self.client.secrets.kv.v2.read_secret_metadata(path=secret_path, mount_point=mount_point)
except InvalidPath:
self.log.debug("Secret not found %s with mount point %s", secret_path, mount_point)
return None
def get_secret_including_metadata(
self, secret_path: str, secret_version: int | None = None
) -> dict | None:
"""
Reads secret including metadata. It is only valid for KV version 2.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
:param secret_path: The path of the secret.
:param secret_version: Specifies the version of Secret to return. If not set, the latest
version is returned. (Can only be used in case of version 2 of KV).
:return: The key info. This is a Dict with "data" mapping keeping secret
and "metadata" mapping keeping metadata of the secret.
"""
if self.kv_engine_version == 1:
raise VaultError("Metadata might only be used with version 2 of the KV engine.")
mount_point = None
try:
mount_point, secret_path = self._parse_secret_path(secret_path)
return self.client.secrets.kv.v2.read_secret_version(
path=secret_path, mount_point=mount_point, version=secret_version
)
except InvalidPath:
self.log.debug(
"Secret not found %s with mount point %s and version %s",
secret_path,
mount_point,
secret_version,
)
return None
def create_or_update_secret(
self, secret_path: str, secret: dict, method: str | None = None, cas: int | None = None
) -> Response:
"""
Creates or updates secret.
:param secret_path: The path of the secret.
:param secret: Secret to create or update for the path specified
:param method: Optional parameter to explicitly request a POST (create) or PUT (update) request to
the selected kv secret engine. If no argument is provided for this parameter, hvac attempts to
intelligently determine which method is appropriate. Only valid for KV engine version 1
:param cas: Set the "cas" value to use a Check-And-Set operation. If not set the write will be
allowed. If set to 0 a write will only be allowed if the key doesn't exist.
If the index is non-zero the write will only be allowed if the key's current version
matches the version specified in the cas parameter. Only valid for KV engine version 2.
:return: The response of the create_or_update_secret request.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v1.html
and https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
"""
if self.kv_engine_version == 2 and method:
raise VaultError("The method parameter is only valid for version 1")
if self.kv_engine_version == 1 and cas:
raise VaultError("The cas parameter is only valid for version 2")
mount_point, secret_path = self._parse_secret_path(secret_path)
if self.kv_engine_version == 1:
response = self.client.secrets.kv.v1.create_or_update_secret(
secret_path=secret_path, secret=secret, mount_point=mount_point, method=method
)
else:
response = self.client.secrets.kv.v2.create_or_update_secret(
secret_path=secret_path, secret=secret, mount_point=mount_point, cas=cas
)
return response
| 21,223 | 44.061571 | 109 | py |
airflow | airflow-main/airflow/providers/hashicorp/hooks/vault.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for HashiCorp Vault."""
from __future__ import annotations
import json
import warnings
from typing import Any
import hvac
from hvac.exceptions import VaultError
from requests import Response
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
from airflow.providers.hashicorp._internal_client.vault_client import (
DEFAULT_KUBERNETES_JWT_PATH,
DEFAULT_KV_ENGINE_VERSION,
_VaultClient,
)
from airflow.utils.helpers import merge_dicts
class VaultHook(BaseHook):
"""
Hook to Interact with HashiCorp Vault KeyValue Secret engine.
HashiCorp hvac documentation:
* https://hvac.readthedocs.io/en/stable/
You connect to the host specified as host in the connection. The login/password from the connection
are used as credentials usually and you can specify different authentication parameters
via init params or via corresponding extras in the connection.
The mount point should be placed as a path in the URL - similarly to Vault's URL schema:
This indicates the "path" the secret engine is mounted on. Default id not specified is "secret".
Note that this ``mount_point`` is not used for authentication if authentication is done via a
different engines. Each engine uses it's own engine-specific authentication mount_point.
The extras in the connection are named the same as the parameters ('kv_engine_version', 'auth_type', ...).
You can also use gcp_keyfile_dict extra to pass json-formatted dict in case of 'gcp' authentication.
The URL schemas supported are "vault", "http" (using http to connect to the vault) or
"vaults" and "https" (using https to connect to the vault).
Example URL:
.. code-block::
vault://user:password@host:port/mount_point?kv_engine_version=1&auth_type=github
Login/Password are used as credentials:
* approle: login -> role_id, password -> secret_id
* github: password -> token
* token: password -> token
* aws_iam: login -> key_id, password -> secret_id
* azure: login -> client_id, password -> client_secret
* ldap: login -> username, password -> password
* userpass: login -> username, password -> password
* radius: password -> radius_secret
:param vault_conn_id: The id of the connection to use
:param auth_type: Authentication Type for the Vault. Default is ``token``. Available values are:
('approle', 'github', 'gcp', 'kubernetes', 'ldap', 'token', 'userpass')
:param auth_mount_point: It can be used to define mount_point for authentication chosen
Default depends on the authentication method used.
:param kv_engine_version: Select the version of the engine to run (``1`` or ``2``). Defaults to
version defined in connection or ``2`` if not defined in connection.
:param role_id: Role ID for ``aws_iam`` Authentication.
:param kubernetes_role: Role for Authentication (for ``kubernetes`` auth_type)
:param kubernetes_jwt_path: Path for kubernetes jwt token (for ``kubernetes`` auth_type, default:
``/var/run/secrets/kubernetes.io/serviceaccount/token``)
:param token_path: path to file containing authentication token to include in requests sent to Vault
(for ``token`` and ``github`` auth_type).
:param gcp_key_path: Path to Google Cloud Service Account key file (JSON) (for ``gcp`` auth_type)
Mutually exclusive with gcp_keyfile_dict
:param gcp_scopes: Comma-separated string containing OAuth2 scopes (for ``gcp`` auth_type)
:param azure_tenant_id: The tenant id for the Azure Active Directory (for ``azure`` auth_type)
:param azure_resource: The configured URL for the application registered in Azure Active Directory
(for ``azure`` auth_type)
:param radius_host: Host for radius (for ``radius`` auth_type)
:param radius_port: Port for radius (for ``radius`` auth_type)
"""
conn_name_attr = "vault_conn_id"
default_conn_name = "vault_default"
conn_type = "vault"
hook_name = "Hashicorp Vault"
def __init__(
self,
vault_conn_id: str = default_conn_name,
auth_type: str | None = None,
auth_mount_point: str | None = None,
kv_engine_version: int | None = None,
role_id: str | None = None,
kubernetes_role: str | None = None,
kubernetes_jwt_path: str | None = None,
token_path: str | None = None,
gcp_key_path: str | None = None,
gcp_scopes: str | None = None,
azure_tenant_id: str | None = None,
azure_resource: str | None = None,
radius_host: str | None = None,
radius_port: int | None = None,
**kwargs,
):
super().__init__()
self.connection = self.get_connection(vault_conn_id)
if not auth_type:
auth_type = self.connection.extra_dejson.get("auth_type") or "token"
if not auth_mount_point:
auth_mount_point = self.connection.extra_dejson.get("auth_mount_point")
if not kv_engine_version:
conn_version = self.connection.extra_dejson.get("kv_engine_version")
try:
kv_engine_version = int(conn_version) if conn_version else DEFAULT_KV_ENGINE_VERSION
except ValueError:
raise VaultError(f"The version is not an int: {conn_version}. ")
client_kwargs = self.connection.extra_dejson.get("client_kwargs", {})
if kwargs:
client_kwargs = merge_dicts(client_kwargs, kwargs)
if auth_type == "approle":
if role_id:
warnings.warn(
"""The usage of role_id for AppRole authentication has been deprecated.
Please use connection login.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
elif self.connection.extra_dejson.get("role_id"):
role_id = self.connection.extra_dejson.get("role_id")
warnings.warn(
"""The usage of role_id in connection extra for AppRole authentication has been
deprecated. Please use connection login.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
elif self.connection.login:
role_id = self.connection.login
if auth_type == "aws_iam":
if not role_id:
role_id = self.connection.extra_dejson.get("role_id")
azure_resource, azure_tenant_id = (
self._get_azure_parameters_from_connection(azure_resource, azure_tenant_id)
if auth_type == "azure"
else (None, None)
)
gcp_key_path, gcp_keyfile_dict, gcp_scopes = (
self._get_gcp_parameters_from_connection(gcp_key_path, gcp_scopes)
if auth_type == "gcp"
else (None, None, None)
)
kubernetes_jwt_path, kubernetes_role = (
self._get_kubernetes_parameters_from_connection(kubernetes_jwt_path, kubernetes_role)
if auth_type == "kubernetes"
else (None, None)
)
radius_host, radius_port = (
self._get_radius_parameters_from_connection(radius_host, radius_port)
if auth_type == "radius"
else (None, None)
)
if self.connection.extra_dejson.get("use_tls") is not None:
if bool(self.connection.extra_dejson.get("use_tls")):
conn_protocol = "https"
else:
conn_protocol = "http"
else:
if self.connection.conn_type == "vault":
conn_protocol = "http"
elif self.connection.conn_type == "vaults":
conn_protocol = "https"
elif self.connection.conn_type == "http":
conn_protocol = "http"
elif self.connection.conn_type == "https":
conn_protocol = "https"
else:
raise VaultError("The url schema must be one of ['http', 'https', 'vault', 'vaults' ]")
url = f"{conn_protocol}://{self.connection.host}"
if self.connection.port:
url += f":{self.connection.port}"
# Schema is really path in the Connection definition. This is pretty confusing because of URL schema
mount_point = self.connection.schema if self.connection.schema else "secret"
client_kwargs.update(
url=url,
auth_type=auth_type,
auth_mount_point=auth_mount_point,
mount_point=mount_point,
kv_engine_version=kv_engine_version,
token=self.connection.password,
token_path=token_path,
username=self.connection.login,
password=self.connection.password,
key_id=self.connection.login,
secret_id=self.connection.password,
role_id=role_id,
kubernetes_role=kubernetes_role,
kubernetes_jwt_path=kubernetes_jwt_path,
gcp_key_path=gcp_key_path,
gcp_keyfile_dict=gcp_keyfile_dict,
gcp_scopes=gcp_scopes,
azure_tenant_id=azure_tenant_id,
azure_resource=azure_resource,
radius_host=radius_host,
radius_secret=self.connection.password,
radius_port=radius_port,
)
self.vault_client = _VaultClient(**client_kwargs)
def _get_kubernetes_parameters_from_connection(
self, kubernetes_jwt_path: str | None, kubernetes_role: str | None
) -> tuple[str, str | None]:
if not kubernetes_jwt_path:
kubernetes_jwt_path = self.connection.extra_dejson.get("kubernetes_jwt_path")
if not kubernetes_jwt_path:
kubernetes_jwt_path = DEFAULT_KUBERNETES_JWT_PATH
if not kubernetes_role:
kubernetes_role = self.connection.extra_dejson.get("kubernetes_role")
return kubernetes_jwt_path, kubernetes_role
def _get_gcp_parameters_from_connection(
self,
gcp_key_path: str | None,
gcp_scopes: str | None,
) -> tuple[str | None, dict | None, str | None]:
if not gcp_scopes:
gcp_scopes = self.connection.extra_dejson.get("gcp_scopes")
if not gcp_key_path:
gcp_key_path = self.connection.extra_dejson.get("gcp_key_path")
string_keyfile_dict = self.connection.extra_dejson.get("gcp_keyfile_dict")
gcp_keyfile_dict = json.loads(string_keyfile_dict) if string_keyfile_dict else None
return gcp_key_path, gcp_keyfile_dict, gcp_scopes
def _get_azure_parameters_from_connection(
self, azure_resource: str | None, azure_tenant_id: str | None
) -> tuple[str | None, str | None]:
if not azure_tenant_id:
azure_tenant_id = self.connection.extra_dejson.get("azure_tenant_id")
if not azure_resource:
azure_resource = self.connection.extra_dejson.get("azure_resource")
return azure_resource, azure_tenant_id
def _get_radius_parameters_from_connection(
self, radius_host: str | None, radius_port: int | None
) -> tuple[str | None, int | None]:
if not radius_port:
radius_port_str = self.connection.extra_dejson.get("radius_port")
if radius_port_str:
try:
radius_port = int(radius_port_str)
except ValueError:
raise VaultError(f"Radius port was wrong: {radius_port_str}")
if not radius_host:
radius_host = self.connection.extra_dejson.get("radius_host")
return radius_host, radius_port
def get_conn(self) -> hvac.Client:
"""
Retrieves connection to Vault.
:return: connection used.
"""
return self.vault_client.client
def get_secret(self, secret_path: str, secret_version: int | None = None) -> dict | None:
"""
Get secret value from the engine.
:param secret_path: Path of the secret
:param secret_version: Optional version of key to read - can only be used in case of version 2 of KV
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v1.html
and https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
:param secret_path: Path of the secret
:return: secret stored in the vault as a dictionary
"""
return self.vault_client.get_secret(secret_path=secret_path, secret_version=secret_version)
def get_secret_metadata(self, secret_path: str) -> dict | None:
"""
Reads secret metadata (including versions) from the engine. It is only valid for KV version 2.
:param secret_path: Path to read from
:return: secret metadata. This is a Dict containing metadata for the secret.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
"""
return self.vault_client.get_secret_metadata(secret_path=secret_path)
def get_secret_including_metadata(
self, secret_path: str, secret_version: int | None = None
) -> dict | None:
"""
Reads secret including metadata. It is only valid for KV version 2.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
:param secret_path: Path of the secret
:param secret_version: Optional version of key to read - can only be used in case of version 2 of KV
:return: key info. This is a Dict with "data" mapping keeping secret
and "metadata" mapping keeping metadata of the secret.
"""
return self.vault_client.get_secret_including_metadata(
secret_path=secret_path, secret_version=secret_version
)
def create_or_update_secret(
self, secret_path: str, secret: dict, method: str | None = None, cas: int | None = None
) -> Response:
"""
Creates or updates secret.
:param secret_path: Path to read from
:param secret: Secret to create or update for the path specified
:param method: Optional parameter to explicitly request a POST (create) or PUT (update) request to
the selected kv secret engine. If no argument is provided for this parameter, hvac attempts to
intelligently determine which method is appropriate. Only valid for KV engine version 1
:param cas: Set the "cas" value to use a Check-And-Set operation. If not set the write will be
allowed. If set to 0 a write will only be allowed if the key doesn't exist.
If the index is non-zero the write will only be allowed if the key's current version
matches the version specified in the cas parameter. Only valid for KV engine version 2.
:return: The response of the create_or_update_secret request.
See https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v1.html
and https://hvac.readthedocs.io/en/stable/usage/secrets_engines/kv_v2.html for details.
"""
return self.vault_client.create_or_update_secret(
secret_path=secret_path, secret=secret, method=method, cas=cas
)
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import BooleanField, IntegerField, StringField
from wtforms.validators import NumberRange, Optional, any_of
return {
"auth_type": StringField(lazy_gettext("Auth type"), widget=BS3TextFieldWidget()),
"auth_mount_point": StringField(lazy_gettext("Auth mount point"), widget=BS3TextFieldWidget()),
"kv_engine_version": IntegerField(
lazy_gettext("KV engine version"),
validators=[any_of([1, 2])],
widget=BS3TextFieldWidget(),
description="Must be 1 or 2.",
default=DEFAULT_KV_ENGINE_VERSION,
),
"role_id": StringField(lazy_gettext("Role ID (deprecated)"), widget=BS3TextFieldWidget()),
"kubernetes_role": StringField(lazy_gettext("Kubernetes role"), widget=BS3TextFieldWidget()),
"kubernetes_jwt_path": StringField(
lazy_gettext("Kubernetes jwt path"), widget=BS3TextFieldWidget()
),
"token_path": StringField(lazy_gettext("Token path"), widget=BS3TextFieldWidget()),
"gcp_key_path": StringField(lazy_gettext("GCP key path"), widget=BS3TextFieldWidget()),
"gcp_scopes": StringField(lazy_gettext("GCP scopes"), widget=BS3TextFieldWidget()),
"azure_tenant_id": StringField(lazy_gettext("Azure tenant ID"), widget=BS3TextFieldWidget()),
"azure_resource": StringField(lazy_gettext("Azure resource"), widget=BS3TextFieldWidget()),
"radius_host": StringField(lazy_gettext("Radius host"), widget=BS3TextFieldWidget()),
"radius_port": IntegerField(
lazy_gettext("Radius port"),
widget=BS3TextFieldWidget(),
validators=[Optional(), NumberRange(min=0)],
),
"use_tls": BooleanField(lazy_gettext("Use TLS"), default=True),
}
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["extra"],
"relabeling": {},
}
def test_connection(self) -> tuple[bool, str]:
"""Test Vault connectivity from UI."""
try:
self.get_conn()
return True, "Connection successfully tested"
except Exception as e:
return False, str(e)
| 18,752 | 43.756563 | 110 | py |
airflow | airflow-main/airflow/providers/hashicorp/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/mongo/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-mongo:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,530 | 35.452381 | 114 | py |
airflow | airflow-main/airflow/providers/mongo/hooks/mongo.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Mongo DB."""
from __future__ import annotations
from ssl import CERT_NONE
from types import TracebackType
from typing import Any, overload
from urllib.parse import quote_plus, urlunsplit
import pymongo
from pymongo import MongoClient, ReplaceOne
from airflow.hooks.base import BaseHook
from airflow.typing_compat import Literal
class MongoHook(BaseHook):
"""
Interact with Mongo. This hook uses the Mongo conn_id.
PyMongo Wrapper to Interact With Mongo Database
Mongo Connection Documentation
https://docs.mongodb.com/manual/reference/connection-string/index.html
You can specify connection string options in extra field of your connection
https://docs.mongodb.com/manual/reference/connection-string/index.html#connection-string-options
If you want use DNS seedlist, set `srv` to True.
ex.
{"srv": true, "replicaSet": "test", "ssl": true, "connectTimeoutMS": 30000}
:param mongo_conn_id: The :ref:`Mongo connection id <howto/connection:mongo>` to use
when connecting to MongoDB.
"""
conn_name_attr = "conn_id"
default_conn_name = "mongo_default"
conn_type = "mongo"
hook_name = "MongoDB"
def __init__(self, conn_id: str = default_conn_name, *args, **kwargs) -> None:
super().__init__()
self.mongo_conn_id = conn_id
self.connection = self.get_connection(conn_id)
self.extras = self.connection.extra_dejson.copy()
self.client: MongoClient | None = None
self.uri = self._create_uri()
def __enter__(self):
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
if self.client is not None:
self.client.close()
self.client = None
def get_conn(self) -> MongoClient:
"""Fetches PyMongo Client."""
if self.client is not None:
return self.client
# Mongo Connection Options dict that is unpacked when passed to MongoClient
options = self.extras
# If we are using SSL disable requiring certs from specific hostname
if options.get("ssl", False):
if pymongo.__version__ >= "4.0.0":
# In pymongo 4.0.0+ `tlsAllowInvalidCertificates=True`
# replaces `ssl_cert_reqs=CERT_NONE`
options.update({"tlsAllowInvalidCertificates": True})
else:
options.update({"ssl_cert_reqs": CERT_NONE})
self.client = MongoClient(self.uri, **options)
return self.client
def _create_uri(self) -> str:
"""
Create URI string from the given credentials.
:return: URI string.
"""
srv = self.extras.pop("srv", False)
scheme = "mongodb+srv" if srv else "mongodb"
login = self.connection.login
password = self.connection.password
netloc = self.connection.host
if login is not None and password is not None:
netloc = f"{quote_plus(login)}:{quote_plus(password)}@{netloc}"
if self.connection.port:
netloc = f"{netloc}:{self.connection.port}"
path = f"/{self.connection.schema}"
return urlunsplit((scheme, netloc, path, "", ""))
def get_collection(
self, mongo_collection: str, mongo_db: str | None = None
) -> pymongo.collection.Collection:
"""
Fetches a mongo collection object for querying.
Uses connection schema as DB unless specified.
"""
mongo_db = mongo_db or self.connection.schema
mongo_conn: MongoClient = self.get_conn()
return mongo_conn.get_database(mongo_db).get_collection(mongo_collection)
def aggregate(
self, mongo_collection: str, aggregate_query: list, mongo_db: str | None = None, **kwargs
) -> pymongo.command_cursor.CommandCursor:
"""
Runs an aggregation pipeline and returns the results.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.aggregate
https://pymongo.readthedocs.io/en/stable/examples/aggregation.html
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.aggregate(aggregate_query, **kwargs)
@overload
def find(
self,
mongo_collection: str,
query: dict,
find_one: Literal[False],
mongo_db: str | None = None,
projection: list | dict | None = None,
**kwargs,
) -> pymongo.cursor.Cursor:
...
@overload
def find(
self,
mongo_collection: str,
query: dict,
find_one: Literal[True],
mongo_db: str | None = None,
projection: list | dict | None = None,
**kwargs,
) -> Any | None:
...
def find(
self,
mongo_collection: str,
query: dict,
find_one: bool = False,
mongo_db: str | None = None,
projection: list | dict | None = None,
**kwargs,
) -> pymongo.cursor.Cursor | Any | None:
"""
Runs a mongo find query and returns the results.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.find
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
if find_one:
return collection.find_one(query, projection, **kwargs)
else:
return collection.find(query, projection, **kwargs)
def insert_one(
self, mongo_collection: str, doc: dict, mongo_db: str | None = None, **kwargs
) -> pymongo.results.InsertOneResult:
"""
Inserts a single document into a mongo collection.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.insert_one
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.insert_one(doc, **kwargs)
def insert_many(
self, mongo_collection: str, docs: dict, mongo_db: str | None = None, **kwargs
) -> pymongo.results.InsertManyResult:
"""
Inserts many docs into a mongo collection.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.insert_many
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.insert_many(docs, **kwargs)
def update_one(
self,
mongo_collection: str,
filter_doc: dict,
update_doc: dict,
mongo_db: str | None = None,
**kwargs,
) -> pymongo.results.UpdateResult:
"""
Updates a single document in a mongo collection.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.update_one
:param mongo_collection: The name of the collection to update.
:param filter_doc: A query that matches the documents to update.
:param update_doc: The modifications to apply.
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.update_one(filter_doc, update_doc, **kwargs)
def update_many(
self,
mongo_collection: str,
filter_doc: dict,
update_doc: dict,
mongo_db: str | None = None,
**kwargs,
) -> pymongo.results.UpdateResult:
"""
Updates one or more documents in a mongo collection.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.update_many
:param mongo_collection: The name of the collection to update.
:param filter_doc: A query that matches the documents to update.
:param update_doc: The modifications to apply.
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.update_many(filter_doc, update_doc, **kwargs)
def replace_one(
self,
mongo_collection: str,
doc: dict,
filter_doc: dict | None = None,
mongo_db: str | None = None,
**kwargs,
) -> pymongo.results.UpdateResult:
"""
Replaces a single document in a mongo collection.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.replace_one
.. note::
If no ``filter_doc`` is given, it is assumed that the replacement
document contain the ``_id`` field which is then used as filters.
:param mongo_collection: The name of the collection to update.
:param doc: The new document.
:param filter_doc: A query that matches the documents to replace.
Can be omitted; then the _id field from doc will be used.
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
if not filter_doc:
filter_doc = {"_id": doc["_id"]}
return collection.replace_one(filter_doc, doc, **kwargs)
def replace_many(
self,
mongo_collection: str,
docs: list[dict],
filter_docs: list[dict] | None = None,
mongo_db: str | None = None,
upsert: bool = False,
collation: pymongo.collation.Collation | None = None,
**kwargs,
) -> pymongo.results.BulkWriteResult:
"""
Replaces many documents in a mongo collection.
Uses bulk_write with multiple ReplaceOne operations
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.bulk_write
.. note::
If no ``filter_docs``are given, it is assumed that all
replacement documents contain the ``_id`` field which are then
used as filters.
:param mongo_collection: The name of the collection to update.
:param docs: The new documents.
:param filter_docs: A list of queries that match the documents to replace.
Can be omitted; then the _id fields from airflow.docs will be used.
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
:param upsert: If ``True``, perform an insert if no documents
match the filters for the replace operation.
:param collation: An instance of
:class:`~pymongo.collation.Collation`. This option is only
supported on MongoDB 3.4 and above.
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
if not filter_docs:
filter_docs = [{"_id": doc["_id"]} for doc in docs]
requests = [
ReplaceOne(filter_docs[i], docs[i], upsert=upsert, collation=collation) for i in range(len(docs))
]
return collection.bulk_write(requests, **kwargs)
def delete_one(
self, mongo_collection: str, filter_doc: dict, mongo_db: str | None = None, **kwargs
) -> pymongo.results.DeleteResult:
"""
Deletes a single document in a mongo collection.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.delete_one
:param mongo_collection: The name of the collection to delete from.
:param filter_doc: A query that matches the document to delete.
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.delete_one(filter_doc, **kwargs)
def delete_many(
self, mongo_collection: str, filter_doc: dict, mongo_db: str | None = None, **kwargs
) -> pymongo.results.DeleteResult:
"""
Deletes one or more documents in a mongo collection.
https://pymongo.readthedocs.io/en/stable/api/pymongo/collection.html#pymongo.collection.Collection.delete_many
:param mongo_collection: The name of the collection to delete from.
:param filter_doc: A query that matches the documents to delete.
:param mongo_db: The name of the database to use.
Can be omitted; then the database from the connection string is used.
"""
collection = self.get_collection(mongo_collection, mongo_db=mongo_db)
return collection.delete_many(filter_doc, **kwargs)
| 13,774 | 36.636612 | 118 | py |
airflow | airflow-main/airflow/providers/mongo/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/mongo/sensors/mongo.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.providers.mongo.hooks.mongo import MongoHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class MongoSensor(BaseSensorOperator):
"""
Checks for the existence of a document which matches the given query in MongoDB.
.. code-block:: python
mongo_sensor = MongoSensor(
collection="coll",
query={"key": "value"},
mongo_conn_id="mongo_default",
mongo_db="admin",
task_id="mongo_sensor",
)
:param collection: Target MongoDB collection.
:param query: The query to find the target document.
:param mongo_conn_id: The :ref:`Mongo connection id <howto/connection:mongo>` to use
when connecting to MongoDB.
:param mongo_db: Target MongoDB name.
"""
template_fields: Sequence[str] = ("collection", "query")
def __init__(
self, *, collection: str, query: dict, mongo_conn_id: str = "mongo_default", mongo_db=None, **kwargs
) -> None:
super().__init__(**kwargs)
self.mongo_conn_id = mongo_conn_id
self.collection = collection
self.query = query
self.mongo_db = mongo_db
def poke(self, context: Context) -> bool:
self.log.info(
"Sensor check existence of the document that matches the following query: %s", self.query
)
hook = MongoHook(self.mongo_conn_id)
return hook.find(self.collection, self.query, mongo_db=self.mongo_db, find_one=True) is not None
| 2,422 | 35.164179 | 108 | py |
airflow | airflow-main/airflow/providers/mongo/sensors/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/postgres/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "5.5.2"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-postgres:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/postgres/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/postgres/operators/postgres.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from typing import Mapping, Sequence
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
class PostgresOperator(SQLExecuteQueryOperator):
"""
Executes sql code in a specific Postgres database.
This class is deprecated.
Please use :class:`airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
:param sql: the SQL code to be executed as a single string, or
a list of str (sql statements), or a reference to a template file.
Template references are recognized by str ending in '.sql'
:param postgres_conn_id: The :ref:`postgres conn id <howto/connection:postgres>`
reference to a specific postgres database.
:param autocommit: if True, each command is automatically committed.
(default value: False)
:param parameters: (optional) the parameters to render the SQL query with.
:param database: name of database which overwrite defined one in connection
:param runtime_parameters: a mapping of runtime params added to the final sql being executed.
For example, you could set the schema via `{"search_path": "CUSTOM_SCHEMA"}`.
Deprecated - use `hook_params={'options': '-c <connection_options>'}` instead.
"""
template_fields: Sequence[str] = ("sql",)
template_fields_renderers = {"sql": "postgresql"}
template_ext: Sequence[str] = (".sql",)
ui_color = "#ededed"
def __init__(
self,
*,
postgres_conn_id: str = "postgres_default",
database: str | None = None,
runtime_parameters: Mapping | None = None,
**kwargs,
) -> None:
if database is not None:
hook_params = kwargs.pop("hook_params", {})
kwargs["hook_params"] = {"schema": database, **hook_params}
if runtime_parameters:
warnings.warn(
"""`runtime_parameters` is deprecated.
Please use `hook_params={'options': '-c <connection_options>}`.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
hook_params = kwargs.pop("hook_params", {})
options = " ".join(f"-c {param}={val}" for param, val in runtime_parameters.items())
kwargs["hook_params"] = {"options": options, **hook_params}
super().__init__(conn_id=postgres_conn_id, **kwargs)
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
Also, you can provide `hook_params={'schema': <database>}`.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
| 3,591 | 41.258824 | 97 | py |
airflow | airflow-main/airflow/providers/postgres/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/postgres/hooks/postgres.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import warnings
from contextlib import closing
from copy import deepcopy
from typing import Any, Iterable, Union
import psycopg2
import psycopg2.extensions
import psycopg2.extras
from psycopg2.extensions import connection
from psycopg2.extras import DictCursor, NamedTupleCursor, RealDictCursor
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.models.connection import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
CursorType = Union[DictCursor, RealDictCursor, NamedTupleCursor]
class PostgresHook(DbApiHook):
"""Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
Also you can choose cursor as ``{"cursor": "dictcursor"}``. Refer to the
psycopg2.extras for more details.
Note: For Redshift, use keepalives_idle in the extra connection parameters
and set it to less than 300 seconds.
Note: For AWS IAM authentication, use iam in the extra connection parameters
and set it to true. Leave the password field empty. This will use the
"aws_default" connection to get the temporary token unless you override
in extras.
extras example: ``{"iam":true, "aws_conn_id":"my_aws_conn"}``
For Redshift, also use redshift in the extra connection parameters and
set it to true. The cluster-identifier is extracted from the beginning of
the host field, so is optional. It can however be overridden in the extra field.
extras example: ``{"iam":true, "redshift":true, "cluster-identifier": "my_cluster_id"}``
:param postgres_conn_id: The :ref:`postgres conn id <howto/connection:postgres>`
reference to a specific postgres database.
:param options: Optional. Specifies command-line options to send to the server
at connection start. For example, setting this to ``-c search_path=myschema``
sets the session's value of the ``search_path`` to ``myschema``.
"""
conn_name_attr = "postgres_conn_id"
default_conn_name = "postgres_default"
conn_type = "postgres"
hook_name = "Postgres"
supports_autocommit = True
def __init__(self, *args, options: str | None = None, **kwargs) -> None:
if "schema" in kwargs:
warnings.warn(
'The "schema" arg has been renamed to "database" as it contained the database name.'
'Please use "database" to set the database name.',
AirflowProviderDeprecationWarning,
stacklevel=2,
)
kwargs["database"] = kwargs["schema"]
super().__init__(*args, **kwargs)
self.connection: Connection | None = kwargs.pop("connection", None)
self.conn: connection = None
self.database: str | None = kwargs.pop("database", None)
self.options = options
@property
def schema(self):
warnings.warn(
'The "schema" variable has been renamed to "database" as it contained the database name.'
'Please use "database" to get the database name.',
AirflowProviderDeprecationWarning,
stacklevel=2,
)
return self.database
@schema.setter
def schema(self, value):
warnings.warn(
'The "schema" variable has been renamed to "database" as it contained the database name.'
'Please use "database" to set the database name.',
AirflowProviderDeprecationWarning,
stacklevel=2,
)
self.database = value
def _get_cursor(self, raw_cursor: str) -> CursorType:
_cursor = raw_cursor.lower()
if _cursor == "dictcursor":
return psycopg2.extras.DictCursor
if _cursor == "realdictcursor":
return psycopg2.extras.RealDictCursor
if _cursor == "namedtuplecursor":
return psycopg2.extras.NamedTupleCursor
raise ValueError(f"Invalid cursor passed {_cursor}")
def get_conn(self) -> connection:
"""Establishes a connection to a postgres database."""
conn_id = getattr(self, self.conn_name_attr)
conn = deepcopy(self.connection or self.get_connection(conn_id))
# check for authentication via AWS IAM
if conn.extra_dejson.get("iam", False):
conn.login, conn.password, conn.port = self.get_iam_token(conn)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=self.database or conn.schema,
port=conn.port,
)
raw_cursor = conn.extra_dejson.get("cursor", False)
if raw_cursor:
conn_args["cursor_factory"] = self._get_cursor(raw_cursor)
if self.options:
conn_args["options"] = self.options
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name not in [
"iam",
"redshift",
"cursor",
"cluster-identifier",
"aws_conn_id",
]:
conn_args[arg_name] = arg_val
self.conn = psycopg2.connect(**conn_args)
return self.conn
def copy_expert(self, sql: str, filename: str) -> None:
"""Executes SQL using psycopg2's ``copy_expert`` method.
Necessary to execute COPY command without access to a superuser.
Note: if this method is called with a "COPY FROM" statement and
the specified input file does not exist, it creates an empty
file and no data is loaded, but the operation succeeds.
So if users want to be aware when the input file does not exist,
they have to check its existence by themselves.
"""
self.log.info("Running copy expert: %s, filename: %s", sql, filename)
if not os.path.isfile(filename):
with open(filename, "w"):
pass
with open(filename, "r+") as file:
with closing(self.get_conn()) as conn:
with closing(conn.cursor()) as cur:
cur.copy_expert(sql, file)
file.truncate(file.tell())
conn.commit()
def get_uri(self) -> str:
"""Extract the URI from the connection.
:return: the extracted uri.
"""
conn = self.get_connection(getattr(self, self.conn_name_attr))
conn.schema = self.database or conn.schema
uri = conn.get_uri().replace("postgres://", "postgresql://")
return uri
def bulk_load(self, table: str, tmp_file: str) -> None:
"""Loads a tab-delimited file into a database table."""
self.copy_expert(f"COPY {table} FROM STDIN", tmp_file)
def bulk_dump(self, table: str, tmp_file: str) -> None:
"""Dumps a database table into a tab-delimited file."""
self.copy_expert(f"COPY {table} TO STDOUT", tmp_file)
@staticmethod
def _serialize_cell(cell: object, conn: connection | None = None) -> Any:
"""Serialize a cell.
PostgreSQL adapts all arguments to the ``execute()`` method internally,
hence we return the cell without any conversion.
See http://initd.org/psycopg/docs/advanced.html#adapting-new-types for
more information.
:param cell: The cell to insert into the table
:param conn: The database connection
:return: The cell
"""
return cell
def get_iam_token(self, conn: Connection) -> tuple[str, str, int]:
"""Get the IAM token.
This uses AWSHook to retrieve a temporary password to connect to
Postgres or Redshift. Port is required. If none is provided, the default
5432 is used.
"""
try:
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
except ImportError:
from airflow.exceptions import AirflowException
raise AirflowException(
"apache-airflow-providers-amazon not installed, run: "
"pip install 'apache-airflow-providers-postgres[amazon]'."
)
aws_conn_id = conn.extra_dejson.get("aws_conn_id", "aws_default")
login = conn.login
if conn.extra_dejson.get("redshift", False):
port = conn.port or 5439
# Pull the custer-identifier from the beginning of the Redshift URL
# ex. my-cluster.ccdre4hpd39h.us-east-1.redshift.amazonaws.com returns my-cluster
cluster_identifier = conn.extra_dejson.get("cluster-identifier", conn.host.split(".")[0])
redshift_client = AwsBaseHook(aws_conn_id=aws_conn_id, client_type="redshift").conn
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/redshift.html#Redshift.Client.get_cluster_credentials
cluster_creds = redshift_client.get_cluster_credentials(
DbUser=login,
DbName=self.database or conn.schema,
ClusterIdentifier=cluster_identifier,
AutoCreate=False,
)
token = cluster_creds["DbPassword"]
login = cluster_creds["DbUser"]
else:
port = conn.port or 5432
rds_client = AwsBaseHook(aws_conn_id=aws_conn_id, client_type="rds").conn
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/rds.html#RDS.Client.generate_db_auth_token
token = rds_client.generate_db_auth_token(conn.host, port, conn.login)
return login, token, port
def get_table_primary_key(self, table: str, schema: str | None = "public") -> list[str] | None:
"""Get the table's primary key.
:param table: Name of the target table
:param schema: Name of the target schema, public by default
:return: Primary key columns list
"""
sql = """
select kcu.column_name
from information_schema.table_constraints tco
join information_schema.key_column_usage kcu
on kcu.constraint_name = tco.constraint_name
and kcu.constraint_schema = tco.constraint_schema
and kcu.constraint_name = tco.constraint_name
where tco.constraint_type = 'PRIMARY KEY'
and kcu.table_schema = %s
and kcu.table_name = %s
"""
pk_columns = [row[0] for row in self.get_records(sql, (schema, table))]
return pk_columns or None
@classmethod
def _generate_insert_sql(
cls, table: str, values: tuple[str, ...], target_fields: Iterable[str], replace: bool, **kwargs
) -> str:
"""Generate the INSERT SQL statement.
The REPLACE variant is specific to the PostgreSQL syntax.
:param table: Name of the target table
:param values: The row to insert into the table
:param target_fields: The names of the columns to fill in the table
:param replace: Whether to replace instead of insert
:param replace_index: the column or list of column names to act as
index for the ON CONFLICT clause
:return: The generated INSERT or REPLACE SQL statement
"""
placeholders = [
cls.placeholder,
] * len(values)
replace_index = kwargs.get("replace_index")
if target_fields:
target_fields_fragment = ", ".join(target_fields)
target_fields_fragment = f"({target_fields_fragment})"
else:
target_fields_fragment = ""
sql = f"INSERT INTO {table} {target_fields_fragment} VALUES ({','.join(placeholders)})"
if replace:
if not target_fields:
raise ValueError("PostgreSQL ON CONFLICT upsert syntax requires column names")
if not replace_index:
raise ValueError("PostgreSQL ON CONFLICT upsert syntax requires an unique index")
if isinstance(replace_index, str):
replace_index = [replace_index]
on_conflict_str = f" ON CONFLICT ({', '.join(replace_index)})"
replace_target = [f for f in target_fields if f not in replace_index]
if replace_target:
replace_target_str = ", ".join(f"{col} = excluded.{col}" for col in replace_target)
sql += f"{on_conflict_str} DO UPDATE SET {replace_target_str}"
else:
sql += f"{on_conflict_str} DO NOTHING"
return sql
| 13,342 | 40.696875 | 142 | py |
airflow | airflow-main/airflow/providers/airbyte/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-airbyte:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,532 | 35.5 | 116 | py |
airflow | airflow-main/airflow/providers/airbyte/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/airbyte/operators/airbyte.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.airbyte.hooks.airbyte import AirbyteHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class AirbyteTriggerSyncOperator(BaseOperator):
"""
Submits a job to an Airbyte server to run a integration process between your source and destination.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:AirbyteTriggerSyncOperator`
:param airbyte_conn_id: Optional. The name of the Airflow connection to get connection
information for Airbyte. Defaults to "airbyte_default".
:param connection_id: Required. The Airbyte ConnectionId UUID between a source and destination.
:param asynchronous: Optional. Flag to get job_id after submitting the job to the Airbyte API.
This is useful for submitting long running jobs and
waiting on them asynchronously using the AirbyteJobSensor. Defaults to False.
:param api_version: Optional. Airbyte API version. Defaults to "v1".
:param wait_seconds: Optional. Number of seconds between checks. Only used when ``asynchronous`` is False.
Defaults to 3 seconds.
:param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
Only used when ``asynchronous`` is False. Defaults to 3600 seconds (or 1 hour).
"""
template_fields: Sequence[str] = ("connection_id",)
def __init__(
self,
connection_id: str,
airbyte_conn_id: str = "airbyte_default",
asynchronous: bool = False,
api_version: str = "v1",
wait_seconds: float = 3,
timeout: float = 3600,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.airbyte_conn_id = airbyte_conn_id
self.connection_id = connection_id
self.timeout = timeout
self.api_version = api_version
self.wait_seconds = wait_seconds
self.asynchronous = asynchronous
def execute(self, context: Context) -> None:
"""Create Airbyte Job and wait to finish."""
self.hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version)
job_object = self.hook.submit_sync_connection(connection_id=self.connection_id)
self.job_id = job_object.json()["job"]["id"]
self.log.info("Job %s was submitted to Airbyte Server", self.job_id)
if not self.asynchronous:
self.log.info("Waiting for job %s to complete", self.job_id)
self.hook.wait_for_job(job_id=self.job_id, wait_seconds=self.wait_seconds, timeout=self.timeout)
self.log.info("Job %s completed successfully", self.job_id)
return self.job_id
def on_kill(self):
"""Cancel the job if task is cancelled."""
if self.job_id:
self.log.info("on_kill: cancel the airbyte Job %s", self.job_id)
self.hook.cancel_job(self.job_id)
| 3,840 | 42.157303 | 110 | py |
airflow | airflow-main/airflow/providers/airbyte/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/airbyte/hooks/airbyte.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import time
from typing import Any
from airflow.exceptions import AirflowException
from airflow.providers.http.hooks.http import HttpHook
class AirbyteHook(HttpHook):
"""
Hook for Airbyte API.
:param airbyte_conn_id: Optional. The name of the Airflow connection to get
connection information for Airbyte. Defaults to "airbyte_default".
:param api_version: Optional. Airbyte API version. Defaults to "v1".
"""
conn_name_attr = "airbyte_conn_id"
default_conn_name = "airbyte_default"
conn_type = "airbyte"
hook_name = "Airbyte"
RUNNING = "running"
SUCCEEDED = "succeeded"
CANCELLED = "cancelled"
PENDING = "pending"
FAILED = "failed"
ERROR = "error"
INCOMPLETE = "incomplete"
def __init__(self, airbyte_conn_id: str = "airbyte_default", api_version: str = "v1") -> None:
super().__init__(http_conn_id=airbyte_conn_id)
self.api_version: str = api_version
def wait_for_job(self, job_id: str | int, wait_seconds: float = 3, timeout: float | None = 3600) -> None:
"""
Helper method which polls a job to check if it finishes.
:param job_id: Required. Id of the Airbyte job
:param wait_seconds: Optional. Number of seconds between checks.
:param timeout: Optional. How many seconds wait for job to be ready.
Used only if ``asynchronous`` is False.
"""
state = None
start = time.monotonic()
while True:
if timeout and start + timeout < time.monotonic():
raise AirflowException(f"Timeout: Airbyte job {job_id} is not ready after {timeout}s")
time.sleep(wait_seconds)
try:
job = self.get_job(job_id=(int(job_id)))
state = job.json()["job"]["status"]
except AirflowException as err:
self.log.info("Retrying. Airbyte API returned server error when waiting for job: %s", err)
continue
if state in (self.RUNNING, self.PENDING, self.INCOMPLETE):
continue
if state == self.SUCCEEDED:
break
if state == self.ERROR:
raise AirflowException(f"Job failed:\n{job}")
elif state == self.CANCELLED:
raise AirflowException(f"Job was cancelled:\n{job}")
else:
raise Exception(f"Encountered unexpected state `{state}` for job_id `{job_id}`")
def submit_sync_connection(self, connection_id: str) -> Any:
"""
Submits a job to a Airbyte server.
:param connection_id: Required. The ConnectionId of the Airbyte Connection.
"""
return self.run(
endpoint=f"api/{self.api_version}/connections/sync",
json={"connectionId": connection_id},
headers={"accept": "application/json"},
)
def get_job(self, job_id: int) -> Any:
"""
Gets the resource representation for a job in Airbyte.
:param job_id: Required. Id of the Airbyte job
"""
return self.run(
endpoint=f"api/{self.api_version}/jobs/get",
json={"id": job_id},
headers={"accept": "application/json"},
)
def cancel_job(self, job_id: int) -> Any:
"""
Cancel the job when task is cancelled.
:param job_id: Required. Id of the Airbyte job
"""
return self.run(
endpoint=f"api/{self.api_version}/jobs/cancel",
json={"id": job_id},
headers={"accept": "application/json"},
)
def test_connection(self):
"""Tests the Airbyte connection by hitting the health API."""
self.method = "GET"
try:
res = self.run(
endpoint=f"api/{self.api_version}/health",
headers={"accept": "application/json"},
extra_options={"check_response": False},
)
if res.status_code == 200:
return True, "Connection successfully tested"
else:
return False, res.text
except Exception as e:
return False, str(e)
finally:
self.method = "POST"
| 5,062 | 35.164286 | 109 | py |
airflow | airflow-main/airflow/providers/airbyte/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/airbyte/sensors/airbyte.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Airbyte Job sensor."""
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.exceptions import AirflowException
from airflow.providers.airbyte.hooks.airbyte import AirbyteHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class AirbyteJobSensor(BaseSensorOperator):
"""
Check for the state of a previously submitted Airbyte job.
:param airbyte_job_id: Required. Id of the Airbyte job
:param airbyte_conn_id: Optional. The name of the Airflow connection to get
connection information for Airbyte. Defaults to "airbyte_default".
:param api_version: Optional. Airbyte API version. Defaults to "v1".
"""
template_fields: Sequence[str] = ("airbyte_job_id",)
ui_color = "#6C51FD"
def __init__(
self,
*,
airbyte_job_id: int,
airbyte_conn_id: str = "airbyte_default",
api_version: str = "v1",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.airbyte_conn_id = airbyte_conn_id
self.airbyte_job_id = airbyte_job_id
self.api_version = api_version
def poke(self, context: Context) -> bool:
hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version)
job = hook.get_job(job_id=self.airbyte_job_id)
status = job.json()["job"]["status"]
if status == hook.FAILED:
raise AirflowException(f"Job failed: \n{job}")
elif status == hook.CANCELLED:
raise AirflowException(f"Job was cancelled: \n{job}")
elif status == hook.SUCCEEDED:
self.log.info("Job %s completed successfully.", self.airbyte_job_id)
return True
elif status == hook.ERROR:
self.log.info("Job %s attempt has failed.", self.airbyte_job_id)
self.log.info("Waiting for job %s to complete.", self.airbyte_job_id)
return False
| 2,793 | 36.756757 | 94 | py |
airflow | airflow-main/airflow/providers/dingding/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-dingding:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/dingding/operators/dingding.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.dingding.hooks.dingding import DingdingHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class DingdingOperator(BaseOperator):
"""
This operator allows you to send Dingding message using Dingding custom bot.
Get Dingding token from conn_id.password. And prefer set domain to
conn_id.host, if not will use default ``https://oapi.dingtalk.com``.
For more detail message in
`Dingding custom bot <https://open-doc.dingtalk.com/microapp/serverapi2/qf2nxq>`_
:param dingding_conn_id: The name of the Dingding connection to use
:param message_type: Message type you want to send to Dingding, support five type so far
including text, link, markdown, actionCard, feedCard
:param message: The message send to Dingding chat group
:param at_mobiles: Remind specific users with this message
:param at_all: Remind all people in group or not. If True, will overwrite ``at_mobiles``
"""
template_fields: Sequence[str] = ("message",)
ui_color = "#4ea4d4" # Dingding icon color
def __init__(
self,
*,
dingding_conn_id: str = "dingding_default",
message_type: str = "text",
message: str | dict | None = None,
at_mobiles: list[str] | None = None,
at_all: bool = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.dingding_conn_id = dingding_conn_id
self.message_type = message_type
self.message = message
self.at_mobiles = at_mobiles
self.at_all = at_all
def execute(self, context: Context) -> None:
self.log.info("Sending Dingding message.")
hook = DingdingHook(
self.dingding_conn_id, self.message_type, self.message, self.at_mobiles, self.at_all
)
hook.send()
| 2,743 | 36.589041 | 96 | py |
airflow | airflow-main/airflow/providers/dingding/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/dingding/hooks/dingding.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import requests
from requests import Session
from airflow.exceptions import AirflowException
from airflow.providers.http.hooks.http import HttpHook
class DingdingHook(HttpHook):
"""Send message using a custom Dingding bot.
Get Dingding token from the connection's ``password``. If ``host`` is not
set in the connection, the default ``https://oapi.dingtalk.com`` is used.
For more detail message in
`Dingding custom bot <https://open-doc.dingtalk.com/microapp/serverapi2/qf2nxq>`_
:param dingding_conn_id: The name of the Dingding connection to use
:param message_type: Message type you want to send to Dingding, support five type so far
including text, link, markdown, actionCard, feedCard
:param message: The message send to Dingding chat group
:param at_mobiles: Remind specific users with this message
:param at_all: Remind all people in group or not. If True, will overwrite ``at_mobiles``
"""
conn_name_attr = "dingding_conn_id"
default_conn_name = "dingding_default"
conn_type = "dingding"
hook_name = "Dingding"
def __init__(
self,
dingding_conn_id="dingding_default",
message_type: str = "text",
message: str | dict | None = None,
at_mobiles: list[str] | None = None,
at_all: bool = False,
*args,
**kwargs,
) -> None:
super().__init__(http_conn_id=dingding_conn_id, *args, **kwargs) # type: ignore[misc]
self.message_type = message_type
self.message = message
self.at_mobiles = at_mobiles
self.at_all = at_all
def _get_endpoint(self) -> str:
"""Get Dingding endpoint for sending message."""
conn = self.get_connection(self.http_conn_id)
token = conn.password
if not token:
raise AirflowException(
"Dingding token is requests but get nothing, check you conn_id configuration."
)
return f"robot/send?access_token={token}"
def _build_message(self) -> str:
"""Build different type of Dingding messages."""
if self.message_type in ["text", "markdown"]:
data = {
"msgtype": self.message_type,
self.message_type: {"content": self.message} if self.message_type == "text" else self.message,
"at": {"atMobiles": self.at_mobiles, "isAtAll": self.at_all},
}
else:
data = {"msgtype": self.message_type, self.message_type: self.message}
return json.dumps(data)
def get_conn(self, headers: dict | None = None) -> Session:
"""Overwrite HttpHook get_conn.
We just need base_url and headers, and not don't need generic params.
:param headers: additional headers to be passed through as a dictionary
"""
conn = self.get_connection(self.http_conn_id)
self.base_url = conn.host if conn.host else "https://oapi.dingtalk.com"
session = requests.Session()
if headers:
session.headers.update(headers)
return session
def send(self) -> None:
"""Send Dingding message."""
support_type = ["text", "link", "markdown", "actionCard", "feedCard"]
if self.message_type not in support_type:
raise ValueError(
f"DingdingWebhookHook only support {support_type} so far, but receive {self.message_type}"
)
data = self._build_message()
self.log.info("Sending Dingding type %s message %s", self.message_type, data)
resp = self.run(
endpoint=self._get_endpoint(), data=data, headers={"Content-Type": "application/json"}
)
# Dingding success send message will with errcode equal to 0
if int(resp.json().get("errcode")) != 0:
raise AirflowException(f"Send Dingding message failed, receive error message {resp.text}")
self.log.info("Success Send Dingding message")
| 4,801 | 38.68595 | 110 | py |
airflow | airflow-main/airflow/providers/dingding/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/sendgrid/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-sendgrid:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/sendgrid/utils/emailer.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Airflow module for email backend using sendgrid."""
from __future__ import annotations
import base64
import logging
import mimetypes
import os
import warnings
from typing import Iterable, Union
import sendgrid
from sendgrid.helpers.mail import (
Attachment,
Category,
Content,
CustomArg,
Email,
Mail,
MailSettings,
Personalization,
SandBoxMode,
)
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
from airflow.utils.email import get_email_address_list
log = logging.getLogger(__name__)
AddressesType = Union[str, Iterable[str]]
def send_email(
to: AddressesType,
subject: str,
html_content: str,
files: AddressesType | None = None,
cc: AddressesType | None = None,
bcc: AddressesType | None = None,
sandbox_mode: bool = False,
conn_id: str = "sendgrid_default",
**kwargs,
) -> None:
"""
Send an email with html content using `Sendgrid <https://sendgrid.com/>`__.
.. note::
For more information, see :ref:`email-configuration-sendgrid`
"""
if files is None:
files = []
mail = Mail()
from_email = kwargs.get("from_email") or os.environ.get("SENDGRID_MAIL_FROM")
from_name = kwargs.get("from_name") or os.environ.get("SENDGRID_MAIL_SENDER")
mail.from_email = Email(from_email, from_name)
mail.subject = subject
mail.mail_settings = MailSettings()
if sandbox_mode:
mail.mail_settings.sandbox_mode = SandBoxMode(enable=True)
# Add the recipient list of to emails.
personalization = Personalization()
to = get_email_address_list(to)
for to_address in to:
personalization.add_to(Email(to_address))
if cc:
cc = get_email_address_list(cc)
for cc_address in cc:
personalization.add_cc(Email(cc_address))
if bcc:
bcc = get_email_address_list(bcc)
for bcc_address in bcc:
personalization.add_bcc(Email(bcc_address))
# Add custom_args to personalization if present
pers_custom_args = kwargs.get("personalization_custom_args")
if isinstance(pers_custom_args, dict):
for key in pers_custom_args.keys():
personalization.add_custom_arg(CustomArg(key, pers_custom_args[key]))
mail.add_personalization(personalization)
mail.add_content(Content("text/html", html_content))
categories = kwargs.get("categories", [])
for cat in categories:
mail.add_category(Category(cat))
# Add email attachment.
for fname in files:
basename = os.path.basename(fname)
with open(fname, "rb") as file:
content = base64.b64encode(file.read()).decode("utf-8")
attachment = Attachment(
file_content=content,
file_type=mimetypes.guess_type(basename)[0],
file_name=basename,
disposition="attachment",
content_id=f"<{basename}>",
)
mail.add_attachment(attachment)
_post_sendgrid_mail(mail.get(), conn_id)
def _post_sendgrid_mail(mail_data: dict, conn_id: str = "sendgrid_default") -> None:
api_key = None
try:
conn = BaseHook.get_connection(conn_id)
api_key = conn.password
except AirflowException:
pass
if api_key is None:
warnings.warn(
"Fetching Sendgrid credentials from environment variables will be deprecated in a future "
"release. Please set credentials using a connection instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
api_key = os.environ.get("SENDGRID_API_KEY")
sendgrid_client = sendgrid.SendGridAPIClient(api_key=api_key)
response = sendgrid_client.client.mail.send.post(request_body=mail_data)
# 2xx status code.
if 200 <= response.status_code < 300:
log.info(
"Email with subject %s is successfully sent to recipients: %s",
mail_data["subject"],
mail_data["personalizations"],
)
else:
log.error(
"Failed to send out email with subject %s, status code: %s",
mail_data["subject"],
response.status_code,
)
| 5,019 | 31.179487 | 102 | py |
airflow | airflow-main/airflow/providers/sendgrid/utils/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.