response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Check if an Exception indicates a transient error and warrants retrying. | def should_retry_start_spark_job(exception: BaseException) -> bool:
"""Check if an Exception indicates a transient error and warrants retrying."""
if isinstance(exception, ApiException):
return str(exception.status) == "409"
return False |
Convert a str into an k8s object.
:param configmap_name: config map name
:return: | def convert_configmap(configmap_name) -> k8s.V1EnvFromSource:
"""
Convert a str into an k8s object.
:param configmap_name: config map name
:return:
"""
return k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmap_name)) |
Convert a dictionary of config_map_name and mount_path into k8s volume mount object and k8s volume.
:param configmap_info: a dictionary of {config_map_name: mount_path}
:return: | def convert_configmap_to_volume(
configmap_info: dict[str, str],
) -> tuple[list[k8s.V1Volume], list[k8s.V1VolumeMount]]:
"""
Convert a dictionary of config_map_name and mount_path into k8s volume mount object and k8s volume.
:param configmap_info: a dictionary of {config_map_name: mount_path}
:return:
"""
volume_mounts = []
volumes = []
for config_name, mount_path in configmap_info.items():
volume_mounts.append(k8s.V1VolumeMount(mount_path=mount_path, name=config_name))
volumes.append(
k8s.V1Volume(
name=config_name,
config_map=k8s.V1ConfigMapVolumeSource(name=config_name),
)
)
return volumes, volume_mounts |
Convert a dictionary of key:value into a list of env_vars.
:param env_vars:
:return: | def convert_env_vars(env_vars) -> list[k8s.V1EnvVar]:
"""
Convert a dictionary of key:value into a list of env_vars.
:param env_vars:
:return:
"""
if isinstance(env_vars, dict):
res = []
for k, v in env_vars.items():
res.append(k8s.V1EnvVar(name=k, value=v))
return res
elif isinstance(env_vars, list):
if all([isinstance(e, k8s.V1EnvVar) for e in env_vars]):
return env_vars
raise AirflowException(f"Expected dict or list of V1EnvVar, got {type(env_vars)}") |
Convert a str into an k8s object.
:param secret_name:
:return: | def convert_secret(secret_name: str) -> k8s.V1EnvFromSource:
"""
Convert a str into an k8s object.
:param secret_name:
:return:
"""
return k8s.V1EnvFromSource(secret_ref=k8s.V1SecretEnvSource(name=secret_name)) |
Convert an image pull secret name into k8s local object reference.
:param image_pull_secrets: comma separated string that contains secrets
:return: | def convert_image_pull_secrets(image_pull_secrets: str) -> list[k8s.V1LocalObjectReference]:
"""
Convert an image pull secret name into k8s local object reference.
:param image_pull_secrets: comma separated string that contains secrets
:return:
"""
secrets = image_pull_secrets.split(",")
return [k8s.V1LocalObjectReference(name=secret) for secret in secrets] |
Check if an Exception indicates a transient error and warrants retrying. | def should_retry_start_pod(exception: BaseException) -> bool:
"""Check if an Exception indicates a transient error and warrants retrying."""
if isinstance(exception, ApiException):
return str(exception.status) == "409"
return False |
Retrieve container status. | def get_container_status(pod: V1Pod, container_name: str) -> V1ContainerStatus | None:
"""Retrieve container status."""
container_statuses = pod.status.container_statuses if pod and pod.status else None
if container_statuses:
# In general the variable container_statuses can store multiple items matching different containers.
# The following generator expression yields all items that have name equal to the container_name.
# The function next() here calls the generator to get only the first value. If there's nothing found
# then None is returned.
return next((x for x in container_statuses if x.name == container_name), None)
return None |
Examine V1Pod ``pod`` to determine whether ``container_name`` is running.
If that container is present and running, returns True. Returns False otherwise. | def container_is_running(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is running.
If that container is present and running, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.running is not None |
Examine V1Pod ``pod`` to determine whether ``container_name`` is completed.
If that container is present and completed, returns True. Returns False otherwise. | def container_is_completed(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is completed.
If that container is present and completed, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.terminated is not None |
Examine V1Pod ``pod`` to determine whether ``container_name`` is completed and succeeded.
If that container is present and completed and succeeded, returns True. Returns False otherwise. | def container_is_succeeded(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is completed and succeeded.
If that container is present and completed and succeeded, returns True. Returns False otherwise.
"""
if not container_is_completed(pod, container_name):
return False
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.terminated.exit_code == 0 |
Examine V1Pod ``pod`` to determine whether ``container_name`` is terminated.
If that container is present and terminated, returns True. Returns False otherwise. | def container_is_terminated(pod: V1Pod, container_name: str) -> bool:
"""
Examine V1Pod ``pod`` to determine whether ``container_name`` is terminated.
If that container is present and terminated, returns True. Returns False otherwise.
"""
container_statuses = pod.status.container_statuses if pod and pod.status else None
if not container_statuses:
return False
container_status = next((x for x in container_statuses if x.name == container_name), None)
if not container_status:
return False
return container_status.state.terminated is not None |
Add sidecar. | def add_xcom_sidecar(
pod: k8s.V1Pod,
*,
sidecar_container_image: str | None = None,
sidecar_container_resources: k8s.V1ResourceRequirements | dict | None = None,
) -> k8s.V1Pod:
"""Add sidecar."""
pod_cp = copy.deepcopy(pod)
pod_cp.spec.volumes = pod.spec.volumes or []
pod_cp.spec.volumes.insert(0, PodDefaults.VOLUME)
pod_cp.spec.containers[0].volume_mounts = pod_cp.spec.containers[0].volume_mounts or []
pod_cp.spec.containers[0].volume_mounts.insert(0, PodDefaults.VOLUME_MOUNT)
sidecar = copy.deepcopy(PodDefaults.SIDECAR_CONTAINER)
sidecar.image = sidecar_container_image or PodDefaults.SIDECAR_CONTAINER.image
if sidecar_container_resources:
sidecar.resources = sidecar_container_resources
pod_cp.spec.containers.append(sidecar)
return pod_cp |
Return whether or not this path is relative to the other path.
This is a port of the pathlib.Path.is_relative_to method. It is not available in python 3.8. | def _is_relative_to(o: ObjectStoragePath, other: ObjectStoragePath) -> bool:
"""Return whether or not this path is relative to the other path.
This is a port of the pathlib.Path.is_relative_to method. It is not available in python 3.8.
"""
if hasattr(o, "is_relative_to"):
return o.is_relative_to(other)
try:
o.relative_to(other)
return True
except ValueError:
return False |
Return the compression suffix for the given compression.
:raises ValueError: if the compression is not supported | def _get_compression_suffix(compression: str) -> str:
"""Return the compression suffix for the given compression.
:raises ValueError: if the compression is not supported
"""
for suffix, c in fsspec.utils.compressions.items():
if c == compression:
return suffix
raise ValueError(f"Compression {compression} is not supported. Make sure it is installed.") |
Determine when results of single query only should be returned.
For compatibility reasons, the behaviour of the DBAPIHook is somewhat confusing.
In some cases, when multiple queries are run, the return value will be an iterable (list) of results
-- one for each query. However, in other cases, when single query is run, the return value will be just
the result of that single query without wrapping the results in a list.
The cases when single query results are returned without wrapping them in a list are as follows:
a) sql is string and ``return_last`` is True (regardless what ``split_statements`` value is)
b) sql is string and ``split_statements`` is False
In all other cases, the results are wrapped in a list, even if there is only one statement to process.
In particular, the return value will be a list of query results in the following circumstances:
a) when ``sql`` is an iterable of string statements (regardless what ``return_last`` value is)
b) when ``sql`` is string, ``split_statements`` is True and ``return_last`` is False
:param sql: sql to run (either string or list of strings)
:param return_last: whether last statement output should only be returned
:param split_statements: whether to split string statements.
:return: True if the hook should return single query results | def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool):
"""
Determine when results of single query only should be returned.
For compatibility reasons, the behaviour of the DBAPIHook is somewhat confusing.
In some cases, when multiple queries are run, the return value will be an iterable (list) of results
-- one for each query. However, in other cases, when single query is run, the return value will be just
the result of that single query without wrapping the results in a list.
The cases when single query results are returned without wrapping them in a list are as follows:
a) sql is string and ``return_last`` is True (regardless what ``split_statements`` value is)
b) sql is string and ``split_statements`` is False
In all other cases, the results are wrapped in a list, even if there is only one statement to process.
In particular, the return value will be a list of query results in the following circumstances:
a) when ``sql`` is an iterable of string statements (regardless what ``return_last`` value is)
b) when ``sql`` is string, ``split_statements`` is True and ``return_last`` is False
:param sql: sql to run (either string or list of strings)
:param return_last: whether last statement output should only be returned
:param split_statements: whether to split string statements.
:return: True if the hook should return single query results
"""
return isinstance(sql, str) and (return_last or not split_statements) |
Return results for DbApiHook.run(). | def fetch_all_handler(cursor) -> list[tuple] | None:
"""Return results for DbApiHook.run()."""
if not hasattr(cursor, "description"):
raise RuntimeError(
"The database we interact with does not support DBAPI 2.0. Use operator and "
"handlers that are specifically designed for your database."
)
if cursor.description is not None:
return cursor.fetchall()
else:
return None |
Return first result for DbApiHook.run(). | def fetch_one_handler(cursor) -> list[tuple] | None:
"""Return first result for DbApiHook.run()."""
if not hasattr(cursor, "description"):
raise RuntimeError(
"The database we interact with does not support DBAPI 2.0. Use operator and "
"handlers that are specifically designed for your database."
)
if cursor.description is not None:
return cursor.fetchone()
else:
return None |
Try to parse a string into boolean.
Raises ValueError if the input is not a valid true- or false-like string value. | def _parse_boolean(val: str) -> str | bool:
"""Try to parse a string into boolean.
Raises ValueError if the input is not a valid true- or false-like string value.
"""
val = val.lower()
if val in ("y", "yes", "t", "true", "on", "1"):
return True
if val in ("n", "no", "f", "false", "off", "0"):
return False
raise ValueError(f"{val!r} is not a boolean-like string value") |
Get failed checks.
IMPORTANT!!! Keep it for compatibility with released 8.4.0 version of google provider.
Unfortunately the provider used _get_failed_checks and parse_boolean as imports and we should
keep those methods to avoid 8.4.0 version from failing. | def _get_failed_checks(checks, col=None):
"""
Get failed checks.
IMPORTANT!!! Keep it for compatibility with released 8.4.0 version of google provider.
Unfortunately the provider used _get_failed_checks and parse_boolean as imports and we should
keep those methods to avoid 8.4.0 version from failing.
"""
if col:
return [
f"Column: {col}\nCheck: {check},\nCheck Values: {check_values}\n"
for check, check_values in checks.items()
if not check_values["success"]
]
return [
f"\tCheck: {check},\n\tCheck Values: {check_values}\n"
for check, check_values in checks.items()
if not check_values["success"]
] |
Handle the Airflow + Databricks lifecycle logic for a Databricks operator.
:param operator: Databricks operator being handled
:param context: Airflow context | def _handle_databricks_operator_execution(operator, hook, log, context) -> None:
"""
Handle the Airflow + Databricks lifecycle logic for a Databricks operator.
:param operator: Databricks operator being handled
:param context: Airflow context
"""
if operator.do_xcom_push and context is not None:
context["ti"].xcom_push(key=XCOM_RUN_ID_KEY, value=operator.run_id)
log.info("Run submitted with run_id: %s", operator.run_id)
run_page_url = hook.get_run_page_url(operator.run_id)
if operator.do_xcom_push and context is not None:
context["ti"].xcom_push(key=XCOM_RUN_PAGE_URL_KEY, value=run_page_url)
if operator.wait_for_termination:
while True:
run_info = hook.get_run(operator.run_id)
run_state = RunState(**run_info["state"])
if run_state.is_terminal:
if run_state.is_successful:
log.info("%s completed successfully.", operator.task_id)
log.info("View run status, Spark UI, and logs at %s", run_page_url)
return
if run_state.result_state == "FAILED":
task_run_id = None
if "tasks" in run_info:
for task in run_info["tasks"]:
if task.get("state", {}).get("result_state", "") == "FAILED":
task_run_id = task["run_id"]
if task_run_id is not None:
run_output = hook.get_run_output(task_run_id)
if "error" in run_output:
notebook_error = run_output["error"]
else:
notebook_error = run_state.state_message
else:
notebook_error = run_state.state_message
error_message = (
f"{operator.task_id} failed with terminal state: {run_state} "
f"and with the error {notebook_error}"
)
else:
error_message = (
f"{operator.task_id} failed with terminal state: {run_state} "
f"and with the error {run_state.state_message}"
)
if isinstance(operator, DatabricksRunNowOperator) and operator.repair_run:
operator.repair_run = False
log.warning(
"%s but since repair run is set, repairing the run with all failed tasks",
error_message,
)
latest_repair_id = hook.get_latest_repair_id(operator.run_id)
repair_json = {"run_id": operator.run_id, "rerun_all_failed_tasks": True}
if latest_repair_id is not None:
repair_json["latest_repair_id"] = latest_repair_id
operator.json["latest_repair_id"] = hook.repair_run(operator, repair_json)
_handle_databricks_operator_execution(operator, hook, log, context)
raise AirflowException(error_message)
log.info("%s in run state: %s", operator.task_id, run_state)
log.info("View run status, Spark UI, and logs at %s", run_page_url)
log.info("Sleeping for %s seconds.", operator.polling_period_seconds)
time.sleep(operator.polling_period_seconds)
log.info("View run status, Spark UI, and logs at %s", run_page_url) |
Handle the Airflow + Databricks lifecycle logic for deferrable Databricks operators.
:param operator: Databricks async operator being handled
:param context: Airflow context | def _handle_deferrable_databricks_operator_execution(operator, hook, log, context) -> None:
"""
Handle the Airflow + Databricks lifecycle logic for deferrable Databricks operators.
:param operator: Databricks async operator being handled
:param context: Airflow context
"""
job_id = hook.get_job_id(operator.run_id)
if operator.do_xcom_push and context is not None:
context["ti"].xcom_push(key=XCOM_JOB_ID_KEY, value=job_id)
if operator.do_xcom_push and context is not None:
context["ti"].xcom_push(key=XCOM_RUN_ID_KEY, value=operator.run_id)
log.info("Run submitted with run_id: %s", operator.run_id)
run_page_url = hook.get_run_page_url(operator.run_id)
if operator.do_xcom_push and context is not None:
context["ti"].xcom_push(key=XCOM_RUN_PAGE_URL_KEY, value=run_page_url)
log.info("View run status, Spark UI, and logs at %s", run_page_url)
if operator.wait_for_termination:
run_info = hook.get_run(operator.run_id)
run_state = RunState(**run_info["state"])
if not run_state.is_terminal:
operator.defer(
trigger=DatabricksExecutionTrigger(
run_id=operator.run_id,
databricks_conn_id=operator.databricks_conn_id,
polling_period_seconds=operator.polling_period_seconds,
retry_limit=operator.databricks_retry_limit,
retry_delay=operator.databricks_retry_delay,
retry_args=operator.databricks_retry_args,
run_page_url=run_page_url,
repair_run=getattr(operator, "repair_run", False),
),
method_name=DEFER_METHOD_NAME,
)
else:
if run_state.is_successful:
log.info("%s completed successfully.", operator.task_id) |
Normalize content or all values of content if it is a dict to a string.
The function will throw if content contains non-string or non-numeric non-boolean
types. The reason why we have this function is because the ``self.json`` field
must be a dict with only string values. This is because ``render_template`` will
fail for numerical values.
The only one exception is when we have boolean values, they can not be converted
to string type because databricks does not understand 'True' or 'False' values. | def normalise_json_content(content, json_path: str = "json") -> str | bool | list | dict:
"""
Normalize content or all values of content if it is a dict to a string.
The function will throw if content contains non-string or non-numeric non-boolean
types. The reason why we have this function is because the ``self.json`` field
must be a dict with only string values. This is because ``render_template`` will
fail for numerical values.
The only one exception is when we have boolean values, they can not be converted
to string type because databricks does not understand 'True' or 'False' values.
"""
normalise = normalise_json_content
if isinstance(content, (str, bool)):
return content
elif isinstance(content, (int, float)):
# Databricks can tolerate either numeric or string types in the API backend.
return str(content)
elif isinstance(content, (list, tuple)):
return [normalise(e, f"{json_path}[{i}]") for i, e in enumerate(content)]
elif isinstance(content, dict):
return {k: normalise(v, f"{json_path}[{k}]") for k, v in content.items()}
else:
param_type = type(content)
msg = f"Type {param_type} used for parameter {json_path} is not a number or a string"
raise AirflowException(msg) |
Validate correctness of the event received from DatabricksExecutionTrigger.
See: :class:`~airflow.providers.databricks.triggers.databricks.DatabricksExecutionTrigger`. | def validate_trigger_event(event: dict):
"""
Validate correctness of the event received from DatabricksExecutionTrigger.
See: :class:`~airflow.providers.databricks.triggers.databricks.DatabricksExecutionTrigger`.
"""
keys_to_check = ["run_id", "run_page_url", "run_state"]
for key in keys_to_check:
if key not in event:
raise AirflowException(f"Could not find `{key}` in the event: {event}")
try:
RunState.from_json(event["run_state"])
except Exception:
raise AirflowException(f'Run state returned by the Trigger is incorrect: {event["run_state"]}') |
Provide a fallback value for ``account_id``.
If the ``account_id`` is None or not passed to the decorated function,
the value will be taken from the configured dbt Cloud Airflow Connection. | def fallback_to_default_account(func: Callable) -> Callable:
"""
Provide a fallback value for ``account_id``.
If the ``account_id`` is None or not passed to the decorated function,
the value will be taken from the configured dbt Cloud Airflow Connection.
"""
sig = signature(func)
@wraps(func)
def wrapper(*args, **kwargs) -> Callable:
bound_args = sig.bind(*args, **kwargs)
# Check if ``account_id`` was not included in the function signature or, if it is, the value is not
# provided.
if bound_args.arguments.get("account_id") is None:
self = args[0]
default_account_id = self.connection.login
if not default_account_id:
raise AirflowException("Could not determine the dbt Cloud account.")
bound_args.arguments["account_id"] = int(default_account_id)
return func(*bound_args.args, **bound_args.kwargs)
return wrapper |
Provide a fallback value for ``account_id``.
If the ``account_id`` is None or not passed to the decorated function,
the value will be taken from the configured dbt Cloud Airflow Connection. | def provide_account_id(func: T) -> T:
"""
Provide a fallback value for ``account_id``.
If the ``account_id`` is None or not passed to the decorated function,
the value will be taken from the configured dbt Cloud Airflow Connection.
"""
function_signature = signature(func)
@wraps(func)
async def wrapper(*args: Any, **kwargs: Any) -> Any:
bound_args = function_signature.bind(*args, **kwargs)
if bound_args.arguments.get("account_id") is None:
self = args[0]
if self.dbt_cloud_conn_id:
connection = await sync_to_async(self.get_connection)(self.dbt_cloud_conn_id)
default_account_id = connection.login
if not default_account_id:
raise AirflowException("Could not determine the dbt Cloud account.")
bound_args.arguments["account_id"] = int(default_account_id)
return await func(*bound_args.args, **bound_args.kwargs)
return cast(T, wrapper) |
Generate OpenLineage events from the DBT Cloud run.
This function retrieves information about a DBT Cloud run, including the associated job,
project, and execution details. It processes the run's artifacts, such as the manifest and run results,
in parallel for many steps.
Then it generates and emits OpenLineage events based on the executed DBT tasks.
:param operator: Instance of DBT Cloud operator that executed DBT tasks.
It already should have run_id and dbt cloud hook.
:param task_instance: Currently executed task instance
:return: An empty OperatorLineage object indicating the completion of events generation. | def generate_openlineage_events_from_dbt_cloud_run(
operator: DbtCloudRunJobOperator | DbtCloudJobRunSensor, task_instance: TaskInstance
) -> OperatorLineage:
"""
Generate OpenLineage events from the DBT Cloud run.
This function retrieves information about a DBT Cloud run, including the associated job,
project, and execution details. It processes the run's artifacts, such as the manifest and run results,
in parallel for many steps.
Then it generates and emits OpenLineage events based on the executed DBT tasks.
:param operator: Instance of DBT Cloud operator that executed DBT tasks.
It already should have run_id and dbt cloud hook.
:param task_instance: Currently executed task instance
:return: An empty OperatorLineage object indicating the completion of events generation.
"""
from openlineage.common.provider.dbt import DbtCloudArtifactProcessor, ParentRunMetadata
from airflow.providers.openlineage.conf import namespace
from airflow.providers.openlineage.extractors import OperatorLineage
from airflow.providers.openlineage.plugins.adapter import (
_PRODUCER,
OpenLineageAdapter,
)
from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
# if no account_id set this will fallback
job_run = operator.hook.get_job_run(
run_id=operator.run_id, account_id=operator.account_id, include_related=["run_steps,job"]
).json()["data"]
job = job_run["job"]
# retrieve account_id from job and use that starting from this line
account_id = job["account_id"]
project = operator.hook.get_project(project_id=job["project_id"], account_id=account_id).json()["data"]
connection = project["connection"]
execute_steps = job["execute_steps"]
run_steps = job_run["run_steps"]
# filter only dbt invocation steps
steps = []
for run_step in run_steps:
name = run_step["name"]
if name.startswith("Invoke dbt with `"):
regex_pattern = "Invoke dbt with `([^`.]*)`"
m = re.search(regex_pattern, name)
if m and m.group(1) in execute_steps:
steps.append(run_step["index"])
# catalog is available only if docs are generated
catalog = None
with suppress(Exception):
catalog = operator.hook.get_job_run_artifact(operator.run_id, path="catalog.json").json()["data"]
async def get_artifacts_for_steps(steps, artifacts):
"""Get artifacts for a list of steps concurrently."""
tasks = [
operator.hook.get_job_run_artifacts_concurrently(
run_id=operator.run_id,
account_id=account_id,
step=step,
artifacts=artifacts,
)
for step in steps
]
return await asyncio.gather(*tasks)
# get artifacts for steps concurrently
step_artifacts = asyncio.run(
get_artifacts_for_steps(steps=steps, artifacts=["manifest.json", "run_results.json"])
)
# process each step in loop, sending generated events in the same order as steps
for artifacts in step_artifacts:
# process manifest
manifest = artifacts["manifest.json"]
if not artifacts.get("run_results.json", None):
continue
processor = DbtCloudArtifactProcessor(
producer=_PRODUCER,
job_namespace=namespace(),
skip_errors=False,
logger=operator.log,
manifest=manifest,
run_result=artifacts["run_results.json"],
profile=connection,
catalog=catalog,
)
# generate same run id of current task instance
parent_run_id = OpenLineageAdapter.build_task_instance_run_id(
dag_id=task_instance.dag_id,
task_id=operator.task_id,
execution_date=task_instance.execution_date,
try_number=task_instance.try_number - 1,
)
parent_job = ParentRunMetadata(
run_id=parent_run_id,
job_name=f"{task_instance.dag_id}.{task_instance.task_id}",
job_namespace=namespace(),
)
processor.dbt_run_metadata = parent_job
events = processor.parse().events()
client = get_openlineage_listener().adapter.get_or_create_openlineage_client()
for event in events:
client.emit(event=event)
return OperatorLineage() |
Python operator decorator; wraps a function into an Airflow operator.
Also accepts any argument that DockerOperator will via ``kwargs``. Can be reused in a single DAG.
:param python_callable: Function to decorate
:param multiple_outputs: If set, function return value will be unrolled to multiple XCom values.
Dict will unroll to XCom values with keys as XCom keys. Defaults to False. | def docker_task(
python_callable: Callable | None = None,
multiple_outputs: bool | None = None,
**kwargs,
) -> TaskDecorator:
"""
Python operator decorator; wraps a function into an Airflow operator.
Also accepts any argument that DockerOperator will via ``kwargs``. Can be reused in a single DAG.
:param python_callable: Function to decorate
:param multiple_outputs: If set, function return value will be unrolled to multiple XCom values.
Dict will unroll to XCom values with keys as XCom keys. Defaults to False.
"""
return task_decorator_factory(
python_callable=python_callable,
multiple_outputs=multiple_outputs,
decorated_operator_class=_DockerDecoratedOperator,
**kwargs,
) |
Make sure string is returned even if bytes are passed. Docker stream can return bytes. | def stringify(line: str | bytes):
"""Make sure string is returned even if bytes are passed. Docker stream can return bytes."""
decode_method = getattr(line, "decode", None)
if decode_method:
return decode_method(encoding="utf-8", errors="surrogateescape")
else:
return line |
Given TI | TIKey, return a TI object.
Will raise exception if no TI is found in the database. | def _ensure_ti(ti: TaskInstanceKey | TaskInstance, session) -> TaskInstance:
"""Given TI | TIKey, return a TI object.
Will raise exception if no TI is found in the database.
"""
from airflow.models.taskinstance import TaskInstance, TaskInstanceKey
if not isinstance(ti, TaskInstanceKey):
return ti
val = (
session.query(TaskInstance)
.filter(
TaskInstance.task_id == ti.task_id,
TaskInstance.dag_id == ti.dag_id,
TaskInstance.run_id == ti.run_id,
TaskInstance.map_index == ti.map_index,
)
.one_or_none()
)
if isinstance(val, TaskInstance):
val._try_number = ti.try_number
return val
else:
raise AirflowException(f"Could not find TaskInstance for {ti}") |
Get item from obj but return default if not found.
E.g. calling ``getattr_nested(a, 'b.c', "NA")`` will return
``a.b.c`` if such a value exists, and "NA" otherwise.
:meta private: | def getattr_nested(obj, item, default):
"""
Get item from obj but return default if not found.
E.g. calling ``getattr_nested(a, 'b.c', "NA")`` will return
``a.b.c`` if such a value exists, and "NA" otherwise.
:meta private:
"""
try:
return attrgetter(item)(obj)
except AttributeError:
return default |
Generate documentation; used by Sphinx argparse. | def get_parser() -> argparse.ArgumentParser:
"""Generate documentation; used by Sphinx argparse."""
from airflow.cli.cli_parser import AirflowHelpFormatter, _add_command
parser = DefaultHelpParser(prog="airflow", formatter_class=AirflowHelpFormatter)
subparsers = parser.add_subparsers(dest="subcommand", metavar="GROUP_OR_COMMAND")
for group_command in FabAuthManager.get_cli_commands():
_add_command(subparsers, group_command)
return parser |
Initialize authentication backend. | def init_app(_):
"""Initialize authentication backend.""" |
Authenticate and set current user if Authorization header exists. | def auth_current_user() -> User | None:
"""Authenticate and set current user if Authorization header exists."""
auth = request.authorization
if auth is None or not auth.username or not auth.password:
return None
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
user = None
if security_manager.auth_type == AUTH_LDAP:
user = security_manager.auth_user_ldap(auth.username, auth.password)
if user is None:
user = security_manager.auth_user_db(auth.username, auth.password)
if user is not None:
login_user(user, remember=False)
return user |
Decorate functions that require authentication. | def requires_authentication(function: T):
"""Decorate functions that require authentication."""
@wraps(function)
def decorated(*args, **kwargs):
if auth_current_user() is not None:
return function(*args, **kwargs)
else:
return Response("Unauthorized", 401, {"WWW-Authenticate": "Basic"})
return cast(T, decorated) |
Check if the action or resource exists and otherwise raise 400.
This function is intended for use in the REST API because it raises an HTTP error 400 | def _check_action_and_resource(sm: FabAirflowSecurityManagerOverride, perms: list[tuple[str, str]]) -> None:
"""
Check if the action or resource exists and otherwise raise 400.
This function is intended for use in the REST API because it raises an HTTP error 400
"""
for action, resource in perms:
if not sm.get_action(action):
raise BadRequest(detail=f"The specified action: {action!r} was not found")
if not sm.get_resource(resource):
raise BadRequest(detail=f"The specified resource: {resource!r} was not found") |
Get role. | def get_role(*, role_name: str) -> APIResponse:
"""Get role."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
role = security_manager.find_role(name=role_name)
if not role:
raise NotFound(title="Role not found", detail=f"Role with name {role_name!r} was not found")
return role_schema.dump(role) |
Get roles. | def get_roles(*, order_by: str = "name", limit: int, offset: int | None = None) -> APIResponse:
"""Get roles."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
session = security_manager.get_session
total_entries = session.scalars(select(func.count(Role.id))).one()
direction = desc if order_by.startswith("-") else asc
to_replace = {"role_id": "id"}
order_param = order_by.strip("-")
order_param = to_replace.get(order_param, order_param)
allowed_sort_attrs = ["role_id", "name"]
if order_by not in allowed_sort_attrs:
raise BadRequest(
detail=f"Ordering with '{order_by}' is disallowed or "
f"the attribute does not exist on the model"
)
query = select(Role)
roles = (
session.scalars(query.order_by(direction(getattr(Role, order_param))).offset(offset).limit(limit))
.unique()
.all()
)
return role_collection_schema.dump(RoleCollection(roles=roles, total_entries=total_entries)) |
Get permissions. | def get_permissions(*, limit: int, offset: int | None = None) -> APIResponse:
"""Get permissions."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
session = security_manager.get_session
total_entries = session.scalars(select(func.count(Action.id))).one()
query = select(Action)
actions = session.scalars(query.offset(offset).limit(limit)).all()
return action_collection_schema.dump(ActionCollection(actions=actions, total_entries=total_entries)) |
Delete a role. | def delete_role(*, role_name: str) -> APIResponse:
"""Delete a role."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
role = security_manager.find_role(name=role_name)
if not role:
raise NotFound(title="Role not found", detail=f"Role with name {role_name!r} was not found")
security_manager.delete_role(role_name=role_name)
return NoContent, HTTPStatus.NO_CONTENT |
Update a role. | def patch_role(*, role_name: str, update_mask: UpdateMask = None) -> APIResponse:
"""Update a role."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
body = request.json
try:
data = role_schema.load(body)
except ValidationError as err:
raise BadRequest(detail=str(err.messages))
role = security_manager.find_role(name=role_name)
if not role:
raise NotFound(title="Role not found", detail=f"Role with name {role_name!r} was not found")
if update_mask:
update_mask = [i.strip() for i in update_mask]
data_ = {}
for field in update_mask:
if field in data and field != "permissions":
data_[field] = data[field]
elif field == "actions":
data_["permissions"] = data["permissions"]
else:
raise BadRequest(detail=f"'{field}' in update_mask is unknown")
data = data_
if "permissions" in data:
perms = [(item["action"]["name"], item["resource"]["name"]) for item in data["permissions"] if item]
_check_action_and_resource(security_manager, perms)
security_manager.bulk_sync_roles([{"role": role_name, "perms": perms}])
new_name = data.get("name")
if new_name is not None and new_name != role.name:
security_manager.update_role(role_id=role.id, name=new_name)
return role_schema.dump(role) |
Create a new role. | def post_role() -> APIResponse:
"""Create a new role."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
body = request.json
try:
data = role_schema.load(body)
except ValidationError as err:
raise BadRequest(detail=str(err.messages))
role = security_manager.find_role(name=data["name"])
if not role:
perms = [(item["action"]["name"], item["resource"]["name"]) for item in data["permissions"] if item]
_check_action_and_resource(security_manager, perms)
security_manager.bulk_sync_roles([{"role": data["name"], "perms": perms}])
return role_schema.dump(role)
detail = f"Role with name {role.name!r} already exists; please update with the PATCH endpoint"
raise AlreadyExists(detail=detail) |
Get a user. | def get_user(*, username: str) -> APIResponse:
"""Get a user."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
user = security_manager.find_user(username=username)
if not user:
raise NotFound(title="User not found", detail=f"The User with username `{username}` was not found")
return user_collection_item_schema.dump(user) |
Get users. | def get_users(*, limit: int, order_by: str = "id", offset: str | None = None) -> APIResponse:
"""Get users."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
session = security_manager.get_session
total_entries = session.execute(select(func.count(User.id))).scalar()
direction = desc if order_by.startswith("-") else asc
to_replace = {"user_id": "id"}
order_param = order_by.strip("-")
order_param = to_replace.get(order_param, order_param)
allowed_sort_attrs = [
"id",
"first_name",
"last_name",
"user_name",
"email",
"is_active",
"role",
]
if order_by not in allowed_sort_attrs:
raise BadRequest(
detail=f"Ordering with '{order_by}' is disallowed or "
f"the attribute does not exist on the model"
)
query = select(User).order_by(direction(getattr(User, order_param))).offset(offset).limit(limit)
users = session.scalars(query).all()
return user_collection_schema.dump(UserCollection(users=users, total_entries=total_entries)) |
Create a new user. | def post_user() -> APIResponse:
"""Create a new user."""
try:
data = user_schema.load(request.json)
except ValidationError as e:
raise BadRequest(detail=str(e.messages))
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
username = data["username"]
email = data["email"]
if security_manager.find_user(username=username):
detail = f"Username `{username}` already exists. Use PATCH to update."
raise AlreadyExists(detail=detail)
if security_manager.find_user(email=email):
detail = f"The email `{email}` is already taken."
raise AlreadyExists(detail=detail)
roles_to_add = []
missing_role_names = []
for role_data in data.pop("roles", ()):
role_name = role_data["name"]
role = security_manager.find_role(role_name)
if role is None:
missing_role_names.append(role_name)
else:
roles_to_add.append(role)
if missing_role_names:
detail = f"Unknown roles: {', '.join(repr(n) for n in missing_role_names)}"
raise BadRequest(detail=detail)
if not roles_to_add: # No roles provided, use the F.A.B's default registered user role.
roles_to_add.append(security_manager.find_role(security_manager.auth_user_registration_role))
user = security_manager.add_user(role=roles_to_add, **data)
if not user:
detail = f"Failed to add user `{username}`."
raise Unknown(detail=detail)
return user_schema.dump(user) |
Update a user. | def patch_user(*, username: str, update_mask: UpdateMask = None) -> APIResponse:
"""Update a user."""
try:
data = user_schema.load(request.json)
except ValidationError as e:
raise BadRequest(detail=str(e.messages))
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
user = security_manager.find_user(username=username)
if user is None:
detail = f"The User with username `{username}` was not found"
raise NotFound(title="User not found", detail=detail)
# Check unique username
new_username = data.get("username")
if new_username and new_username != username:
if security_manager.find_user(username=new_username):
raise AlreadyExists(detail=f"The username `{new_username}` already exists")
# Check unique email
email = data.get("email")
if email and email != user.email:
if security_manager.find_user(email=email):
raise AlreadyExists(detail=f"The email `{email}` already exists")
# Get fields to update.
if update_mask is not None:
masked_data = {}
missing_mask_names = []
for field in update_mask:
field = field.strip()
try:
masked_data[field] = data[field]
except KeyError:
missing_mask_names.append(field)
if missing_mask_names:
detail = f"Unknown update masks: {', '.join(repr(n) for n in missing_mask_names)}"
raise BadRequest(detail=detail)
data = masked_data
roles_to_update: list[Role] | None
if "roles" in data:
roles_to_update = []
missing_role_names = []
for role_data in data.pop("roles", ()):
role_name = role_data["name"]
role = security_manager.find_role(role_name)
if role is None:
missing_role_names.append(role_name)
else:
roles_to_update.append(role)
if missing_role_names:
detail = f"Unknown roles: {', '.join(repr(n) for n in missing_role_names)}"
raise BadRequest(detail=detail)
else:
roles_to_update = None # Don't change existing value.
if "password" in data:
user.password = generate_password_hash(data.pop("password"))
if roles_to_update is not None:
user.roles = roles_to_update
for key, value in data.items():
setattr(user, key, value)
security_manager.update_user(user)
return user_schema.dump(user) |
Delete a user. | def delete_user(*, username: str) -> APIResponse:
"""Delete a user."""
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
user = security_manager.find_user(username=username)
if user is None:
detail = f"The User with username `{username}` was not found"
raise NotFound(title="User not found", detail=detail)
user.roles = [] # Clear foreign keys on this user first.
security_manager.get_session.delete(user)
security_manager.get_session.commit()
return NoContent, HTTPStatus.NO_CONTENT |
List all existing roles. | def roles_list(args):
"""List all existing roles."""
with get_application_builder() as appbuilder:
roles = appbuilder.sm.get_all_roles()
if not args.permission:
AirflowConsole().print_as(
data=sorted(r.name for r in roles), output=args.output, mapper=lambda x: {"name": x}
)
return
permission_map: dict[tuple[str, str], list[str]] = defaultdict(list)
for role in roles:
for permission in role.permissions:
permission_map[(role.name, permission.resource.name)].append(permission.action.name)
AirflowConsole().print_as(
data=sorted(permission_map),
output=args.output,
mapper=lambda x: {"name": x[0], "resource": x[1], "action": ",".join(sorted(permission_map[x]))},
) |
Create new empty role in DB. | def roles_create(args):
"""Create new empty role in DB."""
with get_application_builder() as appbuilder:
for role_name in args.role:
appbuilder.sm.add_role(role_name)
print(f"Added {len(args.role)} role(s)") |
Delete role in DB. | def roles_delete(args):
"""Delete role in DB."""
with get_application_builder() as appbuilder:
for role_name in args.role:
role = appbuilder.sm.find_role(role_name)
if not role:
print(f"Role named '{role_name}' does not exist")
exit(1)
for role_name in args.role:
appbuilder.sm.delete_role(role_name)
print(f"Deleted {len(args.role)} role(s)") |
Add permissions to role in DB. | def roles_add_perms(args):
"""Add permissions to role in DB."""
__roles_add_or_remove_permissions(args) |
Delete permissions from role in DB. | def roles_del_perms(args):
"""Delete permissions from role in DB."""
__roles_add_or_remove_permissions(args) |
Export all the roles from the database to a file including permissions. | def roles_export(args):
"""Export all the roles from the database to a file including permissions."""
with get_application_builder() as appbuilder:
roles = appbuilder.sm.get_all_roles()
exporting_roles = [role for role in roles if role.name not in EXISTING_ROLES]
filename = os.path.expanduser(args.file)
permission_map: dict[tuple[str, str], list[str]] = defaultdict(list)
for role in exporting_roles:
if role.permissions:
for permission in role.permissions:
permission_map[(role.name, permission.resource.name)].append(permission.action.name)
else:
permission_map[(role.name, "")].append("")
export_data = [
{"name": role, "resource": resource, "action": ",".join(sorted(permissions))}
for (role, resource), permissions in permission_map.items()
]
kwargs = {} if not args.pretty else {"sort_keys": False, "indent": 4}
with open(filename, "w", encoding="utf-8") as f:
json.dump(export_data, f, **kwargs)
print(
f"{len(exporting_roles)} roles with {len(export_data)} linked permissions successfully exported to {filename}"
) |
Import all the roles into the db from the given json file including their permissions.
Note, if a role already exists in the db, it is not overwritten, even when the permissions change. | def roles_import(args):
"""
Import all the roles into the db from the given json file including their permissions.
Note, if a role already exists in the db, it is not overwritten, even when the permissions change.
"""
json_file = args.file
try:
with open(json_file) as f:
role_list = json.load(f)
except FileNotFoundError:
print(f"File '{json_file}' does not exist")
exit(1)
except ValueError as e:
print(f"File '{json_file}' is not a valid JSON file. Error: {e}")
exit(1)
with get_application_builder() as appbuilder:
existing_roles = [role.name for role in appbuilder.sm.get_all_roles()]
roles_to_import = [role_dict for role_dict in role_list if role_dict["name"] not in existing_roles]
for role_dict in roles_to_import:
if role_dict["name"] not in appbuilder.sm.get_all_roles():
if role_dict["action"] == "" or role_dict["resource"] == "":
appbuilder.sm.add_role(role_dict["name"])
else:
appbuilder.sm.add_role(role_dict["name"])
role_args = Namespace(
subcommand="add-perms",
role=[role_dict["name"]],
resource=[role_dict["resource"]],
action=role_dict["action"].split(","),
)
__roles_add_or_remove_permissions(role_args)
if role_dict["name"] in appbuilder.sm.get_all_roles():
if role_dict["action"] == "" or role_dict["resource"] == "":
pass
else:
role_args = Namespace(
subcommand="add-perms",
role=[role_dict["name"]],
resource=[role_dict["resource"]],
action=role_dict["action"].split(","),
)
__roles_add_or_remove_permissions(role_args)
print("roles and permissions successfully imported") |
Update permissions for existing roles and DAGs. | def sync_perm(args):
"""Update permissions for existing roles and DAGs."""
from airflow.providers.fab.auth_manager.cli_commands.utils import get_application_builder
with get_application_builder() as appbuilder:
print("Updating actions and resources for all existing roles")
# Add missing permissions for all the Base Views _before_ syncing/creating roles
appbuilder.add_permissions(update_perms=True)
appbuilder.sm.sync_roles()
if args.include_dags:
print("Updating permission on all DAG views")
appbuilder.sm.create_dag_specific_permissions() |
List users at the command line. | def users_list(args):
"""List users at the command line."""
with get_application_builder() as appbuilder:
users = appbuilder.sm.get_all_users()
fields = ["id", "username", "email", "first_name", "last_name", "roles"]
AirflowConsole().print_as(
data=users, output=args.output, mapper=lambda x: {f: x.__getattribute__(f) for f in fields}
) |
Create new user in the DB. | def users_create(args):
"""Create new user in the DB."""
with get_application_builder() as appbuilder:
role = appbuilder.sm.find_role(args.role)
if not role:
valid_roles = appbuilder.sm.get_all_roles()
raise SystemExit(f"{args.role} is not a valid role. Valid roles are: {valid_roles}")
password = _create_password(args)
if appbuilder.sm.find_user(args.username):
print(f"{args.username} already exist in the db")
return
user = appbuilder.sm.add_user(
args.username, args.firstname, args.lastname, args.email, role, password
)
if user:
print(f'User "{args.username}" created with role "{args.role}"')
else:
raise SystemExit("Failed to create user") |
Reset user password user from DB. | def user_reset_password(args):
"""Reset user password user from DB."""
user = _find_user(args)
password = _create_password(args)
with get_application_builder() as appbuilder:
if appbuilder.sm.reset_password(user.id, password):
print(f'User "{user.username}" password reset successfully')
else:
raise SystemExit("Failed to reset user password") |
Delete user from DB. | def users_delete(args):
"""Delete user from DB."""
user = _find_user(args)
# Clear the associated user roles first.
user.roles.clear()
with get_application_builder() as appbuilder:
if appbuilder.sm.del_register_user(user):
print(f'User "{user.username}" deleted')
else:
raise SystemExit("Failed to delete user") |
Delete or appends user roles. | def users_manage_role(args, remove=False):
"""Delete or appends user roles."""
user = _find_user(args)
with get_application_builder() as appbuilder:
role = appbuilder.sm.find_role(args.role)
if not role:
valid_roles = appbuilder.sm.get_all_roles()
raise SystemExit(f'"{args.role}" is not a valid role. Valid roles are: {valid_roles}')
if remove:
if role not in user.roles:
raise SystemExit(f'User "{user.username}" is not a member of role "{args.role}"')
user.roles = [r for r in user.roles if r != role]
appbuilder.sm.update_user(user)
print(f'User "{user.username}" removed from role "{args.role}"')
else:
if role in user.roles:
raise SystemExit(f'User "{user.username}" is already a member of role "{args.role}"')
user.roles.append(role)
appbuilder.sm.update_user(user)
print(f'User "{user.username}" added to role "{args.role}"') |
Export all users to the json file. | def users_export(args):
"""Export all users to the json file."""
with get_application_builder() as appbuilder:
users = appbuilder.sm.get_all_users()
fields = ["id", "username", "email", "first_name", "last_name", "roles"]
# In the User model the first and last name fields have underscores,
# but the corresponding parameters in the CLI don't
def remove_underscores(s):
return re2.sub("_", "", s)
users = [
{
remove_underscores(field): user.__getattribute__(field)
if field != "roles"
else [r.name for r in user.roles]
for field in fields
}
for user in users
]
with open(args.export, "w") as file:
file.write(json.dumps(users, sort_keys=True, indent=4))
print_export_output("users", users, file) |
Import users from the json file. | def users_import(args):
"""Import users from the json file."""
json_file = getattr(args, "import")
if not os.path.exists(json_file):
raise SystemExit(f"File '{json_file}' does not exist")
users_list = None
try:
with open(json_file) as file:
users_list = json.loads(file.read())
except ValueError as e:
raise SystemExit(f"File '{json_file}' is not valid JSON. Error: {e}")
users_created, users_updated = _import_users(users_list)
if users_created:
print("Created the following users:\n\t{}".format("\n\t".join(users_created)))
if users_updated:
print("Updated the following users:\n\t{}".format("\n\t".join(users_updated))) |
Return an appbuilder instance for the given app. | def _return_appbuilder(app: Flask) -> AirflowAppBuilder:
"""Return an appbuilder instance for the given app."""
init_appbuilder(app)
init_plugins(app)
return app.appbuilder |
Check current user's permissions against required permissions.
This decorator is only kept for backward compatible reasons. The decorator
``airflow.api_connexion.security.requires_access``, which redirects to this decorator, might be used in
user plugins. Thus, we need to keep it.
:meta private: | def _requires_access_fab(permissions: Sequence[tuple[str, str]] | None = None) -> Callable[[T], T]:
"""
Check current user's permissions against required permissions.
This decorator is only kept for backward compatible reasons. The decorator
``airflow.api_connexion.security.requires_access``, which redirects to this decorator, might be used in
user plugins. Thus, we need to keep it.
:meta private:
"""
appbuilder = cast(AirflowApp, current_app).appbuilder
security_manager = cast(FabAirflowSecurityManagerOverride, get_auth_manager().security_manager)
if appbuilder.update_perms:
security_manager.sync_resource_permissions(permissions)
def requires_access_decorator(func: T):
@wraps(func)
def decorated(*args, **kwargs):
check_authentication()
if security_manager.check_authorization(permissions, kwargs.get("dag_id")):
return func(*args, **kwargs)
raise PermissionDenied()
return cast(T, decorated)
return requires_access_decorator |
Check current user's permissions against required permissions.
This decorator is only kept for backward compatible reasons. The decorator
``airflow.www.auth.has_access``, which redirects to this decorator, is widely used in user plugins.
Thus, we need to keep it.
See https://github.com/apache/airflow/pull/33213#discussion_r1346287224
:meta private: | def _has_access_fab(permissions: Sequence[tuple[str, str]] | None = None) -> Callable[[T], T]:
"""
Check current user's permissions against required permissions.
This decorator is only kept for backward compatible reasons. The decorator
``airflow.www.auth.has_access``, which redirects to this decorator, is widely used in user plugins.
Thus, we need to keep it.
See https://github.com/apache/airflow/pull/33213#discussion_r1346287224
:meta private:
"""
def requires_access_decorator(func: T):
@wraps(func)
def decorated(*args, **kwargs):
__tracebackhide__ = True # Hide from pytest traceback.
appbuilder = current_app.appbuilder
dag_id_kwargs = kwargs.get("dag_id")
dag_id_args = request.args.get("dag_id")
dag_id_form = request.form.get("dag_id")
dag_id_json = request.json.get("dag_id") if request.is_json else None
all_dag_ids = [dag_id_kwargs, dag_id_args, dag_id_form, dag_id_json]
unique_dag_ids = set(dag_id for dag_id in all_dag_ids if dag_id is not None)
if len(unique_dag_ids) > 1:
log.warning(
"There are different dag_ids passed in the request: %s. Returning 403.", unique_dag_ids
)
log.warning(
"kwargs: %s, args: %s, form: %s, json: %s",
dag_id_kwargs,
dag_id_args,
dag_id_form,
dag_id_json,
)
return (
render_template(
"airflow/no_roles_permissions.html",
hostname=get_hostname()
if conf.getboolean("webserver", "EXPOSE_HOSTNAME")
else "redact",
logout_url=get_auth_manager().get_url_logout(),
),
403,
)
dag_id = unique_dag_ids.pop() if unique_dag_ids else None
return _has_access(
is_authorized=appbuilder.sm.check_authorization(permissions, dag_id),
func=func,
args=args,
kwargs=kwargs,
)
return cast(T, decorated)
return requires_access_decorator |
Initialize a Go module.
If a ``go.mod`` file already exists, this function will do nothing.
:param go_module_name: The name of the Go module to initialize.
:param go_module_path: The path to the directory containing the Go module. | def init_module(go_module_name: str, go_module_path: str) -> None:
"""Initialize a Go module.
If a ``go.mod`` file already exists, this function will do nothing.
:param go_module_name: The name of the Go module to initialize.
:param go_module_path: The path to the directory containing the Go module.
"""
if os.path.isfile(os.path.join(go_module_path, "go.mod")):
return
go_mod_init_cmd = ["go", "mod", "init", go_module_name]
execute_in_subprocess(go_mod_init_cmd, cwd=go_module_path) |
Install dependencies for a Go module.
:param go_module_path: The path to the directory containing the Go module. | def install_dependencies(go_module_path: str) -> None:
"""Install dependencies for a Go module.
:param go_module_path: The path to the directory containing the Go module.
"""
go_mod_tidy = ["go", "mod", "tidy"]
execute_in_subprocess(go_mod_tidy, cwd=go_module_path) |
Remove invalid characters for filename | def safe_name(s: str) -> str:
"""
Remove invalid characters for filename
"""
return re.sub("[^0-9a-zA-Z_]+", "_", s) |
Bind parameters to a SQL query. | def _bind_parameters(operation: str, parameters: dict) -> str:
"""Bind parameters to a SQL query."""
# inspired by MySQL Python Connector (conversion.py)
string_parameters = {} # type dict[str, str]
for name, value in parameters.items():
if value is None:
string_parameters[name] = "NULL"
elif isinstance(value, str):
string_parameters[name] = "'" + _escape(value) + "'"
else:
string_parameters[name] = str(value)
return operation % string_parameters |
Escape special characters in a SQL query string. | def _escape(s: str) -> str:
"""Escape special characters in a SQL query string."""
e = s
e = e.replace("\\", "\\\\")
e = e.replace("\n", "\\n")
e = e.replace("\r", "\\r")
e = e.replace("'", "\\'")
e = e.replace('"', '\\"')
return e |
Check expected type and raise error if type is not correct. | def _validate_value(key: Any, value: Any, expected_type: type | tuple[type]) -> None:
"""Check expected type and raise error if type is not correct."""
if not isinstance(value, expected_type):
raise TypeError(f"{key} argument must have a type {expected_type} not {type(value)}") |
Validate ``src_fmt_configs`` against a valid config for the source format.
Adds the backward compatibility config to ``src_fmt_configs``.
:param source_format: File format to export.
:param src_fmt_configs: Configure optional fields specific to the source format.
:param valid_configs: Valid configuration specific to the source format
:param backward_compatibility_configs: The top-level params for backward-compatibility | def _validate_src_fmt_configs(
source_format: str,
src_fmt_configs: dict,
valid_configs: list[str],
backward_compatibility_configs: dict | None = None,
) -> dict:
"""Validate ``src_fmt_configs`` against a valid config for the source format.
Adds the backward compatibility config to ``src_fmt_configs``.
:param source_format: File format to export.
:param src_fmt_configs: Configure optional fields specific to the source format.
:param valid_configs: Valid configuration specific to the source format
:param backward_compatibility_configs: The top-level params for backward-compatibility
"""
if backward_compatibility_configs is None:
backward_compatibility_configs = {}
for k, v in backward_compatibility_configs.items():
if k not in src_fmt_configs and k in valid_configs:
src_fmt_configs[k] = v
for k in src_fmt_configs:
if k not in valid_configs:
raise ValueError(f"{k} is not a valid src_fmt_configs for type {source_format}.")
return src_fmt_configs |
Reformat the schema to match cursor description standard.
The description should be a tuple of 7 elemenbts: name, type, display_size,
internal_size, precision, scale, null_ok. | def _format_schema_for_description(schema: dict) -> list:
"""Reformat the schema to match cursor description standard.
The description should be a tuple of 7 elemenbts: name, type, display_size,
internal_size, precision, scale, null_ok.
"""
description = []
for field in schema["fields"]:
mode = field.get("mode", "NULLABLE")
field_description = (
field["name"],
field["type"],
None,
None,
None,
None,
mode == "NULLABLE",
)
description.append(field_description)
return description |
Return unique id of the object. | def get_object_id(obj: dict) -> str:
"""Return unique id of the object."""
return obj["name"].rpartition("/")[-1] |
Add a unique suffix to the job name.
:param job_name:
:return: job_name with suffix | def gen_job_name(job_name: str) -> str:
"""Add a unique suffix to the job name.
:param job_name:
:return: job_name with suffix
"""
uniq = int(time.time())
return f"{job_name}_{uniq}" |
Build callback that triggers the specified function.
The returned callback is intended to be used as ``process_line_callback`` in
:py:class:`~airflow.providers.apache.beam.hooks.beam.BeamCommandRunner`.
:param on_new_job_id_callback: Callback called when the job ID is known | def process_line_and_extract_dataflow_job_id_callback(
on_new_job_id_callback: Callable[[str], None] | None,
) -> Callable[[str], None]:
"""Build callback that triggers the specified function.
The returned callback is intended to be used as ``process_line_callback`` in
:py:class:`~airflow.providers.apache.beam.hooks.beam.BeamCommandRunner`.
:param on_new_job_id_callback: Callback called when the job ID is known
"""
def _process_line_and_extract_job_id(line: str) -> None:
# Job id info: https://goo.gl/SE29y9.
if on_new_job_id_callback is None:
return
matched_job = JOB_ID_PATTERN.search(line)
if matched_job is None:
return
job_id = matched_job.group("job_id_java") or matched_job.group("job_id_python")
on_new_job_id_callback(job_id)
return _process_line_and_extract_job_id |
Get field from extra, first checking short name, then for backcompat we check for prefixed name. | def _get_field(extras: dict, field_name: str):
"""Get field from extra, first checking short name, then for backcompat we check for prefixed name."""
backcompat_prefix = "extra__dataprep__"
if field_name.startswith("extra__"):
raise ValueError(
f"Got prefixed name {field_name}; please remove the '{backcompat_prefix}' prefix "
"when using this method."
)
if field_name in extras:
return extras[field_name] or None
prefixed_name = f"{backcompat_prefix}{field_name}"
return extras.get(prefixed_name) or None |
Convert object URL parameter to object name and bucket name parameter.
This method is a Decorator factory.
:param object_url_keyword_arg_name: Name of the object URL parameter
:param bucket_name_keyword_arg_name: Name of the bucket name parameter
:param object_name_keyword_arg_name: Name of the object name parameter
:return: Decorator | def _fallback_object_url_to_object_name_and_bucket_name(
object_url_keyword_arg_name="object_url",
bucket_name_keyword_arg_name="bucket_name",
object_name_keyword_arg_name="object_name",
) -> Callable[[T], T]:
"""
Convert object URL parameter to object name and bucket name parameter.
This method is a Decorator factory.
:param object_url_keyword_arg_name: Name of the object URL parameter
:param bucket_name_keyword_arg_name: Name of the bucket name parameter
:param object_name_keyword_arg_name: Name of the object name parameter
:return: Decorator
"""
def _wrapper(func: Callable[FParams, RT]) -> Callable[FParams, RT]:
@functools.wraps(func)
def _inner_wrapper(self, *args, **kwargs) -> RT:
if args:
raise AirflowException(
"You must use keyword arguments in this methods rather than positional"
)
object_url = kwargs.get(object_url_keyword_arg_name)
bucket_name = kwargs.get(bucket_name_keyword_arg_name)
object_name = kwargs.get(object_name_keyword_arg_name)
if object_url and bucket_name and object_name:
raise AirflowException(
"The mutually exclusive parameters. `object_url`, `bucket_name` together "
"with `object_name` parameters are present. "
"Please provide `object_url` or `bucket_name` and `object_name`."
)
if object_url:
bucket_name, object_name = _parse_gcs_url(object_url)
kwargs[bucket_name_keyword_arg_name] = bucket_name
kwargs[object_name_keyword_arg_name] = object_name
del kwargs[object_url_keyword_arg_name]
if not object_name or not bucket_name:
raise TypeError(
f"{func.__name__}() missing 2 required positional arguments: "
f"'{bucket_name_keyword_arg_name}' and '{object_name_keyword_arg_name}' "
f"or {object_url_keyword_arg_name}"
)
if not object_name:
raise TypeError(
f"{func.__name__}() missing 1 required positional argument: "
f"'{object_name_keyword_arg_name}'"
)
if not bucket_name:
raise TypeError(
f"{func.__name__}() missing 1 required positional argument: "
f"'{bucket_name_keyword_arg_name}'"
)
return func(self, *args, **kwargs)
return cast(Callable[FParams, RT], _inner_wrapper)
return cast(Callable[[T], T], _wrapper) |
Return True if given Google Cloud Storage URL (gs://<bucket>/<blob>) is a directory or empty bucket. | def gcs_object_is_directory(bucket: str) -> bool:
"""Return True if given Google Cloud Storage URL (gs://<bucket>/<blob>) is a directory or empty bucket."""
_, blob = _parse_gcs_url(bucket)
return len(blob) == 0 or blob.endswith("/") |
Download and parses json file from Google cloud Storage.
:param gcp_conn_id: Airflow Google Cloud connection ID.
:param file_uri: full path to json file
example: ``gs://test-bucket/dir1/dir2/file`` | def parse_json_from_gcs(
gcp_conn_id: str,
file_uri: str,
impersonation_chain: str | Sequence[str] | None = None,
) -> Any:
"""
Download and parses json file from Google cloud Storage.
:param gcp_conn_id: Airflow Google Cloud connection ID.
:param file_uri: full path to json file
example: ``gs://test-bucket/dir1/dir2/file``
"""
gcs_hook = GCSHook(
gcp_conn_id=gcp_conn_id,
impersonation_chain=impersonation_chain,
)
bucket, blob = _parse_gcs_url(file_uri)
with NamedTemporaryFile(mode="w+b") as file:
try:
gcs_hook.download(bucket_name=bucket, object_name=blob, filename=file.name)
except GoogleAPICallError as ex:
raise AirflowException(f"Failed to download file with query result: {ex}")
file.seek(0)
try:
json_data = file.read()
except (ValueError, OSError, RuntimeError) as ex:
raise AirflowException(f"Failed to read file: {ex}")
try:
result = json.loads(json_data)
except json.JSONDecodeError as ex:
raise AirflowException(f"Failed to decode query result from bytes to json: {ex}")
return result |
Given a Google Cloud Storage URL, return a tuple containing the corresponding bucket and blob.
Expected url format: gs://<bucket>/<blob> | def _parse_gcs_url(gsurl: str) -> tuple[str, str]:
"""
Given a Google Cloud Storage URL, return a tuple containing the corresponding bucket and blob.
Expected url format: gs://<bucket>/<blob>
"""
parsed_url = urlsplit(gsurl)
if not parsed_url.netloc:
raise AirflowException("Please provide a bucket name")
if parsed_url.scheme.lower() != "gs":
raise AirflowException(f"Schema must be to 'gs://': Current schema: '{parsed_url.scheme}://'")
bucket = parsed_url.netloc
# Remove leading '/' but NOT trailing one
blob = parsed_url.path.lstrip("/")
return bucket, blob |
Encode a Base64 bytes object to a string. | def _b64encode(s: bytes) -> str:
"""Encode a Base64 bytes object to a string."""
return base64.b64encode(s).decode("ascii") |
Decode a Base64 string to bytes. | def _b64decode(s: str) -> bytes:
"""Decode a Base64 string to bytes."""
return base64.b64decode(s.encode("utf-8")) |
Execute request with exponential delay.
This method is intended to handle and retry in case of api-specific errors,
such as 429 "Too Many Requests", unlike the `request.execute` which handles
lower level errors like `ConnectionError`/`socket.timeout`/`ssl.SSLError`.
:param request: request to be executed.
:param execute_num_retries: num_retries for `request.execute` method.
:param max_n: number of times to retry request in this method.
:param is_done_func: callable to determine if operation is done.
:param is_error_func: callable to determine if operation is failed.
:return: response | def _poll_with_exponential_delay(
request, execute_num_retries, max_n, is_done_func, is_error_func
) -> Response:
"""
Execute request with exponential delay.
This method is intended to handle and retry in case of api-specific errors,
such as 429 "Too Many Requests", unlike the `request.execute` which handles
lower level errors like `ConnectionError`/`socket.timeout`/`ssl.SSLError`.
:param request: request to be executed.
:param execute_num_retries: num_retries for `request.execute` method.
:param max_n: number of times to retry request in this method.
:param is_done_func: callable to determine if operation is done.
:param is_error_func: callable to determine if operation is failed.
:return: response
"""
for i in range(0, max_n):
try:
response = request.execute(num_retries=execute_num_retries)
if is_error_func(response):
raise ValueError(f"The response contained an error: {response}")
if is_done_func(response):
log.info("Operation is done: %s", response)
return response
time.sleep((2**i) + random.random())
except HttpError as e:
if e.resp.status != 429:
log.info("Something went wrong. Not retrying: %s", format(e))
raise
else:
time.sleep((2**i) + random.random())
raise ValueError(f"Connection could not be established after {max_n} retries.") |
Replace invalid MLEngine job_id characters with '_'.
This also adds a leading 'z' in case job_id starts with an invalid
character.
:param job_id: A job_id str that may have invalid characters.
:return: A valid job_id representation. | def _normalize_mlengine_job_id(job_id: str) -> str:
"""
Replace invalid MLEngine job_id characters with '_'.
This also adds a leading 'z' in case job_id starts with an invalid
character.
:param job_id: A job_id str that may have invalid characters.
:return: A valid job_id representation.
"""
# Add a prefix when a job_id starts with a digit or a template
match = re.search(r"\d|\{{2}", job_id)
if match and match.start() == 0:
job = f"z_{job_id}"
else:
job = job_id
# Clean up 'bad' characters except templates
tracker = 0
cleansed_job_id = ""
for match in re.finditer(r"\{{2}.+?\}{2}", job):
cleansed_job_id += re.sub(r"[^0-9a-zA-Z]+", "_", job[tracker : match.start()])
cleansed_job_id += job[match.start() : match.end()]
tracker = match.end()
# Clean up last substring or the full string if no templates
cleansed_job_id += re.sub(r"[^0-9a-zA-Z]+", "_", job[tracker:])
return cleansed_job_id |
Create a value for the ``additional_properties`` parameter.
The new value is based on ``language_hints``, ``web_detection_params``, and
``additional_properties`` parameters specified by the user. | def prepare_additional_parameters(
additional_properties: dict | None, language_hints: Any, web_detection_params: Any
) -> dict | None:
"""Create a value for the ``additional_properties`` parameter.
The new value is based on ``language_hints``, ``web_detection_params``, and
``additional_properties`` parameters specified by the user.
"""
if language_hints is None and web_detection_params is None:
return additional_properties
if additional_properties is None:
return {}
merged_additional_parameters = deepcopy(additional_properties)
if "image_context" not in merged_additional_parameters:
merged_additional_parameters["image_context"] = {}
merged_additional_parameters["image_context"]["language_hints"] = merged_additional_parameters[
"image_context"
].get("language_hints", language_hints)
merged_additional_parameters["image_context"]["web_detection_params"] = merged_additional_parameters[
"image_context"
].get("web_detection_params", web_detection_params)
return merged_additional_parameters |
Act as a default callback for the GoogleCloudStorageObjectUpdatedSensor.
The default behaviour is check for the object being updated after the data interval's end,
or execution_date + interval on Airflow versions prior to 2.2 (before AIP-39 implementation). | def ts_function(context):
"""
Act as a default callback for the GoogleCloudStorageObjectUpdatedSensor.
The default behaviour is check for the object being updated after the data interval's end,
or execution_date + interval on Airflow versions prior to 2.2 (before AIP-39 implementation).
"""
try:
return context["data_interval_end"]
except KeyError:
return context["dag"].following_schedule(context["execution_date"]) |
Act as a wrapper of datetime.datetime.now to simplify mocking in the unittests. | def get_time():
"""Act as a wrapper of datetime.datetime.now to simplify mocking in the unittests."""
return datetime.now() |
Cast a BigQuery row to the appropriate data types.
This is useful because BigQuery returns all fields as strings. | def bq_cast(string_field: str, bq_type: str) -> None | int | float | bool | str:
"""
Cast a BigQuery row to the appropriate data types.
This is useful because BigQuery returns all fields as strings.
"""
if string_field is None:
return None
elif bq_type == "INTEGER":
return int(string_field)
elif bq_type in ("FLOAT", "TIMESTAMP"):
return float(string_field)
elif bq_type == "BOOLEAN":
if string_field not in ["true", "false"]:
raise ValueError(f"{string_field} must have value 'true' or 'false'")
return string_field == "true"
else:
return string_field |
Convert job_id to path: project_id:location:job_id.
:param project_id: Required. The ID of the Google Cloud project where workspace located.
:param location: Optional. The ID of the Google Cloud region where workspace located.
:param job_id: Required. The ID of the job.
:return: str or list[str] of project_id:location:job_id. | def convert_job_id(job_id: str | list[str], project_id: str, location: str | None) -> Any:
"""
Convert job_id to path: project_id:location:job_id.
:param project_id: Required. The ID of the Google Cloud project where workspace located.
:param location: Optional. The ID of the Google Cloud region where workspace located.
:param job_id: Required. The ID of the job.
:return: str or list[str] of project_id:location:job_id.
"""
location = location or "US"
if isinstance(job_id, list):
return [f"{project_id}:{location}:{i}" for i in job_id]
else:
return f"{project_id}:{location}:{job_id}" |
Build a uri that can be used as :envvar:`AIRFLOW_CONN_{CONN_ID}` with provided values.
:param key_file_path: Path to service key.
:param scopes: Required OAuth scopes.
:param project_id: The Google Cloud project id to be used for the connection.
:return: String representing Airflow connection. | def build_gcp_conn(
key_file_path: str | None = None,
scopes: Sequence[str] | None = None,
project_id: str | None = None,
) -> str:
"""
Build a uri that can be used as :envvar:`AIRFLOW_CONN_{CONN_ID}` with provided values.
:param key_file_path: Path to service key.
:param scopes: Required OAuth scopes.
:param project_id: The Google Cloud project id to be used for the connection.
:return: String representing Airflow connection.
"""
conn = "google-cloud-platform://?{}"
query_params = {}
if key_file_path:
query_params["key_path"] = key_file_path
if scopes:
scopes_string = ",".join(scopes)
query_params["scope"] = scopes_string
if project_id:
query_params["projects"] = project_id
query = urlencode(query_params)
return conn.format(query) |
Context manager that provides Google Cloud credentials for Application Default Credentials (ADC).
.. seealso::
`Application Default Credentials (ADC) strategy`__.
It can be used to provide credentials for external programs (e.g. gcloud) that expect authorization
file in ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable.
:param key_file_path: Path to file with Google Cloud Service Account .json file.
:param key_file_dict: Dictionary with credentials.
__ https://cloud.google.com/docs/authentication/production | def provide_gcp_credentials(
key_file_path: str | None = None,
key_file_dict: dict | None = None,
) -> Generator[None, None, None]:
"""
Context manager that provides Google Cloud credentials for Application Default Credentials (ADC).
.. seealso::
`Application Default Credentials (ADC) strategy`__.
It can be used to provide credentials for external programs (e.g. gcloud) that expect authorization
file in ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable.
:param key_file_path: Path to file with Google Cloud Service Account .json file.
:param key_file_dict: Dictionary with credentials.
__ https://cloud.google.com/docs/authentication/production
"""
if not key_file_path and not key_file_dict:
raise ValueError("Please provide `key_file_path` or `key_file_dict`.")
if key_file_path and key_file_path.endswith(".p12"):
raise AirflowException("Legacy P12 key file are not supported, use a JSON key file.")
with tempfile.NamedTemporaryFile(mode="w+t") as conf_file:
if not key_file_path and key_file_dict:
conf_file.write(json.dumps(key_file_dict))
conf_file.flush()
key_file_path = conf_file.name
if key_file_path:
with patch_environ({CREDENTIALS: key_file_path}):
yield
else:
# We will use the default service account credentials.
yield |
Context manager that provides a temporary value of :envvar:`AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT` connection.
It builds a new connection that includes path to provided service json, required scopes and project id.
:param key_file_path: Path to file with Google Cloud Service Account .json file.
:param scopes: OAuth scopes for the connection
:param project_id: The id of Google Cloud project for the connection. | def provide_gcp_connection(
key_file_path: str | None = None,
scopes: Sequence | None = None,
project_id: str | None = None,
) -> Generator[None, None, None]:
"""
Context manager that provides a temporary value of :envvar:`AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT` connection.
It builds a new connection that includes path to provided service json, required scopes and project id.
:param key_file_path: Path to file with Google Cloud Service Account .json file.
:param scopes: OAuth scopes for the connection
:param project_id: The id of Google Cloud project for the connection.
"""
if key_file_path and key_file_path.endswith(".p12"):
raise AirflowException("Legacy P12 key file are not supported, use a JSON key file.")
conn = build_gcp_conn(scopes=scopes, key_file_path=key_file_path, project_id=project_id)
with patch_environ({AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT: conn}):
yield |
Context manager that provides GPC connection and credentials.
It provides both:
- Google Cloud credentials for application supporting `Application Default Credentials (ADC)
strategy`__.
- temporary value of :envvar:`AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT` connection
:param key_file_path: Path to file with Google Cloud Service Account .json file.
:param scopes: OAuth scopes for the connection
:param project_id: The id of Google Cloud project for the connection.
__ https://cloud.google.com/docs/authentication/production | def provide_gcp_conn_and_credentials(
key_file_path: str | None = None,
scopes: Sequence | None = None,
project_id: str | None = None,
) -> Generator[None, None, None]:
"""
Context manager that provides GPC connection and credentials.
It provides both:
- Google Cloud credentials for application supporting `Application Default Credentials (ADC)
strategy`__.
- temporary value of :envvar:`AIRFLOW_CONN_GOOGLE_CLOUD_DEFAULT` connection
:param key_file_path: Path to file with Google Cloud Service Account .json file.
:param scopes: OAuth scopes for the connection
:param project_id: The id of Google Cloud project for the connection.
__ https://cloud.google.com/docs/authentication/production
"""
with ExitStack() as stack:
if key_file_path:
stack.enter_context(provide_gcp_credentials(key_file_path)) # type; ignore
if project_id:
stack.enter_context( # type; ignore
patch_environ({PROJECT: project_id, LEGACY_PROJECT: project_id})
)
stack.enter_context(provide_gcp_connection(key_file_path, scopes, project_id)) # type; ignore
yield |
Return the Credentials object for Google API and the associated project_id. | def get_credentials_and_project_id(*args, **kwargs) -> tuple[google.auth.credentials.Credentials, str]:
"""Return the Credentials object for Google API and the associated project_id."""
return _CredentialProvider(*args, **kwargs).get_credentials_and_project() |
Parse a comma-separated string containing OAuth2 scopes if `scopes` is provided; otherwise return default.
:param scopes: A comma-separated string containing OAuth2 scopes
:return: Returns the scope defined in the connection configuration, or the default scope | def _get_scopes(scopes: str | None = None) -> Sequence[str]:
"""
Parse a comma-separated string containing OAuth2 scopes if `scopes` is provided; otherwise return default.
:param scopes: A comma-separated string containing OAuth2 scopes
:return: Returns the scope defined in the connection configuration, or the default scope
"""
return [s.strip() for s in scopes.split(",")] if scopes else _DEFAULT_SCOPES |
Get the target_principal and optional list of delegates from impersonation_chain.
Analyze contents of impersonation_chain and return target_principal (the service account
to directly impersonate using short-term credentials, if any) and optional list of delegates
required to get the access_token of target_principal.
:param impersonation_chain: the service account to impersonate or a chained list leading to this
account
:return: Returns the tuple of target_principal and delegates | def _get_target_principal_and_delegates(
impersonation_chain: str | Sequence[str] | None = None,
) -> tuple[str | None, Sequence[str] | None]:
"""
Get the target_principal and optional list of delegates from impersonation_chain.
Analyze contents of impersonation_chain and return target_principal (the service account
to directly impersonate using short-term credentials, if any) and optional list of delegates
required to get the access_token of target_principal.
:param impersonation_chain: the service account to impersonate or a chained list leading to this
account
:return: Returns the tuple of target_principal and delegates
"""
if not impersonation_chain:
return None, None
if isinstance(impersonation_chain, str):
return impersonation_chain, None
return impersonation_chain[-1], impersonation_chain[:-1] |
Extract project_id from service account's email address.
:param service_account_email: email of the service account.
:return: Returns the project_id of the provided service account. | def _get_project_id_from_service_account_email(service_account_email: str) -> str:
"""
Extract project_id from service account's email address.
:param service_account_email: email of the service account.
:return: Returns the project_id of the provided service account.
"""
try:
return service_account_email.split("@")[1].split(".")[0]
except IndexError:
raise AirflowException(
f"Could not extract project_id from service account's email: {service_account_email}."
) |
Create flow which simulates the initialization of the default project.
:param project_id: Required. The ID of the Google Cloud project where workspace located.
:param region: Required. The ID of the Google Cloud region where workspace located.
:param repository_id: Required. The ID of the Dataform repository where workspace located.
:param workspace_id: Required. The ID of the Dataform workspace which requires initialization.
:param dataform_schema_name: Name of the schema.
:param package_name: Name of the package. If value is not provided then workspace_id will be used.
:param without_installation: Defines should installation of npm packages be added to flow. | def make_initialization_workspace_flow(
project_id: str,
region: str,
repository_id: str,
workspace_id: str,
dataform_schema_name: str = "dataform",
package_name: str | None = None,
without_installation: bool = False,
) -> tuple:
"""
Create flow which simulates the initialization of the default project.
:param project_id: Required. The ID of the Google Cloud project where workspace located.
:param region: Required. The ID of the Google Cloud region where workspace located.
:param repository_id: Required. The ID of the Dataform repository where workspace located.
:param workspace_id: Required. The ID of the Dataform workspace which requires initialization.
:param dataform_schema_name: Name of the schema.
:param package_name: Name of the package. If value is not provided then workspace_id will be used.
:param without_installation: Defines should installation of npm packages be added to flow.
"""
make_definitions_directory = DataformMakeDirectoryOperator(
task_id="make-definitions-directory",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
directory_path="definitions",
)
first_view_content = b"""
-- This is an example SQLX file to help you learn the basics of Dataform.
-- Visit https://cloud.google.com/dataform/docs/how-to for more information on how to configure
-- your SQL workflow.
-- You can delete this file, then commit and push your changes to your repository when you are ready.
-- Config blocks allow you to configure, document, and test your data assets.
config {
type: "view", // Creates a view in BigQuery. Try changing to "table" instead.
columns: {
test: "A description for the test column", // Column descriptions are pushed to BigQuery.
}
}
-- The rest of a SQLX file contains your SELECT statement used to create the table.
SELECT 1 as test
"""
make_first_view_file = DataformWriteFileOperator(
task_id="write-first-view",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
filepath="definitions/first_view.sqlx",
contents=first_view_content,
)
second_view_content = b"""
config { type: "view" }
-- Use the ref() function to manage dependencies.
-- Learn more about ref() and other built in functions
-- here: https://cloud.google.com/dataform/docs/dataform-core
SELECT test from ${ref("first_view")}
"""
make_second_view_file = DataformWriteFileOperator(
task_id="write-second-view",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
filepath="definitions/second_view.sqlx",
contents=second_view_content,
)
make_includes_directory = DataformMakeDirectoryOperator(
task_id="make-includes-directory",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
directory_path="includes",
)
gitignore_contents = b"""
node_modules/
"""
make_gitignore_file = DataformWriteFileOperator(
task_id="write-gitignore-file",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
filepath=".gitignore",
contents=gitignore_contents,
)
default_location: str = define_default_location(region).value
dataform_config_content = json.dumps(
{
"defaultSchema": dataform_schema_name,
"assertionSchema": "dataform_assertions",
"warehouse": "bigquery",
"defaultDatabase": project_id,
"defaultLocation": default_location,
},
indent=4,
).encode()
make_dataform_config_file = DataformWriteFileOperator(
task_id="write-dataform-config-file",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
filepath="dataform.json",
contents=dataform_config_content,
)
package_name = package_name or workspace_id
package_json_content = json.dumps(
{
"name": package_name,
"dependencies": {
"@dataform/core": "2.0.1",
},
},
indent=4,
).encode()
make_package_json_file = DataformWriteFileOperator(
task_id="write-package-json",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
filepath="package.json",
contents=package_json_content,
)
(
make_definitions_directory
>> make_first_view_file
>> make_second_view_file
>> make_gitignore_file
>> make_dataform_config_file
>> make_package_json_file
)
if without_installation:
make_package_json_file >> make_includes_directory
else:
install_npm_packages = DataformInstallNpmPackagesOperator(
task_id="install-npm-packages",
project_id=project_id,
region=region,
repository_id=repository_id,
workspace_id=workspace_id,
)
make_package_json_file >> install_npm_packages >> make_includes_directory
return make_definitions_directory, make_includes_directory |
Make sure dir path ends with a slash. | def normalize_directory_path(source_object: str | None) -> str | None:
"""Make sure dir path ends with a slash."""
return source_object + "/" if source_object and not source_object.endswith("/") else source_object |
Subsets and Splits