repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
airflow | airflow-main/airflow/providers/microsoft/azure/hooks/adx.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains Azure Data Explorer hook.
.. spelling:word-list::
KustoResponseDataSetV
kusto
"""
from __future__ import annotations
import warnings
from typing import Any
from azure.kusto.data.exceptions import KustoServiceError
from azure.kusto.data.request import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
from azure.kusto.data.response import KustoResponseDataSetV2
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.providers.microsoft.azure.utils import _ensure_prefixes
class AzureDataExplorerHook(BaseHook):
"""
Interacts with Azure Data Explorer (Kusto).
**Cluster**:
Azure Data Explorer cluster is specified by a URL, for example: "https://help.kusto.windows.net".
The parameter must be provided through the Data Explorer Cluster URL connection detail.
**Tenant ID**:
To learn about tenants refer to: https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id
**Authentication methods**:
Available authentication methods are:
- AAD_APP: Authentication with AAD application certificate. A Tenant ID is required when using this
method. Provide application ID and application key through Username and Password parameters.
- AAD_APP_CERT: Authentication with AAD application certificate. Tenant ID, Application PEM Certificate,
and Application Certificate Thumbprint are required when using this method.
- AAD_CREDS: Authentication with AAD username and password. A Tenant ID is required when using this
method. Username and Password parameters are used for authentication with AAD.
- AAD_DEVICE: Authenticate with AAD device code. Please note that if you choose this option, you'll need
to authenticate for every new instance that is initialized. It is highly recommended to create one
instance and use it for all queries.
:param azure_data_explorer_conn_id: Reference to the
:ref:`Azure Data Explorer connection<howto/connection:adx>`.
"""
conn_name_attr = "azure_data_explorer_conn_id"
default_conn_name = "azure_data_explorer_default"
conn_type = "azure_data_explorer"
hook_name = "Azure Data Explorer"
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import PasswordField, StringField
return {
"tenant": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()),
"auth_method": StringField(lazy_gettext("Authentication Method"), widget=BS3TextFieldWidget()),
"certificate": PasswordField(
lazy_gettext("Application PEM Certificate"), widget=BS3PasswordFieldWidget()
),
"thumbprint": PasswordField(
lazy_gettext("Application Certificate Thumbprint"), widget=BS3PasswordFieldWidget()
),
}
@staticmethod
@_ensure_prefixes(conn_type="azure_data_explorer")
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "port", "extra"],
"relabeling": {
"login": "Username",
"host": "Data Explorer Cluster URL",
},
"placeholders": {
"login": "Varies with authentication method",
"password": "Varies with authentication method",
"auth_method": "AAD_APP/AAD_APP_CERT/AAD_CREDS/AAD_DEVICE",
"tenant": "Used with AAD_APP/AAD_APP_CERT/AAD_CREDS",
"certificate": "Used with AAD_APP_CERT",
"thumbprint": "Used with AAD_APP_CERT",
},
}
def __init__(self, azure_data_explorer_conn_id: str = default_conn_name) -> None:
super().__init__()
self.conn_id = azure_data_explorer_conn_id
self.connection = self.get_conn() # todo: make this a property, or just delete
def get_conn(self) -> KustoClient:
"""Return a KustoClient object."""
conn = self.get_connection(self.conn_id)
extras = conn.extra_dejson
cluster = conn.host
if not cluster:
raise AirflowException("Host connection option is required")
def warn_if_collison(key, backcompat_key):
if backcompat_key in extras:
warnings.warn(
f"Conflicting params `{key}` and `{backcompat_key}` found in extras for conn "
f"{self.conn_id}. Using value for `{key}`. Please ensure this is the correct value "
f"and remove the backcompat key `{backcompat_key}`."
)
def get_required_param(name: str) -> str:
"""
Extract required parameter value from connection, raise exception if not found.
Warns if both ``foo`` and ``extra__azure_data_explorer__foo`` found in conn extra.
Prefers unprefixed field.
"""
backcompat_prefix = "extra__azure_data_explorer__"
backcompat_key = f"{backcompat_prefix}{name}"
value = extras.get(name)
if value:
warn_if_collison(name, backcompat_key)
if not value:
value = extras.get(backcompat_key)
if not value:
raise AirflowException(f"Required connection parameter is missing: `{name}`")
return value
auth_method = get_required_param("auth_method")
if auth_method == "AAD_APP":
tenant = get_required_param("tenant")
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
cluster, conn.login, conn.password, tenant
)
elif auth_method == "AAD_APP_CERT":
certificate = get_required_param("certificate")
thumbprint = get_required_param("thumbprint")
tenant = get_required_param("tenant")
kcsb = KustoConnectionStringBuilder.with_aad_application_certificate_authentication(
cluster,
conn.login,
certificate,
thumbprint,
tenant,
)
elif auth_method == "AAD_CREDS":
tenant = get_required_param("tenant")
kcsb = KustoConnectionStringBuilder.with_aad_user_password_authentication(
cluster, conn.login, conn.password, tenant
)
elif auth_method == "AAD_DEVICE":
kcsb = KustoConnectionStringBuilder.with_aad_device_authentication(cluster)
else:
raise AirflowException(f"Unknown authentication method: {auth_method}")
return KustoClient(kcsb)
def run_query(self, query: str, database: str, options: dict | None = None) -> KustoResponseDataSetV2:
"""
Run KQL query using provided configuration, and return KustoResponseDataSet instance.
See: `azure.kusto.data.response.KustoResponseDataSet`
If query is unsuccessful AirflowException is raised.
:param query: KQL query to run
:param database: Database to run the query on.
:param options: Optional query options. See:
https://docs.microsoft.com/en-us/azure/kusto/api/netfx/request-properties#list-of-clientrequestproperties
:return: dict
"""
properties = ClientRequestProperties()
if options:
for k, v in options.items():
properties.set_option(k, v)
try:
return self.connection.execute(database, query, properties=properties)
except KustoServiceError as error:
raise AirflowException(f"Error running Kusto query: {error}")
| 8,759 | 41.115385 | 116 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/hooks/container_registry.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Azure Container Registry."""
from __future__ import annotations
from typing import Any
from azure.mgmt.containerinstance.models import ImageRegistryCredential
from airflow.hooks.base import BaseHook
class AzureContainerRegistryHook(BaseHook):
"""
A hook to communicate with a Azure Container Registry.
:param conn_id: :ref:`Azure Container Registry connection id<howto/connection:acr>`
of a service principal which will be used to start the container instance
"""
conn_name_attr = "azure_container_registry_conn_id"
default_conn_name = "azure_container_registry_default"
conn_type = "azure_container_registry"
hook_name = "Azure Container Registry"
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "port", "extra"],
"relabeling": {
"login": "Registry Username",
"password": "Registry Password",
"host": "Registry Server",
},
"placeholders": {
"login": "private registry username",
"password": "private registry password",
"host": "docker image registry server",
},
}
def __init__(self, conn_id: str = "azure_registry") -> None:
super().__init__()
self.conn_id = conn_id
self.connection = self.get_conn()
def get_conn(self) -> ImageRegistryCredential:
conn = self.get_connection(self.conn_id)
return ImageRegistryCredential(server=conn.host, username=conn.login, password=conn.password)
| 2,460 | 35.731343 | 101 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/hooks/fileshare.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from functools import wraps
from typing import IO, Any
from azure.storage.file import File, FileService
from airflow.hooks.base import BaseHook
def _ensure_prefixes(conn_type):
"""
Deprecated.
Remove when provider min airflow version >= 2.5.0 since this is handled by
provider manager from that version.
"""
def dec(func):
@wraps(func)
def inner():
field_behaviors = func()
conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
def _ensure_prefix(field):
if field not in conn_attrs and not field.startswith("extra__"):
return f"extra__{conn_type}__{field}"
else:
return field
if "placeholders" in field_behaviors:
placeholders = field_behaviors["placeholders"]
field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
return field_behaviors
return inner
return dec
class AzureFileShareHook(BaseHook):
"""
Interacts with Azure FileShare Storage.
:param azure_fileshare_conn_id: Reference to the
:ref:`Azure Container Volume connection id<howto/connection:azure_fileshare>`
of an Azure account of which container volumes should be used.
"""
conn_name_attr = "azure_fileshare_conn_id"
default_conn_name = "azure_fileshare_default"
conn_type = "azure_fileshare"
hook_name = "Azure FileShare"
def __init__(self, azure_fileshare_conn_id: str = "azure_fileshare_default") -> None:
super().__init__()
self.conn_id = azure_fileshare_conn_id
self._conn = None
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import PasswordField, StringField
return {
"sas_token": PasswordField(lazy_gettext("SAS Token (optional)"), widget=BS3PasswordFieldWidget()),
"connection_string": StringField(
lazy_gettext("Connection String (optional)"), widget=BS3TextFieldWidget()
),
"protocol": StringField(
lazy_gettext("Account URL or token (optional)"), widget=BS3TextFieldWidget()
),
}
@staticmethod
@_ensure_prefixes(conn_type="azure_fileshare")
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "port", "host", "extra"],
"relabeling": {
"login": "Blob Storage Login (optional)",
"password": "Blob Storage Key (optional)",
},
"placeholders": {
"login": "account name",
"password": "secret",
"sas_token": "account url or token (optional)",
"connection_string": "account url or token (optional)",
"protocol": "account url or token (optional)",
},
}
def get_conn(self) -> FileService:
"""Return the FileService object."""
def check_for_conflict(key):
backcompat_key = f"{backcompat_prefix}{key}"
if backcompat_key in extras:
warnings.warn(
f"Conflicting params `{key}` and `{backcompat_key}` found in extras for conn "
f"{self.conn_id}. Using value for `{key}`. Please ensure this is the correct value "
f"and remove the backcompat key `{backcompat_key}`."
)
backcompat_prefix = "extra__azure_fileshare__"
if self._conn:
return self._conn
conn = self.get_connection(self.conn_id)
extras = conn.extra_dejson
service_options = {}
for key, value in extras.items():
if value == "":
continue
if not key.startswith("extra__"):
service_options[key] = value
check_for_conflict(key)
elif key.startswith(backcompat_prefix):
short_name = key[len(backcompat_prefix) :]
if short_name not in service_options: # prefer values provided with short name
service_options[short_name] = value
else:
warnings.warn(f"Extra param `{key}` not recognized; ignoring.")
self._conn = FileService(account_name=conn.login, account_key=conn.password, **service_options)
return self._conn
def check_for_directory(self, share_name: str, directory_name: str, **kwargs) -> bool:
"""
Check if a directory exists on Azure File Share.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param kwargs: Optional keyword arguments that
`FileService.exists()` takes.
:return: True if the file exists, False otherwise.
"""
return self.get_conn().exists(share_name, directory_name, **kwargs)
def check_for_file(self, share_name: str, directory_name: str, file_name: str, **kwargs) -> bool:
"""
Check if a file exists on Azure File Share.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param file_name: Name of the file.
:param kwargs: Optional keyword arguments that
`FileService.exists()` takes.
:return: True if the file exists, False otherwise.
"""
return self.get_conn().exists(share_name, directory_name, file_name, **kwargs)
def list_directories_and_files(
self, share_name: str, directory_name: str | None = None, **kwargs
) -> list:
"""
Return the list of directories and files stored on a Azure File Share.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param kwargs: Optional keyword arguments that
`FileService.list_directories_and_files()` takes.
:return: A list of files and directories
"""
return self.get_conn().list_directories_and_files(share_name, directory_name, **kwargs)
def list_files(self, share_name: str, directory_name: str | None = None, **kwargs) -> list[str]:
"""
Return the list of files stored on a Azure File Share.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param kwargs: Optional keyword arguments that
`FileService.list_directories_and_files()` takes.
:return: A list of files
"""
return [
obj.name
for obj in self.list_directories_and_files(share_name, directory_name, **kwargs)
if isinstance(obj, File)
]
def create_share(self, share_name: str, **kwargs) -> bool:
"""
Create new Azure File Share.
:param share_name: Name of the share.
:param kwargs: Optional keyword arguments that
`FileService.create_share()` takes.
:return: True if share is created, False if share already exists.
"""
return self.get_conn().create_share(share_name, **kwargs)
def delete_share(self, share_name: str, **kwargs) -> bool:
"""
Delete existing Azure File Share.
:param share_name: Name of the share.
:param kwargs: Optional keyword arguments that
`FileService.delete_share()` takes.
:return: True if share is deleted, False if share does not exist.
"""
return self.get_conn().delete_share(share_name, **kwargs)
def create_directory(self, share_name: str, directory_name: str, **kwargs) -> list:
"""
Create a new directory on a Azure File Share.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param kwargs: Optional keyword arguments that
`FileService.create_directory()` takes.
:return: A list of files and directories
"""
return self.get_conn().create_directory(share_name, directory_name, **kwargs)
def get_file(
self, file_path: str, share_name: str, directory_name: str, file_name: str, **kwargs
) -> None:
"""
Download a file from Azure File Share.
:param file_path: Where to store the file.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param file_name: Name of the file.
:param kwargs: Optional keyword arguments that
`FileService.get_file_to_path()` takes.
"""
self.get_conn().get_file_to_path(share_name, directory_name, file_name, file_path, **kwargs)
def get_file_to_stream(
self, stream: IO, share_name: str, directory_name: str, file_name: str, **kwargs
) -> None:
"""
Download a file from Azure File Share.
:param stream: A filehandle to store the file to.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param file_name: Name of the file.
:param kwargs: Optional keyword arguments that
`FileService.get_file_to_stream()` takes.
"""
self.get_conn().get_file_to_stream(share_name, directory_name, file_name, stream, **kwargs)
def load_file(
self, file_path: str, share_name: str, directory_name: str, file_name: str, **kwargs
) -> None:
"""
Upload a file to Azure File Share.
:param file_path: Path to the file to load.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param file_name: Name of the file.
:param kwargs: Optional keyword arguments that
`FileService.create_file_from_path()` takes.
"""
self.get_conn().create_file_from_path(share_name, directory_name, file_name, file_path, **kwargs)
def load_string(
self, string_data: str, share_name: str, directory_name: str, file_name: str, **kwargs
) -> None:
"""
Upload a string to Azure File Share.
:param string_data: String to load.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param file_name: Name of the file.
:param kwargs: Optional keyword arguments that
`FileService.create_file_from_text()` takes.
"""
self.get_conn().create_file_from_text(share_name, directory_name, file_name, string_data, **kwargs)
def load_stream(
self, stream: str, share_name: str, directory_name: str, file_name: str, count: str, **kwargs
) -> None:
"""
Upload a stream to Azure File Share.
:param stream: Opened file/stream to upload as the file content.
:param share_name: Name of the share.
:param directory_name: Name of the directory.
:param file_name: Name of the file.
:param count: Size of the stream in bytes
:param kwargs: Optional keyword arguments that
`FileService.create_file_from_stream()` takes.
"""
self.get_conn().create_file_from_stream(
share_name, directory_name, file_name, stream, count, **kwargs
)
def test_connection(self):
"""Test Azure FileShare connection."""
success = (True, "Successfully connected to Azure File Share.")
try:
# Attempt to retrieve file share information
next(iter(self.get_conn().list_shares()))
return success
except StopIteration:
# If the iterator returned is empty it should still be considered a successful connection since
# it's possible to create a storage account without any file share and none could
# legitimately exist yet.
return success
except Exception as e:
return False, str(e)
| 13,004 | 38.409091 | 110 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/hooks/container_volume.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from azure.mgmt.containerinstance.models import AzureFileVolume, Volume
from airflow.hooks.base import BaseHook
from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
class AzureContainerVolumeHook(BaseHook):
"""
A hook which wraps an Azure Volume.
:param azure_container_volume_conn_id: Reference to the
:ref:`Azure Container Volume connection id <howto/connection:azure_container_volume>`
of an Azure account of which container volumes should be used.
"""
conn_name_attr = "azure_container_volume_conn_id"
default_conn_name = "azure_container_volume_default"
conn_type = "azure_container_volume"
hook_name = "Azure Container Volume"
def __init__(self, azure_container_volume_conn_id: str = "azure_container_volume_default") -> None:
super().__init__()
self.conn_id = azure_container_volume_conn_id
def _get_field(self, extras, name):
return get_field(
conn_id=self.conn_id,
conn_type=self.conn_type,
extras=extras,
field_name=name,
)
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget
from flask_babel import lazy_gettext
from wtforms import PasswordField
return {
"connection_string": PasswordField(
lazy_gettext("Blob Storage Connection String (optional)"), widget=BS3PasswordFieldWidget()
),
}
@staticmethod
@_ensure_prefixes(conn_type="azure_container_volume")
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "port", "host", "extra"],
"relabeling": {
"login": "Azure Client ID",
"password": "Azure Secret",
},
"placeholders": {
"login": "client_id (token credentials auth)",
"password": "secret (token credentials auth)",
"connection_string": "connection string auth",
},
}
def get_storagekey(self) -> str:
"""Get Azure File Volume storage key."""
conn = self.get_connection(self.conn_id)
extras = conn.extra_dejson
connection_string = self._get_field(extras, "connection_string")
if connection_string:
for keyvalue in connection_string.split(";"):
key, value = keyvalue.split("=", 1)
if key == "AccountKey":
return value
return conn.password
def get_file_volume(
self, mount_name: str, share_name: str, storage_account_name: str, read_only: bool = False
) -> Volume:
"""Get Azure File Volume."""
return Volume(
name=mount_name,
azure_file=AzureFileVolume(
share_name=share_name,
storage_account_name=storage_account_name,
read_only=read_only,
storage_account_key=self.get_storagekey(),
),
)
| 4,062 | 36.275229 | 106 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/hooks/cosmos.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains integration with Azure CosmosDB.
AzureCosmosDBHook communicates via the Azure Cosmos library. Make sure that a
Airflow connection of type `azure_cosmos` exists. Authorization can be done by supplying a
login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify
the default database and collection to use (see connection `azure_cosmos_default` for an example).
"""
from __future__ import annotations
import json
import uuid
from typing import Any
from azure.cosmos.cosmos_client import CosmosClient
from azure.cosmos.exceptions import CosmosHttpResponseError
from airflow.exceptions import AirflowBadRequest
from airflow.hooks.base import BaseHook
from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
class AzureCosmosDBHook(BaseHook):
"""
Interacts with Azure CosmosDB.
login should be the endpoint uri, password should be the master key
optionally, you can use the following extras to default these values
{"database_name": "<DATABASE_NAME>", "collection_name": "COLLECTION_NAME"}.
:param azure_cosmos_conn_id: Reference to the
:ref:`Azure CosmosDB connection<howto/connection:azure_cosmos>`.
"""
conn_name_attr = "azure_cosmos_conn_id"
default_conn_name = "azure_cosmos_default"
conn_type = "azure_cosmos"
hook_name = "Azure CosmosDB"
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import StringField
return {
"database_name": StringField(
lazy_gettext("Cosmos Database Name (optional)"), widget=BS3TextFieldWidget()
),
"collection_name": StringField(
lazy_gettext("Cosmos Collection Name (optional)"), widget=BS3TextFieldWidget()
),
}
@staticmethod
@_ensure_prefixes(conn_type="azure_cosmos") # todo: remove when min airflow version >= 2.5
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "port", "host", "extra"],
"relabeling": {
"login": "Cosmos Endpoint URI",
"password": "Cosmos Master Key Token",
},
"placeholders": {
"login": "endpoint uri",
"password": "master key",
"database_name": "database name",
"collection_name": "collection name",
},
}
def __init__(self, azure_cosmos_conn_id: str = default_conn_name) -> None:
super().__init__()
self.conn_id = azure_cosmos_conn_id
self._conn: CosmosClient | None = None
self.default_database_name = None
self.default_collection_name = None
def _get_field(self, extras, name):
return get_field(
conn_id=self.conn_id,
conn_type=self.conn_type,
extras=extras,
field_name=name,
)
def get_conn(self) -> CosmosClient:
"""Return a cosmos db client."""
if not self._conn:
conn = self.get_connection(self.conn_id)
extras = conn.extra_dejson
endpoint_uri = conn.login
master_key = conn.password
self.default_database_name = self._get_field(extras, "database_name")
self.default_collection_name = self._get_field(extras, "collection_name")
# Initialize the Python Azure Cosmos DB client
self._conn = CosmosClient(endpoint_uri, {"masterKey": master_key})
return self._conn
def __get_database_name(self, database_name: str | None = None) -> str:
self.get_conn()
db_name = database_name
if db_name is None:
db_name = self.default_database_name
if db_name is None:
raise AirflowBadRequest("Database name must be specified")
return db_name
def __get_collection_name(self, collection_name: str | None = None) -> str:
self.get_conn()
coll_name = collection_name
if coll_name is None:
coll_name = self.default_collection_name
if coll_name is None:
raise AirflowBadRequest("Collection name must be specified")
return coll_name
def does_collection_exist(self, collection_name: str, database_name: str) -> bool:
"""Checks if a collection exists in CosmosDB."""
if collection_name is None:
raise AirflowBadRequest("Collection name cannot be None.")
existing_container = list(
self.get_conn()
.get_database_client(self.__get_database_name(database_name))
.query_containers(
"SELECT * FROM r WHERE r.id=@id",
parameters=[json.dumps({"name": "@id", "value": collection_name})],
)
)
if len(existing_container) == 0:
return False
return True
def create_collection(
self,
collection_name: str,
database_name: str | None = None,
partition_key: str | None = None,
) -> None:
"""Creates a new collection in the CosmosDB database."""
if collection_name is None:
raise AirflowBadRequest("Collection name cannot be None.")
# We need to check to see if this container already exists so we don't try
# to create it twice
existing_container = list(
self.get_conn()
.get_database_client(self.__get_database_name(database_name))
.query_containers(
"SELECT * FROM r WHERE r.id=@id",
parameters=[json.dumps({"name": "@id", "value": collection_name})],
)
)
# Only create if we did not find it already existing
if len(existing_container) == 0:
self.get_conn().get_database_client(self.__get_database_name(database_name)).create_container(
collection_name, partition_key=partition_key
)
def does_database_exist(self, database_name: str) -> bool:
"""Checks if a database exists in CosmosDB."""
if database_name is None:
raise AirflowBadRequest("Database name cannot be None.")
existing_database = list(
self.get_conn().query_databases(
"SELECT * FROM r WHERE r.id=@id",
parameters=[json.dumps({"name": "@id", "value": database_name})],
)
)
if len(existing_database) == 0:
return False
return True
def create_database(self, database_name: str) -> None:
"""Creates a new database in CosmosDB."""
if database_name is None:
raise AirflowBadRequest("Database name cannot be None.")
# We need to check to see if this database already exists so we don't try
# to create it twice
existing_database = list(
self.get_conn().query_databases(
"SELECT * FROM r WHERE r.id=@id",
parameters=[json.dumps({"name": "@id", "value": database_name})],
)
)
# Only create if we did not find it already existing
if len(existing_database) == 0:
self.get_conn().create_database(database_name)
def delete_database(self, database_name: str) -> None:
"""Deletes an existing database in CosmosDB."""
if database_name is None:
raise AirflowBadRequest("Database name cannot be None.")
self.get_conn().delete_database(database_name)
def delete_collection(self, collection_name: str, database_name: str | None = None) -> None:
"""Deletes an existing collection in the CosmosDB database."""
if collection_name is None:
raise AirflowBadRequest("Collection name cannot be None.")
self.get_conn().get_database_client(self.__get_database_name(database_name)).delete_container(
collection_name
)
def upsert_document(self, document, database_name=None, collection_name=None, document_id=None):
"""Insert or update a document into an existing collection in the CosmosDB database."""
# Assign unique ID if one isn't provided
if document_id is None:
document_id = str(uuid.uuid4())
if document is None:
raise AirflowBadRequest("You cannot insert a None document")
# Add document id if isn't found
if "id" in document:
if document["id"] is None:
document["id"] = document_id
else:
document["id"] = document_id
created_document = (
self.get_conn()
.get_database_client(self.__get_database_name(database_name))
.get_container_client(self.__get_collection_name(collection_name))
.upsert_item(document)
)
return created_document
def insert_documents(
self, documents, database_name: str | None = None, collection_name: str | None = None
) -> list:
"""Insert a list of new documents into an existing collection in the CosmosDB database."""
if documents is None:
raise AirflowBadRequest("You cannot insert empty documents")
created_documents = []
for single_document in documents:
created_documents.append(
self.get_conn()
.get_database_client(self.__get_database_name(database_name))
.get_container_client(self.__get_collection_name(collection_name))
.create_item(single_document)
)
return created_documents
def delete_document(
self,
document_id: str,
database_name: str | None = None,
collection_name: str | None = None,
partition_key: str | None = None,
) -> None:
"""Delete an existing document out of a collection in the CosmosDB database."""
if document_id is None:
raise AirflowBadRequest("Cannot delete a document without an id")
(
self.get_conn()
.get_database_client(self.__get_database_name(database_name))
.get_container_client(self.__get_collection_name(collection_name))
.delete_item(document_id, partition_key=partition_key)
)
def get_document(
self,
document_id: str,
database_name: str | None = None,
collection_name: str | None = None,
partition_key: str | None = None,
):
"""Get a document from an existing collection in the CosmosDB database."""
if document_id is None:
raise AirflowBadRequest("Cannot get a document without an id")
try:
return (
self.get_conn()
.get_database_client(self.__get_database_name(database_name))
.get_container_client(self.__get_collection_name(collection_name))
.read_item(document_id, partition_key=partition_key)
)
except CosmosHttpResponseError:
return None
def get_documents(
self,
sql_string: str,
database_name: str | None = None,
collection_name: str | None = None,
partition_key: str | None = None,
) -> list | None:
"""Get a list of documents from an existing collection in the CosmosDB database via SQL query."""
if sql_string is None:
raise AirflowBadRequest("SQL query string cannot be None")
try:
result_iterable = (
self.get_conn()
.get_database_client(self.__get_database_name(database_name))
.get_container_client(self.__get_collection_name(collection_name))
.query_items(sql_string, partition_key=partition_key)
)
return list(result_iterable)
except CosmosHttpResponseError:
return None
def test_connection(self):
"""Test a configured Azure Cosmos connection."""
try:
# Attempt to list existing databases under the configured subscription and retrieve the first in
# the returned iterator. The Azure Cosmos API does allow for creation of a
# CosmosClient with incorrect values but then will fail properly once items are
# retrieved using the client. We need to _actually_ try to retrieve an object to properly test the
# connection.
next(iter(self.get_conn().list_databases()), None)
except Exception as e:
return False, str(e)
return True, "Successfully connected to Azure Cosmos."
def get_database_link(database_id: str) -> str:
"""Get Azure CosmosDB database link."""
return "dbs/" + database_id
def get_collection_link(database_id: str, collection_id: str) -> str:
"""Get Azure CosmosDB collection link."""
return get_database_link(database_id) + "/colls/" + collection_id
def get_document_link(database_id: str, collection_id: str, document_id: str) -> str:
"""Get Azure CosmosDB document link."""
return get_collection_link(database_id, collection_id) + "/docs/" + document_id
| 14,177 | 37.422764 | 110 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/hooks/asb.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from azure.servicebus import ServiceBusClient, ServiceBusMessage, ServiceBusSender
from azure.servicebus.management import QueueProperties, ServiceBusAdministrationClient
from airflow.hooks.base import BaseHook
class BaseAzureServiceBusHook(BaseHook):
"""
BaseAzureServiceBusHook class to create session and create connection using connection string.
:param azure_service_bus_conn_id: Reference to the
:ref:`Azure Service Bus connection<howto/connection:azure_service_bus>`.
"""
conn_name_attr = "azure_service_bus_conn_id"
default_conn_name = "azure_service_bus_default"
conn_type = "azure_service_bus"
hook_name = "Azure Service Bus"
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["port", "host", "extra", "login", "password"],
"relabeling": {"schema": "Connection String"},
"placeholders": {
"schema": "Endpoint=sb://<Resource group>.servicebus.windows.net/;SharedAccessKeyName=<AccessKeyName>;SharedAccessKey=<SharedAccessKey>", # noqa
},
}
def __init__(self, azure_service_bus_conn_id: str = default_conn_name) -> None:
super().__init__()
self.conn_id = azure_service_bus_conn_id
def get_conn(self):
raise NotImplementedError
class AdminClientHook(BaseAzureServiceBusHook):
"""Interact with the ServiceBusAdministrationClient.
This can create, update, list, and delete resources of a Service Bus
namespace. This hook uses the same Azure Service Bus client connection
inherited from the base class.
"""
def get_conn(self) -> ServiceBusAdministrationClient:
"""Create a ServiceBusAdministrationClient instance.
This uses the connection string in connection details.
"""
conn = self.get_connection(self.conn_id)
connection_string: str = str(conn.schema)
return ServiceBusAdministrationClient.from_connection_string(connection_string)
def create_queue(
self,
queue_name: str,
max_delivery_count: int = 10,
dead_lettering_on_message_expiration: bool = True,
enable_batched_operations: bool = True,
) -> QueueProperties:
"""
Create Queue by connecting to service Bus Admin client return the QueueProperties.
:param queue_name: The name of the queue or a QueueProperties with name.
:param max_delivery_count: The maximum delivery count. A message is automatically
dead lettered after this number of deliveries. Default value is 10..
:param dead_lettering_on_message_expiration: A value that indicates whether this subscription has
dead letter support when a message expires.
:param enable_batched_operations: Value that indicates whether server-side batched
operations are enabled.
"""
if queue_name is None:
raise TypeError("Queue name cannot be None.")
with self.get_conn() as service_mgmt_conn:
queue = service_mgmt_conn.create_queue(
queue_name,
max_delivery_count=max_delivery_count,
dead_lettering_on_message_expiration=dead_lettering_on_message_expiration,
enable_batched_operations=enable_batched_operations,
)
return queue
def delete_queue(self, queue_name: str) -> None:
"""
Delete the queue by queue_name in service bus namespace.
:param queue_name: The name of the queue or a QueueProperties with name.
"""
if queue_name is None:
raise TypeError("Queue name cannot be None.")
with self.get_conn() as service_mgmt_conn:
service_mgmt_conn.delete_queue(queue_name)
def delete_subscription(self, subscription_name: str, topic_name: str) -> None:
"""
Delete a topic subscription entities under a ServiceBus Namespace.
:param subscription_name: The subscription name that will own the rule in topic
:param topic_name: The topic that will own the subscription rule.
"""
if subscription_name is None:
raise TypeError("Subscription name cannot be None.")
if topic_name is None:
raise TypeError("Topic name cannot be None.")
with self.get_conn() as service_mgmt_conn:
self.log.info("Deleting Subscription %s", subscription_name)
service_mgmt_conn.delete_subscription(topic_name, subscription_name)
class MessageHook(BaseAzureServiceBusHook):
"""Interact with ServiceBusClient.
This acts as a high level interface for getting ServiceBusSender and ServiceBusReceiver.
"""
def get_conn(self) -> ServiceBusClient:
"""Create and returns ServiceBusClient by using the connection string in connection details."""
conn = self.get_connection(self.conn_id)
connection_string: str = str(conn.schema)
self.log.info("Create and returns ServiceBusClient")
return ServiceBusClient.from_connection_string(conn_str=connection_string, logging_enable=True)
def send_message(self, queue_name: str, messages: str | list[str], batch_message_flag: bool = False):
"""Use ServiceBusClient Send to send message(s) to a Service Bus Queue.
By using ``batch_message_flag``, it enables and send message as batch message.
:param queue_name: The name of the queue or a QueueProperties with name.
:param messages: Message which needs to be sent to the queue. It can be string or list of string.
:param batch_message_flag: bool flag, can be set to True if message needs to be
sent as batch message.
"""
if queue_name is None:
raise TypeError("Queue name cannot be None.")
if not messages:
raise ValueError("Messages list cannot be empty.")
with self.get_conn() as service_bus_client, service_bus_client.get_queue_sender(
queue_name=queue_name
) as sender:
with sender:
if isinstance(messages, str):
if not batch_message_flag:
msg = ServiceBusMessage(messages)
sender.send_messages(msg)
else:
self.send_batch_message(sender, [messages])
else:
if not batch_message_flag:
self.send_list_messages(sender, messages)
else:
self.send_batch_message(sender, messages)
@staticmethod
def send_list_messages(sender: ServiceBusSender, messages: list[str]):
list_messages = [ServiceBusMessage(message) for message in messages]
sender.send_messages(list_messages) # type: ignore[arg-type]
@staticmethod
def send_batch_message(sender: ServiceBusSender, messages: list[str]):
batch_message = sender.create_message_batch()
for message in messages:
batch_message.add_message(ServiceBusMessage(message))
sender.send_messages(batch_message)
def receive_message(
self, queue_name, max_message_count: int | None = 1, max_wait_time: float | None = None
):
"""
Receive a batch of messages at once in a specified Queue name.
:param queue_name: The name of the queue name or a QueueProperties with name.
:param max_message_count: Maximum number of messages in the batch.
:param max_wait_time: Maximum time to wait in seconds for the first message to arrive.
"""
if queue_name is None:
raise TypeError("Queue name cannot be None.")
with self.get_conn() as service_bus_client, service_bus_client.get_queue_receiver(
queue_name=queue_name
) as receiver:
with receiver:
received_msgs = receiver.receive_messages(
max_message_count=max_message_count, max_wait_time=max_wait_time
)
for msg in received_msgs:
self.log.info(msg)
receiver.complete_message(msg)
def receive_subscription_message(
self,
topic_name: str,
subscription_name: str,
max_message_count: int | None,
max_wait_time: float | None,
):
"""Receive a batch of subscription message at once.
This approach is optimal if you wish to process multiple messages
simultaneously, or perform an ad-hoc receive as a single call.
:param subscription_name: The subscription name that will own the rule in topic
:param topic_name: The topic that will own the subscription rule.
:param max_message_count: Maximum number of messages in the batch.
Actual number returned will depend on prefetch_count and incoming stream rate.
Setting to None will fully depend on the prefetch config. The default value is 1.
:param max_wait_time: Maximum time to wait in seconds for the first message to arrive. If no
messages arrive, and no timeout is specified, this call will not return until the
connection is closed. If specified, an no messages arrive within the timeout period,
an empty list will be returned.
"""
if subscription_name is None:
raise TypeError("Subscription name cannot be None.")
if topic_name is None:
raise TypeError("Topic name cannot be None.")
with self.get_conn() as service_bus_client, service_bus_client.get_subscription_receiver(
topic_name, subscription_name
) as subscription_receiver:
with subscription_receiver:
received_msgs = subscription_receiver.receive_messages(
max_message_count=max_message_count, max_wait_time=max_wait_time
)
for msg in received_msgs:
self.log.info(msg)
subscription_receiver.complete_message(msg)
| 10,984 | 42.59127 | 161 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/log/wasb_task_handler.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import shutil
from functools import cached_property
from pathlib import Path
from typing import TYPE_CHECKING, Any
from azure.core.exceptions import HttpResponseError
from packaging.version import Version
from airflow.configuration import conf
from airflow.utils.log.file_task_handler import FileTaskHandler
from airflow.utils.log.logging_mixin import LoggingMixin
def get_default_delete_local_copy():
"""Load delete_local_logs conf if Airflow version > 2.6 and return False if not.
TODO: delete this function when min airflow version >= 2.6
"""
from airflow.version import version
if Version(version) < Version("2.6"):
return False
return conf.getboolean("logging", "delete_local_logs")
class WasbTaskHandler(FileTaskHandler, LoggingMixin):
"""
WasbTaskHandler is a python log handler that handles and reads task instance logs.
It extends airflow FileTaskHandler and uploads to and reads from Wasb remote storage.
"""
trigger_should_wrap = True
def __init__(
self,
base_log_folder: str,
wasb_log_folder: str,
wasb_container: str,
*,
filename_template: str | None = None,
**kwargs,
) -> None:
super().__init__(base_log_folder, filename_template)
self.wasb_container = wasb_container
self.remote_base = wasb_log_folder
self.log_relative_path = ""
self._hook = None
self.closed = False
self.upload_on_close = True
self.delete_local_copy = (
kwargs["delete_local_copy"] if "delete_local_copy" in kwargs else get_default_delete_local_copy()
)
@cached_property
def hook(self):
"""Returns WasbHook."""
remote_conn_id = conf.get("logging", "REMOTE_LOG_CONN_ID")
try:
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
return WasbHook(remote_conn_id)
except Exception:
self.log.exception(
"Could not create a WasbHook with connection id '%s'. "
"Do you have apache-airflow[azure] installed? "
"Does connection the connection exist, and is it "
"configured properly?",
remote_conn_id,
)
return None
def set_context(self, ti) -> None:
super().set_context(ti)
# Local location and remote location is needed to open and
# upload local log file to Wasb remote storage.
if TYPE_CHECKING:
assert self.handler is not None
full_path = self.handler.baseFilename
self.log_relative_path = Path(full_path).relative_to(self.local_base).as_posix()
is_trigger_log_context = getattr(ti, "is_trigger_log_context", False)
self.upload_on_close = is_trigger_log_context or not ti.raw
def close(self) -> None:
"""Close and upload local log file to remote storage Wasb."""
# When application exit, system shuts down all handlers by
# calling close method. Here we check if logger is already
# closed to prevent uploading the log to remote storage multiple
# times when `logging.shutdown` is called.
if self.closed:
return
super().close()
if not self.upload_on_close:
return
local_loc = os.path.join(self.local_base, self.log_relative_path)
remote_loc = os.path.join(self.remote_base, self.log_relative_path)
if os.path.exists(local_loc):
# read log and remove old logs to get just the latest additions
with open(local_loc) as logfile:
log = logfile.read()
wasb_write = self.wasb_write(log, remote_loc, append=True)
if wasb_write and self.delete_local_copy:
shutil.rmtree(os.path.dirname(local_loc))
# Mark closed so we don't double write if close is called twice
self.closed = True
def _read_remote_logs(self, ti, try_number, metadata=None) -> tuple[list[str], list[str]]:
messages = []
logs = []
worker_log_relative_path = self._render_filename(ti, try_number)
# todo: fix this
# for some reason this handler was designed such that (1) container name is not configurable
# (i.e. it's hardcoded in airflow_local_settings.py) and (2) the "relative path" is actually...
# whatever you put in REMOTE_BASE_LOG_FOLDER i.e. it includes the "wasb://" in the blob
# name. it's very screwed up but to change it we have to be careful not to break backcompat.
prefix = os.path.join(self.remote_base, worker_log_relative_path)
blob_names = []
try:
blob_names = self.hook.get_blobs_list(container_name=self.wasb_container, prefix=prefix)
except HttpResponseError as e:
messages.append(f"tried listing blobs with prefix={prefix} and container={self.wasb_container}")
messages.append("could not list blobs " + str(e))
self.log.exception("can't list blobs")
if blob_names:
uris = [f"wasb://{self.wasb_container}/{b}" for b in blob_names]
messages.extend(["Found remote logs:", *[f" * {x}" for x in sorted(uris)]])
else:
messages.append(f"No logs found in WASB; ti=%s {ti}")
for name in sorted(blob_names):
remote_log = ""
try:
remote_log = self.hook.read_file(self.wasb_container, name)
if remote_log:
logs.append(remote_log)
except Exception as e:
messages.append(
f"Unable to read remote blob '{name}' in container '{self.wasb_container}'\n{e}"
)
self.log.exception("Could not read blob")
return messages, logs
def _read(
self, ti, try_number: int, metadata: dict[str, Any] | None = None
) -> tuple[str, dict[str, bool]]:
"""
Read logs of given task instance and try_number from Wasb remote storage.
If failed, read the log from task instance host machine.
todo: when min airflow version >= 2.6, remove this method
:param ti: task instance object
:param try_number: task instance try_number to read logs from
:param metadata: log metadata,
can be used for steaming log reading and auto-tailing.
"""
if hasattr(super(), "_read_remote_logs"):
# from Airflow 2.6, we don't implement the `_read` method.
# if parent has _read_remote_logs, we're >= 2.6
return super()._read(ti, try_number, metadata)
# below is backcompat, for airflow < 2.6
messages, logs = self._read_remote_logs(ti, try_number, metadata)
if not logs:
return super()._read(ti, try_number, metadata)
return "".join([f"*** {x}\n" for x in messages]) + "\n".join(logs), {"end_of_log": True}
def wasb_log_exists(self, remote_log_location: str) -> bool:
"""
Check if remote_log_location exists in remote storage.
:param remote_log_location: log's location in remote storage
:return: True if location exists else False
"""
try:
return self.hook.check_for_blob(self.wasb_container, remote_log_location)
except Exception as e:
self.log.debug('Exception when trying to check remote location: "%s"', e)
return False
def wasb_read(self, remote_log_location: str, return_error: bool = False):
"""
Return the log found at the remote_log_location. Returns '' if no logs are found or there is an error.
:param remote_log_location: the log's location in remote storage
:param return_error: if True, returns a string error message if an
error occurs. Otherwise returns '' when an error occurs.
"""
try:
return self.hook.read_file(self.wasb_container, remote_log_location)
except Exception:
msg = f"Could not read logs from {remote_log_location}"
self.log.exception(msg)
# return error if needed
if return_error:
return msg
return ""
def wasb_write(self, log: str, remote_log_location: str, append: bool = True) -> bool:
"""
Writes the log to the remote_log_location. Fails silently if no hook was created.
:param log: the log to write to the remote_log_location
:param remote_log_location: the log's location in remote storage
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
"""
if append and self.wasb_log_exists(remote_log_location):
old_log = self.wasb_read(remote_log_location)
log = "\n".join([old_log, log]) if old_log else log
try:
self.hook.load_string(log, self.wasb_container, remote_log_location, overwrite=True)
except Exception:
self.log.exception("Could not write logs to %s", remote_log_location)
return False
return True
| 10,038 | 39.808943 | 110 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/log/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/example_dag/example_wasb_sensors.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that senses blob(s) in Azure Blob Storage.
This DAG relies on the following OS environment variables
* CONTAINER_NAME - The container under which to look for the blob.
* BLOB_NAME - The name of the blob to match.
* PREFIX - The blob with the specified prefix to match.
"""
from __future__ import annotations
import os
from datetime import datetime
from airflow.models import DAG
from airflow.providers.microsoft.azure.sensors.wasb import WasbBlobSensor, WasbPrefixSensor
CONTAINER_NAME = os.environ.get("CONTAINER_NAME", "example-container-name")
BLOB_NAME = os.environ.get("BLOB_NAME", "example-blob-name")
PREFIX = os.environ.get("PREFIX", "example-prefix")
with DAG(
"example_wasb_sensors",
start_date=datetime(2022, 8, 8),
catchup=False,
tags=["example"],
) as dag:
# [START wasb_blob_sensor]
azure_wasb_sensor = WasbBlobSensor(
container_name=CONTAINER_NAME,
blob_name=BLOB_NAME,
task_id="wasb_sense_blob",
)
# [END wasb_blob_sensor]
# [START wasb_prefix_sensor]
azure_wasb_prefix_sensor = WasbPrefixSensor(
container_name=CONTAINER_NAME,
prefix=PREFIX,
task_id="wasb_sense_prefix",
)
# [END wasb_prefix_sensor]
| 2,032 | 32.327869 | 91 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/example_dag/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/example_dag/example_cosmos_document_sensor.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that senses document in Azure Cosmos DB.
This DAG relies on the following OS environment variables
* DATABASE_NAME - Target CosmosDB database_name.
* COLLECTION_NAME - Target CosmosDB collection_name.
* DOCUMENT_ID - The ID of the target document.
"""
from __future__ import annotations
import os
from datetime import datetime
from airflow.models import DAG
from airflow.providers.microsoft.azure.sensors.cosmos import AzureCosmosDocumentSensor
DATABASE_NAME = os.environ.get("DATABASE_NAME", "example-database-name")
COLLECTION_NAME = os.environ.get("COLLECTION_NAME", "example-collection-name")
DOCUMENT_ID = os.environ.get("DOCUMENT_ID", "example-document-id")
with DAG(
"example_cosmos_document_sensor",
start_date=datetime(2022, 8, 8),
catchup=False,
tags=["example"],
) as dag:
# [START cosmos_document_sensor]
azure_wasb_sensor = AzureCosmosDocumentSensor(
database_name=DATABASE_NAME,
collection_name=COLLECTION_NAME,
document_id=DOCUMENT_ID,
task_id="cosmos_document_sensor",
)
# [END cosmos_document_sensor]
| 1,900 | 34.203704 | 86 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/sensors/data_factory.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from datetime import timedelta
from typing import TYPE_CHECKING, Any, Sequence
from airflow.configuration import conf
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.providers.microsoft.azure.hooks.data_factory import (
AzureDataFactoryHook,
AzureDataFactoryPipelineRunException,
AzureDataFactoryPipelineRunStatus,
)
from airflow.providers.microsoft.azure.triggers.data_factory import ADFPipelineRunStatusSensorTrigger
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class AzureDataFactoryPipelineRunStatusSensor(BaseSensorOperator):
"""
Checks the status of a pipeline run.
:param azure_data_factory_conn_id: The connection identifier for connecting to Azure Data Factory.
:param run_id: The pipeline run identifier.
:param resource_group_name: The resource group name.
:param factory_name: The data factory name.
:param deferrable: Run sensor in the deferrable mode.
"""
template_fields: Sequence[str] = (
"azure_data_factory_conn_id",
"resource_group_name",
"factory_name",
"run_id",
)
ui_color = "#50e6ff"
def __init__(
self,
*,
run_id: str,
azure_data_factory_conn_id: str = AzureDataFactoryHook.default_conn_name,
resource_group_name: str | None = None,
factory_name: str | None = None,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
) -> None:
super().__init__(**kwargs)
self.azure_data_factory_conn_id = azure_data_factory_conn_id
self.run_id = run_id
self.resource_group_name = resource_group_name
self.factory_name = factory_name
self.deferrable = deferrable
def poke(self, context: Context) -> bool:
self.hook = AzureDataFactoryHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id)
pipeline_run_status = self.hook.get_pipeline_run_status(
run_id=self.run_id,
resource_group_name=self.resource_group_name,
factory_name=self.factory_name,
)
if pipeline_run_status == AzureDataFactoryPipelineRunStatus.FAILED:
raise AzureDataFactoryPipelineRunException(f"Pipeline run {self.run_id} has failed.")
if pipeline_run_status == AzureDataFactoryPipelineRunStatus.CANCELLED:
raise AzureDataFactoryPipelineRunException(f"Pipeline run {self.run_id} has been cancelled.")
return pipeline_run_status == AzureDataFactoryPipelineRunStatus.SUCCEEDED
def execute(self, context: Context) -> None:
"""Poll for state of the job run.
In deferrable mode, the polling is deferred to the triggerer. Otherwise
the sensor waits synchronously.
"""
if not self.deferrable:
super().execute(context=context)
else:
if not self.poke(context=context):
self.defer(
timeout=timedelta(seconds=self.timeout),
trigger=ADFPipelineRunStatusSensorTrigger(
run_id=self.run_id,
azure_data_factory_conn_id=self.azure_data_factory_conn_id,
resource_group_name=self.resource_group_name,
factory_name=self.factory_name,
poke_interval=self.poke_interval,
),
method_name="execute_complete",
)
def execute_complete(self, context: Context, event: dict[str, str]) -> None:
"""
Callback for when the trigger fires - returns immediately.
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event:
if event["status"] == "error":
raise AirflowException(event["message"])
self.log.info(event["message"])
return None
class AzureDataFactoryPipelineRunStatusAsyncSensor(AzureDataFactoryPipelineRunStatusSensor):
"""
Checks the status of a pipeline run asynchronously.
This class is deprecated and will be removed in a future release.
Please use
:class:`airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor`
and set *deferrable* attribute to *True* instead.
:param azure_data_factory_conn_id: The connection identifier for connecting to Azure Data Factory.
:param run_id: The pipeline run identifier.
:param resource_group_name: The resource group name.
:param factory_name: The data factory name.
:param poke_interval: polling period in seconds to check for the status
:param deferrable: Run sensor in the deferrable mode.
"""
def __init__(self, **kwargs: Any) -> None:
warnings.warn(
"Class `AzureDataFactoryPipelineRunStatusAsyncSensor` is deprecated and "
"will be removed in a future release. "
"Please use `AzureDataFactoryPipelineRunStatusSensor` and "
"set `deferrable` attribute to `True` instead",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
super().__init__(**kwargs, deferrable=True)
| 6,145 | 38.909091 | 107 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/sensors/wasb.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from datetime import timedelta
from typing import TYPE_CHECKING, Any, Sequence
from airflow.configuration import conf
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
from airflow.providers.microsoft.azure.triggers.wasb import WasbBlobSensorTrigger, WasbPrefixSensorTrigger
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class WasbBlobSensor(BaseSensorOperator):
"""
Waits for a blob to arrive on Azure Blob Storage.
:param container_name: Name of the container.
:param blob_name: Name of the blob.
:param wasb_conn_id: Reference to the :ref:`wasb connection <howto/connection:wasb>`.
:param check_options: Optional keyword arguments that
`WasbHook.check_for_blob()` takes.
:param deferrable: Run sensor in the deferrable mode.
:param public_read: whether an anonymous public read access should be used. Default is False
"""
template_fields: Sequence[str] = ("container_name", "blob_name")
def __init__(
self,
*,
container_name: str,
blob_name: str,
wasb_conn_id: str = "wasb_default",
check_options: dict | None = None,
public_read: bool = False,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
) -> None:
super().__init__(**kwargs)
if check_options is None:
check_options = {}
self.wasb_conn_id = wasb_conn_id
self.container_name = container_name
self.blob_name = blob_name
self.check_options = check_options
self.public_read = public_read
self.deferrable = deferrable
def poke(self, context: Context):
self.log.info("Poking for blob: %s\n in wasb://%s", self.blob_name, self.container_name)
hook = WasbHook(wasb_conn_id=self.wasb_conn_id)
return hook.check_for_blob(self.container_name, self.blob_name, **self.check_options)
def execute(self, context: Context) -> None:
"""Poll for state of the job run.
In deferrable mode, the polling is deferred to the triggerer. Otherwise
the sensor waits synchronously.
"""
if not self.deferrable:
super().execute(context=context)
else:
if not self.poke(context=context):
self.defer(
timeout=timedelta(seconds=self.timeout),
trigger=WasbBlobSensorTrigger(
container_name=self.container_name,
blob_name=self.blob_name,
wasb_conn_id=self.wasb_conn_id,
public_read=self.public_read,
poke_interval=self.poke_interval,
),
method_name="execute_complete",
)
def execute_complete(self, context: Context, event: dict[str, str]) -> None:
"""
Callback for when the trigger fires - returns immediately.
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event:
if event["status"] == "error":
raise AirflowException(event["message"])
self.log.info(event["message"])
else:
raise AirflowException("Did not receive valid event from the triggerer")
class WasbBlobAsyncSensor(WasbBlobSensor):
"""
Polls asynchronously for the existence of a blob in a WASB container.
This class is deprecated and will be removed in a future release.
Please use :class:`airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor`
and set *deferrable* attribute to *True* instead.
:param container_name: name of the container in which the blob should be searched for
:param blob_name: name of the blob to check existence for
:param wasb_conn_id: the connection identifier for connecting to Azure WASB
:param poke_interval: polling period in seconds to check for the status
:param public_read: whether an anonymous public read access should be used. Default is False
:param timeout: Time, in seconds before the task times out and fails.
"""
def __init__(self, **kwargs: Any) -> None:
warnings.warn(
"Class `WasbBlobAsyncSensor` is deprecated and "
"will be removed in a future release. "
"Please use `WasbBlobSensor` and "
"set `deferrable` attribute to `True` instead",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
super().__init__(**kwargs, deferrable=True)
class WasbPrefixSensor(BaseSensorOperator):
"""
Waits for blobs matching a prefix to arrive on Azure Blob Storage.
:param container_name: Name of the container.
:param prefix: Prefix of the blob.
:param wasb_conn_id: Reference to the wasb connection.
:param check_options: Optional keyword arguments that
`WasbHook.check_for_prefix()` takes.
"""
template_fields: Sequence[str] = ("container_name", "prefix")
def __init__(
self,
*,
container_name: str,
prefix: str,
wasb_conn_id: str = "wasb_default",
check_options: dict | None = None,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
) -> None:
super().__init__(**kwargs)
if check_options is None:
check_options = {}
self.wasb_conn_id = wasb_conn_id
self.container_name = container_name
self.prefix = prefix
self.check_options = check_options
self.deferrable = deferrable
def poke(self, context: Context) -> bool:
self.log.info("Poking for prefix: %s in wasb://%s", self.prefix, self.container_name)
hook = WasbHook(wasb_conn_id=self.wasb_conn_id)
return hook.check_for_prefix(self.container_name, self.prefix, **self.check_options)
def execute(self, context: Context) -> None:
"""Poll for state of the job run.
In deferrable mode, the polling is deferred to the triggerer. Otherwise
the sensor waits synchronously.
"""
if not self.deferrable:
super().execute(context=context)
else:
if not self.poke(context=context):
self.defer(
timeout=timedelta(seconds=self.timeout),
trigger=WasbPrefixSensorTrigger(
container_name=self.container_name,
prefix=self.prefix,
wasb_conn_id=self.wasb_conn_id,
poke_interval=self.poke_interval,
),
method_name="execute_complete",
)
def execute_complete(self, context: Context, event: dict[str, str]) -> None:
"""
Callback for when the trigger fires - returns immediately.
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event:
if event["status"] == "error":
raise AirflowException(event["message"])
self.log.info(event["message"])
else:
raise AirflowException("Did not receive valid event from the triggerer")
| 8,277 | 38.419048 | 106 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/sensors/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/azure/sensors/cosmos.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.providers.microsoft.azure.hooks.cosmos import AzureCosmosDBHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class AzureCosmosDocumentSensor(BaseSensorOperator):
"""
Checks for the existence of a document which matches the given query in CosmosDB.
.. code-block:: python
azure_cosmos_sensor = AzureCosmosDocumentSensor(
database_name="somedatabase_name",
collection_name="somecollection_name",
document_id="unique-doc-id",
azure_cosmos_conn_id="azure_cosmos_default",
task_id="azure_cosmos_sensor",
)
:param database_name: Target CosmosDB database_name.
:param collection_name: Target CosmosDB collection_name.
:param document_id: The ID of the target document.
:param azure_cosmos_conn_id: Reference to the
:ref:`Azure CosmosDB connection<howto/connection:azure_cosmos>`.
"""
template_fields: Sequence[str] = ("database_name", "collection_name", "document_id")
def __init__(
self,
*,
database_name: str,
collection_name: str,
document_id: str,
azure_cosmos_conn_id: str = "azure_cosmos_default",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.azure_cosmos_conn_id = azure_cosmos_conn_id
self.database_name = database_name
self.collection_name = collection_name
self.document_id = document_id
def poke(self, context: Context) -> bool:
self.log.info("*** Entering poke")
hook = AzureCosmosDBHook(self.azure_cosmos_conn_id)
return hook.get_document(self.document_id, self.database_name, self.collection_name) is not None
| 2,637 | 36.15493 | 104 | py |
airflow | airflow-main/airflow/providers/microsoft/psrp/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "2.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-microsoft-psrp:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,539 | 35.666667 | 123 | py |
airflow | airflow-main/airflow/providers/microsoft/psrp/operators/psrp.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from logging import DEBUG
from typing import TYPE_CHECKING, Any, Sequence
from jinja2.nativetypes import NativeEnvironment
from pypsrp.powershell import Command
from pypsrp.serializer import TaggedValue
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.microsoft.psrp.hooks.psrp import PsrpHook
from airflow.settings import json
from airflow.utils.helpers import exactly_one
if TYPE_CHECKING:
from airflow.utils.context import Context
class PsrpOperator(BaseOperator):
"""PowerShell Remoting Protocol operator.
Use one of the 'command', 'cmdlet', or 'powershell' arguments.
The 'securestring' template filter can be used to tag a value for
serialization into a `System.Security.SecureString` (applicable only
for DAGs which have `render_template_as_native_obj=True`).
When using the `cmdlet` or `powershell` arguments and when `do_xcom_push`
is enabled, the command output is converted to JSON by PowerShell using
the `ConvertTo-Json
<https://docs.microsoft.com/en-us/powershell/
module/microsoft.powershell.utility/convertto-json>`__ cmdlet such
that the operator return value is serializable to an XCom value.
:param psrp_conn_id: connection id
:param command: command to execute on remote host. (templated)
:param powershell: powershell to execute on remote host. (templated)
:param cmdlet:
cmdlet to execute on remote host (templated). Also used as the default
value for `task_id`.
:param arguments:
When using the `cmdlet` or `powershell` option, use `arguments` to
provide arguments (templated).
:param parameters:
When using the `cmdlet` or `powershell` option, use `parameters` to
provide parameters (templated). Note that a parameter with a value of `None`
becomes an *argument* (i.e., switch).
:param logging_level:
Logging level for message streams which are received during remote execution.
The default is to include all messages in the task log.
:param runspace_options:
optional dictionary which is passed when creating the runspace pool. See
:py:class:`~pypsrp.powershell.RunspacePool` for a description of the
available options.
:param wsman_options:
optional dictionary which is passed when creating the `WSMan` client. See
:py:class:`~pypsrp.wsman.WSMan` for a description of the available options.
:param psrp_session_init:
Optional command which will be added to the pipeline when a new PowerShell
session has been established, prior to invoking the action specified using
the `cmdlet`, `command`, or `powershell` parameters.
"""
template_fields: Sequence[str] = (
"cmdlet",
"command",
"arguments",
"parameters",
"powershell",
)
template_fields_renderers = {"command": "powershell", "powershell": "powershell"}
ui_color = "#c2e2ff"
def __init__(
self,
*,
psrp_conn_id: str,
command: str | None = None,
powershell: str | None = None,
cmdlet: str | None = None,
arguments: list[str] | None = None,
parameters: dict[str, str] | None = None,
logging_level: int = DEBUG,
runspace_options: dict[str, Any] | None = None,
wsman_options: dict[str, Any] | None = None,
psrp_session_init: Command | None = None,
**kwargs,
) -> None:
args = {command, powershell, cmdlet}
if not exactly_one(*args):
raise ValueError("Must provide exactly one of 'command', 'powershell', or 'cmdlet'")
if arguments and not cmdlet:
raise ValueError("Arguments only allowed with 'cmdlet'")
if parameters and not cmdlet:
raise ValueError("Parameters only allowed with 'cmdlet'")
if cmdlet:
kwargs.setdefault("task_id", cmdlet)
super().__init__(**kwargs)
self.conn_id = psrp_conn_id
self.command = command
self.powershell = powershell
self.cmdlet = cmdlet
self.arguments = arguments
self.parameters = parameters
self.logging_level = logging_level
self.runspace_options = runspace_options
self.wsman_options = wsman_options
self.psrp_session_init = psrp_session_init
def execute(self, context: Context) -> list[Any] | None:
with PsrpHook(
self.conn_id,
logging_level=self.logging_level,
runspace_options=self.runspace_options,
wsman_options=self.wsman_options,
on_output_callback=self.log.info if not self.do_xcom_push else None,
) as hook, hook.invoke() as ps:
if self.psrp_session_init is not None:
ps.add_command(self.psrp_session_init)
if self.command:
ps.add_script(f"cmd.exe /c @'\n{self.command}\n'@")
else:
if self.cmdlet:
ps.add_cmdlet(self.cmdlet)
else:
ps.add_script(self.powershell)
for argument in self.arguments or ():
ps.add_argument(argument)
if self.parameters:
ps.add_parameters(self.parameters)
if self.do_xcom_push:
ps.add_cmdlet("ConvertTo-Json")
if ps.had_errors:
raise AirflowException("Process failed")
rc = ps.runspace_pool.host.rc
if rc:
raise AirflowException(f"Process exited with non-zero status code: {rc}")
if not self.do_xcom_push:
return None
return [json.loads(output) for output in ps.output]
def get_template_env(self):
# Create a template environment overlay in order to leave the underlying
# environment unchanged.
env = super().get_template_env().overlay()
native = isinstance(env, NativeEnvironment)
def securestring(value: str):
if not native:
raise AirflowException(
"Filter 'securestring' not applicable to non-native templating environment"
)
return TaggedValue("SS", value)
env.filters["securestring"] = securestring
return env
| 7,175 | 38.866667 | 96 | py |
airflow | airflow-main/airflow/providers/microsoft/psrp/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/psrp/hooks/psrp.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from contextlib import contextmanager
from copy import copy
from logging import DEBUG, ERROR, INFO, WARNING
from typing import Any, Callable, Generator
from warnings import warn
from weakref import WeakKeyDictionary
from pypsrp.host import PSHost
from pypsrp.messages import MessageType
from pypsrp.powershell import PowerShell, PSInvocationState, RunspacePool
from pypsrp.wsman import WSMan
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
INFORMATIONAL_RECORD_LEVEL_MAP = {
MessageType.DEBUG_RECORD: DEBUG,
MessageType.ERROR_RECORD: ERROR,
MessageType.VERBOSE_RECORD: INFO,
MessageType.WARNING_RECORD: WARNING,
}
OutputCallback = Callable[[str], None]
class PsrpHook(BaseHook):
"""
Hook for PowerShell Remoting Protocol execution.
When used as a context manager, the runspace pool is reused between shell
sessions.
:param psrp_conn_id: Required. The name of the PSRP connection.
:param logging_level:
Logging level for message streams which are received during remote execution.
The default is to include all messages in the task log.
:param operation_timeout: Override the default WSMan timeout when polling the pipeline.
:param runspace_options:
Optional dictionary which is passed when creating the runspace pool. See
:py:class:`~pypsrp.powershell.RunspacePool` for a description of the
available options.
:param wsman_options:
Optional dictionary which is passed when creating the `WSMan` client. See
:py:class:`~pypsrp.wsman.WSMan` for a description of the available options.
:param on_output_callback:
Optional callback function to be called whenever an output response item is
received during job status polling.
:param exchange_keys:
If true (default), automatically initiate a session key exchange when the
hook is used as a context manager.
:param host:
Optional PowerShell host instance. If this is not set, the default
implementation will be used.
You can provide an alternative `configuration_name` using either `runspace_options`
or by setting this key as the extra fields of your connection.
"""
_conn: RunspacePool | None = None
_wsman_ref: WeakKeyDictionary[RunspacePool, WSMan] = WeakKeyDictionary()
def __init__(
self,
psrp_conn_id: str,
logging_level: int = DEBUG,
operation_timeout: int | None = None,
runspace_options: dict[str, Any] | None = None,
wsman_options: dict[str, Any] | None = None,
on_output_callback: OutputCallback | None = None,
exchange_keys: bool = True,
host: PSHost | None = None,
):
self.conn_id = psrp_conn_id
self._logging_level = logging_level
self._operation_timeout = operation_timeout
self._runspace_options = runspace_options or {}
self._wsman_options = wsman_options or {}
self._on_output_callback = on_output_callback
self._exchange_keys = exchange_keys
self._host = host or PSHost(None, None, False, type(self).__name__, None, None, "1.0")
def __enter__(self):
conn = self.get_conn()
self._wsman_ref[conn].__enter__()
conn.__enter__()
if self._exchange_keys:
conn.exchange_keys()
self._conn = conn
return self
def __exit__(self, exc_type, exc_value, traceback):
try:
self._conn.__exit__(exc_type, exc_value, traceback)
self._wsman_ref[self._conn].__exit__(exc_type, exc_value, traceback)
finally:
del self._conn
def get_conn(self) -> RunspacePool:
"""
Returns a runspace pool.
The returned object must be used as a context manager.
"""
conn = self.get_connection(self.conn_id)
self.log.info("Establishing WinRM connection %s to host: %s", self.conn_id, conn.host)
extra = conn.extra_dejson.copy()
def apply_extra(d, keys):
d = d.copy()
for key in keys:
value = extra.pop(key, None)
if value is not None:
d[key] = value
return d
wsman_options = apply_extra(
self._wsman_options,
(
"auth",
"cert_validation",
"connection_timeout",
"locale",
"read_timeout",
"reconnection_retries",
"reconnection_backoff",
"ssl",
),
)
wsman = WSMan(conn.host, username=conn.login, password=conn.password, **wsman_options)
runspace_options = apply_extra(self._runspace_options, ("configuration_name",))
if extra:
raise AirflowException(f"Unexpected extra configuration keys: {', '.join(sorted(extra))}")
pool = RunspacePool(wsman, host=self._host, **runspace_options)
self._wsman_ref[pool] = wsman
return pool
@contextmanager
def invoke(self) -> Generator[PowerShell, None, None]:
"""
Yields a PowerShell object to which commands can be added.
Upon exit, the commands will be invoked.
"""
logger = copy(self.log)
logger.setLevel(self._logging_level)
local_context = self._conn is None
if local_context:
self.__enter__()
try:
assert self._conn is not None
ps = PowerShell(self._conn)
yield ps
ps.begin_invoke()
streams = [
ps.output,
ps.streams.debug,
ps.streams.error,
ps.streams.information,
ps.streams.progress,
ps.streams.verbose,
ps.streams.warning,
]
offsets = [0 for _ in streams]
# We're using polling to make sure output and streams are
# handled while the process is running.
while ps.state == PSInvocationState.RUNNING:
ps.poll_invoke(timeout=self._operation_timeout)
for i, stream in enumerate(streams):
offset = offsets[i]
while len(stream) > offset:
record = stream[offset]
# Records received on the output stream during job
# status polling are handled via an optional callback,
# while the other streams are simply logged.
if stream is ps.output:
if self._on_output_callback is not None:
self._on_output_callback(record)
else:
self._log_record(logger.log, record)
offset += 1
offsets[i] = offset
# For good measure, we'll make sure the process has
# stopped running in any case.
ps.end_invoke()
self.log.info("Invocation state: %s", str(PSInvocationState(ps.state)))
if ps.streams.error:
raise AirflowException("Process had one or more errors")
finally:
if local_context:
self.__exit__(None, None, None)
def invoke_cmdlet(
self,
name: str,
use_local_scope: bool | None = None,
arguments: list[str] | None = None,
parameters: dict[str, str] | None = None,
**kwargs: str,
) -> PowerShell:
"""Invoke a PowerShell cmdlet and return session."""
if kwargs:
if parameters:
raise ValueError("**kwargs not allowed when 'parameters' is used at the same time.")
warn(
"Passing **kwargs to 'invoke_cmdlet' is deprecated "
"and will be removed in a future release. Please use 'parameters' "
"instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
parameters = kwargs
with self.invoke() as ps:
ps.add_cmdlet(name, use_local_scope=use_local_scope)
for argument in arguments or ():
ps.add_argument(argument)
if parameters:
ps.add_parameters(parameters)
return ps
def invoke_powershell(self, script: str) -> PowerShell:
"""Invoke a PowerShell script and return session."""
with self.invoke() as ps:
ps.add_script(script)
return ps
def _log_record(self, log, record):
message_type = record.MESSAGE_TYPE
if message_type == MessageType.ERROR_RECORD:
log(INFO, "%s: %s", record.reason, record)
if record.script_stacktrace:
for trace in record.script_stacktrace.split("\r\n"):
log(INFO, trace)
level = INFORMATIONAL_RECORD_LEVEL_MAP.get(message_type)
if level is not None:
try:
message = str(record.message)
except BaseException as exc:
# See https://github.com/jborean93/pypsrp/pull/130
message = str(exc)
# Sometimes a message will have a trailing \r\n sequence such as
# the tracing output of the Set-PSDebug cmdlet.
message = message.rstrip()
if record.command_name is None:
log(level, "%s", message)
else:
log(level, "%s: %s", record.command_name, message)
elif message_type == MessageType.INFORMATION_RECORD:
log(INFO, "%s (%s): %s", record.computer, record.user, record.message_data)
elif message_type == MessageType.PROGRESS_RECORD:
log(INFO, "Progress: %s (%s)", record.activity, record.description)
else:
log(WARNING, "Unsupported message type: %s", message_type)
| 10,830 | 37.137324 | 102 | py |
airflow | airflow-main/airflow/providers/microsoft/psrp/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/winrm/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-microsoft-winrm:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,540 | 35.690476 | 124 | py |
airflow | airflow-main/airflow/providers/microsoft/winrm/operators/winrm.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from base64 import b64encode
from typing import TYPE_CHECKING, Sequence
from winrm.exceptions import WinRMOperationTimeoutError
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.microsoft.winrm.hooks.winrm import WinRMHook
if TYPE_CHECKING:
from airflow.utils.context import Context
# Hide the following error message in urllib3 when making WinRM connections:
# requests.packages.urllib3.exceptions.HeaderParsingError: [StartBoundaryNotFoundDefect(),
# MultipartInvariantViolationDefect()], unparsed data: ''
logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
class WinRMOperator(BaseOperator):
"""
WinRMOperator to execute commands on given remote host using the winrm_hook.
:param winrm_hook: predefined ssh_hook to use for remote execution
:param ssh_conn_id: connection id from airflow Connections
:param remote_host: remote host to connect
:param command: command to execute on remote host. (templated)
:param ps_path: path to powershell, `powershell` for v5.1- and `pwsh` for v6+.
If specified, it will execute the command as powershell script.
:param output_encoding: the encoding used to decode stout and stderr
:param timeout: timeout for executing the command.
"""
template_fields: Sequence[str] = ("command",)
template_fields_renderers = {"command": "powershell"}
def __init__(
self,
*,
winrm_hook: WinRMHook | None = None,
ssh_conn_id: str | None = None,
remote_host: str | None = None,
command: str | None = None,
ps_path: str | None = None,
output_encoding: str = "utf-8",
timeout: int = 10,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.winrm_hook = winrm_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.command = command
self.ps_path = ps_path
self.output_encoding = output_encoding
self.timeout = timeout
def execute(self, context: Context) -> list | str:
if self.ssh_conn_id and not self.winrm_hook:
self.log.info("Hook not found, creating...")
self.winrm_hook = WinRMHook(ssh_conn_id=self.ssh_conn_id)
if not self.winrm_hook:
raise AirflowException("Cannot operate without winrm_hook or ssh_conn_id.")
if self.remote_host is not None:
self.winrm_hook.remote_host = self.remote_host
if not self.command:
raise AirflowException("No command specified so nothing to execute here.")
winrm_client = self.winrm_hook.get_conn()
try:
if self.ps_path is not None:
self.log.info("Running command as powershell script: '%s'...", self.command)
encoded_ps = b64encode(self.command.encode("utf_16_le")).decode("ascii")
command_id = self.winrm_hook.winrm_protocol.run_command( # type: ignore[attr-defined]
winrm_client, f"{self.ps_path} -encodedcommand {encoded_ps}"
)
else:
self.log.info("Running command: '%s'...", self.command)
command_id = self.winrm_hook.winrm_protocol.run_command( # type: ignore[attr-defined]
winrm_client, self.command
)
# See: https://github.com/diyan/pywinrm/blob/master/winrm/protocol.py
stdout_buffer = []
stderr_buffer = []
command_done = False
while not command_done:
try:
(
stdout,
stderr,
return_code,
command_done,
) = self.winrm_hook.winrm_protocol._raw_get_command_output( # type: ignore[attr-defined]
winrm_client, command_id
)
# Only buffer stdout if we need to so that we minimize memory usage.
if self.do_xcom_push:
stdout_buffer.append(stdout)
stderr_buffer.append(stderr)
for line in stdout.decode(self.output_encoding).splitlines():
self.log.info(line)
for line in stderr.decode(self.output_encoding).splitlines():
self.log.warning(line)
except WinRMOperationTimeoutError:
# this is an expected error when waiting for a
# long-running process, just silently retry
pass
self.winrm_hook.winrm_protocol.cleanup_command( # type: ignore[attr-defined]
winrm_client, command_id
)
self.winrm_hook.winrm_protocol.close_shell(winrm_client) # type: ignore[attr-defined]
except Exception as e:
raise AirflowException(f"WinRM operator error: {str(e)}")
if return_code == 0:
# returning output if do_xcom_push is set
enable_pickling = conf.getboolean("core", "enable_xcom_pickling")
if enable_pickling:
return stdout_buffer
else:
return b64encode(b"".join(stdout_buffer)).decode(self.output_encoding)
else:
stderr_output = b"".join(stderr_buffer).decode(self.output_encoding)
error_msg = (
f"Error running cmd: {self.command}, return code: {return_code}, error: {stderr_output}"
)
raise AirflowException(error_msg)
| 6,499 | 39.880503 | 109 | py |
airflow | airflow-main/airflow/providers/microsoft/winrm/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/winrm/hooks/winrm.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for winrm remote execution."""
from __future__ import annotations
from winrm.protocol import Protocol
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.utils.platform import getuser
# TODO: Fixme please - I have too complex implementation
class WinRMHook(BaseHook):
"""
Hook for winrm remote execution using pywinrm.
:seealso: https://github.com/diyan/pywinrm/blob/master/winrm/protocol.py
:param ssh_conn_id: connection id from airflow Connections from where
all the required parameters can be fetched like username and password,
though priority is given to the params passed during init.
:param endpoint: When not set, endpoint will be constructed like this:
'http://{remote_host}:{remote_port}/wsman'
:param remote_host: Remote host to connect to. Ignored if `endpoint` is set.
:param remote_port: Remote port to connect to. Ignored if `endpoint` is set.
:param transport: transport type, one of 'plaintext' (default), 'kerberos', 'ssl', 'ntlm', 'credssp'
:param username: username to connect to the remote_host
:param password: password of the username to connect to the remote_host
:param service: the service name, default is HTTP
:param keytab: the path to a keytab file if you are using one
:param ca_trust_path: Certification Authority trust path
:param cert_pem: client authentication certificate file path in PEM format
:param cert_key_pem: client authentication certificate key file path in PEM format
:param server_cert_validation: whether server certificate should be validated on
Python versions that support it; one of 'validate' (default), 'ignore'
:param kerberos_delegation: if True, TGT is sent to target server to
allow multiple hops
:param read_timeout_sec: maximum seconds to wait before an HTTP connect/read times out (default 30).
This value should be slightly higher than operation_timeout_sec,
as the server can block *at least* that long.
:param operation_timeout_sec: maximum allowed time in seconds for any single wsman
HTTP operation (default 20). Note that operation timeouts while receiving output
(the only wsman operation that should take any significant time,
and where these timeouts are expected) will be silently retried indefinitely.
:param kerberos_hostname_override: the hostname to use for the kerberos exchange
(defaults to the hostname in the endpoint URL)
:param message_encryption: Will encrypt the WinRM messages if set
and the transport auth supports message encryption. (Default 'auto')
:param credssp_disable_tlsv1_2: Whether to disable TLSv1.2 support and work with older
protocols like TLSv1.0, default is False
:param send_cbt: Will send the channel bindings over a HTTPS channel (Default: True)
"""
def __init__(
self,
ssh_conn_id: str | None = None,
endpoint: str | None = None,
remote_host: str | None = None,
remote_port: int = 5985,
transport: str = "plaintext",
username: str | None = None,
password: str | None = None,
service: str = "HTTP",
keytab: str | None = None,
ca_trust_path: str | None = None,
cert_pem: str | None = None,
cert_key_pem: str | None = None,
server_cert_validation: str = "validate",
kerberos_delegation: bool = False,
read_timeout_sec: int = 30,
operation_timeout_sec: int = 20,
kerberos_hostname_override: str | None = None,
message_encryption: str | None = "auto",
credssp_disable_tlsv1_2: bool = False,
send_cbt: bool = True,
) -> None:
super().__init__()
self.ssh_conn_id = ssh_conn_id
self.endpoint = endpoint
self.remote_host = remote_host
self.remote_port = remote_port
self.transport = transport
self.username = username
self.password = password
self.service = service
self.keytab = keytab
self.ca_trust_path = ca_trust_path
self.cert_pem = cert_pem
self.cert_key_pem = cert_key_pem
self.server_cert_validation = server_cert_validation
self.kerberos_delegation = kerberos_delegation
self.read_timeout_sec = read_timeout_sec
self.operation_timeout_sec = operation_timeout_sec
self.kerberos_hostname_override = kerberos_hostname_override
self.message_encryption = message_encryption
self.credssp_disable_tlsv1_2 = credssp_disable_tlsv1_2
self.send_cbt = send_cbt
self.client = None
self.winrm_protocol = None
def get_conn(self):
if self.client:
return self.client
self.log.debug("Creating WinRM client for conn_id: %s", self.ssh_conn_id)
if self.ssh_conn_id is not None:
conn = self.get_connection(self.ssh_conn_id)
if self.username is None:
self.username = conn.login
if self.password is None:
self.password = conn.password
if self.remote_host is None:
self.remote_host = conn.host
if conn.extra is not None:
extra_options = conn.extra_dejson
if "endpoint" in extra_options:
self.endpoint = str(extra_options["endpoint"])
if "remote_port" in extra_options:
self.remote_port = int(extra_options["remote_port"])
if "transport" in extra_options:
self.transport = str(extra_options["transport"])
if "service" in extra_options:
self.service = str(extra_options["service"])
if "keytab" in extra_options:
self.keytab = str(extra_options["keytab"])
if "ca_trust_path" in extra_options:
self.ca_trust_path = str(extra_options["ca_trust_path"])
if "cert_pem" in extra_options:
self.cert_pem = str(extra_options["cert_pem"])
if "cert_key_pem" in extra_options:
self.cert_key_pem = str(extra_options["cert_key_pem"])
if "server_cert_validation" in extra_options:
self.server_cert_validation = str(extra_options["server_cert_validation"])
if "kerberos_delegation" in extra_options:
self.kerberos_delegation = str(extra_options["kerberos_delegation"]).lower() == "true"
if "read_timeout_sec" in extra_options:
self.read_timeout_sec = int(extra_options["read_timeout_sec"])
if "operation_timeout_sec" in extra_options:
self.operation_timeout_sec = int(extra_options["operation_timeout_sec"])
if "kerberos_hostname_override" in extra_options:
self.kerberos_hostname_override = str(extra_options["kerberos_hostname_override"])
if "message_encryption" in extra_options:
self.message_encryption = str(extra_options["message_encryption"])
if "credssp_disable_tlsv1_2" in extra_options:
self.credssp_disable_tlsv1_2 = (
str(extra_options["credssp_disable_tlsv1_2"]).lower() == "true"
)
if "send_cbt" in extra_options:
self.send_cbt = str(extra_options["send_cbt"]).lower() == "true"
if not self.remote_host:
raise AirflowException("Missing required param: remote_host")
# Auto detecting username values from system
if not self.username:
self.log.debug(
"username to WinRM to host: %s is not specified for connection id"
" %s. Using system's default provided by getpass.getuser()",
self.remote_host,
self.ssh_conn_id,
)
self.username = getuser()
# If endpoint is not set, then build a standard wsman endpoint from host and port.
if not self.endpoint:
self.endpoint = f"http://{self.remote_host}:{self.remote_port}/wsman"
try:
if self.password and self.password.strip():
self.winrm_protocol = Protocol(
endpoint=self.endpoint,
transport=self.transport,
username=self.username,
password=self.password,
service=self.service,
keytab=self.keytab,
ca_trust_path=self.ca_trust_path,
cert_pem=self.cert_pem,
cert_key_pem=self.cert_key_pem,
server_cert_validation=self.server_cert_validation,
kerberos_delegation=self.kerberos_delegation,
read_timeout_sec=self.read_timeout_sec,
operation_timeout_sec=self.operation_timeout_sec,
kerberos_hostname_override=self.kerberos_hostname_override,
message_encryption=self.message_encryption,
credssp_disable_tlsv1_2=self.credssp_disable_tlsv1_2,
send_cbt=self.send_cbt,
)
self.log.info("Establishing WinRM connection to host: %s", self.remote_host)
self.client = self.winrm_protocol.open_shell()
except Exception as error:
error_msg = f"Error connecting to host: {self.remote_host}, error: {error}"
self.log.error(error_msg)
raise AirflowException(error_msg)
return self.client
| 10,523 | 46.836364 | 106 | py |
airflow | airflow-main/airflow/providers/microsoft/winrm/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/mssql/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.4.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-microsoft-mssql:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,540 | 35.690476 | 124 | py |
airflow | airflow-main/airflow/providers/microsoft/mssql/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/mssql/operators/mssql.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from typing import Sequence
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
class MsSqlOperator(SQLExecuteQueryOperator):
"""
Executes sql code in a specific Microsoft SQL database.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:MsSqlOperator`
This operator may use one of two hooks, depending on the ``conn_type`` of the connection.
If conn_type is ``'odbc'``, then :py:class:`~airflow.providers.odbc.hooks.odbc.OdbcHook`
is used. Otherwise, :py:class:`~airflow.providers.microsoft.mssql.hooks.mssql.MsSqlHook` is used.
This class is deprecated.
Please use :class:`airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
:param sql: the sql code to be executed (templated)
:param mssql_conn_id: reference to a specific mssql database
:param parameters: (optional) the parameters to render the SQL query with.
:param autocommit: if True, each command is automatically committed.
(default value: False)
:param database: name of database which overwrite defined one in connection
"""
template_fields: Sequence[str] = ("sql",)
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {"sql": "tsql"}
ui_color = "#ededed"
def __init__(
self, *, mssql_conn_id: str = "mssql_default", database: str | None = None, **kwargs
) -> None:
if database is not None:
hook_params = kwargs.pop("hook_params", {})
kwargs["hook_params"] = {"schema": database, **hook_params}
super().__init__(conn_id=mssql_conn_id, **kwargs)
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
Also, you can provide `hook_params={'schema': <database>}`.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
| 2,910 | 39.430556 | 102 | py |
airflow | airflow-main/airflow/providers/microsoft/mssql/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/microsoft/mssql/hooks/mssql.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Microsoft SQLServer hook module."""
from __future__ import annotations
from typing import Any
import pymssql
from airflow.providers.common.sql.hooks.sql import DbApiHook
class MsSqlHook(DbApiHook):
"""
Interact with Microsoft SQL Server.
:param args: passed to DBApiHook
:param sqlalchemy_scheme: Scheme sqlalchemy connection. Default is ``mssql+pymssql`` Only used for
``get_sqlalchemy_engine`` and ``get_sqlalchemy_connection`` methods.
:param kwargs: passed to DbApiHook
"""
conn_name_attr = "mssql_conn_id"
default_conn_name = "mssql_default"
conn_type = "mssql"
hook_name = "Microsoft SQL Server"
supports_autocommit = True
DEFAULT_SQLALCHEMY_SCHEME = "mssql+pymssql"
def __init__(
self,
*args,
sqlalchemy_scheme: str | None = None,
**kwargs,
) -> None:
super().__init__(*args, **kwargs)
self.schema = kwargs.pop("schema", None)
self._sqlalchemy_scheme = sqlalchemy_scheme
@property
def connection_extra_lower(self) -> dict:
"""
``connection.extra_dejson`` but where keys are converted to lower case.
This is used internally for case-insensitive access of mssql params.
"""
conn = self.get_connection(self.mssql_conn_id) # type: ignore[attr-defined]
return {k.lower(): v for k, v in conn.extra_dejson.items()}
@property
def sqlalchemy_scheme(self) -> str:
"""Sqlalchemy scheme either from constructor, connection extras or default."""
extra_scheme = self.connection_extra_lower.get("sqlalchemy_scheme")
if not self._sqlalchemy_scheme and extra_scheme and (":" in extra_scheme or "/" in extra_scheme):
raise RuntimeError("sqlalchemy_scheme in connection extra should not contain : or / characters")
return self._sqlalchemy_scheme or extra_scheme or self.DEFAULT_SQLALCHEMY_SCHEME
def get_uri(self) -> str:
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
r = list(urlsplit(super().get_uri()))
# change pymssql driver:
r[0] = self.sqlalchemy_scheme
# remove query string 'sqlalchemy_scheme' like parameters:
qs = parse_qs(r[3], keep_blank_values=True)
for k in list(qs.keys()):
if k.lower() == "sqlalchemy_scheme":
qs.pop(k, None)
r[3] = urlencode(qs, doseq=True)
return urlunsplit(r)
def get_sqlalchemy_connection(
self, connect_kwargs: dict | None = None, engine_kwargs: dict | None = None
) -> Any:
"""Sqlalchemy connection object."""
engine = self.get_sqlalchemy_engine(engine_kwargs=engine_kwargs)
return engine.connect(**(connect_kwargs or {}))
def get_conn(self) -> pymssql.connect:
"""Returns a mssql connection object."""
conn = self.get_connection(self.mssql_conn_id) # type: ignore[attr-defined]
conn = pymssql.connect(
server=conn.host,
user=conn.login,
password=conn.password,
database=self.schema or conn.schema,
port=conn.port,
)
return conn
def set_autocommit(
self,
conn: pymssql.connect,
autocommit: bool,
) -> None:
conn.autocommit(autocommit)
def get_autocommit(self, conn: pymssql.connect):
return conn.autocommit_state
| 4,199 | 35.206897 | 108 | py |
airflow | airflow-main/airflow/providers/sqlite/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.4.2"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-sqlite:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,531 | 35.47619 | 115 | py |
airflow | airflow-main/airflow/providers/sqlite/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/sqlite/operators/sqlite.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from typing import Sequence
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
class SqliteOperator(SQLExecuteQueryOperator):
"""
Executes sql code in a specific Sqlite database.
This class is deprecated.
Please use :class:`airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:SqliteOperator`
:param sql: the sql code to be executed. Can receive a str representing a
sql statement, a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
(templated)
:param sqlite_conn_id: reference to a specific sqlite database
:param parameters: (optional) the parameters to render the SQL query with.
"""
template_fields: Sequence[str] = ("sql",)
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {"sql": "sql"}
ui_color = "#cdaaed"
def __init__(self, *, sqlite_conn_id: str = "sqlite_default", **kwargs) -> None:
super().__init__(conn_id=sqlite_conn_id, **kwargs)
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
| 2,352 | 38.216667 | 96 | py |
airflow | airflow-main/airflow/providers/sqlite/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/sqlite/hooks/sqlite.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sqlite3
from urllib.parse import unquote
from airflow.providers.common.sql.hooks.sql import DbApiHook
class SqliteHook(DbApiHook):
"""Interact with SQLite."""
conn_name_attr = "sqlite_conn_id"
default_conn_name = "sqlite_default"
conn_type = "sqlite"
hook_name = "Sqlite"
placeholder = "?"
def get_conn(self) -> sqlite3.dbapi2.Connection:
"""Returns a sqlite connection object."""
sqlalchemy_uri = self.get_uri()
# The sqlite3 connection does not use the sqlite scheme.
# See https://docs.sqlalchemy.org/en/14/dialects/sqlite.html#uri-connections for details.
sqlite_uri = sqlalchemy_uri.replace("sqlite:///", "file:")
conn = sqlite3.connect(sqlite_uri, uri=True)
return conn
def get_uri(self) -> str:
"""Override DbApiHook get_uri method for get_sqlalchemy_engine()."""
conn_id = getattr(self, self.conn_name_attr)
airflow_conn = self.get_connection(conn_id)
if airflow_conn.conn_type is None:
airflow_conn.conn_type = self.conn_type
airflow_uri = unquote(airflow_conn.get_uri())
# For sqlite, there is no schema in the connection URI. So we need to drop the trailing slash.
airflow_sqlite_uri = airflow_uri.replace("/?", "?")
# The sqlite connection has one more slash for path specification.
# See https://docs.sqlalchemy.org/en/14/dialects/sqlite.html#connect-strings for details.
sqlalchemy_uri = airflow_sqlite_uri.replace("sqlite://", "sqlite:///")
return sqlalchemy_uri
| 2,402 | 41.157895 | 102 | py |
airflow | airflow-main/airflow/providers/openlineage/sqlparser.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Callable
import sqlparse
from attrs import define
from airflow.providers.openlineage.extractors.base import OperatorLineage
from airflow.providers.openlineage.utils.sql import (
TablesHierarchy,
create_information_schema_query,
get_table_schemas,
)
from airflow.typing_compat import TypedDict
from openlineage.client.facet import BaseFacet, ExtractionError, ExtractionErrorRunFacet, SqlJobFacet
from openlineage.client.run import Dataset
from openlineage.common.sql import DbTableMeta, SqlMeta, parse
if TYPE_CHECKING:
from sqlalchemy.engine import Engine
from airflow.hooks.base import BaseHook
DEFAULT_NAMESPACE = "default"
DEFAULT_INFORMATION_SCHEMA_COLUMNS = [
"table_schema",
"table_name",
"column_name",
"ordinal_position",
"udt_name",
]
DEFAULT_INFORMATION_SCHEMA_TABLE_NAME = "information_schema.columns"
def default_normalize_name_method(name: str) -> str:
return name.lower()
class GetTableSchemasParams(TypedDict):
"""get_table_schemas params."""
normalize_name: Callable[[str], str]
is_cross_db: bool
information_schema_columns: list[str]
information_schema_table: str
is_uppercase_names: bool
database: str | None
@define
class DatabaseInfo:
"""
Contains database specific information needed to process SQL statement parse result.
:param scheme: Scheme part of URI in OpenLineage namespace.
:param authority: Authority part of URI in OpenLineage namespace.
For most cases it should return `{host}:{port}` part of Airflow connection.
See: https://github.com/OpenLineage/OpenLineage/blob/main/spec/Naming.md
:param database: Takes precedence over parsed database name.
:param information_schema_columns: List of columns names from information schema table.
:param information_schema_table_name: Information schema table name.
:param is_information_schema_cross_db: Specifies if information schema contains
cross-database data.
:param is_uppercase_names: Specifies if database accepts only uppercase names (e.g. Snowflake).
:param normalize_name_method: Method to normalize database, schema and table names.
Defaults to `name.lower()`.
"""
scheme: str
authority: str | None = None
database: str | None = None
information_schema_columns: list[str] = DEFAULT_INFORMATION_SCHEMA_COLUMNS
information_schema_table_name: str = DEFAULT_INFORMATION_SCHEMA_TABLE_NAME
is_information_schema_cross_db: bool = False
is_uppercase_names: bool = False
normalize_name_method: Callable[[str], str] = default_normalize_name_method
class SQLParser:
"""Interface for openlineage-sql.
:param dialect: dialect specific to the database
:param default_schema: schema applied to each table with no schema parsed
"""
def __init__(self, dialect: str | None = None, default_schema: str | None = None) -> None:
self.dialect = dialect
self.default_schema = default_schema
def parse(self, sql: list[str] | str) -> SqlMeta | None:
"""Parse a single or a list of SQL statements."""
return parse(sql=sql, dialect=self.dialect)
def parse_table_schemas(
self,
hook: BaseHook,
inputs: list[DbTableMeta],
outputs: list[DbTableMeta],
database_info: DatabaseInfo,
namespace: str = DEFAULT_NAMESPACE,
database: str | None = None,
sqlalchemy_engine: Engine | None = None,
) -> tuple[list[Dataset], ...]:
"""Parse schemas for input and output tables."""
database_kwargs: GetTableSchemasParams = {
"normalize_name": database_info.normalize_name_method,
"is_cross_db": database_info.is_information_schema_cross_db,
"information_schema_columns": database_info.information_schema_columns,
"information_schema_table": database_info.information_schema_table_name,
"is_uppercase_names": database_info.is_uppercase_names,
"database": database or database_info.database,
}
return get_table_schemas(
hook,
namespace,
self.default_schema,
database or database_info.database,
self.create_information_schema_query(
tables=inputs, sqlalchemy_engine=sqlalchemy_engine, **database_kwargs
)
if inputs
else None,
self.create_information_schema_query(
tables=outputs, sqlalchemy_engine=sqlalchemy_engine, **database_kwargs
)
if outputs
else None,
)
def generate_openlineage_metadata_from_sql(
self,
sql: list[str] | str,
hook: BaseHook,
database_info: DatabaseInfo,
database: str | None = None,
sqlalchemy_engine: Engine | None = None,
) -> OperatorLineage:
"""Parses SQL statement(s) and generates OpenLineage metadata.
Generated OpenLineage metadata contains:
* input tables with schemas parsed
* output tables with schemas parsed
* run facets
* job facets.
:param sql: a SQL statement or list of SQL statement to be parsed
:param hook: Airflow Hook used to connect to the database
:param database_info: database specific information
:param database: when passed it takes precedence over parsed database name
:param sqlalchemy_engine: when passed, engine's dialect is used to compile SQL queries
"""
job_facets: dict[str, BaseFacet] = {"sql": SqlJobFacet(query=self.normalize_sql(sql))}
parse_result = self.parse(self.split_sql_string(sql))
if not parse_result:
return OperatorLineage(job_facets=job_facets)
run_facets: dict[str, BaseFacet] = {}
if parse_result.errors:
run_facets["extractionError"] = ExtractionErrorRunFacet(
totalTasks=len(sql) if isinstance(sql, list) else 1,
failedTasks=len(parse_result.errors),
errors=[
ExtractionError(
errorMessage=error.message,
stackTrace=None,
task=error.origin_statement,
taskNumber=error.index,
)
for error in parse_result.errors
],
)
namespace = self.create_namespace(database_info=database_info)
inputs, outputs = self.parse_table_schemas(
hook=hook,
inputs=parse_result.in_tables,
outputs=parse_result.out_tables,
namespace=namespace,
database=database,
database_info=database_info,
sqlalchemy_engine=sqlalchemy_engine,
)
return OperatorLineage(
inputs=inputs,
outputs=outputs,
run_facets=run_facets,
job_facets=job_facets,
)
@staticmethod
def create_namespace(database_info: DatabaseInfo) -> str:
return (
f"{database_info.scheme}://{database_info.authority}"
if database_info.authority
else database_info.scheme
)
@classmethod
def normalize_sql(cls, sql: list[str] | str) -> str:
"""Makes sure to return a semicolon-separated SQL statements."""
return ";\n".join(stmt.rstrip(" ;\r\n") for stmt in cls.split_sql_string(sql))
@classmethod
def split_sql_string(cls, sql: list[str] | str) -> list[str]:
"""
Split SQL string into list of statements.
Tries to use `DbApiHook.split_sql_string` if available.
Otherwise, uses the same logic.
"""
try:
from airflow.providers.common.sql.hooks.sql import DbApiHook
split_statement = DbApiHook.split_sql_string
except (ImportError, AttributeError):
# No common.sql Airflow provider available or version is too old.
def split_statement(sql: str) -> list[str]:
splits = sqlparse.split(sqlparse.format(sql, strip_comments=True))
return [s for s in splits if s]
if isinstance(sql, str):
return split_statement(sql)
return [obj for stmt in sql for obj in cls.split_sql_string(stmt) if obj != ""]
@classmethod
def create_information_schema_query(
cls,
tables: list[DbTableMeta],
normalize_name: Callable[[str], str],
is_cross_db: bool,
information_schema_columns,
information_schema_table,
is_uppercase_names,
database: str | None = None,
sqlalchemy_engine: Engine | None = None,
) -> str:
"""Creates SELECT statement to query information schema table."""
tables_hierarchy = cls._get_tables_hierarchy(
tables,
normalize_name=normalize_name,
database=database,
is_cross_db=is_cross_db,
)
return create_information_schema_query(
columns=information_schema_columns,
information_schema_table_name=information_schema_table,
tables_hierarchy=tables_hierarchy,
uppercase_names=is_uppercase_names,
sqlalchemy_engine=sqlalchemy_engine,
)
@staticmethod
def _get_tables_hierarchy(
tables: list[DbTableMeta],
normalize_name: Callable[[str], str],
database: str | None = None,
is_cross_db: bool = False,
) -> TablesHierarchy:
"""
Creates a hierarchy of database -> schema -> table name.
This helps to create simpler information schema query grouped by
database and schema.
:param tables: List of tables.
:param normalize_name: A method to normalize all names.
:param is_cross_db: If false, set top (database) level to None
when creating hierarchy.
"""
hierarchy: TablesHierarchy = {}
for table in tables:
if is_cross_db:
db = table.database or database
else:
db = None
schemas = hierarchy.setdefault(normalize_name(db) if db else db, {})
tables = schemas.setdefault(normalize_name(table.schema) if table.schema else db, [])
tables.append(table.name)
return hierarchy
| 11,218 | 36.774411 | 101 | py |
airflow | airflow-main/airflow/providers/openlineage/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "1.0.0"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.6.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-openlineage:{__version__}` requires Apache Airflow 2.6.0+"
)
| 1,522 | 35.261905 | 106 | py |
airflow | airflow-main/airflow/providers/openlineage/extractors/base.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from abc import ABC, abstractmethod
from attrs import Factory, define
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.state import TaskInstanceState
from openlineage.client.facet import BaseFacet
from openlineage.client.run import Dataset
@define
class OperatorLineage:
"""Structure returned from lineage extraction."""
inputs: list[Dataset] = Factory(list)
outputs: list[Dataset] = Factory(list)
run_facets: dict[str, BaseFacet] = Factory(dict)
job_facets: dict[str, BaseFacet] = Factory(dict)
class BaseExtractor(ABC, LoggingMixin):
"""Abstract base extractor class.
This is used mostly to maintain support for custom extractors.
"""
_allowed_query_params: list[str] = []
def __init__(self, operator): # type: ignore
super().__init__()
self.operator = operator
@classmethod
@abstractmethod
def get_operator_classnames(cls) -> list[str]:
"""Get a list of operators that extractor works for.
This is an abstract method that subclasses should implement. There are
operators that work very similarly and one extractor can cover.
"""
raise NotImplementedError()
def validate(self):
assert self.operator.task_type in self.get_operator_classnames()
@abstractmethod
def extract(self) -> OperatorLineage | None:
pass
def extract_on_complete(self, task_instance) -> OperatorLineage | None:
return self.extract()
class DefaultExtractor(BaseExtractor):
"""Extractor that uses `get_openlineage_facets_on_start/complete/failure` methods."""
@classmethod
def get_operator_classnames(cls) -> list[str]:
"""Assign this extractor to *no* operators.
Default extractor is chosen not on the classname basis, but
by existence of get_openlineage_facets method on operator.
"""
return []
def extract(self) -> OperatorLineage | None:
try:
return self._get_openlineage_facets(self.operator.get_openlineage_facets_on_start) # type: ignore
except AttributeError:
return None
def extract_on_complete(self, task_instance) -> OperatorLineage | None:
if task_instance.state == TaskInstanceState.FAILED:
on_failed = getattr(self.operator, "get_openlineage_facets_on_failure", None)
if on_failed and callable(on_failed):
return self._get_openlineage_facets(on_failed, task_instance)
on_complete = getattr(self.operator, "get_openlineage_facets_on_complete", None)
if on_complete and callable(on_complete):
return self._get_openlineage_facets(on_complete, task_instance)
return self.extract()
def _get_openlineage_facets(self, get_facets_method, *args) -> OperatorLineage | None:
try:
facets = get_facets_method(*args)
except ImportError:
self.log.exception(
"OpenLineage provider method failed to import OpenLineage integration. "
"This should not happen."
)
except Exception:
self.log.exception("OpenLineage provider method failed to extract data from provider. ")
else:
return OperatorLineage(
inputs=facets.inputs,
outputs=facets.outputs,
run_facets=facets.run_facets,
job_facets=facets.job_facets,
)
return None
| 4,302 | 35.159664 | 110 | py |
airflow | airflow-main/airflow/providers/openlineage/extractors/python.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import inspect
from typing import Callable
from airflow.providers.openlineage.extractors.base import BaseExtractor, OperatorLineage
from airflow.providers.openlineage.plugins.facets import (
UnknownOperatorAttributeRunFacet,
UnknownOperatorInstance,
)
from airflow.providers.openlineage.utils.utils import get_filtered_unknown_operator_keys, is_source_enabled
from openlineage.client.facet import SourceCodeJobFacet
"""
:meta private:
"""
class PythonExtractor(BaseExtractor):
"""
Extract executed source code and put it into SourceCodeJobFacet.
This extractor provides visibility on what particular task does by extracting
executed source code and putting it into SourceCodeJobFacet. It does not extract
datasets yet.
:meta private:
"""
@classmethod
def get_operator_classnames(cls) -> list[str]:
return ["PythonOperator"]
def extract(self) -> OperatorLineage | None:
source_code = self.get_source_code(self.operator.python_callable)
job_facet: dict = {}
if is_source_enabled() and source_code:
job_facet = {
"sourceCode": SourceCodeJobFacet(
language="python",
# We're on worker and should have access to DAG files
source=source_code,
)
}
return OperatorLineage(
job_facets=job_facet,
run_facets={
# The PythonOperator is recorded as an "unknownSource" even though we have an
# extractor, as the data lineage cannot be determined from the operator
# directly.
"unknownSourceAttribute": UnknownOperatorAttributeRunFacet(
unknownItems=[
UnknownOperatorInstance(
name="PythonOperator",
properties=get_filtered_unknown_operator_keys(self.operator),
)
]
)
},
)
def get_source_code(self, callable: Callable) -> str | None:
try:
return inspect.getsource(callable)
except TypeError:
# Trying to extract source code of builtin_function_or_method
return str(callable)
except OSError:
self.log.exception("Can't get source code facet of PythonOperator %s", self.operator.task_id)
return None
| 3,269 | 36.159091 | 107 | py |
airflow | airflow-main/airflow/providers/openlineage/extractors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.openlineage.extractors.base import BaseExtractor, OperatorLineage
from airflow.providers.openlineage.extractors.manager import ExtractorManager
"""
:meta private:
"""
__all__ = ["BaseExtractor", "OperatorLineage", "ExtractorManager"]
| 1,081 | 37.642857 | 88 | py |
airflow | airflow-main/airflow/providers/openlineage/extractors/manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from contextlib import suppress
from typing import TYPE_CHECKING
from airflow.configuration import conf
from airflow.providers.openlineage.extractors import BaseExtractor, OperatorLineage
from airflow.providers.openlineage.extractors.base import DefaultExtractor
from airflow.providers.openlineage.extractors.bash import BashExtractor
from airflow.providers.openlineage.extractors.python import PythonExtractor
from airflow.providers.openlineage.plugins.facets import (
UnknownOperatorAttributeRunFacet,
UnknownOperatorInstance,
)
from airflow.providers.openlineage.utils.utils import get_filtered_unknown_operator_keys
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.module_loading import import_string
if TYPE_CHECKING:
from airflow.models import Operator
def try_import_from_string(string):
with suppress(ImportError):
return import_string(string)
_extractors: list[type[BaseExtractor]] = list(
filter(
lambda t: t is not None,
[
PythonExtractor,
BashExtractor,
],
)
)
class ExtractorManager(LoggingMixin):
"""Class abstracting management of custom extractors."""
def __init__(self):
super().__init__()
self.extractors: dict[str, type[BaseExtractor]] = {}
self.default_extractor = DefaultExtractor
# Comma-separated extractors in OPENLINEAGE_EXTRACTORS variable.
# Extractors should implement BaseExtractor
for extractor in _extractors:
for operator_class in extractor.get_operator_classnames():
self.extractors[operator_class] = extractor
env_extractors = conf.get("openlinege", "extractors", fallback=os.getenv("OPENLINEAGE_EXTRACTORS"))
if env_extractors is not None:
for extractor in env_extractors.split(";"):
extractor: type[BaseExtractor] = try_import_from_string(extractor.strip())
for operator_class in extractor.get_operator_classnames():
self.extractors[operator_class] = extractor
def add_extractor(self, operator_class: str, extractor: type[BaseExtractor]):
self.extractors[operator_class] = extractor
def extract_metadata(self, dagrun, task, complete: bool = False, task_instance=None) -> OperatorLineage:
extractor = self._get_extractor(task)
task_info = (
f"task_type={task.task_type} "
f"airflow_dag_id={task.dag_id} "
f"task_id={task.task_id} "
f"airflow_run_id={dagrun.run_id} "
)
if extractor:
# Extracting advanced metadata is only possible when extractor for particular operator
# is defined. Without it, we can't extract any input or output data.
try:
self.log.debug("Using extractor %s %s", extractor.__class__.__name__, str(task_info))
if complete:
task_metadata = extractor.extract_on_complete(task_instance)
else:
task_metadata = extractor.extract()
self.log.debug("Found task metadata for operation %s: %s", task.task_id, str(task_metadata))
task_metadata = self.validate_task_metadata(task_metadata)
if task_metadata:
if (not task_metadata.inputs) and (not task_metadata.outputs):
self.extract_inlets_and_outlets(task_metadata, task.inlets, task.outlets)
return task_metadata
except Exception as e:
self.log.exception(
"Failed to extract metadata using found extractor %s - %s %s", extractor, e, task_info
)
else:
self.log.debug("Unable to find an extractor %s", task_info)
# Only include the unkonwnSourceAttribute facet if there is no extractor
task_metadata = OperatorLineage(
run_facets={
"unknownSourceAttribute": UnknownOperatorAttributeRunFacet(
unknownItems=[
UnknownOperatorInstance(
name=task.task_type,
properties=get_filtered_unknown_operator_keys(task),
)
]
)
},
)
inlets = task.get_inlet_defs()
outlets = task.get_outlet_defs()
self.extract_inlets_and_outlets(task_metadata, inlets, outlets)
return task_metadata
return OperatorLineage()
def get_extractor_class(self, task: Operator) -> type[BaseExtractor] | None:
if task.task_type in self.extractors:
return self.extractors[task.task_type]
def method_exists(method_name):
method = getattr(task, method_name, None)
if method:
return callable(method)
if method_exists("get_openlineage_facets_on_start") or method_exists(
"get_openlineage_facets_on_complete"
):
return self.default_extractor
return None
def _get_extractor(self, task: Operator) -> BaseExtractor | None:
# TODO: Re-enable in Extractor PR
# self.instantiate_abstract_extractors(task)
extractor = self.get_extractor_class(task)
self.log.debug("extractor for %s is %s", task.task_type, extractor)
if extractor:
return extractor(task)
return None
def extract_inlets_and_outlets(
self,
task_metadata: OperatorLineage,
inlets: list,
outlets: list,
):
self.log.debug("Manually extracting lineage metadata from inlets and outlets")
for i in inlets:
d = self.convert_to_ol_dataset(i)
if d:
task_metadata.inputs.append(d)
for o in outlets:
d = self.convert_to_ol_dataset(o)
if d:
task_metadata.outputs.append(d)
@staticmethod
def convert_to_ol_dataset(obj):
from airflow.lineage.entities import Table
from openlineage.client.run import Dataset
if isinstance(obj, Dataset):
return obj
elif isinstance(obj, Table):
return Dataset(
namespace=f"{obj.cluster}",
name=f"{obj.database}.{obj.name}",
facets={},
)
else:
return None
def validate_task_metadata(self, task_metadata) -> OperatorLineage | None:
try:
return OperatorLineage(
inputs=task_metadata.inputs,
outputs=task_metadata.outputs,
run_facets=task_metadata.run_facets,
job_facets=task_metadata.job_facets,
)
except AttributeError:
self.log.error("Extractor returns non-valid metadata: %s", task_metadata)
return None
| 7,805 | 37.835821 | 108 | py |
airflow | airflow-main/airflow/providers/openlineage/extractors/bash.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.openlineage.extractors.base import BaseExtractor, OperatorLineage
from airflow.providers.openlineage.plugins.facets import (
UnknownOperatorAttributeRunFacet,
UnknownOperatorInstance,
)
from airflow.providers.openlineage.utils.utils import get_filtered_unknown_operator_keys, is_source_enabled
from openlineage.client.facet import SourceCodeJobFacet
"""
:meta private:
"""
class BashExtractor(BaseExtractor):
"""
Extract executed bash command and put it into SourceCodeJobFacet.
This extractor provides visibility on what bash task does by extracting
executed bash command and putting it into SourceCodeJobFacet. It does
not extract datasets.
:meta private:
"""
@classmethod
def get_operator_classnames(cls) -> list[str]:
return ["BashOperator"]
def extract(self) -> OperatorLineage | None:
job_facets: dict = {}
if is_source_enabled():
job_facets = {
"sourceCode": SourceCodeJobFacet(
language="bash",
# We're on worker and should have access to DAG files
source=self.operator.bash_command,
)
}
return OperatorLineage(
job_facets=job_facets,
run_facets={
# The BashOperator is recorded as an "unknownSource" even though we have an
# extractor, as the <i>data lineage</i> cannot be determined from the operator
# directly.
"unknownSourceAttribute": UnknownOperatorAttributeRunFacet(
unknownItems=[
UnknownOperatorInstance(
name="BashOperator",
properties=get_filtered_unknown_operator_keys(self.operator),
)
]
)
},
)
| 2,734 | 35.466667 | 107 | py |
airflow | airflow-main/airflow/providers/openlineage/plugins/macros.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import typing
from airflow.configuration import conf
from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
if typing.TYPE_CHECKING:
from airflow.models import TaskInstance
_JOB_NAMESPACE = conf.get("openlineage", "namespace", fallback=os.getenv("OPENLINEAGE_NAMESPACE", "default"))
def lineage_run_id(task_instance: TaskInstance):
"""
Macro function which returns the generated run id for a given task.
This can be used to forward the run id from a task to a child run so the job hierarchy is preserved.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/macros:openlineage`
"""
return OpenLineageAdapter.build_task_instance_run_id(
task_instance.task.task_id, task_instance.execution_date, task_instance.try_number
)
def lineage_parent_id(run_id: str, task_instance: TaskInstance):
"""
Macro function which returns the generated job and run id for a given task.
This can be used to forward the ids from a task to a child run so the job
hierarchy is preserved. Child run can create ParentRunFacet from those ids.
.. seealso::
For more information on how to use this macro, take a look at the guide:
:ref:`howto/macros:openlineage`
"""
job_name = OpenLineageAdapter.build_task_instance_run_id(
task_instance.task.task_id, task_instance.execution_date, task_instance.try_number
)
return f"{_JOB_NAMESPACE}/{job_name}/{run_id}"
| 2,356 | 37.639344 | 109 | py |
airflow | airflow-main/airflow/providers/openlineage/plugins/listener.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from concurrent.futures import Executor, ThreadPoolExecutor
from typing import TYPE_CHECKING
from airflow.listeners import hookimpl
from airflow.providers.openlineage.extractors import ExtractorManager
from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
from airflow.providers.openlineage.utils.utils import (
get_airflow_run_facet,
get_custom_facets,
get_job_name,
print_exception,
)
from airflow.utils.timeout import timeout
if TYPE_CHECKING:
from sqlalchemy.orm import Session
from airflow.models import DagRun, TaskInstance
class OpenLineageListener:
"""OpenLineage listener sends events on task instance and dag run starts, completes and failures."""
def __init__(self):
self.log = logging.getLogger(__name__)
self.executor: Executor = None # type: ignore
self.extractor_manager = ExtractorManager()
self.adapter = OpenLineageAdapter()
@hookimpl
def on_task_instance_running(
self, previous_state, task_instance: TaskInstance, session: Session # This will always be QUEUED
):
if not hasattr(task_instance, "task"):
self.log.warning(
f"No task set for TI object task_id: {task_instance.task_id} - "
f"dag_id: {task_instance.dag_id} - run_id {task_instance.run_id}"
)
return
self.log.debug("OpenLineage listener got notification about task instance start")
dagrun = task_instance.dag_run
task = task_instance.task
dag = task.dag
@print_exception
def on_running():
# that's a workaround to detect task running from deferred state
# we return here because Airflow 2.3 needs task from deferred state
if task_instance.next_method is not None:
return
parent_run_id = self.adapter.build_dag_run_id(dag.dag_id, dagrun.run_id)
task_uuid = self.adapter.build_task_instance_run_id(
task.task_id, task_instance.execution_date, task_instance.try_number
)
task_metadata = self.extractor_manager.extract_metadata(dagrun, task)
self.adapter.start_task(
run_id=task_uuid,
job_name=get_job_name(task),
job_description=dag.description,
event_time=task_instance.start_date.isoformat(),
parent_job_name=dag.dag_id,
parent_run_id=parent_run_id,
code_location=None,
nominal_start_time=dagrun.data_interval_start.isoformat(),
nominal_end_time=dagrun.data_interval_end.isoformat(),
owners=dag.owner.split(", "),
task=task_metadata,
run_facets={
**task_metadata.run_facets,
**get_custom_facets(task_instance),
**get_airflow_run_facet(dagrun, dag, task_instance, task, task_uuid),
},
)
self.executor.submit(on_running)
@hookimpl
def on_task_instance_success(self, previous_state, task_instance: TaskInstance, session):
self.log.debug("OpenLineage listener got notification about task instance success")
dagrun = task_instance.dag_run
task = task_instance.task
task_uuid = OpenLineageAdapter.build_task_instance_run_id(
task.task_id, task_instance.execution_date, task_instance.try_number - 1
)
@print_exception
def on_success():
task_metadata = self.extractor_manager.extract_metadata(
dagrun, task, complete=True, task_instance=task_instance
)
self.adapter.complete_task(
run_id=task_uuid,
job_name=get_job_name(task),
end_time=task_instance.end_date.isoformat(),
task=task_metadata,
)
self.executor.submit(on_success)
@hookimpl
def on_task_instance_failed(self, previous_state, task_instance: TaskInstance, session):
self.log.debug("OpenLineage listener got notification about task instance failure")
dagrun = task_instance.dag_run
task = task_instance.task
task_uuid = OpenLineageAdapter.build_task_instance_run_id(
task.task_id, task_instance.execution_date, task_instance.try_number - 1
)
@print_exception
def on_failure():
task_metadata = self.extractor_manager.extract_metadata(
dagrun, task, complete=True, task_instance=task_instance
)
self.adapter.fail_task(
run_id=task_uuid,
job_name=get_job_name(task),
end_time=task_instance.end_date.isoformat(),
task=task_metadata,
)
self.executor.submit(on_failure)
@hookimpl
def on_starting(self, component):
self.log.debug("on_starting: %s", component.__class__.__name__)
self.executor = ThreadPoolExecutor(max_workers=8, thread_name_prefix="openlineage_")
@hookimpl
def before_stopping(self, component):
self.log.debug("before_stopping: %s", component.__class__.__name__)
# TODO: configure this with Airflow config
with timeout(30):
self.executor.shutdown(wait=True)
@hookimpl
def on_dag_run_running(self, dag_run: DagRun, msg: str):
if not self.executor:
self.log.error("Executor have not started before `on_dag_run_running`")
return
self.executor.submit(
self.adapter.dag_started,
dag_run=dag_run,
msg=msg,
nominal_start_time=dag_run.data_interval_start.isoformat(),
nominal_end_time=dag_run.data_interval_end.isoformat(),
)
@hookimpl
def on_dag_run_success(self, dag_run: DagRun, msg: str):
if not self.executor:
self.log.error("Executor have not started before `on_dag_run_success`")
return
self.executor.submit(self.adapter.dag_success, dag_run=dag_run, msg=msg)
@hookimpl
def on_dag_run_failed(self, dag_run: DagRun, msg: str):
if not self.executor:
self.log.error("Executor have not started before `on_dag_run_failed`")
return
self.executor.submit(self.adapter.dag_failed, dag_run=dag_run, msg=msg)
| 7,256 | 37.396825 | 105 | py |
airflow | airflow-main/airflow/providers/openlineage/plugins/adapter.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import uuid
from typing import TYPE_CHECKING
import requests.exceptions
import yaml
from airflow.configuration import conf
from airflow.providers.openlineage import __version__ as OPENLINEAGE_PROVIDER_VERSION
from airflow.providers.openlineage.extractors import OperatorLineage
from airflow.providers.openlineage.utils.utils import OpenLineageRedactor
from airflow.utils.log.logging_mixin import LoggingMixin
from openlineage.client import OpenLineageClient, set_producer
from openlineage.client.facet import (
BaseFacet,
DocumentationJobFacet,
ErrorMessageRunFacet,
NominalTimeRunFacet,
OwnershipJobFacet,
OwnershipJobFacetOwners,
ParentRunFacet,
ProcessingEngineRunFacet,
SourceCodeLocationJobFacet,
)
from openlineage.client.run import Job, Run, RunEvent, RunState
if TYPE_CHECKING:
from airflow.models.dagrun import DagRun
from airflow.utils.log.secrets_masker import SecretsMasker
_DAG_DEFAULT_NAMESPACE = "default"
_DAG_NAMESPACE = conf.get(
"openlineage", "namespace", fallback=os.getenv("OPENLINEAGE_NAMESPACE", _DAG_DEFAULT_NAMESPACE)
)
_PRODUCER = f"https://github.com/apache/airflow/tree/providers-openlineage/{OPENLINEAGE_PROVIDER_VERSION}"
set_producer(_PRODUCER)
class OpenLineageAdapter(LoggingMixin):
"""Translate Airflow metadata to OpenLineage events instead of creating them from Airflow code."""
def __init__(self, client: OpenLineageClient | None = None, secrets_masker: SecretsMasker | None = None):
super().__init__()
self._client = client
if not secrets_masker:
from airflow.utils.log.secrets_masker import _secrets_masker
secrets_masker = _secrets_masker()
self._redacter = OpenLineageRedactor.from_masker(secrets_masker)
def get_or_create_openlineage_client(self) -> OpenLineageClient:
if not self._client:
config = self.get_openlineage_config()
if config:
self._client = OpenLineageClient.from_dict(config=config)
else:
self._client = OpenLineageClient.from_environment()
return self._client
def get_openlineage_config(self) -> dict | None:
# First, try to read from YAML file
openlineage_config_path = conf.get("openlineage", "config_path")
if openlineage_config_path:
config = self._read_yaml_config(openlineage_config_path)
if config:
return config.get("transport", None)
# Second, try to get transport config
transport = conf.getjson("openlineage", "transport")
if not transport:
return None
elif not isinstance(transport, dict):
raise ValueError(f"{transport} is not a dict")
return transport
def _read_yaml_config(self, path: str) -> dict | None:
with open(path) as config_file:
return yaml.safe_load(config_file)
def build_dag_run_id(self, dag_id, dag_run_id):
return str(uuid.uuid3(uuid.NAMESPACE_URL, f"{_DAG_NAMESPACE}.{dag_id}.{dag_run_id}"))
@staticmethod
def build_task_instance_run_id(task_id, execution_date, try_number):
return str(
uuid.uuid3(
uuid.NAMESPACE_URL,
f"{_DAG_NAMESPACE}.{task_id}.{execution_date}.{try_number}",
)
)
def emit(self, event: RunEvent):
if not self._client:
self._client = self.get_or_create_openlineage_client()
redacted_event: RunEvent = self._redacter.redact(event, max_depth=20) # type: ignore[assignment]
try:
return self._client.emit(redacted_event)
except requests.exceptions.RequestException:
self.log.exception(f"Failed to emit OpenLineage event of id {event.run.runId}")
def start_task(
self,
run_id: str,
job_name: str,
job_description: str,
event_time: str,
parent_job_name: str | None,
parent_run_id: str | None,
code_location: str | None,
nominal_start_time: str,
nominal_end_time: str,
owners: list[str],
task: OperatorLineage | None,
run_facets: dict[str, BaseFacet] | None = None, # Custom run facets
):
"""
Emits openlineage event of type START.
:param run_id: globally unique identifier of task in dag run
:param job_name: globally unique identifier of task in dag
:param job_description: user provided description of job
:param event_time:
:param parent_job_name: the name of the parent job (typically the DAG,
but possibly a task group)
:param parent_run_id: identifier of job spawning this task
:param code_location: file path or URL of DAG file
:param nominal_start_time: scheduled time of dag run
:param nominal_end_time: following schedule of dag run
:param owners: list of owners of DAG
:param task: metadata container with information extracted from operator
:param run_facets: custom run facets
"""
from airflow.version import version as AIRFLOW_VERSION
processing_engine_version_facet = ProcessingEngineRunFacet(
version=AIRFLOW_VERSION,
name="Airflow",
openlineageAdapterVersion=OPENLINEAGE_PROVIDER_VERSION,
)
if not run_facets:
run_facets = {}
run_facets["processing_engine"] = processing_engine_version_facet # type: ignore
event = RunEvent(
eventType=RunState.START,
eventTime=event_time,
run=self._build_run(
run_id,
job_name,
parent_job_name,
parent_run_id,
nominal_start_time,
nominal_end_time,
run_facets=run_facets,
),
job=self._build_job(
job_name=job_name,
job_description=job_description,
code_location=code_location,
owners=owners,
job_facets=task.job_facets if task else None,
),
inputs=task.inputs if task else [],
outputs=task.outputs if task else [],
producer=_PRODUCER,
)
self.emit(event)
def complete_task(self, run_id: str, job_name: str, end_time: str, task: OperatorLineage):
"""
Emits openlineage event of type COMPLETE.
:param run_id: globally unique identifier of task in dag run
:param job_name: globally unique identifier of task between dags
:param end_time: time of task completion
:param task: metadata container with information extracted from operator
"""
event = RunEvent(
eventType=RunState.COMPLETE,
eventTime=end_time,
run=self._build_run(run_id, job_name=job_name, run_facets=task.run_facets),
job=self._build_job(job_name, job_facets=task.job_facets),
inputs=task.inputs,
outputs=task.outputs,
producer=_PRODUCER,
)
self.emit(event)
def fail_task(self, run_id: str, job_name: str, end_time: str, task: OperatorLineage):
"""
Emits openlineage event of type FAIL.
:param run_id: globally unique identifier of task in dag run
:param job_name: globally unique identifier of task between dags
:param end_time: time of task completion
:param task: metadata container with information extracted from operator
"""
event = RunEvent(
eventType=RunState.FAIL,
eventTime=end_time,
run=self._build_run(run_id, job_name=job_name, run_facets=task.run_facets),
job=self._build_job(job_name),
inputs=task.inputs,
outputs=task.outputs,
producer=_PRODUCER,
)
self.emit(event)
def dag_started(
self,
dag_run: DagRun,
msg: str,
nominal_start_time: str,
nominal_end_time: str,
):
event = RunEvent(
eventType=RunState.START,
eventTime=dag_run.start_date.isoformat(),
job=Job(name=dag_run.dag_id, namespace=_DAG_NAMESPACE),
run=self._build_run(
run_id=self.build_dag_run_id(dag_run.dag_id, dag_run.run_id),
job_name=dag_run.dag_id,
nominal_start_time=nominal_start_time,
nominal_end_time=nominal_end_time,
),
inputs=[],
outputs=[],
producer=_PRODUCER,
)
self.emit(event)
def dag_success(self, dag_run: DagRun, msg: str):
event = RunEvent(
eventType=RunState.COMPLETE,
eventTime=dag_run.end_date.isoformat(),
job=Job(name=dag_run.dag_id, namespace=_DAG_NAMESPACE),
run=Run(runId=self.build_dag_run_id(dag_run.dag_id, dag_run.run_id)),
inputs=[],
outputs=[],
producer=_PRODUCER,
)
self.emit(event)
def dag_failed(self, dag_run: DagRun, msg: str):
event = RunEvent(
eventType=RunState.FAIL,
eventTime=dag_run.end_date.isoformat(),
job=Job(name=dag_run.dag_id, namespace=_DAG_NAMESPACE),
run=Run(
runId=self.build_dag_run_id(dag_run.dag_id, dag_run.run_id),
facets={"errorMessage": ErrorMessageRunFacet(message=msg, programmingLanguage="python")},
),
inputs=[],
outputs=[],
producer=_PRODUCER,
)
self.emit(event)
@staticmethod
def _build_run(
run_id: str,
job_name: str,
parent_job_name: str | None = None,
parent_run_id: str | None = None,
nominal_start_time: str | None = None,
nominal_end_time: str | None = None,
run_facets: dict[str, BaseFacet] | None = None,
) -> Run:
facets: dict[str, BaseFacet] = {}
if nominal_start_time:
facets.update({"nominalTime": NominalTimeRunFacet(nominal_start_time, nominal_end_time)})
if parent_run_id:
parent_run_facet = ParentRunFacet.create(
runId=parent_run_id,
namespace=_DAG_NAMESPACE,
name=parent_job_name or job_name,
)
facets.update(
{
"parent": parent_run_facet,
"parentRun": parent_run_facet, # Keep sending this for the backward compatibility
}
)
if run_facets:
facets.update(run_facets)
return Run(run_id, facets)
@staticmethod
def _build_job(
job_name: str,
job_description: str | None = None,
code_location: str | None = None,
owners: list[str] | None = None,
job_facets: dict[str, BaseFacet] | None = None,
):
facets: dict[str, BaseFacet] = {}
if job_description:
facets.update({"documentation": DocumentationJobFacet(description=job_description)})
if code_location:
facets.update({"sourceCodeLocation": SourceCodeLocationJobFacet("", url=code_location)})
if owners:
facets.update(
{
"ownership": OwnershipJobFacet(
owners=[OwnershipJobFacetOwners(name=owner) for owner in owners]
)
}
)
if job_facets:
facets = {**facets, **job_facets}
return Job(_DAG_NAMESPACE, job_name, facets)
| 12,484 | 36.157738 | 109 | py |
airflow | airflow-main/airflow/providers/openlineage/plugins/facets.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from attrs import define
from openlineage.client.facet import BaseFacet
from openlineage.client.utils import RedactMixin
@define(slots=False)
class AirflowMappedTaskRunFacet(BaseFacet):
"""Run facet containing information about mapped tasks."""
mapIndex: int
operatorClass: str
_additional_skip_redact: list[str] = ["operatorClass"]
@classmethod
def from_task_instance(cls, task_instance):
task = task_instance.task
from airflow.providers.openlineage.utils import get_operator_class
return cls(
mapIndex=task_instance.map_index,
operatorClass=f"{get_operator_class(task).__module__}.{get_operator_class(task).__name__}",
)
@define(slots=False)
class AirflowRunFacet(BaseFacet):
"""Composite Airflow run facet."""
dag: dict
dagRun: dict
task: dict
taskInstance: dict
taskUuid: str
@define(slots=False)
class UnknownOperatorInstance(RedactMixin):
"""Describes an unknown operator.
This specifies the (class) name of the operator and its properties.
"""
name: str
properties: dict[str, object]
type: str = "operator"
_skip_redact: list[str] = ["name", "type"]
@define(slots=False)
class UnknownOperatorAttributeRunFacet(BaseFacet):
"""RunFacet that describes unknown operators in an Airflow DAG."""
unknownItems: list[UnknownOperatorInstance]
| 2,219 | 28.6 | 103 | py |
airflow | airflow-main/airflow/providers/openlineage/plugins/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/openlineage/plugins/openlineage.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from airflow.configuration import conf
from airflow.plugins_manager import AirflowPlugin
from airflow.providers.openlineage.plugins.macros import lineage_parent_id, lineage_run_id
def _is_disabled() -> bool:
return (
conf.getboolean("openlineage", "disabled")
or os.getenv("OPENLINEAGE_DISABLED", "false").lower() == "true"
)
class OpenLineageProviderPlugin(AirflowPlugin):
"""
Listener that emits numerous Events.
OpenLineage Plugin provides listener that emits OL events on DAG start,
complete and failure and TaskInstances start, complete and failure.
"""
name = "OpenLineageProviderPlugin"
macros = [lineage_run_id, lineage_parent_id]
if not _is_disabled():
from airflow.providers.openlineage.plugins.listener import OpenLineageListener
listeners = [OpenLineageListener()]
| 1,688 | 34.93617 | 90 | py |
airflow | airflow-main/airflow/providers/openlineage/utils/utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
import json
import logging
import os
from contextlib import suppress
from functools import wraps
from typing import TYPE_CHECKING, Any
from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
import attrs
from attrs import asdict
from airflow.compat.functools import cache
from airflow.configuration import conf
from airflow.providers.openlineage.plugins.facets import (
AirflowMappedTaskRunFacet,
AirflowRunFacet,
)
from airflow.utils.log.secrets_masker import Redactable, Redacted, SecretsMasker, should_hide_value_for_key
# TODO: move this maybe to Airflow's logic?
from openlineage.client.utils import RedactMixin
if TYPE_CHECKING:
from airflow.models import DAG, BaseOperator, Connection, DagRun, TaskInstance
log = logging.getLogger(__name__)
_NOMINAL_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
def openlineage_job_name(dag_id: str, task_id: str) -> str:
return f"{dag_id}.{task_id}"
def get_operator_class(task: BaseOperator) -> type:
if task.__class__.__name__ in ("DecoratedMappedOperator", "MappedOperator"):
return task.operator_class
return task.__class__
def to_json_encodable(task: BaseOperator) -> dict[str, object]:
def _task_encoder(obj):
from airflow.models import DAG
if isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, DAG):
return {
"dag_id": obj.dag_id,
"tags": obj.tags,
"schedule_interval": obj.schedule_interval,
"timetable": obj.timetable.serialize(),
}
else:
return str(obj)
return json.loads(json.dumps(task.__dict__, default=_task_encoder))
def url_to_https(url) -> str | None:
# Ensure URL exists
if not url:
return None
base_url = None
if url.startswith("git@"):
part = url.split("git@")[1:2]
if part:
base_url = f'https://{part[0].replace(":", "/", 1)}'
elif url.startswith("https://"):
base_url = url
if not base_url:
raise ValueError(f"Unable to extract location from: {url}")
if base_url.endswith(".git"):
base_url = base_url[:-4]
return base_url
def redacted_connection_uri(conn: Connection, filtered_params=None, filtered_prefixes=None):
"""
Return the connection URI for the given Connection.
This method additionally filters URI by removing query parameters that are known to carry sensitive data
like username, password, access key.
"""
if filtered_prefixes is None:
filtered_prefixes = []
if filtered_params is None:
filtered_params = []
def filter_key_params(k: str):
return k not in filtered_params and any(substr in k for substr in filtered_prefixes)
conn_uri = conn.get_uri()
parsed = urlparse(conn_uri)
# Remove username and password
netloc = f"{parsed.hostname}" + (f":{parsed.port}" if parsed.port else "")
parsed = parsed._replace(netloc=netloc)
if parsed.query:
query_dict = dict(parse_qsl(parsed.query))
if conn.EXTRA_KEY in query_dict:
query_dict = json.loads(query_dict[conn.EXTRA_KEY])
filtered_qs = {k: v for k, v in query_dict.items() if not filter_key_params(k)}
parsed = parsed._replace(query=urlencode(filtered_qs))
return urlunparse(parsed)
def get_connection(conn_id) -> Connection | None:
from airflow.hooks.base import BaseHook
with suppress(Exception):
return BaseHook.get_connection(conn_id=conn_id)
return None
def get_job_name(task):
return f"{task.dag_id}.{task.task_id}"
def get_custom_facets(task_instance: TaskInstance | None = None) -> dict[str, Any]:
custom_facets = {}
# check for -1 comes from SmartSensor compatibility with dynamic task mapping
# this comes from Airflow code
if hasattr(task_instance, "map_index") and getattr(task_instance, "map_index") != -1:
custom_facets["airflow_mappedTask"] = AirflowMappedTaskRunFacet.from_task_instance(task_instance)
return custom_facets
class InfoJsonEncodable(dict):
"""
Airflow objects might not be json-encodable overall.
The class provides additional attributes to control
what and how is encoded:
* renames: a dictionary of attribute name changes
* | casts: a dictionary consisting of attribute names
| and corresponding methods that should change
| object value
* includes: list of attributes to be included in encoding
* excludes: list of attributes to be excluded from encoding
Don't use both includes and excludes.
"""
renames: dict[str, str] = {}
casts: dict[str, Any] = {}
includes: list[str] = []
excludes: list[str] = []
def __init__(self, obj):
self.obj = obj
self._fields = []
self._cast_fields()
self._rename_fields()
self._include_fields()
dict.__init__(
self,
**{field: InfoJsonEncodable._cast_basic_types(getattr(self, field)) for field in self._fields},
)
@staticmethod
def _cast_basic_types(value):
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, (set, list, tuple)):
return str(list(value))
return value
def _rename_fields(self):
for field, renamed in self.renames.items():
if hasattr(self.obj, field):
setattr(self, renamed, getattr(self.obj, field))
self._fields.append(renamed)
def _cast_fields(self):
for field, func in self.casts.items():
setattr(self, field, func(self.obj))
self._fields.append(field)
def _include_fields(self):
if self.includes and self.excludes:
raise Exception("Don't use both includes and excludes.")
if self.includes:
for field in self.includes:
if field in self._fields or not hasattr(self.obj, field):
continue
setattr(self, field, getattr(self.obj, field))
self._fields.append(field)
else:
for field, val in self.obj.__dict__.items():
if field in self._fields or field in self.excludes or field in self.renames:
continue
setattr(self, field, val)
self._fields.append(field)
class DagInfo(InfoJsonEncodable):
"""Defines encoding DAG object to JSON."""
includes = ["dag_id", "schedule_interval", "tags", "start_date"]
casts = {"timetable": lambda dag: dag.timetable.serialize() if getattr(dag, "timetable", None) else None}
renames = {"_dag_id": "dag_id"}
class DagRunInfo(InfoJsonEncodable):
"""Defines encoding DagRun object to JSON."""
includes = [
"conf",
"dag_id",
"data_interval_start",
"data_interval_end",
"external_trigger",
"run_id",
"run_type",
"start_date",
]
class TaskInstanceInfo(InfoJsonEncodable):
"""Defines encoding TaskInstance object to JSON."""
includes = ["duration", "try_number", "pool"]
casts = {
"map_index": lambda ti: ti.map_index
if hasattr(ti, "map_index") and getattr(ti, "map_index") != -1
else None
}
class TaskInfo(InfoJsonEncodable):
"""Defines encoding BaseOperator/AbstractOperator object to JSON."""
renames = {
"_BaseOperator__init_kwargs": "args",
"_BaseOperator__from_mapped": "mapped",
"_downstream_task_ids": "downstream_task_ids",
"_upstream_task_ids": "upstream_task_ids",
}
excludes = [
"_BaseOperator__instantiated",
"_dag",
"_hook",
"_log",
"_outlets",
"_inlets",
"_lock_for_execution",
"handler",
"params",
"python_callable",
"retry_delay",
]
casts = {
"operator_class": lambda task: task.task_type,
"task_group": lambda task: TaskGroupInfo(task.task_group)
if hasattr(task, "task_group") and getattr(task.task_group, "_group_id", None)
else None,
}
class TaskGroupInfo(InfoJsonEncodable):
"""Defines encoding TaskGroup object to JSON."""
renames = {
"_group_id": "group_id",
}
includes = [
"downstream_group_ids",
"downstream_task_ids",
"prefix_group_id",
"tooltip",
"upstream_group_ids",
"upstream_task_ids",
]
def get_airflow_run_facet(
dag_run: DagRun,
dag: DAG,
task_instance: TaskInstance,
task: BaseOperator,
task_uuid: str,
):
return {
"airflow": json.loads(
json.dumps(
asdict(
AirflowRunFacet(
dag=DagInfo(dag),
dagRun=DagRunInfo(dag_run),
taskInstance=TaskInstanceInfo(task_instance),
task=TaskInfo(task),
taskUuid=task_uuid,
)
),
default=str,
)
)
}
class OpenLineageRedactor(SecretsMasker):
"""
This class redacts sensitive data similar to SecretsMasker in Airflow logs.
The difference is that our default max recursion depth is way higher - due to
the structure of OL events we need more depth.
Additionally, we allow data structures to specify data that needs not to be
redacted by specifying _skip_redact list by deriving RedactMixin.
"""
@classmethod
def from_masker(cls, other: SecretsMasker) -> OpenLineageRedactor:
instance = cls()
instance.patterns = other.patterns
instance.replacer = other.replacer
return instance
def _redact(self, item: Redactable, name: str | None, depth: int, max_depth: int) -> Redacted:
if depth > max_depth:
return item
try:
if name and should_hide_value_for_key(name):
return self._redact_all(item, depth, max_depth)
if attrs.has(type(item)):
# TODO: fixme when mypy gets compatible with new attrs
for dict_key, subval in attrs.asdict(item, recurse=False).items(): # type: ignore[arg-type]
if _is_name_redactable(dict_key, item):
setattr(
item,
dict_key,
self._redact(subval, name=dict_key, depth=(depth + 1), max_depth=max_depth),
)
return item
elif is_json_serializable(item) and hasattr(item, "__dict__"):
for dict_key, subval in item.__dict__.items():
if _is_name_redactable(dict_key, item):
setattr(
item,
dict_key,
self._redact(subval, name=dict_key, depth=(depth + 1), max_depth=max_depth),
)
return item
else:
return super()._redact(item, name, depth, max_depth)
except Exception as e:
log.warning(
"Unable to redact %s. Error was: %s: %s",
repr(item),
type(e).__name__,
str(e),
)
return item
def is_json_serializable(item):
try:
json.dumps(item)
return True
except (TypeError, ValueError):
return False
def _is_name_redactable(name, redacted):
if not issubclass(redacted.__class__, RedactMixin):
return not name.startswith("_")
return name not in redacted.skip_redact
def print_exception(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
log.exception(e)
return wrapper
@cache
def is_source_enabled() -> bool:
source_var = conf.get(
"openlineage", "disable_source_code", fallback=os.getenv("OPENLINEAGE_AIRFLOW_DISABLE_SOURCE_CODE")
)
return isinstance(source_var, str) and source_var.lower() not in ("true", "1", "t")
def get_filtered_unknown_operator_keys(operator: BaseOperator) -> dict:
not_required_keys = {"dag", "task_group"}
return {attr: value for attr, value in operator.__dict__.items() if attr not in not_required_keys}
| 13,269 | 30.822542 | 109 | py |
airflow | airflow-main/airflow/providers/openlineage/utils/sql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from collections import defaultdict
from contextlib import closing
from enum import IntEnum
from typing import TYPE_CHECKING, Dict, List, Optional
from attrs import define
from sqlalchemy import Column, MetaData, Table, and_, union_all
from openlineage.client.facet import SchemaDatasetFacet, SchemaField
from openlineage.client.run import Dataset
if TYPE_CHECKING:
from sqlalchemy.engine import Engine
from sqlalchemy.sql import ClauseElement
from airflow.hooks.base import BaseHook
logger = logging.getLogger(__name__)
class ColumnIndex(IntEnum):
"""Enumerates the indices of columns in information schema view."""
SCHEMA = 0
TABLE_NAME = 1
COLUMN_NAME = 2
ORDINAL_POSITION = 3
# Use 'udt_name' which is the underlying type of column
UDT_NAME = 4
# Database is optional as 5th column
DATABASE = 5
TablesHierarchy = Dict[Optional[str], Dict[Optional[str], List[str]]]
@define
class TableSchema:
"""Temporary object used to construct OpenLineage Dataset."""
table: str
schema: str | None
database: str | None
fields: list[SchemaField]
def to_dataset(self, namespace: str, database: str | None = None, schema: str | None = None) -> Dataset:
# Prefix the table name with database and schema name using
# the format: {database_name}.{table_schema}.{table_name}.
name = ".".join(
part
for part in [self.database or database, self.schema or schema, self.table]
if part is not None
)
return Dataset(
namespace=namespace,
name=name,
facets={"schema": SchemaDatasetFacet(fields=self.fields)} if len(self.fields) > 0 else {},
)
def get_table_schemas(
hook: BaseHook,
namespace: str,
schema: str | None,
database: str | None,
in_query: str | None,
out_query: str | None,
) -> tuple[list[Dataset], list[Dataset]]:
"""Query database for table schemas.
Uses provided hook. Responsibility to provide queries for this function is on particular extractors.
If query for input or output table isn't provided, the query is skipped.
"""
# Do not query if we did not get both queries
if not in_query and not out_query:
return [], []
with closing(hook.get_conn()) as conn, closing(conn.cursor()) as cursor:
if in_query:
cursor.execute(in_query)
in_datasets = [x.to_dataset(namespace, database, schema) for x in parse_query_result(cursor)]
else:
in_datasets = []
if out_query:
cursor.execute(out_query)
out_datasets = [x.to_dataset(namespace, database, schema) for x in parse_query_result(cursor)]
else:
out_datasets = []
return in_datasets, out_datasets
def parse_query_result(cursor) -> list[TableSchema]:
"""Fetch results from DB-API 2.0 cursor and creates list of table schemas.
For each row it creates :class:`TableSchema`.
"""
schemas: dict = {}
columns: dict = defaultdict(list)
for row in cursor.fetchall():
table_schema_name: str = row[ColumnIndex.SCHEMA]
table_name: str = row[ColumnIndex.TABLE_NAME]
table_column: SchemaField = SchemaField(
name=row[ColumnIndex.COLUMN_NAME],
type=row[ColumnIndex.UDT_NAME],
description=None,
)
ordinal_position = row[ColumnIndex.ORDINAL_POSITION]
try:
table_database = row[ColumnIndex.DATABASE]
except IndexError:
table_database = None
# Attempt to get table schema
table_key = ".".join(filter(None, [table_database, table_schema_name, table_name]))
schemas[table_key] = TableSchema(
table=table_name, schema=table_schema_name, database=table_database, fields=[]
)
columns[table_key].append((ordinal_position, table_column))
for schema in schemas.values():
table_key = ".".join(filter(None, [schema.database, schema.schema, schema.table]))
schema.fields = [x for _, x in sorted(columns[table_key])]
return list(schemas.values())
def create_information_schema_query(
columns: list[str],
information_schema_table_name: str,
tables_hierarchy: TablesHierarchy,
uppercase_names: bool = False,
sqlalchemy_engine: Engine | None = None,
) -> str:
"""Creates query for getting table schemas from information schema."""
metadata = MetaData(sqlalchemy_engine)
select_statements = []
for db, schema_mapping in tables_hierarchy.items():
schema, table_name = information_schema_table_name.split(".")
if db:
schema = f"{db}.{schema}"
information_schema_table = Table(
table_name, metadata, *[Column(column) for column in columns], schema=schema
)
filter_clauses = create_filter_clauses(schema_mapping, information_schema_table, uppercase_names)
select_statements.append(information_schema_table.select().filter(*filter_clauses))
return str(
union_all(*select_statements).compile(sqlalchemy_engine, compile_kwargs={"literal_binds": True})
)
def create_filter_clauses(
schema_mapping: dict, information_schema_table: Table, uppercase_names: bool = False
) -> ClauseElement:
"""
Creates comprehensive filter clauses for all tables in one database.
:param schema_mapping: a dictionary of schema names and list of tables in each
:param information_schema_table: `sqlalchemy.Table` instance used to construct clauses
For most SQL dbs it contains `table_name` and `table_schema` columns,
therefore it is expected the table has them defined.
:param uppercase_names: if True use schema and table names uppercase
"""
filter_clauses = []
for schema, tables in schema_mapping.items():
filter_clause = information_schema_table.c.table_name.in_(
name.upper() if uppercase_names else name for name in tables
)
if schema:
filter_clause = and_(information_schema_table.c.table_schema == schema, filter_clause)
filter_clauses.append(filter_clause)
return filter_clauses
| 7,039 | 35.476684 | 108 | py |
airflow | airflow-main/airflow/providers/openlineage/utils/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/dbt/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.2"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-dbt-cloud:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,534 | 35.547619 | 118 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/operators/dbt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import time
import warnings
from pathlib import Path
from typing import TYPE_CHECKING, Any
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator, BaseOperatorLink, XCom
from airflow.providers.dbt.cloud.hooks.dbt import (
DbtCloudHook,
DbtCloudJobRunException,
DbtCloudJobRunStatus,
JobRunInfo,
)
from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
if TYPE_CHECKING:
from airflow.utils.context import Context
class DbtCloudRunJobOperatorLink(BaseOperatorLink):
"""Allows users to monitor the triggered job run directly in dbt Cloud."""
name = "Monitor Job Run"
def get_link(self, operator: BaseOperator, *, ti_key=None):
return XCom.get_value(key="job_run_url", ti_key=ti_key)
class DbtCloudRunJobOperator(BaseOperator):
"""
Executes a dbt Cloud job.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:DbtCloudRunJobOperator`
:param dbt_cloud_conn_id: The connection ID for connecting to dbt Cloud.
:param job_id: The ID of a dbt Cloud job.
:param account_id: Optional. The ID of a dbt Cloud account.
:param trigger_reason: Optional. Description of the reason to trigger the job.
Defaults to "Triggered via Apache Airflow by task <task_id> in the <dag_id> DAG."
:param steps_override: Optional. List of dbt commands to execute when triggering the job instead of those
configured in dbt Cloud.
:param schema_override: Optional. Override the destination schema in the configured target for this job.
:param wait_for_termination: Flag to wait on a job run's termination. By default, this feature is
enabled but could be disabled to perform an asynchronous wait for a long-running job run execution
using the ``DbtCloudJobRunSensor``.
:param timeout: Time in seconds to wait for a job run to reach a terminal status for non-asynchronous
waits. Used only if ``wait_for_termination`` is True. Defaults to 7 days.
:param check_interval: Time in seconds to check on a job run's status for non-asynchronous waits.
Used only if ``wait_for_termination`` is True. Defaults to 60 seconds.
:param additional_run_config: Optional. Any additional parameters that should be included in the API
request when triggering the job.
:param deferrable: Run operator in the deferrable mode
:return: The ID of the triggered dbt Cloud job run.
"""
template_fields = (
"dbt_cloud_conn_id",
"job_id",
"account_id",
"trigger_reason",
"steps_override",
"schema_override",
"additional_run_config",
)
operator_extra_links = (DbtCloudRunJobOperatorLink(),)
def __init__(
self,
*,
dbt_cloud_conn_id: str = DbtCloudHook.default_conn_name,
job_id: int,
account_id: int | None = None,
trigger_reason: str | None = None,
steps_override: list[str] | None = None,
schema_override: str | None = None,
wait_for_termination: bool = True,
timeout: int = 60 * 60 * 24 * 7,
check_interval: int = 60,
additional_run_config: dict[str, Any] | None = None,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
) -> None:
super().__init__(**kwargs)
self.dbt_cloud_conn_id = dbt_cloud_conn_id
self.account_id = account_id
self.job_id = job_id
self.trigger_reason = trigger_reason
self.steps_override = steps_override
self.schema_override = schema_override
self.wait_for_termination = wait_for_termination
self.timeout = timeout
self.check_interval = check_interval
self.additional_run_config = additional_run_config or {}
self.hook: DbtCloudHook
self.run_id: int
self.deferrable = deferrable
def execute(self, context: Context):
if self.trigger_reason is None:
self.trigger_reason = (
f"Triggered via Apache Airflow by task {self.task_id!r} in the {self.dag.dag_id} DAG."
)
self.hook = DbtCloudHook(self.dbt_cloud_conn_id)
trigger_job_response = self.hook.trigger_job_run(
account_id=self.account_id,
job_id=self.job_id,
cause=self.trigger_reason,
steps_override=self.steps_override,
schema_override=self.schema_override,
additional_run_config=self.additional_run_config,
)
self.run_id = trigger_job_response.json()["data"]["id"]
job_run_url = trigger_job_response.json()["data"]["href"]
# Push the ``job_run_url`` value to XCom regardless of what happens during execution so that the job
# run can be monitored via the operator link.
context["ti"].xcom_push(key="job_run_url", value=job_run_url)
if self.wait_for_termination:
if self.deferrable is False:
self.log.info("Waiting for job run %s to terminate.", str(self.run_id))
if self.hook.wait_for_job_run_status(
run_id=self.run_id,
account_id=self.account_id,
expected_statuses=DbtCloudJobRunStatus.SUCCESS.value,
check_interval=self.check_interval,
timeout=self.timeout,
):
self.log.info("Job run %s has completed successfully.", str(self.run_id))
else:
raise DbtCloudJobRunException(f"Job run {self.run_id} has failed or has been cancelled.")
return self.run_id
else:
end_time = time.time() + self.timeout
job_run_info = JobRunInfo(account_id=self.account_id, run_id=self.run_id)
job_run_status = self.hook.get_job_run_status(**job_run_info)
if not DbtCloudJobRunStatus.is_terminal(job_run_status):
self.defer(
timeout=self.execution_timeout,
trigger=DbtCloudRunJobTrigger(
conn_id=self.dbt_cloud_conn_id,
run_id=self.run_id,
end_time=end_time,
account_id=self.account_id,
poll_interval=self.check_interval,
),
method_name="execute_complete",
)
elif job_run_status == DbtCloudJobRunStatus.SUCCESS.value:
self.log.info("Job run %s has completed successfully.", str(self.run_id))
return self.run_id
elif job_run_status in (
DbtCloudJobRunStatus.CANCELLED.value,
DbtCloudJobRunStatus.ERROR.value,
):
raise DbtCloudJobRunException(f"Job run {self.run_id} has failed or has been cancelled.")
else:
if self.deferrable is True:
warnings.warn(
"Argument `wait_for_termination` is False and `deferrable` is True , hence "
"`deferrable` parameter doesn't have any effect",
)
return self.run_id
def execute_complete(self, context: Context, event: dict[str, Any]) -> int:
"""
Callback for when the trigger fires - returns immediately.
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event["status"] == "error":
raise AirflowException(event["message"])
self.log.info(event["message"])
return int(event["run_id"])
def on_kill(self) -> None:
if self.run_id:
self.hook.cancel_job_run(account_id=self.account_id, run_id=self.run_id)
if self.hook.wait_for_job_run_status(
run_id=self.run_id,
account_id=self.account_id,
expected_statuses=DbtCloudJobRunStatus.CANCELLED.value,
check_interval=self.check_interval,
timeout=self.timeout,
):
self.log.info("Job run %s has been cancelled successfully.", str(self.run_id))
class DbtCloudGetJobRunArtifactOperator(BaseOperator):
"""
Download artifacts from a dbt Cloud job run.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:DbtCloudGetJobRunArtifactOperator`
:param dbt_cloud_conn_id: The connection ID for connecting to dbt Cloud.
:param run_id: The ID of a dbt Cloud job run.
:param path: The file path related to the artifact file. Paths are rooted at the target/ directory.
Use "manifest.json", "catalog.json", or "run_results.json" to download dbt-generated artifacts
for the run.
:param account_id: Optional. The ID of a dbt Cloud account.
:param step: Optional. The index of the Step in the Run to query for artifacts. The first step in the
run has the index 1. If the step parameter is omitted, artifacts for the last step in the run will
be returned.
:param output_file_name: Optional. The desired file name for the download artifact file.
Defaults to <run_id>_<path> (e.g. "728368_run_results.json").
"""
template_fields = ("dbt_cloud_conn_id", "run_id", "path", "account_id", "output_file_name")
def __init__(
self,
*,
dbt_cloud_conn_id: str = DbtCloudHook.default_conn_name,
run_id: int,
path: str,
account_id: int | None = None,
step: int | None = None,
output_file_name: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.dbt_cloud_conn_id = dbt_cloud_conn_id
self.run_id = run_id
self.path = path
self.account_id = account_id
self.step = step
self.output_file_name = output_file_name or f"{self.run_id}_{self.path}".replace("/", "-")
def execute(self, context: Context) -> str:
hook = DbtCloudHook(self.dbt_cloud_conn_id)
response = hook.get_job_run_artifact(
run_id=self.run_id, path=self.path, account_id=self.account_id, step=self.step
)
output_file_path = Path(self.output_file_name)
output_file_path.parent.mkdir(parents=True, exist_ok=True)
with output_file_path.open(mode="w") as file:
self.log.info(
"Writing %s artifact for job run %s to %s.", self.path, self.run_id, self.output_file_name
)
if self.path.endswith(".json"):
json.dump(response.json(), file)
else:
file.write(response.text)
return self.output_file_name
class DbtCloudListJobsOperator(BaseOperator):
"""
List jobs in a dbt Cloud project.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:DbtCloudListJobsOperator`
Retrieves metadata for all jobs tied to a specified dbt Cloud account. If a ``project_id`` is
supplied, only jobs pertaining to this project id will be retrieved.
:param account_id: Optional. If an account ID is not provided explicitly,
the account ID from the dbt Cloud connection will be used.
:param order_by: Optional. Field to order the result by. Use '-' to indicate reverse order.
For example, to use reverse order by the run ID use ``order_by=-id``.
:param project_id: Optional. The ID of a dbt Cloud project.
"""
template_fields = (
"account_id",
"project_id",
)
def __init__(
self,
*,
dbt_cloud_conn_id: str = DbtCloudHook.default_conn_name,
account_id: int | None = None,
project_id: int | None = None,
order_by: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.dbt_cloud_conn_id = dbt_cloud_conn_id
self.account_id = account_id
self.project_id = project_id
self.order_by = order_by
def execute(self, context: Context) -> list:
hook = DbtCloudHook(self.dbt_cloud_conn_id)
list_jobs_response = hook.list_jobs(
account_id=self.account_id, order_by=self.order_by, project_id=self.project_id
)
buffer = []
for job_metadata in list_jobs_response:
for job in job_metadata.json()["data"]:
buffer.append(job["id"])
self.log.info("Jobs in the specified dbt Cloud account are: %s", ", ".join(map(str, buffer)))
return buffer
| 13,665 | 40.920245 | 109 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/triggers/dbt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
import time
from typing import Any, AsyncIterator
from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunStatus
from airflow.triggers.base import BaseTrigger, TriggerEvent
class DbtCloudRunJobTrigger(BaseTrigger):
"""Trigger to make an HTTP call to dbt and get the status for the job.
This is done with run id in polling interval of time.
:param conn_id: The connection identifier for connecting to Dbt.
:param run_id: The ID of a dbt Cloud job.
:param end_time: Time in seconds to wait for a job run to reach a terminal status. Defaults to 7 days.
:param account_id: The ID of a dbt Cloud account.
:param poll_interval: polling period in seconds to check for the status.
"""
def __init__(
self,
conn_id: str,
run_id: int,
end_time: float,
poll_interval: float,
account_id: int | None,
):
super().__init__()
self.run_id = run_id
self.account_id = account_id
self.conn_id = conn_id
self.end_time = end_time
self.poll_interval = poll_interval
def serialize(self) -> tuple[str, dict[str, Any]]:
"""Serializes DbtCloudRunJobTrigger arguments and classpath."""
return (
"airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger",
{
"run_id": self.run_id,
"account_id": self.account_id,
"conn_id": self.conn_id,
"end_time": self.end_time,
"poll_interval": self.poll_interval,
},
)
async def run(self) -> AsyncIterator[TriggerEvent]:
"""Make async connection to Dbt, polls for the pipeline run status."""
hook = DbtCloudHook(self.conn_id)
try:
while await self.is_still_running(hook):
if self.end_time < time.time():
yield TriggerEvent(
{
"status": "error",
"message": f"Job run {self.run_id} has not reached a terminal status after "
f"{self.end_time} seconds.",
"run_id": self.run_id,
}
)
await asyncio.sleep(self.poll_interval)
job_run_status = await hook.get_job_status(self.run_id, self.account_id)
if job_run_status == DbtCloudJobRunStatus.SUCCESS.value:
yield TriggerEvent(
{
"status": "success",
"message": f"Job run {self.run_id} has completed successfully.",
"run_id": self.run_id,
}
)
elif job_run_status == DbtCloudJobRunStatus.CANCELLED.value:
yield TriggerEvent(
{
"status": "cancelled",
"message": f"Job run {self.run_id} has been cancelled.",
"run_id": self.run_id,
}
)
else:
yield TriggerEvent(
{
"status": "error",
"message": f"Job run {self.run_id} has failed.",
"run_id": self.run_id,
}
)
except Exception as e:
yield TriggerEvent({"status": "error", "message": str(e), "run_id": self.run_id})
async def is_still_running(self, hook: DbtCloudHook) -> bool:
"""Check whether the submitted job is running."""
job_run_status = await hook.get_job_status(self.run_id, self.account_id)
if not DbtCloudJobRunStatus.is_terminal(job_run_status):
return True
return False
| 4,649 | 39.086207 | 106 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/triggers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/hooks/dbt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import time
from enum import Enum
from functools import cached_property, wraps
from inspect import signature
from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, TypeVar, cast
import aiohttp
from aiohttp import ClientResponseError
from asgiref.sync import sync_to_async
from requests.auth import AuthBase
from requests.sessions import Session
from airflow.exceptions import AirflowException
from airflow.providers.http.hooks.http import HttpHook
from airflow.typing_compat import TypedDict
if TYPE_CHECKING:
from requests.models import PreparedRequest, Response
from airflow.models import Connection
def fallback_to_default_account(func: Callable) -> Callable:
"""
Decorator which provides a fallback value for ``account_id``.
If the ``account_id`` is None or not passed to the decorated function,
the value will be taken from the configured dbt Cloud Airflow Connection.
"""
sig = signature(func)
@wraps(func)
def wrapper(*args, **kwargs) -> Callable:
bound_args = sig.bind(*args, **kwargs)
# Check if ``account_id`` was not included in the function signature or, if it is, the value is not
# provided.
if bound_args.arguments.get("account_id") is None:
self = args[0]
default_account_id = self.connection.login
if not default_account_id:
raise AirflowException("Could not determine the dbt Cloud account.")
bound_args.arguments["account_id"] = int(default_account_id)
return func(*bound_args.args, **bound_args.kwargs)
return wrapper
def _get_provider_info() -> tuple[str, str]:
from airflow.providers_manager import ProvidersManager
manager = ProvidersManager()
package_name = manager.hooks[DbtCloudHook.conn_type].package_name # type: ignore[union-attr]
provider = manager.providers[package_name]
return package_name, provider.version
class TokenAuth(AuthBase):
"""Helper class for Auth when executing requests."""
def __init__(self, token: str) -> None:
self.token = token
def __call__(self, request: PreparedRequest) -> PreparedRequest:
package_name, provider_version = _get_provider_info()
request.headers["User-Agent"] = f"{package_name}-v{provider_version}"
request.headers["Content-Type"] = "application/json"
request.headers["Authorization"] = f"Token {self.token}"
return request
class JobRunInfo(TypedDict):
"""Type class for the ``job_run_info`` dictionary."""
account_id: int | None
run_id: int
class DbtCloudJobRunStatus(Enum):
"""dbt Cloud Job statuses."""
QUEUED = 1
STARTING = 2
RUNNING = 3
SUCCESS = 10
ERROR = 20
CANCELLED = 30
TERMINAL_STATUSES = (SUCCESS, ERROR, CANCELLED)
@classmethod
def check_is_valid(cls, statuses: int | Sequence[int] | set[int]):
"""Validates input statuses are a known value."""
if isinstance(statuses, (Sequence, Set)):
for status in statuses:
cls(status)
else:
cls(statuses)
@classmethod
def is_terminal(cls, status: int) -> bool:
"""Checks if the input status is that of a terminal type."""
cls.check_is_valid(statuses=status)
return status in cls.TERMINAL_STATUSES.value
class DbtCloudJobRunException(AirflowException):
"""An exception that indicates a job run failed to complete."""
T = TypeVar("T", bound=Any)
def provide_account_id(func: T) -> T:
"""
Decorator which provides a fallback value for ``account_id``.
If the ``account_id`` is None or not passed to the decorated function,
the value will be taken from the configured dbt Cloud Airflow Connection.
"""
function_signature = signature(func)
@wraps(func)
async def wrapper(*args: Any, **kwargs: Any) -> Any:
bound_args = function_signature.bind(*args, **kwargs)
if bound_args.arguments.get("account_id") is None:
self = args[0]
if self.dbt_cloud_conn_id:
connection = await sync_to_async(self.get_connection)(self.dbt_cloud_conn_id)
default_account_id = connection.login
if not default_account_id:
raise AirflowException("Could not determine the dbt Cloud account.")
bound_args.arguments["account_id"] = int(default_account_id)
return await func(*bound_args.args, **bound_args.kwargs)
return cast(T, wrapper)
class DbtCloudHook(HttpHook):
"""
Interact with dbt Cloud using the V2 API.
:param dbt_cloud_conn_id: The ID of the :ref:`dbt Cloud connection <howto/connection:dbt-cloud>`.
"""
conn_name_attr = "dbt_cloud_conn_id"
default_conn_name = "dbt_cloud_default"
conn_type = "dbt_cloud"
hook_name = "dbt Cloud"
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Builds custom field behavior for the dbt Cloud connection form in the Airflow UI."""
return {
"hidden_fields": ["schema", "port", "extra"],
"relabeling": {"login": "Account ID", "password": "API Token", "host": "Tenant"},
"placeholders": {"host": "Defaults to 'cloud.getdbt.com'."},
}
def __init__(self, dbt_cloud_conn_id: str = default_conn_name, *args, **kwargs) -> None:
super().__init__(auth_type=TokenAuth)
self.dbt_cloud_conn_id = dbt_cloud_conn_id
@staticmethod
def _get_tenant_domain(conn: Connection) -> str:
return conn.host or "cloud.getdbt.com"
@staticmethod
def get_request_url_params(
tenant: str, endpoint: str, include_related: list[str] | None = None
) -> tuple[str, dict[str, Any]]:
"""
Form URL from base url and endpoint url.
:param tenant: The tenant domain name which is need to be replaced in base url.
:param endpoint: Endpoint url to be requested.
:param include_related: Optional. List of related fields to pull with the run.
Valid values are "trigger", "job", "repository", and "environment".
"""
data: dict[str, Any] = {}
if include_related:
data = {"include_related": include_related}
url = f"https://{tenant}/api/v2/accounts/{endpoint or ''}"
return url, data
async def get_headers_tenants_from_connection(self) -> tuple[dict[str, Any], str]:
"""Get Headers, tenants from the connection details."""
headers: dict[str, Any] = {}
connection: Connection = await sync_to_async(self.get_connection)(self.dbt_cloud_conn_id)
tenant = self._get_tenant_domain(connection)
package_name, provider_version = _get_provider_info()
headers["User-Agent"] = f"{package_name}-v{provider_version}"
headers["Content-Type"] = "application/json"
headers["Authorization"] = f"Token {connection.password}"
return headers, tenant
@provide_account_id
async def get_job_details(
self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
) -> Any:
"""
Uses Http async call to retrieve metadata for a specific run of a dbt Cloud job.
:param run_id: The ID of a dbt Cloud job run.
:param account_id: Optional. The ID of a dbt Cloud account.
:param include_related: Optional. List of related fields to pull with the run.
Valid values are "trigger", "job", "repository", and "environment".
"""
endpoint = f"{account_id}/runs/{run_id}/"
headers, tenant = await self.get_headers_tenants_from_connection()
url, params = self.get_request_url_params(tenant, endpoint, include_related)
async with aiohttp.ClientSession(headers=headers) as session:
async with session.get(url, params=params) as response:
try:
response.raise_for_status()
return await response.json()
except ClientResponseError as e:
raise AirflowException(str(e.status) + ":" + e.message)
async def get_job_status(
self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
) -> int:
"""
Retrieves the status for a specific run of a dbt Cloud job.
:param run_id: The ID of a dbt Cloud job run.
:param account_id: Optional. The ID of a dbt Cloud account.
:param include_related: Optional. List of related fields to pull with the run.
Valid values are "trigger", "job", "repository", and "environment".
"""
try:
self.log.info("Getting the status of job run %s.", str(run_id))
response = await self.get_job_details(
run_id, account_id=account_id, include_related=include_related
)
job_run_status: int = response["data"]["status"]
return job_run_status
except Exception as e:
raise e
@cached_property
def connection(self) -> Connection:
_connection = self.get_connection(self.dbt_cloud_conn_id)
if not _connection.password:
raise AirflowException("An API token is required to connect to dbt Cloud.")
return _connection
def get_conn(self, *args, **kwargs) -> Session:
tenant = self._get_tenant_domain(self.connection)
self.base_url = f"https://{tenant}/api/v2/accounts/"
session = Session()
session.auth = self.auth_type(self.connection.password)
return session
def _paginate(self, endpoint: str, payload: dict[str, Any] | None = None) -> list[Response]:
response = self.run(endpoint=endpoint, data=payload)
resp_json = response.json()
limit = resp_json["extra"]["filters"]["limit"]
num_total_results = resp_json["extra"]["pagination"]["total_count"]
num_current_results = resp_json["extra"]["pagination"]["count"]
results = [response]
if num_current_results != num_total_results:
_paginate_payload = payload.copy() if payload else {}
_paginate_payload["offset"] = limit
while not num_current_results >= num_total_results:
response = self.run(endpoint=endpoint, data=_paginate_payload)
resp_json = response.json()
results.append(response)
num_current_results += resp_json["extra"]["pagination"]["count"]
_paginate_payload["offset"] += limit
return results
def _run_and_get_response(
self,
method: str = "GET",
endpoint: str | None = None,
payload: str | dict[str, Any] | None = None,
paginate: bool = False,
) -> Any:
self.method = method
if paginate:
if isinstance(payload, str):
raise ValueError("Payload cannot be a string to paginate a response.")
if endpoint:
return self._paginate(endpoint=endpoint, payload=payload)
else:
raise ValueError("An endpoint is needed to paginate a response.")
return self.run(endpoint=endpoint, data=payload)
def list_accounts(self) -> list[Response]:
"""
Retrieves all of the dbt Cloud accounts the configured API token is authorized to access.
:return: List of request responses.
"""
return self._run_and_get_response()
@fallback_to_default_account
def get_account(self, account_id: int | None = None) -> Response:
"""
Retrieves metadata for a specific dbt Cloud account.
:param account_id: Optional. The ID of a dbt Cloud account.
:return: The request response.
"""
return self._run_and_get_response(endpoint=f"{account_id}/")
@fallback_to_default_account
def list_projects(self, account_id: int | None = None) -> list[Response]:
"""
Retrieves metadata for all projects tied to a specified dbt Cloud account.
:param account_id: Optional. The ID of a dbt Cloud account.
:return: List of request responses.
"""
return self._run_and_get_response(endpoint=f"{account_id}/projects/", paginate=True)
@fallback_to_default_account
def get_project(self, project_id: int, account_id: int | None = None) -> Response:
"""
Retrieves metadata for a specific project.
:param project_id: The ID of a dbt Cloud project.
:param account_id: Optional. The ID of a dbt Cloud account.
:return: The request response.
"""
return self._run_and_get_response(endpoint=f"{account_id}/projects/{project_id}/")
@fallback_to_default_account
def list_jobs(
self,
account_id: int | None = None,
order_by: str | None = None,
project_id: int | None = None,
) -> list[Response]:
"""
Retrieves metadata for all jobs tied to a specified dbt Cloud account.
If a ``project_id`` is supplied, only jobs pertaining to this project will be retrieved.
:param account_id: Optional. The ID of a dbt Cloud account.
:param order_by: Optional. Field to order the result by. Use '-' to indicate reverse order.
For example, to use reverse order by the run ID use ``order_by=-id``.
:param project_id: The ID of a dbt Cloud project.
:return: List of request responses.
"""
return self._run_and_get_response(
endpoint=f"{account_id}/jobs/",
payload={"order_by": order_by, "project_id": project_id},
paginate=True,
)
@fallback_to_default_account
def get_job(self, job_id: int, account_id: int | None = None) -> Response:
"""
Retrieves metadata for a specific job.
:param job_id: The ID of a dbt Cloud job.
:param account_id: Optional. The ID of a dbt Cloud account.
:return: The request response.
"""
return self._run_and_get_response(endpoint=f"{account_id}/jobs/{job_id}")
@fallback_to_default_account
def trigger_job_run(
self,
job_id: int,
cause: str,
account_id: int | None = None,
steps_override: list[str] | None = None,
schema_override: str | None = None,
additional_run_config: dict[str, Any] | None = None,
) -> Response:
"""
Triggers a run of a dbt Cloud job.
:param job_id: The ID of a dbt Cloud job.
:param cause: Description of the reason to trigger the job.
:param account_id: Optional. The ID of a dbt Cloud account.
:param steps_override: Optional. List of dbt commands to execute when triggering the job
instead of those configured in dbt Cloud.
:param schema_override: Optional. Override the destination schema in the configured target for this
job.
:param additional_run_config: Optional. Any additional parameters that should be included in the API
request when triggering the job.
:return: The request response.
"""
if additional_run_config is None:
additional_run_config = {}
payload = {
"cause": cause,
"steps_override": steps_override,
"schema_override": schema_override,
}
payload.update(additional_run_config)
return self._run_and_get_response(
method="POST",
endpoint=f"{account_id}/jobs/{job_id}/run/",
payload=json.dumps(payload),
)
@fallback_to_default_account
def list_job_runs(
self,
account_id: int | None = None,
include_related: list[str] | None = None,
job_definition_id: int | None = None,
order_by: str | None = None,
) -> list[Response]:
"""
Retrieves metadata for all dbt Cloud job runs for an account.
If a ``job_definition_id`` is supplied, only metadata for runs of that specific job are pulled.
:param account_id: Optional. The ID of a dbt Cloud account.
:param include_related: Optional. List of related fields to pull with the run.
Valid values are "trigger", "job", "repository", and "environment".
:param job_definition_id: Optional. The dbt Cloud job ID to retrieve run metadata.
:param order_by: Optional. Field to order the result by. Use '-' to indicate reverse order.
For example, to use reverse order by the run ID use ``order_by=-id``.
:return: List of request responses.
"""
return self._run_and_get_response(
endpoint=f"{account_id}/runs/",
payload={
"include_related": include_related,
"job_definition_id": job_definition_id,
"order_by": order_by,
},
paginate=True,
)
@fallback_to_default_account
def get_job_run(
self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
) -> Response:
"""
Retrieves metadata for a specific run of a dbt Cloud job.
:param run_id: The ID of a dbt Cloud job run.
:param account_id: Optional. The ID of a dbt Cloud account.
:param include_related: Optional. List of related fields to pull with the run.
Valid values are "trigger", "job", "repository", and "environment".
:return: The request response.
"""
return self._run_and_get_response(
endpoint=f"{account_id}/runs/{run_id}/",
payload={"include_related": include_related},
)
def get_job_run_status(self, run_id: int, account_id: int | None = None) -> int:
"""
Retrieves the status for a specific run of a dbt Cloud job.
:param run_id: The ID of a dbt Cloud job run.
:param account_id: Optional. The ID of a dbt Cloud account.
:return: The status of a dbt Cloud job run.
"""
self.log.info("Getting the status of job run %s.", str(run_id))
job_run = self.get_job_run(account_id=account_id, run_id=run_id)
job_run_status = job_run.json()["data"]["status"]
self.log.info(
"Current status of job run %s: %s", str(run_id), DbtCloudJobRunStatus(job_run_status).name
)
return job_run_status
def wait_for_job_run_status(
self,
run_id: int,
account_id: int | None = None,
expected_statuses: int | Sequence[int] | set[int] = DbtCloudJobRunStatus.SUCCESS.value,
check_interval: int = 60,
timeout: int = 60 * 60 * 24 * 7,
) -> bool:
"""
Waits for a dbt Cloud job run to match an expected status.
:param run_id: The ID of a dbt Cloud job run.
:param account_id: Optional. The ID of a dbt Cloud account.
:param expected_statuses: Optional. The desired status(es) to check against a job run's current
status. Defaults to the success status value.
:param check_interval: Time in seconds to check on a pipeline run's status.
:param timeout: Time in seconds to wait for a pipeline to reach a terminal status or the expected
status.
:return: Boolean indicating if the job run has reached the ``expected_status``.
"""
expected_statuses = (expected_statuses,) if isinstance(expected_statuses, int) else expected_statuses
DbtCloudJobRunStatus.check_is_valid(expected_statuses)
job_run_info = JobRunInfo(account_id=account_id, run_id=run_id)
job_run_status = self.get_job_run_status(**job_run_info)
start_time = time.monotonic()
while (
not DbtCloudJobRunStatus.is_terminal(job_run_status) and job_run_status not in expected_statuses
):
# Check if the job-run duration has exceeded the ``timeout`` configured.
if start_time + timeout < time.monotonic():
raise DbtCloudJobRunException(
f"Job run {run_id} has not reached a terminal status after {timeout} seconds."
)
# Wait to check the status of the job run based on the ``check_interval`` configured.
time.sleep(check_interval)
job_run_status = self.get_job_run_status(**job_run_info)
return job_run_status in expected_statuses
@fallback_to_default_account
def cancel_job_run(self, run_id: int, account_id: int | None = None) -> None:
"""
Cancel a specific dbt Cloud job run.
:param run_id: The ID of a dbt Cloud job run.
:param account_id: Optional. The ID of a dbt Cloud account.
"""
self._run_and_get_response(method="POST", endpoint=f"{account_id}/runs/{run_id}/cancel/")
@fallback_to_default_account
def list_job_run_artifacts(
self, run_id: int, account_id: int | None = None, step: int | None = None
) -> list[Response]:
"""
Retrieves a list of the available artifact files generated for a completed run of a dbt Cloud job.
By default, this returns artifacts from the last step in the run. To
list artifacts from other steps in the run, use the ``step`` parameter.
:param run_id: The ID of a dbt Cloud job run.
:param account_id: Optional. The ID of a dbt Cloud account.
:param step: Optional. The index of the Step in the Run to query for artifacts. The first step in the
run has the index 1. If the step parameter is omitted, artifacts for the last step in the run will
be returned.
:return: List of request responses.
"""
return self._run_and_get_response(
endpoint=f"{account_id}/runs/{run_id}/artifacts/", payload={"step": step}
)
@fallback_to_default_account
def get_job_run_artifact(
self, run_id: int, path: str, account_id: int | None = None, step: int | None = None
) -> Response:
"""
Retrieves a list of the available artifact files generated for a completed run of a dbt Cloud job.
By default, this returns artifacts from the last step in the run. To
list artifacts from other steps in the run, use the ``step`` parameter.
:param run_id: The ID of a dbt Cloud job run.
:param path: The file path related to the artifact file. Paths are rooted at the target/ directory.
Use "manifest.json", "catalog.json", or "run_results.json" to download dbt-generated artifacts
for the run.
:param account_id: Optional. The ID of a dbt Cloud account.
:param step: Optional. The index of the Step in the Run to query for artifacts. The first step in the
run has the index 1. If the step parameter is omitted, artifacts for the last step in the run will
be returned.
:return: The request response.
"""
return self._run_and_get_response(
endpoint=f"{account_id}/runs/{run_id}/artifacts/{path}", payload={"step": step}
)
def test_connection(self) -> tuple[bool, str]:
"""Test dbt Cloud connection."""
try:
self._run_and_get_response()
return True, "Successfully connected to dbt Cloud."
except Exception as e:
return False, str(e)
| 24,242 | 38.677578 | 110 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/sensors/dbt.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import time
import warnings
from typing import TYPE_CHECKING, Any
from airflow.configuration import conf
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class DbtCloudJobRunSensor(BaseSensorOperator):
"""Checks the status of a dbt Cloud job run.
.. seealso::
For more information on how to use this sensor, take a look at the guide:
:ref:`howto/operator:DbtCloudJobRunSensor`
:param dbt_cloud_conn_id: The connection identifier for connecting to dbt Cloud.
:param run_id: The job run identifier.
:param account_id: The dbt Cloud account identifier.
:param deferrable: Run sensor in the deferrable mode.
"""
template_fields = ("dbt_cloud_conn_id", "run_id", "account_id")
def __init__(
self,
*,
dbt_cloud_conn_id: str = DbtCloudHook.default_conn_name,
run_id: int,
account_id: int | None = None,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs,
) -> None:
if deferrable:
if "poke_interval" not in kwargs:
# TODO: Remove once deprecated
if "polling_interval" in kwargs:
kwargs["poke_interval"] = kwargs["polling_interval"]
warnings.warn(
"Argument `poll_interval` is deprecated and will be removed "
"in a future release. Please use `poke_interval` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
else:
kwargs["poke_interval"] = 5
if "timeout" not in kwargs:
kwargs["timeout"] = 60 * 60 * 24 * 7
super().__init__(**kwargs)
self.dbt_cloud_conn_id = dbt_cloud_conn_id
self.run_id = run_id
self.account_id = account_id
self.deferrable = deferrable
def poke(self, context: Context) -> bool:
hook = DbtCloudHook(self.dbt_cloud_conn_id)
job_run_status = hook.get_job_run_status(run_id=self.run_id, account_id=self.account_id)
if job_run_status == DbtCloudJobRunStatus.ERROR.value:
raise DbtCloudJobRunException(f"Job run {self.run_id} has failed.")
if job_run_status == DbtCloudJobRunStatus.CANCELLED.value:
raise DbtCloudJobRunException(f"Job run {self.run_id} has been cancelled.")
return job_run_status == DbtCloudJobRunStatus.SUCCESS.value
def execute(self, context: Context) -> None:
"""Run the sensor.
Depending on whether ``deferrable`` is set, this would either defer to
the triggerer or poll for states of the job run, until the job reaches a
failure state or success state.
"""
if not self.deferrable:
super().execute(context)
else:
end_time = time.time() + self.timeout
if not self.poke(context=context):
self.defer(
timeout=self.execution_timeout,
trigger=DbtCloudRunJobTrigger(
run_id=self.run_id,
conn_id=self.dbt_cloud_conn_id,
account_id=self.account_id,
poll_interval=self.poke_interval,
end_time=end_time,
),
method_name="execute_complete",
)
def execute_complete(self, context: Context, event: dict[str, Any]) -> int:
"""Callback for when the trigger fires - returns immediately.
This relies on trigger to throw an exception, otherwise it assumes
execution was successful.
"""
if event["status"] in ["error", "cancelled"]:
raise AirflowException("Error in dbt: " + event["message"])
self.log.info(event["message"])
return int(event["run_id"])
class DbtCloudJobRunAsyncSensor(DbtCloudJobRunSensor):
"""This class is deprecated.
Please use :class:`airflow.providers.dbt.cloud.sensor.dbt.DbtCloudJobRunSensor`
with ``deferrable=True``.
"""
def __init__(self, **kwargs: Any) -> None:
warnings.warn(
"Class `DbtCloudJobRunAsyncSensor` is deprecated and will be removed in a future release. "
"Please use `DbtCloudJobRunSensor` and set `deferrable` attribute to `True` instead",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
super().__init__(deferrable=True, **kwargs)
| 5,714 | 38.6875 | 109 | py |
airflow | airflow-main/airflow/providers/dbt/cloud/sensors/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/celery/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-celery:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,531 | 35.47619 | 115 | py |
airflow | airflow-main/airflow/providers/celery/executors/celery_executor.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""CeleryExecutor.
.. seealso::
For more information on how the CeleryExecutor works, take a look at the guide:
:ref:`executor:CeleryExecutor`
"""
from __future__ import annotations
import logging
import math
import operator
import time
from collections import Counter
from concurrent.futures import ProcessPoolExecutor
from multiprocessing import cpu_count
from typing import TYPE_CHECKING, Any, Optional, Sequence, Tuple
from celery import states as celery_states
from airflow.configuration import conf
from airflow.exceptions import AirflowTaskTimeout
from airflow.executors.base_executor import BaseExecutor
from airflow.stats import Stats
from airflow.utils.state import State
log = logging.getLogger(__name__)
CELERY_SEND_ERR_MSG_HEADER = "Error sending Celery task"
if TYPE_CHECKING:
from celery import Task
from airflow.executors.base_executor import CommandType, TaskTuple
from airflow.models.taskinstance import TaskInstance
from airflow.models.taskinstancekey import TaskInstanceKey
# Task instance that is sent over Celery queues
# TaskInstanceKey, Command, queue_name, CallableTask
TaskInstanceInCelery = Tuple[TaskInstanceKey, CommandType, Optional[str], Task]
# PEP562
def __getattr__(name):
# This allows us to make the Celery app accessible through the
# celery_executor module without the time cost of its import and
# construction
if name == "app":
from airflow.providers.celery.executors.celery_executor_utils import app
return app
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
"""
To start the celery worker, run the command:
airflow celery worker
"""
class CeleryExecutor(BaseExecutor):
"""
CeleryExecutor is recommended for production use of Airflow.
It allows distributing the execution of task instances to multiple worker nodes.
Celery is a simple, flexible and reliable distributed system to process
vast amounts of messages, while providing operations with the tools
required to maintain such a system.
"""
supports_ad_hoc_ti_run: bool = True
supports_sentry: bool = True
def __init__(self):
super().__init__()
# Celery doesn't support bulk sending the tasks (which can become a bottleneck on bigger clusters)
# so we use a multiprocessing pool to speed this up.
# How many worker processes are created for checking celery task state.
self._sync_parallelism = conf.getint("celery", "SYNC_PARALLELISM")
if self._sync_parallelism == 0:
self._sync_parallelism = max(1, cpu_count() - 1)
from airflow.providers.celery.executors.celery_executor_utils import BulkStateFetcher
self.bulk_state_fetcher = BulkStateFetcher(self._sync_parallelism)
self.tasks = {}
self.task_publish_retries: Counter[TaskInstanceKey] = Counter()
self.task_publish_max_retries = conf.getint("celery", "task_publish_max_retries", fallback=3)
def start(self) -> None:
self.log.debug("Starting Celery Executor using %s processes for syncing", self._sync_parallelism)
def _num_tasks_per_send_process(self, to_send_count: int) -> int:
"""
How many Celery tasks should each worker process send.
:return: Number of tasks that should be sent per process
"""
return max(1, int(math.ceil(1.0 * to_send_count / self._sync_parallelism)))
def _process_tasks(self, task_tuples: list[TaskTuple]) -> None:
from airflow.providers.celery.executors.celery_executor_utils import execute_command
task_tuples_to_send = [task_tuple[:3] + (execute_command,) for task_tuple in task_tuples]
first_task = next(t[3] for t in task_tuples_to_send)
# Celery state queries will stuck if we do not use one same backend
# for all tasks.
cached_celery_backend = first_task.backend
key_and_async_results = self._send_tasks_to_celery(task_tuples_to_send)
self.log.debug("Sent all tasks.")
from airflow.providers.celery.executors.celery_executor_utils import ExceptionWithTraceback
for key, _, result in key_and_async_results:
if isinstance(result, ExceptionWithTraceback) and isinstance(
result.exception, AirflowTaskTimeout
):
retries = self.task_publish_retries[key]
if retries < self.task_publish_max_retries:
Stats.incr("celery.task_timeout_error")
self.log.info(
"[Try %s of %s] Task Timeout Error for Task: (%s).",
self.task_publish_retries[key] + 1,
self.task_publish_max_retries,
key,
)
self.task_publish_retries[key] = retries + 1
continue
self.queued_tasks.pop(key)
self.task_publish_retries.pop(key, None)
if isinstance(result, ExceptionWithTraceback):
self.log.error(CELERY_SEND_ERR_MSG_HEADER + ": %s\n%s\n", result.exception, result.traceback)
self.event_buffer[key] = (State.FAILED, None)
elif result is not None:
result.backend = cached_celery_backend
self.running.add(key)
self.tasks[key] = result
# Store the Celery task_id in the event buffer. This will get "overwritten" if the task
# has another event, but that is fine, because the only other events are success/failed at
# which point we don't need the ID anymore anyway
self.event_buffer[key] = (State.QUEUED, result.task_id)
# If the task runs _really quickly_ we may already have a result!
self.update_task_state(key, result.state, getattr(result, "info", None))
def _send_tasks_to_celery(self, task_tuples_to_send: list[TaskInstanceInCelery]):
from airflow.providers.celery.executors.celery_executor_utils import send_task_to_executor
if len(task_tuples_to_send) == 1 or self._sync_parallelism == 1:
# One tuple, or max one process -> send it in the main thread.
return list(map(send_task_to_executor, task_tuples_to_send))
# Use chunks instead of a work queue to reduce context switching
# since tasks are roughly uniform in size
chunksize = self._num_tasks_per_send_process(len(task_tuples_to_send))
num_processes = min(len(task_tuples_to_send), self._sync_parallelism)
with ProcessPoolExecutor(max_workers=num_processes) as send_pool:
key_and_async_results = list(
send_pool.map(send_task_to_executor, task_tuples_to_send, chunksize=chunksize)
)
return key_and_async_results
def sync(self) -> None:
if not self.tasks:
self.log.debug("No task to query celery, skipping sync")
return
self.update_all_task_states()
def debug_dump(self) -> None:
"""Called in response to SIGUSR2 by the scheduler."""
super().debug_dump()
self.log.info(
"executor.tasks (%d)\n\t%s", len(self.tasks), "\n\t".join(map(repr, self.tasks.items()))
)
def update_all_task_states(self) -> None:
"""Updates states of the tasks."""
self.log.debug("Inquiring about %s celery task(s)", len(self.tasks))
state_and_info_by_celery_task_id = self.bulk_state_fetcher.get_many(self.tasks.values())
self.log.debug("Inquiries completed.")
for key, async_result in list(self.tasks.items()):
state, info = state_and_info_by_celery_task_id.get(async_result.task_id)
if state:
self.update_task_state(key, state, info)
def change_state(self, key: TaskInstanceKey, state: str, info=None) -> None:
super().change_state(key, state, info)
self.tasks.pop(key, None)
def update_task_state(self, key: TaskInstanceKey, state: str, info: Any) -> None:
"""Updates state of a single task."""
try:
if state == celery_states.SUCCESS:
self.success(key, info)
elif state in (celery_states.FAILURE, celery_states.REVOKED):
self.fail(key, info)
elif state == celery_states.STARTED:
pass
elif state == celery_states.PENDING:
pass
else:
self.log.info("Unexpected state for %s: %s", key, state)
except Exception:
self.log.exception("Error syncing the Celery executor, ignoring it.")
def end(self, synchronous: bool = False) -> None:
if synchronous:
while any(task.state not in celery_states.READY_STATES for task in self.tasks.values()):
time.sleep(5)
self.sync()
def terminate(self):
pass
def try_adopt_task_instances(self, tis: Sequence[TaskInstance]) -> Sequence[TaskInstance]:
# See which of the TIs are still alive (or have finished even!)
#
# Since Celery doesn't store "SENT" state for queued commands (if we create an AsyncResult with a made
# up id it just returns PENDING state for it), we have to store Celery's task_id against the TI row to
# look at in future.
#
# This process is not perfect -- we could have sent the task to celery, and crashed before we were
# able to record the AsyncResult.task_id in the TaskInstance table, in which case we won't adopt the
# task (it'll either run and update the TI state, or the scheduler will clear and re-queue it. Either
# way it won't get executed more than once)
#
# (If we swapped it around, and generated a task_id for Celery, stored that in TI and enqueued that
# there is also still a race condition where we could generate and store the task_id, but die before
# we managed to enqueue the command. Since neither way is perfect we always have to deal with this
# process not being perfect.)
from celery.result import AsyncResult
celery_tasks = {}
not_adopted_tis = []
for ti in tis:
if ti.external_executor_id is not None:
celery_tasks[ti.external_executor_id] = (AsyncResult(ti.external_executor_id), ti)
else:
not_adopted_tis.append(ti)
if not celery_tasks:
# Nothing to adopt
return tis
states_by_celery_task_id = self.bulk_state_fetcher.get_many(
list(map(operator.itemgetter(0), celery_tasks.values()))
)
adopted = []
cached_celery_backend = next(iter(celery_tasks.values()))[0].backend
for celery_task_id, (state, info) in states_by_celery_task_id.items():
result, ti = celery_tasks[celery_task_id]
result.backend = cached_celery_backend
# Set the correct elements of the state dicts, then update this
# like we just queried it.
self.tasks[ti.key] = result
self.running.add(ti.key)
self.update_task_state(ti.key, state, info)
adopted.append(f"{ti} in state {state}")
if adopted:
task_instance_str = "\n\t".join(adopted)
self.log.info(
"Adopted the following %d tasks from a dead executor\n\t%s", len(adopted), task_instance_str
)
return not_adopted_tis
def cleanup_stuck_queued_tasks(self, tis: list[TaskInstance]) -> list[str]:
"""
Handle remnants of tasks that were failed because they were stuck in queued.
Tasks can get stuck in queued. If such a task is detected, it will be marked
as `UP_FOR_RETRY` if the task instance has remaining retries or marked as `FAILED`
if it doesn't.
:param tis: List of Task Instances to clean up
:return: List of readable task instances for a warning message
"""
readable_tis = []
from airflow.providers.celery.executors.celery_executor_utils import app
for ti in tis:
readable_tis.append(repr(ti))
task_instance_key = ti.key
self.fail(task_instance_key, None)
celery_async_result = self.tasks.pop(task_instance_key, None)
if celery_async_result:
try:
app.control.revoke(celery_async_result.task_id)
except Exception as ex:
self.log.error("Error revoking task instance %s from celery: %s", task_instance_key, ex)
return readable_tis
| 13,487 | 41.15 | 110 | py |
airflow | airflow-main/airflow/providers/celery/executors/celery_kubernetes_executor.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.callbacks.base_callback_sink import BaseCallbackSink
from airflow.callbacks.callback_requests import CallbackRequest
from airflow.configuration import conf
from airflow.executors.kubernetes_executor import KubernetesExecutor
from airflow.providers.celery.executors.celery_executor import CeleryExecutor
from airflow.utils.log.logging_mixin import LoggingMixin
if TYPE_CHECKING:
from airflow.executors.base_executor import CommandType, EventBufferValueType, QueuedTaskInstanceType
from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance
from airflow.models.taskinstancekey import TaskInstanceKey
class CeleryKubernetesExecutor(LoggingMixin):
"""
CeleryKubernetesExecutor consists of CeleryExecutor and KubernetesExecutor.
It chooses an executor to use based on the queue defined on the task.
When the queue is the value of ``kubernetes_queue`` in section ``[celery_kubernetes_executor]``
of the configuration (default value: `kubernetes`), KubernetesExecutor is selected to run the task,
otherwise, CeleryExecutor is used.
"""
supports_ad_hoc_ti_run: bool = True
supports_pickling: bool = True
supports_sentry: bool = False
is_local: bool = False
is_single_threaded: bool = False
is_production: bool = True
serve_logs: bool = False
change_sensor_mode_to_reschedule: bool = False
callback_sink: BaseCallbackSink | None = None
KUBERNETES_QUEUE = conf.get("celery_kubernetes_executor", "kubernetes_queue")
def __init__(self, celery_executor: CeleryExecutor, kubernetes_executor: KubernetesExecutor):
super().__init__()
self._job_id: int | None = None
self.celery_executor = celery_executor
self.kubernetes_executor = kubernetes_executor
self.kubernetes_executor.kubernetes_queue = self.KUBERNETES_QUEUE
@property
def queued_tasks(self) -> dict[TaskInstanceKey, QueuedTaskInstanceType]:
"""Return queued tasks from celery and kubernetes executor."""
queued_tasks = self.celery_executor.queued_tasks.copy()
queued_tasks.update(self.kubernetes_executor.queued_tasks)
return queued_tasks
@property
def running(self) -> set[TaskInstanceKey]:
"""Return running tasks from celery and kubernetes executor."""
return self.celery_executor.running.union(self.kubernetes_executor.running)
@property
def job_id(self) -> int | None:
"""
Inherited attribute from BaseExecutor.
Since this is not really an executor, but a wrapper of executors
we implemented it as property, so we can have custom setter.
"""
return self._job_id
@job_id.setter
def job_id(self, value: int | None) -> None:
"""Expose job ID for SchedulerJob."""
self._job_id = value
self.kubernetes_executor.job_id = value
self.celery_executor.job_id = value
def start(self) -> None:
"""Start celery and kubernetes executor."""
self.celery_executor.start()
self.kubernetes_executor.start()
@property
def slots_available(self) -> int:
"""Number of new tasks this executor instance can accept."""
return self.celery_executor.slots_available
def queue_command(
self,
task_instance: TaskInstance,
command: CommandType,
priority: int = 1,
queue: str | None = None,
) -> None:
"""Queues command via celery or kubernetes executor."""
executor = self._router(task_instance)
self.log.debug("Using executor: %s for %s", executor.__class__.__name__, task_instance.key)
executor.queue_command(task_instance, command, priority, queue)
def queue_task_instance(
self,
task_instance: TaskInstance,
mark_success: bool = False,
pickle_id: int | None = None,
ignore_all_deps: bool = False,
ignore_depends_on_past: bool = False,
wait_for_past_depends_before_skipping: bool = False,
ignore_task_deps: bool = False,
ignore_ti_state: bool = False,
pool: str | None = None,
cfg_path: str | None = None,
) -> None:
"""Queues task instance via celery or kubernetes executor."""
from airflow.models.taskinstance import SimpleTaskInstance
executor = self._router(SimpleTaskInstance.from_ti(task_instance))
self.log.debug(
"Using executor: %s to queue_task_instance for %s", executor.__class__.__name__, task_instance.key
)
executor.queue_task_instance(
task_instance=task_instance,
mark_success=mark_success,
pickle_id=pickle_id,
ignore_all_deps=ignore_all_deps,
ignore_depends_on_past=ignore_depends_on_past,
wait_for_past_depends_before_skipping=wait_for_past_depends_before_skipping,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state,
pool=pool,
cfg_path=cfg_path,
)
def get_task_log(self, ti: TaskInstance, try_number: int) -> tuple[list[str], list[str]]:
"""Fetch task log from Kubernetes executor."""
if ti.queue == self.kubernetes_executor.kubernetes_queue:
return self.kubernetes_executor.get_task_log(ti=ti, try_number=try_number)
return [], []
def has_task(self, task_instance: TaskInstance) -> bool:
"""
Checks if a task is either queued or running in either celery or kubernetes executor.
:param task_instance: TaskInstance
:return: True if the task is known to this executor
"""
return self.celery_executor.has_task(task_instance) or self.kubernetes_executor.has_task(
task_instance
)
def heartbeat(self) -> None:
"""Heartbeat sent to trigger new jobs in celery and kubernetes executor."""
self.celery_executor.heartbeat()
self.kubernetes_executor.heartbeat()
def get_event_buffer(
self, dag_ids: list[str] | None = None
) -> dict[TaskInstanceKey, EventBufferValueType]:
"""
Return and flush the event buffer from celery and kubernetes executor.
:param dag_ids: dag_ids to return events for, if None returns all
:return: a dict of events
"""
cleared_events_from_celery = self.celery_executor.get_event_buffer(dag_ids)
cleared_events_from_kubernetes = self.kubernetes_executor.get_event_buffer(dag_ids)
return {**cleared_events_from_celery, **cleared_events_from_kubernetes}
def try_adopt_task_instances(self, tis: Sequence[TaskInstance]) -> Sequence[TaskInstance]:
"""
Try to adopt running task instances that have been abandoned by a SchedulerJob dying.
Anything that is not adopted will be cleared by the scheduler (and then become eligible for
re-scheduling)
:return: any TaskInstances that were unable to be adopted
"""
celery_tis = [ti for ti in tis if ti.queue != self.KUBERNETES_QUEUE]
kubernetes_tis = [ti for ti in tis if ti.queue == self.KUBERNETES_QUEUE]
return [
*self.celery_executor.try_adopt_task_instances(celery_tis),
*self.kubernetes_executor.try_adopt_task_instances(kubernetes_tis),
]
def cleanup_stuck_queued_tasks(self, tis: list[TaskInstance]) -> list[str]:
celery_tis = [ti for ti in tis if ti.queue != self.KUBERNETES_QUEUE]
kubernetes_tis = [ti for ti in tis if ti.queue == self.KUBERNETES_QUEUE]
return [
*self.celery_executor.cleanup_stuck_queued_tasks(celery_tis),
*self.kubernetes_executor.cleanup_stuck_queued_tasks(kubernetes_tis),
]
def end(self) -> None:
"""End celery and kubernetes executor."""
self.celery_executor.end()
self.kubernetes_executor.end()
def terminate(self) -> None:
"""Terminate celery and kubernetes executor."""
self.celery_executor.terminate()
self.kubernetes_executor.terminate()
def _router(self, simple_task_instance: SimpleTaskInstance) -> CeleryExecutor | KubernetesExecutor:
"""
Return either celery_executor or kubernetes_executor.
:param simple_task_instance: SimpleTaskInstance
:return: celery_executor or kubernetes_executor
"""
if simple_task_instance.queue == self.KUBERNETES_QUEUE:
return self.kubernetes_executor
return self.celery_executor
def debug_dump(self) -> None:
"""Called in response to SIGUSR2 by the scheduler."""
self.log.info("Dumping CeleryExecutor state")
self.celery_executor.debug_dump()
self.log.info("Dumping KubernetesExecutor state")
self.kubernetes_executor.debug_dump()
def send_callback(self, request: CallbackRequest) -> None:
"""Sends callback for execution.
:param request: Callback request to be executed.
"""
if not self.callback_sink:
raise ValueError("Callback sink is not ready.")
self.callback_sink.send(request)
| 10,013 | 39.379032 | 110 | py |
airflow | airflow-main/airflow/providers/celery/executors/default_celery.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Default celery configuration."""
from __future__ import annotations
import logging
import ssl
import re2
from airflow.configuration import conf
from airflow.exceptions import AirflowConfigException, AirflowException
def _broker_supports_visibility_timeout(url):
return url.startswith(("redis://", "rediss://", "sqs://", "sentinel://"))
log = logging.getLogger(__name__)
broker_url = conf.get("celery", "BROKER_URL")
broker_transport_options = conf.getsection("celery_broker_transport_options") or {}
if "visibility_timeout" not in broker_transport_options:
if _broker_supports_visibility_timeout(broker_url):
broker_transport_options["visibility_timeout"] = 21600
broker_transport_options_for_celery: dict = broker_transport_options.copy()
if "sentinel_kwargs" in broker_transport_options:
try:
sentinel_kwargs = broker_transport_options.get("sentinel_kwargs")
if not isinstance(sentinel_kwargs, dict):
raise ValueError
broker_transport_options_for_celery["sentinel_kwargs"] = sentinel_kwargs
except Exception:
raise AirflowException("sentinel_kwargs should be written in the correct dictionary format.")
if conf.has_option("celery", "RESULT_BACKEND"):
result_backend = conf.get_mandatory_value("celery", "RESULT_BACKEND")
else:
log.debug("Value for celery result_backend not found. Using sql_alchemy_conn with db+ prefix.")
result_backend = f'db+{conf.get("database", "SQL_ALCHEMY_CONN")}'
DEFAULT_CELERY_CONFIG = {
"accept_content": ["json"],
"event_serializer": "json",
"worker_prefetch_multiplier": conf.getint("celery", "worker_prefetch_multiplier"),
"task_acks_late": True,
"task_default_queue": conf.get("operators", "DEFAULT_QUEUE"),
"task_default_exchange": conf.get("operators", "DEFAULT_QUEUE"),
"task_track_started": conf.getboolean("celery", "task_track_started"),
"broker_url": broker_url,
"broker_transport_options": broker_transport_options_for_celery,
"result_backend": result_backend,
"database_engine_options": conf.getjson(
"celery", "result_backend_sqlalchemy_engine_options", fallback={}
),
"worker_concurrency": conf.getint("celery", "WORKER_CONCURRENCY"),
"worker_enable_remote_control": conf.getboolean("celery", "worker_enable_remote_control"),
}
def _get_celery_ssl_active() -> bool:
try:
return conf.getboolean("celery", "SSL_ACTIVE")
except AirflowConfigException:
log.warning("Celery Executor will run without SSL")
return False
celery_ssl_active = _get_celery_ssl_active()
try:
if celery_ssl_active:
if broker_url and "amqp://" in broker_url:
broker_use_ssl = {
"keyfile": conf.get("celery", "SSL_KEY"),
"certfile": conf.get("celery", "SSL_CERT"),
"ca_certs": conf.get("celery", "SSL_CACERT"),
"cert_reqs": ssl.CERT_REQUIRED,
}
elif broker_url and re2.search("rediss?://|sentinel://", broker_url):
broker_use_ssl = {
"ssl_keyfile": conf.get("celery", "SSL_KEY"),
"ssl_certfile": conf.get("celery", "SSL_CERT"),
"ssl_ca_certs": conf.get("celery", "SSL_CACERT"),
"ssl_cert_reqs": ssl.CERT_REQUIRED,
}
else:
raise AirflowException(
"The broker you configured does not support SSL_ACTIVE to be True. "
"Please use RabbitMQ or Redis if you would like to use SSL for broker."
)
DEFAULT_CELERY_CONFIG["broker_use_ssl"] = broker_use_ssl
except AirflowConfigException:
raise AirflowException(
"AirflowConfigException: SSL_ACTIVE is True, "
"please ensure SSL_KEY, "
"SSL_CERT and SSL_CACERT are set"
)
except Exception as e:
raise AirflowException(
f"Exception: There was an unknown Celery SSL Error. Please ensure you want to use SSL and/or have "
f"all necessary certs and key ({e})."
)
if re2.search("rediss?://|amqp://|rpc://", result_backend):
log.warning(
"You have configured a result_backend of %s, it is highly recommended "
"to use an alternative result_backend (i.e. a database).",
result_backend,
)
| 5,074 | 38.341085 | 107 | py |
airflow | airflow-main/airflow/providers/celery/executors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import packaging.version
from airflow.exceptions import AirflowOptionalProviderFeatureException
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
base_version = packaging.version.parse(airflow_version).base_version
if packaging.version.parse(base_version) < packaging.version.parse("2.7.0"):
raise AirflowOptionalProviderFeatureException(
"Celery Executor from Celery Provider should only be used with Airflow 2.7.0+.\n"
f"This is Airflow {airflow_version} and Celery and CeleryKubernetesExecutor are "
f"available in the 'airflow.executors' package. You should not use "
f"the provider's executors in this version of Airflow."
)
| 1,584 | 41.837838 | 89 | py |
airflow | airflow-main/airflow/providers/celery/executors/celery_executor_utils.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Utilities and classes used by the Celery Executor.
Much of this code is expensive to import/load, be careful where this module is imported.
"""
from __future__ import annotations
import logging
import math
import os
import subprocess
import traceback
import warnings
from concurrent.futures import ProcessPoolExecutor
from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, Tuple
from celery import Celery, Task, states as celery_states
from celery.backends.base import BaseKeyValueStoreBackend
from celery.backends.database import DatabaseBackend, Task as TaskDb, retry, session_cleanup
from celery.result import AsyncResult
from celery.signals import import_modules as celery_import_modules
from setproctitle import setproctitle
import airflow.settings as settings
from airflow.configuration import conf
from airflow.exceptions import AirflowException, RemovedInAirflow3Warning
from airflow.executors.base_executor import BaseExecutor
from airflow.models.taskinstance import TaskInstanceKey
from airflow.providers.celery.executors.default_celery import DEFAULT_CELERY_CONFIG
from airflow.stats import Stats
from airflow.utils.dag_parsing_context import _airflow_parsing_context_manager
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.net import get_hostname
from airflow.utils.timeout import timeout
log = logging.getLogger(__name__)
if TYPE_CHECKING:
from airflow.executors.base_executor import CommandType, EventBufferValueType
TaskInstanceInCelery = Tuple[TaskInstanceKey, CommandType, Optional[str], Task]
OPERATION_TIMEOUT = conf.getfloat("celery", "operation_timeout", fallback=1.0)
# Make it constant for unit test.
CELERY_FETCH_ERR_MSG_HEADER = "Error fetching Celery task state"
if conf.has_option("celery", "celery_config_options"):
celery_configuration = conf.getimport("celery", "celery_config_options")
else:
celery_configuration = DEFAULT_CELERY_CONFIG
celery_app_name = conf.get("celery", "CELERY_APP_NAME")
if celery_app_name == "airflow.executors.celery_executor":
warnings.warn(
"The celery.CELERY_APP_NAME configuration uses deprecated package name: "
"'airflow.executors.celery_executor'. "
"Change it to `airflow.providers.celery.executors.celery_executor`, and "
"update the `-app` flag in your Celery Health Checks "
"to use `airflow.providers.celery.executors.celery_executor.app`.",
RemovedInAirflow3Warning,
)
app = Celery(celery_app_name, config_source=celery_configuration)
@celery_import_modules.connect
def on_celery_import_modules(*args, **kwargs):
"""
Preload some "expensive" airflow modules once, so other task processes won't have to import it again.
Loading these for each task adds 0.3-0.5s *per task* before the task can run. For long running tasks this
doesn't matter, but for short tasks this starts to be a noticeable impact.
"""
import jinja2.ext # noqa: F401
import airflow.jobs.local_task_job_runner
import airflow.macros
import airflow.operators.bash
import airflow.operators.python
import airflow.operators.subdag # noqa: F401
try:
import numpy # noqa: F401
except ImportError:
pass
try:
import kubernetes.client # noqa: F401
except ImportError:
pass
@app.task
def execute_command(command_to_exec: CommandType) -> None:
"""Executes command."""
dag_id, task_id = BaseExecutor.validate_airflow_tasks_run_command(command_to_exec)
celery_task_id = app.current_task.request.id
log.info("[%s] Executing command in Celery: %s", celery_task_id, command_to_exec)
with _airflow_parsing_context_manager(dag_id=dag_id, task_id=task_id):
try:
if settings.EXECUTE_TASKS_NEW_PYTHON_INTERPRETER:
_execute_in_subprocess(command_to_exec, celery_task_id)
else:
_execute_in_fork(command_to_exec, celery_task_id)
except Exception:
Stats.incr("celery.execute_command.failure")
raise
def _execute_in_fork(command_to_exec: CommandType, celery_task_id: str | None = None) -> None:
pid = os.fork()
if pid:
# In parent, wait for the child
pid, ret = os.waitpid(pid, 0)
if ret == 0:
return
msg = f"Celery command failed on host: {get_hostname()} with celery_task_id {celery_task_id}"
raise AirflowException(msg)
from airflow.sentry import Sentry
ret = 1
try:
from airflow.cli.cli_parser import get_parser
settings.engine.pool.dispose()
settings.engine.dispose()
parser = get_parser()
# [1:] - remove "airflow" from the start of the command
args = parser.parse_args(command_to_exec[1:])
args.shut_down_logging = False
if celery_task_id:
args.external_executor_id = celery_task_id
setproctitle(f"airflow task supervisor: {command_to_exec}")
args.func(args)
ret = 0
except Exception as e:
log.exception("[%s] Failed to execute task %s.", celery_task_id, str(e))
ret = 1
finally:
Sentry.flush()
logging.shutdown()
os._exit(ret)
def _execute_in_subprocess(command_to_exec: CommandType, celery_task_id: str | None = None) -> None:
env = os.environ.copy()
if celery_task_id:
env["external_executor_id"] = celery_task_id
try:
subprocess.check_output(command_to_exec, stderr=subprocess.STDOUT, close_fds=True, env=env)
except subprocess.CalledProcessError as e:
log.exception("[%s] execute_command encountered a CalledProcessError", celery_task_id)
log.error(e.output)
msg = f"Celery command failed on host: {get_hostname()} with celery_task_id {celery_task_id}"
raise AirflowException(msg)
class ExceptionWithTraceback:
"""
Wrapper class used to propagate exceptions to parent processes from subprocesses.
:param exception: The exception to wrap
:param exception_traceback: The stacktrace to wrap
"""
def __init__(self, exception: Exception, exception_traceback: str):
self.exception = exception
self.traceback = exception_traceback
def send_task_to_executor(
task_tuple: TaskInstanceInCelery,
) -> tuple[TaskInstanceKey, CommandType, AsyncResult | ExceptionWithTraceback]:
"""Sends task to executor."""
key, command, queue, task_to_run = task_tuple
try:
with timeout(seconds=OPERATION_TIMEOUT):
result = task_to_run.apply_async(args=[command], queue=queue)
except Exception as e:
exception_traceback = f"Celery Task ID: {key}\n{traceback.format_exc()}"
result = ExceptionWithTraceback(e, exception_traceback)
return key, command, result
def fetch_celery_task_state(async_result: AsyncResult) -> tuple[str, str | ExceptionWithTraceback, Any]:
"""
Fetch and return the state of the given celery task.
The scope of this function is global so that it can be called by subprocesses in the pool.
:param async_result: a tuple of the Celery task key and the async Celery object used
to fetch the task's state
:return: a tuple of the Celery task key and the Celery state and the celery info
of the task
"""
try:
with timeout(seconds=OPERATION_TIMEOUT):
# Accessing state property of celery task will make actual network request
# to get the current state of the task
info = async_result.info if hasattr(async_result, "info") else None
return async_result.task_id, async_result.state, info
except Exception as e:
exception_traceback = f"Celery Task ID: {async_result}\n{traceback.format_exc()}"
return async_result.task_id, ExceptionWithTraceback(e, exception_traceback), None
class BulkStateFetcher(LoggingMixin):
"""
Gets status for many Celery tasks using the best method available.
If BaseKeyValueStoreBackend is used as result backend, the mget method is used.
If DatabaseBackend is used as result backend, the SELECT ...WHERE task_id IN (...) query is used
Otherwise, multiprocessing.Pool will be used. Each task status will be downloaded individually.
"""
def __init__(self, sync_parallelism=None):
super().__init__()
self._sync_parallelism = sync_parallelism
def _tasks_list_to_task_ids(self, async_tasks) -> set[str]:
return {a.task_id for a in async_tasks}
def get_many(self, async_results) -> Mapping[str, EventBufferValueType]:
"""Gets status for many Celery tasks using the best method available."""
if isinstance(app.backend, BaseKeyValueStoreBackend):
result = self._get_many_from_kv_backend(async_results)
elif isinstance(app.backend, DatabaseBackend):
result = self._get_many_from_db_backend(async_results)
else:
result = self._get_many_using_multiprocessing(async_results)
self.log.debug("Fetched %d state(s) for %d task(s)", len(result), len(async_results))
return result
def _get_many_from_kv_backend(self, async_tasks) -> Mapping[str, EventBufferValueType]:
task_ids = self._tasks_list_to_task_ids(async_tasks)
keys = [app.backend.get_key_for_task(k) for k in task_ids]
values = app.backend.mget(keys)
task_results = [app.backend.decode_result(v) for v in values if v]
task_results_by_task_id = {task_result["task_id"]: task_result for task_result in task_results}
return self._prepare_state_and_info_by_task_dict(task_ids, task_results_by_task_id)
@retry
def _query_task_cls_from_db_backend(self, task_ids, **kwargs):
session = app.backend.ResultSession()
task_cls = getattr(app.backend, "task_cls", TaskDb)
with session_cleanup(session):
return session.query(task_cls).filter(task_cls.task_id.in_(task_ids)).all()
def _get_many_from_db_backend(self, async_tasks) -> Mapping[str, EventBufferValueType]:
task_ids = self._tasks_list_to_task_ids(async_tasks)
tasks = self._query_task_cls_from_db_backend(task_ids)
task_results = [app.backend.meta_from_decoded(task.to_dict()) for task in tasks]
task_results_by_task_id = {task_result["task_id"]: task_result for task_result in task_results}
return self._prepare_state_and_info_by_task_dict(task_ids, task_results_by_task_id)
@staticmethod
def _prepare_state_and_info_by_task_dict(
task_ids, task_results_by_task_id
) -> Mapping[str, EventBufferValueType]:
state_info: MutableMapping[str, EventBufferValueType] = {}
for task_id in task_ids:
task_result = task_results_by_task_id.get(task_id)
if task_result:
state = task_result["status"]
info = None if not hasattr(task_result, "info") else task_result["info"]
else:
state = celery_states.PENDING
info = None
state_info[task_id] = state, info
return state_info
def _get_many_using_multiprocessing(self, async_results) -> Mapping[str, EventBufferValueType]:
num_process = min(len(async_results), self._sync_parallelism)
with ProcessPoolExecutor(max_workers=num_process) as sync_pool:
chunksize = max(1, math.floor(math.ceil(1.0 * len(async_results) / self._sync_parallelism)))
task_id_to_states_and_info = list(
sync_pool.map(fetch_celery_task_state, async_results, chunksize=chunksize)
)
states_and_info_by_task_id: MutableMapping[str, EventBufferValueType] = {}
for task_id, state_or_exception, info in task_id_to_states_and_info:
if isinstance(state_or_exception, ExceptionWithTraceback):
self.log.error(
CELERY_FETCH_ERR_MSG_HEADER + ":%s\n%s\n",
state_or_exception.exception,
state_or_exception.traceback,
)
else:
states_and_info_by_task_id[task_id] = state_or_exception, info
return states_and_info_by_task_id
| 13,025 | 39.962264 | 109 | py |
airflow | airflow-main/airflow/providers/celery/sensors/celery_queue.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
from celery.app import control
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class CeleryQueueSensor(BaseSensorOperator):
"""
Waits for a Celery queue to be empty.
By default, in order to be considered empty, the queue must not have
any tasks in the ``reserved``, ``scheduled`` or ``active`` states.
:param celery_queue: The name of the Celery queue to wait for.
:param target_task_id: Task id for checking
"""
def __init__(self, *, celery_queue: str, target_task_id: str | None = None, **kwargs) -> None:
super().__init__(**kwargs)
self.celery_queue = celery_queue
self.target_task_id = target_task_id
def _check_task_id(self, context: Context) -> bool:
"""
Get the Celery result from the Airflow task ID and return True if the result has finished execution.
:param context: Airflow's execution context
:return: True if task has been executed, otherwise False
"""
ti = context["ti"]
celery_result = ti.xcom_pull(task_ids=self.target_task_id)
return celery_result.ready()
def poke(self, context: Context) -> bool:
if self.target_task_id:
return self._check_task_id(context)
inspect_result = control.Inspect()
reserved = inspect_result.reserved()
scheduled = inspect_result.scheduled()
active = inspect_result.active()
try:
reserved = len(reserved[self.celery_queue])
scheduled = len(scheduled[self.celery_queue])
active = len(active[self.celery_queue])
self.log.info("Checking if celery queue %s is empty.", self.celery_queue)
return reserved == 0 and scheduled == 0 and active == 0
except KeyError:
raise KeyError(f"Could not locate Celery queue {self.celery_queue}")
| 2,780 | 34.653846 | 108 | py |
airflow | airflow-main/airflow/providers/celery/sensors/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/oracle/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.7.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-oracle:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,531 | 35.47619 | 115 | py |
airflow | airflow-main/airflow/providers/oracle/transfers/oracle_to_oracle.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.oracle.hooks.oracle import OracleHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class OracleToOracleOperator(BaseOperator):
"""
Moves data from Oracle to Oracle.
:param oracle_destination_conn_id: destination Oracle connection.
:param destination_table: destination table to insert rows.
:param oracle_source_conn_id: :ref:`Source Oracle connection <howto/connection:oracle>`.
:param source_sql: SQL query to execute against the source Oracle
database. (templated)
:param source_sql_params: Parameters to use in sql query. (templated)
:param rows_chunk: number of rows per chunk to commit.
"""
template_fields: Sequence[str] = ("source_sql", "source_sql_params")
template_fields_renderers = {"source_sql": "sql", "source_sql_params": "py"}
ui_color = "#e08c8c"
def __init__(
self,
*,
oracle_destination_conn_id: str,
destination_table: str,
oracle_source_conn_id: str,
source_sql: str,
source_sql_params: dict | None = None,
rows_chunk: int = 5000,
**kwargs,
) -> None:
super().__init__(**kwargs)
if source_sql_params is None:
source_sql_params = {}
self.oracle_destination_conn_id = oracle_destination_conn_id
self.destination_table = destination_table
self.oracle_source_conn_id = oracle_source_conn_id
self.source_sql = source_sql
self.source_sql_params = source_sql_params
self.rows_chunk = rows_chunk
def _execute(self, src_hook, dest_hook, context) -> None:
with src_hook.get_conn() as src_conn:
cursor = src_conn.cursor()
self.log.info("Querying data from source: %s", self.oracle_source_conn_id)
cursor.execute(self.source_sql, self.source_sql_params)
target_fields = list(map(lambda field: field[0], cursor.description))
rows_total = 0
rows = cursor.fetchmany(self.rows_chunk)
while len(rows) > 0:
rows_total += len(rows)
dest_hook.bulk_insert_rows(
self.destination_table, rows, target_fields=target_fields, commit_every=self.rows_chunk
)
rows = cursor.fetchmany(self.rows_chunk)
self.log.info("Total inserted: %s rows", rows_total)
self.log.info("Finished data transfer.")
cursor.close()
def execute(self, context: Context) -> None:
src_hook = OracleHook(oracle_conn_id=self.oracle_source_conn_id)
dest_hook = OracleHook(oracle_conn_id=self.oracle_destination_conn_id)
self._execute(src_hook, dest_hook, context)
| 3,646 | 39.076923 | 107 | py |
airflow | airflow-main/airflow/providers/oracle/transfers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/oracle/operators/oracle.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import re
import warnings
from typing import TYPE_CHECKING, Sequence
import oracledb
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.models import BaseOperator
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.oracle.hooks.oracle import OracleHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class OracleOperator(SQLExecuteQueryOperator):
"""
Executes sql code in a specific Oracle database.
This class is deprecated.
Please use :class:`airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
:param sql: the sql code to be executed. Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
(templated)
:param oracle_conn_id: The :ref:`Oracle connection id <howto/connection:oracle>`
reference to a specific Oracle database.
:param parameters: (optional, templated) the parameters to render the SQL query with.
:param autocommit: if True, each command is automatically committed.
(default value: False)
"""
template_fields: Sequence[str] = (
"parameters",
"sql",
)
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {"sql": "sql"}
ui_color = "#ededed"
def __init__(self, *, oracle_conn_id: str = "oracle_default", **kwargs) -> None:
super().__init__(conn_id=oracle_conn_id, **kwargs)
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
class OracleStoredProcedureOperator(BaseOperator):
"""
Executes stored procedure in a specific Oracle database.
:param procedure: name of stored procedure to call (templated)
:param oracle_conn_id: The :ref:`Oracle connection id <howto/connection:oracle>`
reference to a specific Oracle database.
:param parameters: (optional, templated) the parameters provided in the call
If *do_xcom_push* is *True*, the numeric exit code emitted by
the database is pushed to XCom under key ``ORA`` in case of failure.
"""
template_fields: Sequence[str] = (
"parameters",
"procedure",
)
ui_color = "#ededed"
def __init__(
self,
*,
procedure: str,
oracle_conn_id: str = "oracle_default",
parameters: dict | list | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.oracle_conn_id = oracle_conn_id
self.procedure = procedure
self.parameters = parameters
def execute(self, context: Context):
self.log.info("Executing: %s", self.procedure)
hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
try:
return hook.callproc(self.procedure, autocommit=True, parameters=self.parameters)
except oracledb.DatabaseError as e:
if not self.do_xcom_push or not context:
raise
ti = context["ti"]
code_match = re.search("^ORA-(\\d+):.+", str(e))
if code_match:
ti.xcom_push(key="ORA", value=code_match.group(1))
raise
| 4,235 | 35.205128 | 96 | py |
airflow | airflow-main/airflow/providers/oracle/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/oracle/hooks/oracle.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import math
import warnings
from datetime import datetime
import oracledb
from airflow.exceptions import AirflowProviderDeprecationWarning
try:
import numpy
except ImportError:
numpy = None # type: ignore
from airflow.providers.common.sql.hooks.sql import DbApiHook
PARAM_TYPES = {bool, float, int, str}
def _map_param(value):
if value in PARAM_TYPES:
# In this branch, value is a Python type; calling it produces
# an instance of the type which is understood by the Oracle driver
# in the out parameter mapping mechanism.
value = value()
return value
def _get_bool(val):
if isinstance(val, bool):
return val
if isinstance(val, str):
val = val.lower().strip()
if val == "true":
return True
if val == "false":
return False
return None
def _get_first_bool(*vals):
for val in vals:
converted = _get_bool(val)
if isinstance(converted, bool):
return converted
return None
class OracleHook(DbApiHook):
"""
Interact with Oracle SQL.
:param oracle_conn_id: The :ref:`Oracle connection id <howto/connection:oracle>`
used for Oracle credentials.
:param thick_mode: Specify whether to use python-oracledb in thick mode. Defaults to False.
If set to True, you must have the Oracle Client libraries installed.
See `oracledb docs<https://python-oracledb.readthedocs.io/en/latest/user_guide/initialization.html>`
for more info.
:param thick_mode_lib_dir: Path to use to find the Oracle Client libraries when using thick mode.
If not specified, defaults to the standard way of locating the Oracle Client library on the OS.
See `oracledb docs
<https://python-oracledb.readthedocs.io/en/latest/user_guide/initialization.html#setting-the-oracle-client-library-directory>`
for more info.
:param thick_mode_config_dir: Path to use to find the Oracle Client library
configuration files when using thick mode.
If not specified, defaults to the standard way of locating the Oracle Client
library configuration files on the OS.
See `oracledb docs
<https://python-oracledb.readthedocs.io/en/latest/user_guide/initialization.html#optional-oracle-net-configuration-files>`
for more info.
:param fetch_decimals: Specify whether numbers should be fetched as ``decimal.Decimal`` values.
See `defaults.fetch_decimals
<https://python-oracledb.readthedocs.io/en/latest/api_manual/defaults.html#defaults.fetch_decimals>`
for more info.
:param fetch_lobs: Specify whether to fetch strings/bytes for CLOBs or BLOBs instead of locators.
See `defaults.fetch_lobs
<https://python-oracledb.readthedocs.io/en/latest/api_manual/defaults.html#defaults.fetch_decimals>`
for more info.
"""
conn_name_attr = "oracle_conn_id"
default_conn_name = "oracle_default"
conn_type = "oracle"
hook_name = "Oracle"
_test_connection_sql = "select 1 from dual"
supports_autocommit = True
def __init__(
self,
*args,
thick_mode: bool | None = None,
thick_mode_lib_dir: str | None = None,
thick_mode_config_dir: str | None = None,
fetch_decimals: bool | None = None,
fetch_lobs: bool | None = None,
**kwargs,
) -> None:
super().__init__(*args, **kwargs)
self.thick_mode = thick_mode
self.thick_mode_lib_dir = thick_mode_lib_dir
self.thick_mode_config_dir = thick_mode_config_dir
self.fetch_decimals = fetch_decimals
self.fetch_lobs = fetch_lobs
def get_conn(self) -> oracledb.Connection:
"""Get an Oracle connection object.
Optional parameters for using a custom DSN connection (instead of using
a server alias from tnsnames.ora) The dsn (data source name) is the TNS
entry (from the Oracle names server or tnsnames.ora file), or is a
string like the one returned from ``makedsn()``.
:param dsn: the data source name for the Oracle server
:param service_name: the db_unique_name of the database
that you are connecting to (CONNECT_DATA part of TNS)
:param sid: Oracle System ID that identifies a particular
database on a system
You can set these parameters in the extra fields of your connection
as in
.. code-block:: python
{"dsn": ("(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=host)(PORT=1521))(CONNECT_DATA=(SID=sid)))")}
see more param detail in `oracledb.connect
<https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.connect>`_
"""
conn = self.get_connection(self.oracle_conn_id) # type: ignore[attr-defined]
conn_config = {"user": conn.login, "password": conn.password}
sid = conn.extra_dejson.get("sid")
mod = conn.extra_dejson.get("module")
schema = conn.schema
# Enable oracledb thick mode if thick_mode is set to True
# Parameters take precedence over connection config extra
# Defaults to use thin mode if not provided in params or connection config extra
thick_mode = _get_first_bool(self.thick_mode, conn.extra_dejson.get("thick_mode"))
if thick_mode is True:
if self.thick_mode_lib_dir is None:
self.thick_mode_lib_dir = conn.extra_dejson.get("thick_mode_lib_dir")
if not isinstance(self.thick_mode_lib_dir, (str, type(None))):
raise TypeError(
f"thick_mode_lib_dir expected str or None, "
f"got {type(self.thick_mode_lib_dir).__name__}"
)
if self.thick_mode_config_dir is None:
self.thick_mode_config_dir = conn.extra_dejson.get("thick_mode_config_dir")
if not isinstance(self.thick_mode_config_dir, (str, type(None))):
raise TypeError(
f"thick_mode_config_dir expected str or None, "
f"got {type(self.thick_mode_config_dir).__name__}"
)
oracledb.init_oracle_client(
lib_dir=self.thick_mode_lib_dir, config_dir=self.thick_mode_config_dir
)
# Set oracledb Defaults Attributes if provided
# (https://python-oracledb.readthedocs.io/en/latest/api_manual/defaults.html)
fetch_decimals = _get_first_bool(self.fetch_decimals, conn.extra_dejson.get("fetch_decimals"))
if isinstance(fetch_decimals, bool):
oracledb.defaults.fetch_decimals = fetch_decimals
fetch_lobs = _get_first_bool(self.fetch_lobs, conn.extra_dejson.get("fetch_lobs"))
if isinstance(fetch_lobs, bool):
oracledb.defaults.fetch_lobs = fetch_lobs
# Set up DSN
service_name = conn.extra_dejson.get("service_name")
port = conn.port if conn.port else 1521
if conn.host and sid and not service_name:
conn_config["dsn"] = oracledb.makedsn(conn.host, port, sid)
elif conn.host and service_name and not sid:
conn_config["dsn"] = oracledb.makedsn(conn.host, port, service_name=service_name)
else:
dsn = conn.extra_dejson.get("dsn")
if dsn is None:
dsn = conn.host
if conn.port is not None:
dsn += ":" + str(conn.port)
if service_name:
dsn += "/" + service_name
elif conn.schema:
warnings.warn(
"""Using conn.schema to pass the Oracle Service Name is deprecated.
Please use conn.extra.service_name instead.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
dsn += "/" + conn.schema
conn_config["dsn"] = dsn
if "events" in conn.extra_dejson:
conn_config["events"] = conn.extra_dejson.get("events")
mode = conn.extra_dejson.get("mode", "").lower()
if mode == "sysdba":
conn_config["mode"] = oracledb.AUTH_MODE_SYSDBA
elif mode == "sysasm":
conn_config["mode"] = oracledb.AUTH_MODE_SYSASM
elif mode == "sysoper":
conn_config["mode"] = oracledb.AUTH_MODE_SYSOPER
elif mode == "sysbkp":
conn_config["mode"] = oracledb.AUTH_MODE_SYSBKP
elif mode == "sysdgd":
conn_config["mode"] = oracledb.AUTH_MODE_SYSDGD
elif mode == "syskmt":
conn_config["mode"] = oracledb.AUTH_MODE_SYSKMT
elif mode == "sysrac":
conn_config["mode"] = oracledb.AUTH_MODE_SYSRAC
purity = conn.extra_dejson.get("purity", "").lower()
if purity == "new":
conn_config["purity"] = oracledb.PURITY_NEW
elif purity == "self":
conn_config["purity"] = oracledb.PURITY_SELF
elif purity == "default":
conn_config["purity"] = oracledb.PURITY_DEFAULT
conn = oracledb.connect(**conn_config)
if mod is not None:
conn.module = mod
# if Connection.schema is defined, set schema after connecting successfully
# cannot be part of conn_config
# https://python-oracledb.readthedocs.io/en/latest/api_manual/connection.html?highlight=schema#Connection.current_schema
# Only set schema when not using conn.schema as Service Name
if schema and service_name:
conn.current_schema = schema
return conn
def insert_rows(
self,
table: str,
rows: list[tuple],
target_fields=None,
commit_every: int = 1000,
replace: bool | None = False,
**kwargs,
) -> None:
"""Insert a collection of tuples into a table.
All data to insert are treated as one transaction. Changes from standard
DbApiHook implementation:
- Oracle SQL queries can not be terminated with a semicolon (``;``).
- Replace NaN values with NULL using ``numpy.nan_to_num`` (not using
``is_nan()`` because of input types error for strings).
- Coerce datetime cells to Oracle DATETIME format during insert.
:param table: target Oracle table, use dot notation to target a
specific database
:param rows: the rows to insert into the table
:param target_fields: the names of the columns to fill in the table
:param commit_every: the maximum number of rows to insert in one transaction
Default 1000, Set greater than 0.
Set 1 to insert each row in each single transaction
:param replace: Whether to replace instead of insert
"""
if target_fields:
target_fields = ", ".join(target_fields)
target_fields = f"({target_fields})"
else:
target_fields = ""
conn = self.get_conn()
if self.supports_autocommit:
self.set_autocommit(conn, False)
cur = conn.cursor() # type: ignore[attr-defined]
i = 0
for row in rows:
i += 1
lst = []
for cell in row:
if isinstance(cell, str):
lst.append("'" + str(cell).replace("'", "''") + "'")
elif cell is None:
lst.append("NULL")
elif isinstance(cell, float) and math.isnan(cell): # coerce numpy NaN to NULL
lst.append("NULL")
elif numpy and isinstance(cell, numpy.datetime64):
lst.append("'" + str(cell) + "'")
elif isinstance(cell, datetime):
lst.append(
"to_date('" + cell.strftime("%Y-%m-%d %H:%M:%S") + "','YYYY-MM-DD HH24:MI:SS')"
)
else:
lst.append(str(cell))
values = tuple(lst)
sql = f"INSERT /*+ APPEND */ INTO {table} {target_fields} VALUES ({','.join(values)})"
cur.execute(sql)
if i % commit_every == 0:
conn.commit() # type: ignore[attr-defined]
self.log.info("Loaded %s into %s rows so far", i, table)
conn.commit() # type: ignore[attr-defined]
cur.close()
conn.close() # type: ignore[attr-defined]
self.log.info("Done loading. Loaded a total of %s rows", i)
def bulk_insert_rows(
self,
table: str,
rows: list[tuple],
target_fields: list[str] | None = None,
commit_every: int = 5000,
):
"""A performant bulk insert for Oracle DB.
This uses prepared statements via `executemany()`. For best performance,
pass in `rows` as an iterator.
:param table: target Oracle table, use dot notation to target a
specific database
:param rows: the rows to insert into the table
:param target_fields: the names of the columns to fill in the table, default None.
If None, each rows should have some order as table columns name
:param commit_every: the maximum number of rows to insert in one transaction
Default 5000. Set greater than 0. Set 1 to insert each row in each transaction
"""
if not rows:
raise ValueError("parameter rows could not be None or empty iterable")
conn = self.get_conn()
if self.supports_autocommit:
self.set_autocommit(conn, False)
cursor = conn.cursor() # type: ignore[attr-defined]
values_base = target_fields if target_fields else rows[0]
prepared_stm = "insert into {tablename} {columns} values ({values})".format(
tablename=table,
columns="({})".format(", ".join(target_fields)) if target_fields else "",
values=", ".join(":%s" % i for i in range(1, len(values_base) + 1)),
)
row_count = 0
# Chunk the rows
row_chunk = []
for row in rows:
row_chunk.append(row)
row_count += 1
if row_count % commit_every == 0:
cursor.prepare(prepared_stm)
cursor.executemany(None, row_chunk)
conn.commit() # type: ignore[attr-defined]
self.log.info("[%s] inserted %s rows", table, row_count)
# Empty chunk
row_chunk = []
# Commit the leftover chunk
cursor.prepare(prepared_stm)
cursor.executemany(None, row_chunk)
conn.commit() # type: ignore[attr-defined]
self.log.info("[%s] inserted %s rows", table, row_count)
cursor.close()
conn.close() # type: ignore[attr-defined]
def callproc(
self,
identifier: str,
autocommit: bool = False,
parameters: list | dict | None = None,
) -> list | dict | None:
"""
Call the stored procedure identified by the provided string.
Any OUT parameters must be provided with a value of either the
expected Python type (e.g., `int`) or an instance of that type.
The return value is a list or mapping that includes parameters in
both directions; the actual return type depends on the type of the
provided `parameters` argument.
See
https://python-oracledb.readthedocs.io/en/latest/api_manual/cursor.html#Cursor.var
for further reference.
"""
if parameters is None:
parameters = []
args = ",".join(
f":{name}"
for name in (parameters if isinstance(parameters, dict) else range(1, len(parameters) + 1))
)
sql = f"BEGIN {identifier}({args}); END;"
def handler(cursor):
if cursor.bindvars is None:
return
if isinstance(cursor.bindvars, list):
return [v.getvalue() for v in cursor.bindvars]
if isinstance(cursor.bindvars, dict):
return {n: v.getvalue() for (n, v) in cursor.bindvars.items()}
raise TypeError(f"Unexpected bindvars: {cursor.bindvars!r}")
result = self.run(
sql,
autocommit=autocommit,
parameters=(
{name: _map_param(value) for (name, value) in parameters.items()}
if isinstance(parameters, dict)
else [_map_param(value) for value in parameters]
),
handler=handler,
)
return result
| 17,488 | 39.7669 | 134 | py |
airflow | airflow-main/airflow/providers/oracle/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/oracle/example_dags/example_oracle.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from airflow import DAG
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
from airflow.providers.oracle.operators.oracle import OracleStoredProcedureOperator
with DAG(
max_active_runs=1,
max_active_tasks=3,
catchup=False,
start_date=datetime(2023, 1, 1),
dag_id="example_oracle",
) as dag:
# [START howto_oracle_operator]
opr_sql = SQLExecuteQueryOperator(
task_id="task_sql", conn_id="oracle", sql="SELECT 1 FROM DUAL", autocommit=True
)
# [END howto_oracle_operator]
# [START howto_oracle_stored_procedure_operator_with_list_inout]
opr_stored_procedure_with_list_input_output = OracleStoredProcedureOperator(
task_id="opr_stored_procedure_with_list_input_output",
oracle_conn_id="oracle",
procedure="TEST_PROCEDURE",
parameters=[3, int],
)
# [END howto_oracle_stored_procedure_operator_with_list_inout]
# [START howto_oracle_stored_procedure_operator_with_dict_inout]
opr_stored_procedure_with_dict_input_output = OracleStoredProcedureOperator(
task_id="opr_stored_procedure_with_dict_input_output",
oracle_conn_id="oracle",
procedure="TEST_PROCEDURE",
parameters={"val_in": 3, "val_out": int},
)
# [END howto_oracle_stored_procedure_operator_with_dict_inout]
| 2,193 | 34.387097 | 87 | py |
airflow | airflow-main/airflow/providers/oracle/example_dags/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/github/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "2.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-github:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,531 | 35.47619 | 115 | py |
airflow | airflow-main/airflow/providers/github/operators/github.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable
from github import GithubException
from airflow import AirflowException
from airflow.models import BaseOperator
from airflow.providers.github.hooks.github import GithubHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class GithubOperator(BaseOperator):
"""Interact and perform actions on GitHub API.
This operator is designed to use GitHub's Python SDK: https://github.com/PyGithub/PyGithub
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GithubOperator`
:param github_conn_id: Reference to a pre-defined GitHub Connection
:param github_method: Method name from GitHub Python SDK to be called
:param github_method_args: Method parameters for the github_method. (templated)
:param result_processor: Function to further process the response from GitHub API
"""
template_fields = ("github_method_args",)
def __init__(
self,
*,
github_method: str,
github_conn_id: str = "github_default",
github_method_args: dict | None = None,
result_processor: Callable | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.github_conn_id = github_conn_id
self.method_name = github_method
self.github_method_args = github_method_args or {}
self.result_processor = result_processor
def execute(self, context: Context) -> Any:
try:
# Default method execution is on the top level GitHub client
hook = GithubHook(github_conn_id=self.github_conn_id)
resource = hook.client
github_result = getattr(resource, self.method_name)(**self.github_method_args)
if self.result_processor:
return self.result_processor(github_result)
return github_result
except GithubException as github_error:
raise AirflowException(f"Failed to execute GithubOperator, error: {str(github_error)}")
except Exception as e:
raise AirflowException(f"GitHub operator error: {str(e)}")
| 3,006 | 36.5875 | 99 | py |
airflow | airflow-main/airflow/providers/github/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/github/hooks/github.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module allows you to connect to GitHub."""
from __future__ import annotations
from typing import TYPE_CHECKING
from github import Github as GithubClient
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
class GithubHook(BaseHook):
"""
Interact with GitHub.
Performs a connection to GitHub and retrieves client.
:param github_conn_id: Reference to :ref:`GitHub connection id <howto/connection:github>`.
"""
conn_name_attr = "github_conn_id"
default_conn_name = "github_default"
conn_type = "github"
hook_name = "GitHub"
def __init__(self, github_conn_id: str = default_conn_name, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.github_conn_id = github_conn_id
self.client: GithubClient | None = None
self.get_conn()
def get_conn(self) -> GithubClient:
"""Function that initiates a new GitHub connection with token and hostname (for GitHub Enterprise)."""
if self.client is not None:
return self.client
conn = self.get_connection(self.github_conn_id)
access_token = conn.password
host = conn.host
# Currently the only method of authenticating to GitHub in Airflow is via a token. This is not the
# only means available, but raising an exception to enforce this method for now.
# TODO: When/If other auth methods are implemented this exception should be removed/modified.
if not access_token:
raise AirflowException("An access token is required to authenticate to GitHub.")
if not host:
self.client = GithubClient(login_or_token=access_token)
else:
self.client = GithubClient(login_or_token=access_token, base_url=host)
return self.client
@staticmethod
def get_ui_field_behaviour() -> dict:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "port", "login", "extra"],
"relabeling": {"host": "GitHub Enterprise URL (Optional)", "password": "GitHub Access Token"},
"placeholders": {"host": "https://{hostname}/api/v3 (for GitHub Enterprise)"},
}
def test_connection(self) -> tuple[bool, str]:
"""Test GitHub connection."""
try:
if TYPE_CHECKING:
assert self.client
self.client.get_user().id
return True, "Successfully connected to GitHub."
except Exception as e:
return False, str(e)
| 3,350 | 36.651685 | 110 | py |
airflow | airflow-main/airflow/providers/github/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/github/sensors/github.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable
from github import GithubException
from airflow import AirflowException
from airflow.providers.github.hooks.github import GithubHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class GithubSensor(BaseSensorOperator):
"""
Base GithubSensor which can monitor for any change.
:param github_conn_id: reference to a pre-defined GitHub Connection
:param method_name: method name from PyGithub to be executed
:param method_params: parameters for the method method_name
:param result_processor: function that return boolean and act as a sensor response
"""
def __init__(
self,
*,
method_name: str,
github_conn_id: str = "github_default",
method_params: dict | None = None,
result_processor: Callable | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.github_conn_id = github_conn_id
self.result_processor = None
if result_processor is not None:
self.result_processor = result_processor
self.method_name = method_name
self.method_params = method_params
def poke(self, context: Context) -> bool:
hook = GithubHook(github_conn_id=self.github_conn_id)
github_result = getattr(hook.client, self.method_name)(**self.method_params)
if self.result_processor:
return self.result_processor(github_result)
return github_result
class BaseGithubRepositorySensor(GithubSensor):
"""
Base GitHub sensor at Repository level.
:param github_conn_id: reference to a pre-defined GitHub Connection
:param repository_name: full qualified name of the repository to be monitored, ex. "apache/airflow"
"""
def __init__(
self,
*,
github_conn_id: str = "github_default",
repository_name: str | None = None,
result_processor: Callable | None = None,
**kwargs,
) -> None:
super().__init__(
github_conn_id=github_conn_id,
result_processor=result_processor,
method_name="get_repo",
method_params={"full_name_or_id": repository_name},
**kwargs,
)
def poke(self, context: Context) -> bool:
"""Function that the sensors defined while deriving this class should override."""
raise AirflowException("Override me.")
class GithubTagSensor(BaseGithubRepositorySensor):
"""
Monitors a github tag for its creation.
:param github_conn_id: reference to a pre-defined GitHub Connection
:param tag_name: name of the tag to be monitored
:param repository_name: fully qualified name of the repository to be monitored, ex. "apache/airflow"
"""
template_fields = ("tag_name",)
def __init__(
self,
*,
github_conn_id: str = "github_default",
tag_name: str | None = None,
repository_name: str | None = None,
**kwargs,
) -> None:
self.repository_name = repository_name
self.tag_name = tag_name
super().__init__(
github_conn_id=github_conn_id,
repository_name=repository_name,
result_processor=self.tag_checker,
**kwargs,
)
def poke(self, context: Context) -> bool:
self.log.info("Poking for tag: %s in repository: %s", self.tag_name, self.repository_name)
return GithubSensor.poke(self, context=context)
def tag_checker(self, repo: Any) -> bool | None:
"""Checking existence of Tag in a Repository."""
result = None
try:
if repo is not None and self.tag_name is not None:
all_tags = [x.name for x in repo.get_tags()]
result = self.tag_name in all_tags
except GithubException as github_error: # type: ignore[misc]
raise AirflowException(f"Failed to execute GithubSensor, error: {str(github_error)}")
except Exception as e:
raise AirflowException(f"GitHub operator error: {str(e)}")
if result is True:
self.log.info("Tag %s exists in %s repository, Success.", self.tag_name, self.repository_name)
else:
self.log.info("Tag %s doesn't exists in %s repository yet.", self.tag_name, self.repository_name)
return result
| 5,252 | 34.493243 | 109 | py |
airflow | airflow-main/airflow/providers/github/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/telegram/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "4.1.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-telegram:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/telegram/operators/telegram.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Operator for Telegram."""
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.telegram.hooks.telegram import TelegramHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class TelegramOperator(BaseOperator):
"""
This operator allows you to post messages to Telegram using Telegram Bot API.
Takes both Telegram Bot API token directly or connection that has Telegram token in password field.
If both supplied, token parameter will be given precedence.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:TelegramOperator`
:param telegram_conn_id: Telegram connection ID which its password is Telegram API token
:param token: Telegram API Token
:param chat_id: Telegram chat ID for a chat/channel/group
:param text: Message to be sent on telegram
:param telegram_kwargs: Extra args to be passed to telegram client
"""
template_fields: Sequence[str] = ("text", "chat_id")
ui_color = "#FFBA40"
def __init__(
self,
*,
telegram_conn_id: str = "telegram_default",
token: str | None = None,
chat_id: str | None = None,
text: str = "No message has been set.",
telegram_kwargs: dict | None = None,
**kwargs,
):
self.chat_id = chat_id
self.token = token
self.telegram_kwargs = telegram_kwargs or {}
self.text = text
if telegram_conn_id is None:
raise AirflowException("No valid Telegram connection id supplied.")
self.telegram_conn_id = telegram_conn_id
super().__init__(**kwargs)
def execute(self, context: Context) -> None:
"""Calls the TelegramHook to post the provided Telegram message."""
if self.text:
self.telegram_kwargs["text"] = self.text
telegram_hook = TelegramHook(
telegram_conn_id=self.telegram_conn_id,
token=self.token,
chat_id=self.chat_id,
)
telegram_hook.send_message(self.telegram_kwargs)
| 3,014 | 34.470588 | 103 | py |
airflow | airflow-main/airflow/providers/telegram/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/telegram/hooks/telegram.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Telegram."""
from __future__ import annotations
import asyncio
import telegram
import tenacity
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
class TelegramHook(BaseHook):
"""
This hook allows you to post messages to Telegram using the telegram python-telegram-bot library.
The library can be found here: https://github.com/python-telegram-bot/python-telegram-bot
It accepts both telegram bot API token directly or connection that has telegram bot API token.
If both supplied, token parameter will be given precedence, otherwise 'password' field in the connection
from telegram_conn_id will be used.
chat_id can also be provided in the connection using 'host' field in connection.
Following is the details of a telegram_connection:
name: 'telegram-connection-name'
conn_type: 'http'
password: 'TELEGRAM_TOKEN'
host: 'chat_id' (optional)
Examples:
.. code-block:: python
# Create hook
telegram_hook = TelegramHook(telegram_conn_id="telegram_default")
# or telegram_hook = TelegramHook(telegram_conn_id='telegram_default', chat_id='-1xxx')
# or telegram_hook = TelegramHook(token='xxx:xxx', chat_id='-1xxx')
# Call method from telegram bot client
telegram_hook.send_message(None, {"text": "message", "chat_id": "-1xxx"})
# or telegram_hook.send_message(None', {"text": "message"})
:param telegram_conn_id: connection that optionally has Telegram API token in the password field
:param token: optional telegram API token
:param chat_id: optional chat_id of the telegram chat/channel/group
"""
def __init__(
self,
telegram_conn_id: str | None = None,
token: str | None = None,
chat_id: str | None = None,
) -> None:
super().__init__()
self.token = self.__get_token(token, telegram_conn_id)
self.chat_id = self.__get_chat_id(chat_id, telegram_conn_id)
self.connection = self.get_conn()
def get_conn(self) -> telegram.Bot:
"""
Returns the telegram bot client.
:return: telegram bot client
"""
return telegram.Bot(self.token)
def __get_token(self, token: str | None, telegram_conn_id: str | None) -> str:
"""
Returns the telegram API token.
:param token: telegram API token
:param telegram_conn_id: telegram connection name
:return: telegram API token
"""
if token is not None:
return token
if telegram_conn_id is not None:
conn = self.get_connection(telegram_conn_id)
if not conn.password:
raise AirflowException("Missing token(password) in Telegram connection")
return conn.password
raise AirflowException("Cannot get token: No valid Telegram connection supplied.")
def __get_chat_id(self, chat_id: str | None, telegram_conn_id: str | None) -> str | None:
"""
Returns the telegram chat ID for a chat/channel/group.
:param chat_id: optional chat ID
:param telegram_conn_id: telegram connection name
:return: telegram chat ID
"""
if chat_id is not None:
return chat_id
if telegram_conn_id is not None:
conn = self.get_connection(telegram_conn_id)
return conn.host
return None
@tenacity.retry(
retry=tenacity.retry_if_exception_type(telegram.error.TelegramError),
stop=tenacity.stop_after_attempt(5),
wait=tenacity.wait_fixed(1),
)
def send_message(self, api_params: dict) -> None:
"""
Sends the message to a telegram channel or chat.
:param api_params: params for telegram_instance.send_message. It can also be used to override chat_id
"""
kwargs = {
"chat_id": self.chat_id,
"parse_mode": telegram.constants.ParseMode.HTML,
"disable_web_page_preview": True,
}
kwargs.update(api_params)
if "text" not in kwargs or kwargs["text"] is None:
raise AirflowException("'text' must be provided for telegram message")
if kwargs["chat_id"] is None:
raise AirflowException("'chat_id' must be provided for telegram message")
response = asyncio.run(self.connection.send_message(**kwargs))
self.log.debug(response)
| 5,241 | 35.402778 | 109 | py |
airflow | airflow-main/airflow/providers/telegram/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/snowflake/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "4.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-snowflake:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,534 | 35.547619 | 118 | py |
airflow | airflow-main/airflow/providers/snowflake/transfers/s3_to_snowflake.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains AWS S3 to Snowflake operator."""
from __future__ import annotations
import warnings
from typing import Any, Sequence
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.models import BaseOperator
from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
from airflow.providers.snowflake.utils.common import enclose_param
class S3ToSnowflakeOperator(BaseOperator):
"""
Executes an COPY command to load files from s3 to Snowflake.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:S3ToSnowflakeOperator`
:param s3_keys: reference to a list of S3 keys
:param table: reference to a specific table in snowflake database
:param schema: name of schema (will overwrite schema defined in
connection)
:param stage: reference to a specific snowflake stage. If the stage's schema is not the same as the
table one, it must be specified
:param prefix: cloud storage location specified to limit the set of files to load
:param file_format: reference to a specific file format
:param warehouse: name of warehouse (will overwrite any warehouse
defined in the connection's extra JSON)
:param database: reference to a specific database in Snowflake connection
:param columns_array: reference to a specific columns array in snowflake database
:param pattern: regular expression pattern string specifying the file names and/or paths to match.
Note: regular expression will be automatically enclose in single quotes
and all single quotes in expression will replace by two single quotes.
:param snowflake_conn_id: Reference to
:ref:`Snowflake connection id<howto/connection:snowflake>`
:param role: name of role (will overwrite any role defined in
connection's extra JSON)
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
'https://<your_okta_account_name>.okta.com' to authenticate
through native Okta.
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
"""
template_fields: Sequence[str] = ("s3_keys",)
template_fields_renderers = {"s3_keys": "json"}
def __init__(
self,
*,
s3_keys: list | None = None,
table: str,
stage: str,
prefix: str | None = None,
file_format: str,
schema: str | None = None,
columns_array: list | None = None,
pattern: str | None = None,
warehouse: str | None = None,
database: str | None = None,
autocommit: bool = True,
snowflake_conn_id: str = "snowflake_default",
role: str | None = None,
authenticator: str | None = None,
session_parameters: dict | None = None,
**kwargs,
) -> None:
warnings.warn(
"""
S3ToSnowflakeOperator is deprecated.
Please use
`airflow.providers.snowflake.transfers.copy_into_snowflake.CopyFromExternalStageToSnowflakeOperator`.
""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
super().__init__(**kwargs)
self.s3_keys = s3_keys
self.table = table
self.warehouse = warehouse
self.database = database
self.stage = stage
self.prefix = prefix
self.file_format = file_format
self.schema = schema
self.columns_array = columns_array
self.pattern = pattern
self.autocommit = autocommit
self.snowflake_conn_id = snowflake_conn_id
self.role = role
self.authenticator = authenticator
self.session_parameters = session_parameters
def execute(self, context: Any) -> None:
snowflake_hook = SnowflakeHook(
snowflake_conn_id=self.snowflake_conn_id,
warehouse=self.warehouse,
database=self.database,
role=self.role,
schema=self.schema,
authenticator=self.authenticator,
session_parameters=self.session_parameters,
)
if self.schema:
into = f"{self.schema}.{self.table}"
else:
into = self.table
if self.columns_array:
into = f"{into}({','.join(self.columns_array)})"
sql_parts = [
f"COPY INTO {into}",
f"FROM @{self.stage}/{self.prefix or ''}",
]
if self.s3_keys:
files = ", ".join(map(enclose_param, self.s3_keys))
sql_parts.append(f"files=({files})")
sql_parts.append(f"file_format={self.file_format}")
if self.pattern:
sql_parts.append(f"pattern={enclose_param(self.pattern)}")
copy_query = "\n".join(sql_parts)
self.log.info("Executing COPY command...")
snowflake_hook.run(copy_query, self.autocommit)
self.log.info("COPY command completed")
| 6,054 | 39.366667 | 113 | py |
airflow | airflow-main/airflow/providers/snowflake/transfers/snowflake_to_slack.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from typing import Iterable, Mapping, Sequence
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.slack.transfers.sql_to_slack import SqlToSlackOperator
class SnowflakeToSlackOperator(SqlToSlackOperator):
"""
Executes an SQL statement in Snowflake and sends the results to Slack.
The results of the query are rendered into the 'slack_message' parameter as a Pandas dataframe
using a Jinja variable called '{{ results_df }}'. The 'results_df' variable name can be changed
by specifying a different 'results_df_name' parameter. The Tabulate library is added to the
Jinja environment as a filter to allow the dataframe to be rendered nicely. For example, set
'slack_message' to {{ results_df | tabulate(tablefmt="pretty", headers="keys") }} to send the
results to Slack as an ASCII rendered table.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:SnowflakeToSlackOperator`
:param sql: The SQL statement to execute on Snowflake (templated)
:param slack_message: The templated Slack message to send with the data returned from Snowflake.
You can use the default JINJA variable {{ results_df }} to access the pandas dataframe containing the
SQL results
:param snowflake_conn_id: Reference to
:ref:`Snowflake connection id<howto/connection:snowflake>`
:param slack_conn_id: The connection id for Slack.
:param results_df_name: The name of the JINJA template's dataframe variable, default is 'results_df'
:param parameters: The parameters to pass to the SQL query
:param warehouse: The Snowflake virtual warehouse to use to run the SQL query
:param database: The Snowflake database to use for the SQL query
:param schema: The schema to run the SQL against in Snowflake
:param role: The role to use when connecting to Snowflake
:param slack_token: The token to use to authenticate to Slack. If this is not provided, the
'webhook_token' attribute needs to be specified in the 'Extra' JSON field against the slack_conn_id.
"""
template_fields: Sequence[str] = ("sql", "slack_message")
template_ext: Sequence[str] = (".sql", ".jinja", ".j2")
template_fields_renderers = {"sql": "sql", "slack_message": "jinja"}
times_rendered = 0
def __init__(
self,
*,
sql: str,
slack_message: str,
snowflake_conn_id: str = "snowflake_default",
slack_conn_id: str = "slack_default",
results_df_name: str = "results_df",
parameters: Iterable | Mapping | None = None,
warehouse: str | None = None,
database: str | None = None,
schema: str | None = None,
role: str | None = None,
slack_token: str | None = None,
**kwargs,
) -> None:
self.snowflake_conn_id = snowflake_conn_id
self.sql = sql
self.parameters = parameters
self.warehouse = warehouse
self.database = database
self.schema = schema
self.role = role
self.slack_conn_id = slack_conn_id
self.slack_token = slack_token
self.slack_message = slack_message
self.results_df_name = results_df_name
warnings.warn(
"""
SnowflakeToSlackOperator is deprecated.
Please use `airflow.providers.slack.transfers.sql_to_slack.SqlToSlackOperator`.
""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
hook_params = {
"schema": self.schema,
"role": self.role,
"database": self.database,
"warehouse": self.warehouse,
}
cleaned_hook_params = {k: v for k, v in hook_params.items() if v is not None}
super().__init__(
sql=self.sql,
sql_conn_id=self.snowflake_conn_id,
slack_conn_id=self.slack_conn_id,
slack_webhook_token=self.slack_token,
slack_message=self.slack_message,
results_df_name=self.results_df_name,
parameters=self.parameters,
sql_hook_params=cleaned_hook_params,
**kwargs,
)
| 5,083 | 41.722689 | 109 | py |
airflow | airflow-main/airflow/providers/snowflake/transfers/copy_into_snowflake.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Abstract operator that child classes implement ``COPY INTO <TABLE> SQL in Snowflake``."""
from __future__ import annotations
from typing import Any, Sequence
from airflow.models import BaseOperator
from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
from airflow.providers.snowflake.utils.common import enclose_param
class CopyFromExternalStageToSnowflakeOperator(BaseOperator):
"""
Executes a COPY INTO command to load files from an external stage from clouds to Snowflake.
This operator requires the snowflake_conn_id connection. The snowflake host, login,
and, password field must be setup in the connection. Other inputs can be defined
in the connection or hook instantiation.
:param namespace: snowflake namespace
:param table: snowflake table
:param file_format: file format name i.e. CSV, AVRO, etc
:param stage: reference to a specific snowflake stage. If the stage's schema is not the same as the
table one, it must be specified
:param prefix: cloud storage location specified to limit the set of files to load
:param files: files to load into table
:param pattern: pattern to load files from external location to table
:param copy_into_postifx: optional sql postfix for INSERT INTO query
such as `formatTypeOptions` and `copyOptions`
:param snowflake_conn_id: Reference to :ref:`Snowflake connection id<howto/connection:snowflake>`
:param account: snowflake account name
:param warehouse: name of snowflake warehouse
:param database: name of snowflake database
:param region: name of snowflake region
:param role: name of snowflake role
:param schema: name of snowflake schema
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
``https://<your_okta_account_name>.okta.com`` to authenticate
through native Okta.
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
:param copy_options: snowflake COPY INTO syntax copy options
:param validation_mode: snowflake COPY INTO syntax validation mode
"""
template_fields: Sequence[str] = ("files",)
template_fields_renderers = {"files": "json"}
def __init__(
self,
*,
files: list | None = None,
table: str,
stage: str,
prefix: str | None = None,
file_format: str,
schema: str | None = None,
columns_array: list | None = None,
pattern: str | None = None,
warehouse: str | None = None,
database: str | None = None,
autocommit: bool = True,
snowflake_conn_id: str = "snowflake_default",
role: str | None = None,
authenticator: str | None = None,
session_parameters: dict | None = None,
copy_options: str | None = None,
validation_mode: str | None = None,
**kwargs,
):
super().__init__(**kwargs)
self.files = files
self.table = table
self.stage = stage
self.prefix = prefix
self.file_format = file_format
self.schema = schema
self.columns_array = columns_array
self.pattern = pattern
self.warehouse = warehouse
self.database = database
self.autocommit = autocommit
self.snowflake_conn_id = snowflake_conn_id
self.role = role
self.authenticator = authenticator
self.session_parameters = session_parameters
self.copy_options = copy_options
self.validation_mode = validation_mode
def execute(self, context: Any) -> None:
snowflake_hook = SnowflakeHook(
snowflake_conn_id=self.snowflake_conn_id,
warehouse=self.warehouse,
database=self.database,
role=self.role,
schema=self.schema,
authenticator=self.authenticator,
session_parameters=self.session_parameters,
)
if self.schema:
into = f"{self.schema}.{self.table}"
else:
into = self.table
if self.columns_array:
into = f"{into}({', '.join(self.columns_array)})"
sql = f"""
COPY INTO {into}
FROM @{self.stage}/{self.prefix or ""}
{"FILES=(" + ",".join(map(enclose_param, self.files)) + ")" if self.files else ""}
{"PATTERN=" + enclose_param(self.pattern) if self.pattern else ""}
FILE_FORMAT={self.file_format}
{self.copy_options or ""}
{self.validation_mode or ""}
"""
self.log.info("Executing COPY command...")
snowflake_hook.run(sql=sql, autocommit=self.autocommit)
self.log.info("COPY command completed")
| 5,765 | 39.605634 | 103 | py |
airflow | airflow-main/airflow/providers/snowflake/transfers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/snowflake/operators/snowflake.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import time
import warnings
from datetime import timedelta
from typing import TYPE_CHECKING, Any, Iterable, List, Mapping, Sequence, SupportsAbs, cast
from airflow import AirflowException
from airflow.configuration import conf
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.sql.operators.sql import (
SQLCheckOperator,
SQLExecuteQueryOperator,
SQLIntervalCheckOperator,
SQLValueCheckOperator,
)
from airflow.providers.snowflake.hooks.snowflake_sql_api import SnowflakeSqlApiHook
from airflow.providers.snowflake.triggers.snowflake_trigger import SnowflakeSqlApiTrigger
if TYPE_CHECKING:
from airflow.utils.context import Context
class SnowflakeOperator(SQLExecuteQueryOperator):
"""
Executes SQL code in a Snowflake database.
This class is deprecated.
Please use :class:`airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:SnowflakeOperator`
:param snowflake_conn_id: Reference to
:ref:`Snowflake connection id<howto/connection:snowflake>`
:param sql: the SQL code to be executed as a single string, or
a list of str (sql statements), or a reference to a template file.
Template references are recognized by str ending in '.sql'
:param parameters: (optional) the parameters to render the SQL query with.
:param warehouse: name of warehouse (will overwrite any warehouse
defined in the connection's extra JSON)
:param database: name of database (will overwrite database defined
in connection)
:param schema: name of schema (will overwrite schema defined in
connection)
:param role: name of role (will overwrite any role defined in
connection's extra JSON)
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
'https://<your_okta_account_name>.okta.com' to authenticate
through native Okta.
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
:return Returns list of dictionaries in { 'column': 'value', 'column2': 'value2' } form.
"""
template_fields: Sequence[str] = ("sql",)
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {"sql": "sql"}
ui_color = "#ededed"
def __init__(
self,
*,
snowflake_conn_id: str = "snowflake_default",
warehouse: str | None = None,
database: str | None = None,
role: str | None = None,
schema: str | None = None,
authenticator: str | None = None,
session_parameters: dict | None = None,
**kwargs,
) -> None:
if any([warehouse, database, role, schema, authenticator, session_parameters]):
hook_params = kwargs.pop("hook_params", {})
kwargs["hook_params"] = {
"warehouse": warehouse,
"database": database,
"role": role,
"schema": schema,
"authenticator": authenticator,
"session_parameters": session_parameters,
**hook_params,
}
super().__init__(conn_id=snowflake_conn_id, **kwargs)
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
Also, you can provide `hook_params={'warehouse': <warehouse>, 'database': <database>,
'role': <role>, 'schema': <schema>, 'authenticator': <authenticator>,
'session_parameters': <session_parameters>}`.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
def _process_output(self, results: list[Any], descriptions: list[Sequence[Sequence] | None]) -> list[Any]:
validated_descriptions: list[Sequence[Sequence]] = []
for idx, description in enumerate(descriptions):
if not description:
raise RuntimeError(
f"The query did not return descriptions of the cursor for query number {idx}. "
"Cannot return values in a form of dictionary for that query."
)
validated_descriptions.append(description)
returned_results = []
for result_id, result_list in enumerate(results):
current_processed_result = []
for row in result_list:
dict_result: dict[Any, Any] = {}
for idx, description in enumerate(validated_descriptions[result_id]):
dict_result[description[0]] = row[idx]
current_processed_result.append(dict_result)
returned_results.append(current_processed_result)
return returned_results
class SnowflakeCheckOperator(SQLCheckOperator):
"""
Performs a check against Snowflake.
The ``SnowflakeCheckOperator`` expects a sql query that will return a single row. Each
value on that first row is evaluated using python ``bool`` casting. If any of the values
return ``False`` the check is failed and errors out.
Note that Python bool casting evals the following as ``False``:
* ``False``
* ``0``
* Empty string (``""``)
* Empty list (``[]``)
* Empty dictionary or set (``{}``)
Given a query like ``SELECT COUNT(*) FROM foo``, it will fail only if
the count ``== 0``. You can craft much more complex query that could,
for instance, check that the table has the same number of rows as
the source table upstream, or that the count of today's partition is
greater than yesterday's partition, or that a set of metrics are less
than 3 standard deviation for the 7 day average.
This operator can be used as a data quality check in your pipeline, and
depending on where you put it in your DAG, you have the choice to
stop the critical path, preventing from
publishing dubious data, or on the side and receive email alerts
without stopping the progress of the DAG.
:param sql: the SQL code to be executed as a single string, or
a list of str (sql statements), or a reference to a template file.
Template references are recognized by str ending in '.sql'
:param snowflake_conn_id: Reference to
:ref:`Snowflake connection id<howto/connection:snowflake>`
:param autocommit: if True, each command is automatically committed.
(default value: True)
:param parameters: (optional) the parameters to render the SQL query with.
:param warehouse: name of warehouse (will overwrite any warehouse
defined in the connection's extra JSON)
:param database: name of database (will overwrite database defined
in connection)
:param schema: name of schema (will overwrite schema defined in
connection)
:param role: name of role (will overwrite any role defined in
connection's extra JSON)
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
'https://<your_okta_account_name>.okta.com' to authenticate
through native Okta.
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
"""
template_fields: Sequence[str] = ("sql",)
template_ext: Sequence[str] = (".sql",)
ui_color = "#ededed"
def __init__(
self,
*,
sql: str,
snowflake_conn_id: str = "snowflake_default",
parameters: Iterable | Mapping | None = None,
autocommit: bool = True,
do_xcom_push: bool = True,
warehouse: str | None = None,
database: str | None = None,
role: str | None = None,
schema: str | None = None,
authenticator: str | None = None,
session_parameters: dict | None = None,
**kwargs,
) -> None:
super().__init__(sql=sql, parameters=parameters, conn_id=snowflake_conn_id, **kwargs)
self.snowflake_conn_id = snowflake_conn_id
self.sql = sql
self.autocommit = autocommit
self.do_xcom_push = do_xcom_push
self.parameters = parameters
self.warehouse = warehouse
self.database = database
self.role = role
self.schema = schema
self.authenticator = authenticator
self.session_parameters = session_parameters
self.query_ids: list[str] = []
class SnowflakeValueCheckOperator(SQLValueCheckOperator):
"""
Performs a simple check using sql code against a specified value, within a certain level of tolerance.
:param sql: the sql to be executed
:param pass_value: the value to check against
:param tolerance: (optional) the tolerance allowed to accept the query as
passing
:param snowflake_conn_id: Reference to
:ref:`Snowflake connection id<howto/connection:snowflake>`
:param autocommit: if True, each command is automatically committed.
(default value: True)
:param parameters: (optional) the parameters to render the SQL query with.
:param warehouse: name of warehouse (will overwrite any warehouse
defined in the connection's extra JSON)
:param database: name of database (will overwrite database defined
in connection)
:param schema: name of schema (will overwrite schema defined in
connection)
:param role: name of role (will overwrite any role defined in
connection's extra JSON)
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
'https://<your_okta_account_name>.okta.com' to authenticate
through native Okta.
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
"""
def __init__(
self,
*,
sql: str,
pass_value: Any,
tolerance: Any = None,
snowflake_conn_id: str = "snowflake_default",
parameters: Iterable | Mapping | None = None,
autocommit: bool = True,
do_xcom_push: bool = True,
warehouse: str | None = None,
database: str | None = None,
role: str | None = None,
schema: str | None = None,
authenticator: str | None = None,
session_parameters: dict | None = None,
**kwargs,
) -> None:
super().__init__(
sql=sql, pass_value=pass_value, tolerance=tolerance, conn_id=snowflake_conn_id, **kwargs
)
self.snowflake_conn_id = snowflake_conn_id
self.sql = sql
self.autocommit = autocommit
self.do_xcom_push = do_xcom_push
self.parameters = parameters
self.warehouse = warehouse
self.database = database
self.role = role
self.schema = schema
self.authenticator = authenticator
self.session_parameters = session_parameters
self.query_ids: list[str] = []
class SnowflakeIntervalCheckOperator(SQLIntervalCheckOperator):
"""
Checks that the metrics given as SQL expressions are within tolerance of the ones from days_back before.
This method constructs a query like so ::
SELECT {metrics_threshold_dict_key} FROM {table}
WHERE {date_filter_column}=<date>
:param table: the table name
:param days_back: number of days between ds and the ds we want to check
against. Defaults to 7 days
:param metrics_thresholds: a dictionary of ratios indexed by metrics, for
example 'COUNT(*)': 1.5 would require a 50 percent or less difference
between the current day, and the prior days_back.
:param snowflake_conn_id: Reference to
:ref:`Snowflake connection id<howto/connection:snowflake>`
:param autocommit: if True, each command is automatically committed.
(default value: True)
:param parameters: (optional) the parameters to render the SQL query with.
:param warehouse: name of warehouse (will overwrite any warehouse
defined in the connection's extra JSON)
:param database: name of database (will overwrite database defined
in connection)
:param schema: name of schema (will overwrite schema defined in
connection)
:param role: name of role (will overwrite any role defined in
connection's extra JSON)
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
'https://<your_okta_account_name>.okta.com' to authenticate
through native Okta.
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
"""
def __init__(
self,
*,
table: str,
metrics_thresholds: dict,
date_filter_column: str = "ds",
days_back: SupportsAbs[int] = -7,
snowflake_conn_id: str = "snowflake_default",
parameters: Iterable | Mapping | None = None,
autocommit: bool = True,
do_xcom_push: bool = True,
warehouse: str | None = None,
database: str | None = None,
role: str | None = None,
schema: str | None = None,
authenticator: str | None = None,
session_parameters: dict | None = None,
**kwargs,
) -> None:
super().__init__(
table=table,
metrics_thresholds=metrics_thresholds,
date_filter_column=date_filter_column,
days_back=days_back,
conn_id=snowflake_conn_id,
**kwargs,
)
self.snowflake_conn_id = snowflake_conn_id
self.autocommit = autocommit
self.do_xcom_push = do_xcom_push
self.parameters = parameters
self.warehouse = warehouse
self.database = database
self.role = role
self.schema = schema
self.authenticator = authenticator
self.session_parameters = session_parameters
self.query_ids: list[str] = []
class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
"""
Implemented Snowflake SQL API Operator to support multiple SQL statements sequentially,
which is the behavior of the SnowflakeOperator, the Snowflake SQL API allows submitting
multiple SQL statements in a single request. It make post request to submit SQL
statements for execution, poll to check the status of the execution of a statement. Fetch query results
concurrently.
This Operator currently uses key pair authentication, so you need to provide private key raw content or
private key file path in the snowflake connection along with other details
.. seealso::
`Snowflake SQL API key pair Authentication <https://docs.snowflake.com/en/developer-guide/sql-api/authenticating.html#label-sql-api-authenticating-key-pair>`_
Where can this operator fit in?
- To execute multiple SQL statements in a single request
- To execute the SQL statement asynchronously and to execute standard queries and most DDL and DML statements
- To develop custom applications and integrations that perform queries
- To create provision users and roles, create table, etc.
The following commands are not supported:
- The PUT command (in Snowflake SQL)
- The GET command (in Snowflake SQL)
- The CALL command with stored procedures that return a table(stored procedures with the RETURNS TABLE clause).
.. seealso::
- `Snowflake SQL API <https://docs.snowflake.com/en/developer-guide/sql-api/intro.html#introduction-to-the-sql-api>`_
- `API Reference <https://docs.snowflake.com/en/developer-guide/sql-api/reference.html#snowflake-sql-api-reference>`_
- `Limitation on snowflake SQL API <https://docs.snowflake.com/en/developer-guide/sql-api/intro.html#limitations-of-the-sql-api>`_
:param snowflake_conn_id: Reference to Snowflake connection id
:param sql: the sql code to be executed. (templated)
:param autocommit: if True, each command is automatically committed.
(default value: True)
:param parameters: (optional) the parameters to render the SQL query with.
:param warehouse: name of warehouse (will overwrite any warehouse
defined in the connection's extra JSON)
:param database: name of database (will overwrite database defined
in connection)
:param schema: name of schema (will overwrite schema defined in
connection)
:param role: name of role (will overwrite any role defined in
connection's extra JSON)
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
'https://<your_okta_account_name>.okta.com' to authenticate
through native Okta.
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
:param poll_interval: the interval in seconds to poll the query
:param statement_count: Number of SQL statement to be executed
:param token_life_time: lifetime of the JWT Token
:param token_renewal_delta: Renewal time of the JWT Token
:param bindings: (Optional) Values of bind variables in the SQL statement.
When executing the statement, Snowflake replaces placeholders (? and :name) in
the statement with these specified values.
:param deferrable: Run operator in the deferrable mode.
""" # noqa
LIFETIME = timedelta(minutes=59) # The tokens will have a 59 minutes lifetime
RENEWAL_DELTA = timedelta(minutes=54) # Tokens will be renewed after 54 minutes
def __init__(
self,
*,
snowflake_conn_id: str = "snowflake_default",
warehouse: str | None = None,
database: str | None = None,
role: str | None = None,
schema: str | None = None,
authenticator: str | None = None,
session_parameters: dict[str, Any] | None = None,
poll_interval: int = 5,
statement_count: int = 0,
token_life_time: timedelta = LIFETIME,
token_renewal_delta: timedelta = RENEWAL_DELTA,
bindings: dict[str, Any] | None = None,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
**kwargs: Any,
) -> None:
self.snowflake_conn_id = snowflake_conn_id
self.poll_interval = poll_interval
self.statement_count = statement_count
self.token_life_time = token_life_time
self.token_renewal_delta = token_renewal_delta
self.bindings = bindings
self.execute_async = False
self.deferrable = deferrable
if any([warehouse, database, role, schema, authenticator, session_parameters]): # pragma: no cover
hook_params = kwargs.pop("hook_params", {}) # pragma: no cover
kwargs["hook_params"] = {
"warehouse": warehouse,
"database": database,
"role": role,
"schema": schema,
"authenticator": authenticator,
"session_parameters": session_parameters,
**hook_params,
}
super().__init__(conn_id=snowflake_conn_id, **kwargs) # pragma: no cover
def execute(self, context: Context) -> None:
"""
Make a POST API request to snowflake by using SnowflakeSQL and execute the query to get the ids.
By deferring the SnowflakeSqlApiTrigger class passed along with query ids.
"""
self.log.info("Executing: %s", self.sql)
self._hook = SnowflakeSqlApiHook(
snowflake_conn_id=self.snowflake_conn_id,
token_life_time=self.token_life_time,
token_renewal_delta=self.token_renewal_delta,
deferrable=self.deferrable,
)
self.query_ids = self._hook.execute_query(
self.sql, statement_count=self.statement_count, bindings=self.bindings # type: ignore[arg-type]
)
self.log.info("List of query ids %s", self.query_ids)
if self.do_xcom_push:
context["ti"].xcom_push(key="query_ids", value=self.query_ids)
if self.deferrable:
self.defer(
timeout=self.execution_timeout,
trigger=SnowflakeSqlApiTrigger(
poll_interval=self.poll_interval,
query_ids=self.query_ids,
snowflake_conn_id=self.snowflake_conn_id,
token_life_time=self.token_life_time,
token_renewal_delta=self.token_renewal_delta,
),
method_name="execute_complete",
)
else:
statement_status = self.poll_on_queries()
if statement_status["error"]:
raise AirflowException(statement_status["error"])
self._hook.check_query_output(self.query_ids)
def poll_on_queries(self):
"""Poll on requested queries."""
queries_in_progress = set(self.query_ids)
statement_success_status = {}
statement_error_status = {}
for query_id in self.query_ids:
if not len(queries_in_progress):
break
self.log.info("checking : %s", query_id)
try:
statement_status = self._hook.get_sql_api_query_status(query_id)
except Exception as e:
raise ValueError({"status": "error", "message": str(e)})
if statement_status.get("status") == "error":
queries_in_progress.remove(query_id)
statement_error_status[query_id] = statement_status
if statement_status.get("status") == "success":
statement_success_status[query_id] = statement_status
queries_in_progress.remove(query_id)
time.sleep(self.poll_interval)
return {"success": statement_success_status, "error": statement_error_status}
def execute_complete(self, context: Context, event: dict[str, str | list[str]] | None = None) -> None:
"""
Callback for when the trigger fires - returns immediately.
Relies on trigger to throw an exception, otherwise it assumes execution was successful.
"""
if event:
if "status" in event and event["status"] == "error":
msg = f"{event['status']}: {event['message']}"
raise AirflowException(msg)
elif "status" in event and event["status"] == "success":
hook = SnowflakeSqlApiHook(snowflake_conn_id=self.snowflake_conn_id)
query_ids = cast(List[str], event["statement_query_ids"])
hook.check_query_output(query_ids)
self.log.info("%s completed successfully.", self.task_id)
else:
self.log.info("%s completed successfully.", self.task_id)
| 24,848 | 43.452594 | 166 | py |
airflow | airflow-main/airflow/providers/snowflake/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/snowflake/triggers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/snowflake/triggers/snowflake_trigger.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
from datetime import timedelta
from typing import Any, AsyncIterator
from airflow.providers.snowflake.hooks.snowflake_sql_api import SnowflakeSqlApiHook
from airflow.triggers.base import BaseTrigger, TriggerEvent
class SnowflakeSqlApiTrigger(BaseTrigger):
"""
Fetch the status for the query ids passed.
:param poll_interval: polling period in seconds to check for the status
:param query_ids: List of Query ids to run and poll for the status
:param snowflake_conn_id: Reference to Snowflake connection id
:param token_life_time: lifetime of the JWT Token in timedelta
:param token_renewal_delta: Renewal time of the JWT Token in timedelta
"""
def __init__(
self,
poll_interval: float,
query_ids: list[str],
snowflake_conn_id: str,
token_life_time: timedelta,
token_renewal_delta: timedelta,
):
super().__init__()
self.poll_interval = poll_interval
self.query_ids = query_ids
self.snowflake_conn_id = snowflake_conn_id
self.token_life_time = token_life_time
self.token_renewal_delta = token_renewal_delta
def serialize(self) -> tuple[str, dict[str, Any]]:
"""Serializes SnowflakeSqlApiTrigger arguments and classpath."""
return (
"airflow.providers.snowflake.triggers.snowflake_trigger.SnowflakeSqlApiTrigger",
{
"poll_interval": self.poll_interval,
"query_ids": self.query_ids,
"snowflake_conn_id": self.snowflake_conn_id,
"token_life_time": self.token_life_time,
"token_renewal_delta": self.token_renewal_delta,
},
)
async def run(self) -> AsyncIterator[TriggerEvent]:
"""Wait for the query the snowflake query to complete."""
SnowflakeSqlApiHook(
self.snowflake_conn_id,
self.token_life_time,
self.token_renewal_delta,
)
try:
statement_query_ids: list[str] = []
for query_id in self.query_ids:
while True:
statement_status = await self.get_query_status(query_id)
if statement_status["status"] not in ["running"]:
break
await asyncio.sleep(self.poll_interval)
if statement_status["status"] == "error":
yield TriggerEvent(statement_status)
return
if statement_status["status"] == "success":
statement_query_ids.extend(statement_status["statement_handles"])
yield TriggerEvent(
{
"status": "success",
"statement_query_ids": statement_query_ids,
}
)
except Exception as e:
yield TriggerEvent({"status": "error", "message": str(e)})
async def get_query_status(self, query_id: str) -> dict[str, Any]:
"""Return True if the SQL query is still running otherwise return False."""
hook = SnowflakeSqlApiHook(
self.snowflake_conn_id,
self.token_life_time,
self.token_renewal_delta,
)
return await hook.get_sql_api_query_status_async(query_id)
def _set_context(self, context):
pass
| 4,186 | 38.5 | 92 | py |
airflow | airflow-main/airflow/providers/snowflake/hooks/snowflake.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from contextlib import closing, contextmanager
from functools import wraps
from io import StringIO
from pathlib import Path
from typing import Any, Callable, Iterable, Mapping
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from snowflake import connector
from snowflake.connector import DictCursor, SnowflakeConnection, util_text
from snowflake.sqlalchemy import URL
from sqlalchemy import create_engine
from airflow import AirflowException
from airflow.providers.common.sql.hooks.sql import DbApiHook, return_single_query_results
from airflow.utils.strings import to_boolean
def _try_to_boolean(value: Any):
if isinstance(value, (str, type(None))):
return to_boolean(value)
return value
# TODO: Remove this when provider min airflow version >= 2.5.0 since this is
# handled by provider manager from that version.
def _ensure_prefixes(conn_type):
def dec(func):
@wraps(func)
def inner():
field_behaviors = func()
conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
def _ensure_prefix(field):
if field not in conn_attrs and not field.startswith("extra__"):
return f"extra__{conn_type}__{field}"
else:
return field
if "placeholders" in field_behaviors:
placeholders = field_behaviors["placeholders"]
field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
return field_behaviors
return inner
return dec
class SnowflakeHook(DbApiHook):
"""A client to interact with Snowflake.
This hook requires the snowflake_conn_id connection. The snowflake account, login,
and, password field must be setup in the connection. Other inputs can be defined
in the connection or hook instantiation.
:param snowflake_conn_id: Reference to
:ref:`Snowflake connection id<howto/connection:snowflake>`
:param account: snowflake account name
:param authenticator: authenticator for Snowflake.
'snowflake' (default) to use the internal Snowflake authenticator
'externalbrowser' to authenticate using your web browser and
Okta, ADFS or any other SAML 2.0-compliant identify provider
(IdP) that has been defined for your account
``https://<your_okta_account_name>.okta.com`` to authenticate
through native Okta.
:param warehouse: name of snowflake warehouse
:param database: name of snowflake database
:param region: name of snowflake region
:param role: name of snowflake role
:param schema: name of snowflake schema
:param session_parameters: You can set session-level parameters at
the time you connect to Snowflake
:param insecure_mode: Turns off OCSP certificate checks.
For details, see: `How To: Turn Off OCSP Checking in Snowflake Client Drivers - Snowflake Community
<https://community.snowflake.com/s/article/How-to-turn-off-OCSP-checking-in-Snowflake-client-drivers>`__
.. note::
``get_sqlalchemy_engine()`` depends on ``snowflake-sqlalchemy``
.. seealso::
For more information on how to use this Snowflake connection, take a look at the guide:
:ref:`howto/operator:SnowflakeOperator`
"""
conn_name_attr = "snowflake_conn_id"
default_conn_name = "snowflake_default"
conn_type = "snowflake"
hook_name = "Snowflake"
supports_autocommit = True
_test_connection_sql = "select 1"
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3TextAreaFieldWidget, BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import BooleanField, StringField
return {
"account": StringField(lazy_gettext("Account"), widget=BS3TextFieldWidget()),
"warehouse": StringField(lazy_gettext("Warehouse"), widget=BS3TextFieldWidget()),
"database": StringField(lazy_gettext("Database"), widget=BS3TextFieldWidget()),
"region": StringField(lazy_gettext("Region"), widget=BS3TextFieldWidget()),
"role": StringField(lazy_gettext("Role"), widget=BS3TextFieldWidget()),
"private_key_file": StringField(lazy_gettext("Private key (Path)"), widget=BS3TextFieldWidget()),
"private_key_content": StringField(
lazy_gettext("Private key (Text)"), widget=BS3TextAreaFieldWidget()
),
"insecure_mode": BooleanField(
label=lazy_gettext("Insecure mode"), description="Turns off OCSP certificate checks"
),
}
@staticmethod
@_ensure_prefixes(conn_type="snowflake")
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
import json
return {
"hidden_fields": ["port", "host"],
"relabeling": {},
"placeholders": {
"extra": json.dumps(
{
"authenticator": "snowflake oauth",
"private_key_file": "private key",
"session_parameters": "session parameters",
},
indent=1,
),
"schema": "snowflake schema",
"login": "snowflake username",
"password": "snowflake password",
"account": "snowflake account name",
"warehouse": "snowflake warehouse name",
"database": "snowflake db name",
"region": "snowflake hosted region",
"role": "snowflake role",
"private_key_file": "Path of snowflake private key (PEM Format)",
"private_key_content": "Content to snowflake private key (PEM format)",
"insecure_mode": "insecure mode",
},
}
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.account = kwargs.pop("account", None)
self.warehouse = kwargs.pop("warehouse", None)
self.database = kwargs.pop("database", None)
self.region = kwargs.pop("region", None)
self.role = kwargs.pop("role", None)
self.schema = kwargs.pop("schema", None)
self.authenticator = kwargs.pop("authenticator", None)
self.session_parameters = kwargs.pop("session_parameters", None)
self.query_ids: list[str] = []
def _get_field(self, extra_dict, field_name):
backcompat_prefix = "extra__snowflake__"
backcompat_key = f"{backcompat_prefix}{field_name}"
if field_name.startswith("extra__"):
raise ValueError(
f"Got prefixed name {field_name}; please remove the '{backcompat_prefix}' prefix "
f"when using this method."
)
if field_name in extra_dict:
import warnings
if backcompat_key in extra_dict:
warnings.warn(
f"Conflicting params `{field_name}` and `{backcompat_key}` found in extras. "
f"Using value for `{field_name}`. Please ensure this is the correct "
f"value and remove the backcompat key `{backcompat_key}`."
)
return extra_dict[field_name] or None
return extra_dict.get(backcompat_key) or None
def _get_conn_params(self) -> dict[str, str | None]:
"""Fetch connection params as a dict.
This is used in ``get_uri()`` and ``get_connection()``.
"""
conn = self.get_connection(self.snowflake_conn_id) # type: ignore[attr-defined]
extra_dict = conn.extra_dejson
account = self._get_field(extra_dict, "account") or ""
warehouse = self._get_field(extra_dict, "warehouse") or ""
database = self._get_field(extra_dict, "database") or ""
region = self._get_field(extra_dict, "region") or ""
role = self._get_field(extra_dict, "role") or ""
insecure_mode = _try_to_boolean(self._get_field(extra_dict, "insecure_mode"))
schema = conn.schema or ""
# authenticator and session_parameters never supported long name so we don't use _get_field
authenticator = extra_dict.get("authenticator", "snowflake")
session_parameters = extra_dict.get("session_parameters")
conn_config = {
"user": conn.login,
"password": conn.password or "",
"schema": self.schema or schema,
"database": self.database or database,
"account": self.account or account,
"warehouse": self.warehouse or warehouse,
"region": self.region or region,
"role": self.role or role,
"authenticator": self.authenticator or authenticator,
"session_parameters": self.session_parameters or session_parameters,
# application is used to track origin of the requests
"application": os.environ.get("AIRFLOW_SNOWFLAKE_PARTNER", "AIRFLOW"),
}
if insecure_mode:
conn_config["insecure_mode"] = insecure_mode
# If private_key_file is specified in the extra json, load the contents of the file as a private key.
# If private_key_content is specified in the extra json, use it as a private key.
# As a next step, specify this private key in the connection configuration.
# The connection password then becomes the passphrase for the private key.
# If your private key is not encrypted (not recommended), then leave the password empty.
private_key_file = self._get_field(extra_dict, "private_key_file")
private_key_content = self._get_field(extra_dict, "private_key_content")
private_key_pem = None
if private_key_content and private_key_file:
raise AirflowException(
"The private_key_file and private_key_content extra fields are mutually exclusive. "
"Please remove one."
)
elif private_key_file:
private_key_file_path = Path(private_key_file)
if not private_key_file_path.is_file() or private_key_file_path.stat().st_size == 0:
raise ValueError("The private_key_file path points to an empty or invalid file.")
if private_key_file_path.stat().st_size > 4096:
raise ValueError("The private_key_file size is too big. Please keep it less than 4 KB.")
private_key_pem = Path(private_key_file_path).read_bytes()
elif private_key_content:
private_key_pem = private_key_content.encode()
if private_key_pem:
passphrase = None
if conn.password:
passphrase = conn.password.strip().encode()
p_key = serialization.load_pem_private_key(
private_key_pem, password=passphrase, backend=default_backend()
)
pkb = p_key.private_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
)
conn_config["private_key"] = pkb
conn_config.pop("password", None)
return conn_config
def get_uri(self) -> str:
"""Override DbApiHook get_uri method for get_sqlalchemy_engine()."""
conn_params = self._get_conn_params()
return self._conn_params_to_sqlalchemy_uri(conn_params)
def _conn_params_to_sqlalchemy_uri(self, conn_params: dict) -> str:
return URL(
**{
k: v
for k, v in conn_params.items()
if v and k not in ["session_parameters", "insecure_mode", "private_key"]
}
)
def get_conn(self) -> SnowflakeConnection:
"""Returns a snowflake.connection object."""
conn_config = self._get_conn_params()
conn = connector.connect(**conn_config)
return conn
def get_sqlalchemy_engine(self, engine_kwargs=None):
"""Get an sqlalchemy_engine object.
:param engine_kwargs: Kwargs used in :func:`~sqlalchemy.create_engine`.
:return: the created engine.
"""
engine_kwargs = engine_kwargs or {}
conn_params = self._get_conn_params()
if "insecure_mode" in conn_params:
engine_kwargs.setdefault("connect_args", dict())
engine_kwargs["connect_args"]["insecure_mode"] = True
for key in ["session_parameters", "private_key"]:
if conn_params.get(key):
engine_kwargs.setdefault("connect_args", dict())
engine_kwargs["connect_args"][key] = conn_params[key]
return create_engine(self._conn_params_to_sqlalchemy_uri(conn_params), **engine_kwargs)
def set_autocommit(self, conn, autocommit: Any) -> None:
conn.autocommit(autocommit)
conn.autocommit_mode = autocommit
def get_autocommit(self, conn):
return getattr(conn, "autocommit_mode", False)
def run(
self,
sql: str | Iterable[str],
autocommit: bool = False,
parameters: Iterable | Mapping | None = None,
handler: Callable | None = None,
split_statements: bool = True,
return_last: bool = True,
return_dictionaries: bool = False,
) -> Any | list[Any] | None:
"""Runs a command or a list of commands.
Pass a list of SQL statements to the SQL parameter to get them to
execute sequentially. The variable ``execution_info`` is returned so
that it can be used in the Operators to modify the behavior depending on
the result of the query (i.e fail the operator if the copy has processed
0 files).
:param sql: The SQL string to be executed with possibly multiple
statements, or a list of sql statements to execute
:param autocommit: What to set the connection's autocommit setting to
before executing the query.
:param parameters: The parameters to render the SQL query with.
:param handler: The result handler which is called with the result of
each statement.
:param split_statements: Whether to split a single SQL string into
statements and run separately
:param return_last: Whether to return result for only last statement or
for all after split.
:param return_dictionaries: Whether to return dictionaries rather than
regular DBAPI sequences as rows in the result. The dictionaries are
of form ``{ 'column1_name': value1, 'column2_name': value2 ... }``.
:return: Result of the last SQL statement if *handler* is set.
*None* otherwise.
"""
self.query_ids = []
if isinstance(sql, str):
if split_statements:
split_statements_tuple = util_text.split_statements(StringIO(sql))
sql_list: Iterable[str] = [
sql_string for sql_string, _ in split_statements_tuple if sql_string
]
else:
sql_list = [self.strip_sql_string(sql)]
else:
sql_list = sql
if sql_list:
self.log.debug("Executing following statements against Snowflake DB: %s", sql_list)
else:
raise ValueError("List of SQL statements is empty")
with closing(self.get_conn()) as conn:
self.set_autocommit(conn, autocommit)
with self._get_cursor(conn, return_dictionaries) as cur:
results = []
for sql_statement in sql_list:
self._run_command(cur, sql_statement, parameters)
if handler is not None:
result = handler(cur)
if return_single_query_results(sql, return_last, split_statements):
_last_result = result
_last_description = cur.description
else:
results.append(result)
self.descriptions.append(cur.description)
query_id = cur.sfqid
self.log.info("Rows affected: %s", cur.rowcount)
self.log.info("Snowflake query id: %s", query_id)
self.query_ids.append(query_id)
# If autocommit was set to False or db does not support autocommit, we do a manual commit.
if not self.get_autocommit(conn):
conn.commit()
if handler is None:
return None
if return_single_query_results(sql, return_last, split_statements):
self.descriptions = [_last_description]
return _last_result
else:
return results
@contextmanager
def _get_cursor(self, conn: Any, return_dictionaries: bool):
cursor = None
try:
if return_dictionaries:
cursor = conn.cursor(DictCursor)
else:
cursor = conn.cursor()
yield cursor
finally:
if cursor is not None:
cursor.close()
| 18,283 | 42.224586 | 112 | py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.