repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
airflow | airflow-main/airflow/providers/smtp/notifications/smtp.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from functools import cached_property
from typing import Any, Iterable
from airflow.exceptions import AirflowOptionalProviderFeatureException
try:
from airflow.notifications.basenotifier import BaseNotifier
except ImportError:
raise AirflowOptionalProviderFeatureException(
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
)
from airflow.providers.smtp.hooks.smtp import SmtpHook
class SmtpNotifier(BaseNotifier):
"""
SMTP Notifier.
:param smtp_conn_id: The :ref:`smtp connection id <howto/connection:smtp>`
that contains the information used to authenticate the client.
"""
template_fields = (
"from_email",
"to",
"subject",
"html_content",
"files",
"cc",
"bcc",
"mime_subtype",
"mime_charset",
"custom_headers",
)
def __init__(
self,
from_email: str | None,
to: str | Iterable[str],
subject: str,
html_content: str,
files: list[str] | None = None,
cc: str | Iterable[str] | None = None,
bcc: str | Iterable[str] | None = None,
mime_subtype: str = "mixed",
mime_charset: str = "utf-8",
custom_headers: dict[str, Any] | None = None,
smtp_conn_id: str = SmtpHook.default_conn_name,
):
super().__init__()
self.smtp_conn_id = smtp_conn_id
self.from_email = from_email
self.to = to
self.subject = subject
self.html_content = html_content
self.files = files
self.cc = cc
self.bcc = bcc
self.mime_subtype = mime_subtype
self.mime_charset = mime_charset
self.custom_headers = custom_headers
@cached_property
def hook(self) -> SmtpHook:
"""Smtp Events Hook."""
return SmtpHook(smtp_conn_id=self.smtp_conn_id)
def notify(self, context):
"""Send a email via smtp server."""
with self.hook as smtp:
smtp.send_email_smtp(
smtp_conn_id=self.smtp_conn_id,
from_email=self.from_email,
to=self.to,
subject=self.subject,
html_content=self.html_content,
files=self.files,
cc=self.cc,
bcc=self.bcc,
mime_subtype=self.mime_subtype,
mime_charset=self.mime_charset,
custom_headers=self.custom_headers,
)
send_smtp_notification = SmtpNotifier
| 3,374 | 30.542056 | 100 | py |
airflow | airflow-main/airflow/providers/smtp/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/smtp/operators/smtp.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any, Sequence
from airflow.models import BaseOperator
from airflow.providers.smtp.hooks.smtp import SmtpHook
from airflow.utils.context import Context
class EmailOperator(BaseOperator):
"""
Sends an email.
:param to: list of emails to send the email to. (templated)
:param from_email: email to send from. (templated)
:param subject: subject line for the email. (templated)
:param html_content: content of the email, html markup
is allowed. (templated)
:param files: file names to attach in email (templated)
:param cc: list of recipients to be added in CC field
:param bcc: list of recipients to be added in BCC field
:param mime_subtype: MIME sub content type
:param mime_charset: character set parameter added to the Content-Type
header.
:param custom_headers: additional headers to add to the MIME message.
"""
template_fields: Sequence[str] = ("to", "from_email", "subject", "html_content", "files")
template_fields_renderers = {"html_content": "html"}
template_ext: Sequence[str] = (".html",)
ui_color = "#e6faf9"
def __init__(
self,
*,
to: list[str] | str,
subject: str,
html_content: str,
from_email: str | None = None,
files: list | None = None,
cc: list[str] | str | None = None,
bcc: list[str] | str | None = None,
mime_subtype: str = "mixed",
mime_charset: str = "utf-8",
conn_id: str = "smtp_default",
custom_headers: dict[str, Any] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.to = to
self.subject = subject
self.html_content = html_content
self.from_email = from_email
self.files = files or []
self.cc = cc
self.bcc = bcc
self.mime_subtype = mime_subtype
self.mime_charset = mime_charset
self.conn_id = conn_id
self.custom_headers = custom_headers
def execute(self, context: Context):
with SmtpHook(smtp_conn_id=self.conn_id) as smtp_hook:
return smtp_hook.send_email_smtp(
to=self.to,
subject=self.subject,
html_content=self.html_content,
from_email=self.from_email,
files=self.files,
cc=self.cc,
bcc=self.bcc,
mime_subtype=self.mime_subtype,
mime_charset=self.mime_charset,
conn_id=self.conn_id,
custom_headers=self.custom_headers,
)
| 3,434 | 35.542553 | 93 | py |
airflow | airflow-main/airflow/providers/smtp/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/smtp/hooks/smtp.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Search in emails for a specific attachment and also to download it.
It uses the smtplib library that is already integrated in python 3.
"""
from __future__ import annotations
import collections.abc
import os
import re
import smtplib
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
from typing import Any, Iterable
from airflow.exceptions import AirflowException, AirflowNotFoundException
from airflow.hooks.base import BaseHook
from airflow.models.connection import Connection
class SmtpHook(BaseHook):
"""
This hook connects to a mail server by using the smtp protocol.
.. note:: Please call this Hook as context manager via `with`
to automatically open and close the connection to the mail server.
:param smtp_conn_id: The :ref:`smtp connection id <howto/connection:smtp>`
that contains the information used to authenticate the client.
"""
conn_name_attr = "smtp_conn_id"
default_conn_name = "smtp_default"
conn_type = "smtp"
hook_name = "SMTP"
def __init__(self, smtp_conn_id: str = default_conn_name) -> None:
super().__init__()
self.smtp_conn_id = smtp_conn_id
self.smtp_connection: Connection | None = None
self.smtp_client: smtplib.SMTP_SSL | smtplib.SMTP | None = None
def __enter__(self) -> SmtpHook:
return self.get_conn()
def __exit__(self, exc_type, exc_val, exc_tb):
self.smtp_client.close()
def get_conn(self) -> SmtpHook:
"""
Login to the smtp server.
.. note:: Please call this Hook as context manager via `with`
to automatically open and close the connection to the smtp server.
:return: an authorized SmtpHook object.
"""
if not self.smtp_client:
try:
self.smtp_connection = self.get_connection(self.smtp_conn_id)
except AirflowNotFoundException:
raise AirflowException("SMTP connection is not found.")
for attempt in range(1, self.smtp_retry_limit + 1):
try:
self.smtp_client = self._build_client()
except smtplib.SMTPServerDisconnected:
if attempt < self.smtp_retry_limit:
continue
raise AirflowException("Unable to connect to smtp server")
if self.smtp_starttls:
self.smtp_client.starttls()
if self.smtp_user and self.smtp_password:
self.smtp_client.login(self.smtp_user, self.smtp_password)
break
return self
def _build_client(self) -> smtplib.SMTP_SSL | smtplib.SMTP:
SMTP: type[smtplib.SMTP_SSL] | type[smtplib.SMTP]
if self.use_ssl:
SMTP = smtplib.SMTP_SSL
else:
SMTP = smtplib.SMTP
smtp_kwargs: dict[str, Any] = {"host": self.host}
if self.port:
smtp_kwargs["port"] = self.port
smtp_kwargs["timeout"] = self.timeout
return SMTP(**smtp_kwargs)
@classmethod
def get_connection_form_widgets(cls) -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import BooleanField, IntegerField, StringField
from wtforms.validators import NumberRange
return {
"from_email": StringField(lazy_gettext("From email"), widget=BS3TextFieldWidget()),
"timeout": IntegerField(
lazy_gettext("Connection timeout"),
validators=[NumberRange(min=0)],
widget=BS3TextFieldWidget(),
default=30,
),
"retry_limit": IntegerField(
lazy_gettext("Number of Retries"),
validators=[NumberRange(min=0)],
widget=BS3TextFieldWidget(),
default=5,
),
"disable_tls": BooleanField(lazy_gettext("Disable TLS"), default=False),
"disable_ssl": BooleanField(lazy_gettext("Disable SSL"), default=False),
}
def test_connection(self) -> tuple[bool, str]:
"""Test SMTP connectivity from UI."""
try:
smtp_client = self.get_conn().smtp_client
if smtp_client:
status = smtp_client.noop()[0]
if status == 250:
return True, "Connection successfully tested"
except Exception as e:
return False, str(e)
return False, "Failed to establish connection"
def send_email_smtp(
self,
*,
to: str | Iterable[str],
subject: str,
html_content: str,
from_email: str | None = None,
files: list[str] | None = None,
dryrun: bool = False,
cc: str | Iterable[str] | None = None,
bcc: str | Iterable[str] | None = None,
mime_subtype: str = "mixed",
mime_charset: str = "utf-8",
custom_headers: dict[str, Any] | None = None,
**kwargs,
) -> None:
"""Send an email with html content.
:param to: Recipient email address or list of addresses.
:param subject: Email subject.
:param html_content: Email body in HTML format.
:param from_email: Sender email address. If it's None, the hook will check if there is an email
provided in the connection, and raises an exception if not.
:param files: List of file paths to attach to the email.
:param dryrun: If True, the email will not be sent, but all other actions will be performed.
:param cc: Carbon copy recipient email address or list of addresses.
:param bcc: Blind carbon copy recipient email address or list of addresses.
:param mime_subtype: MIME subtype of the email.
:param mime_charset: MIME charset of the email.
:param custom_headers: Dictionary of custom headers to include in the email.
:param kwargs: Additional keyword arguments.
>>> send_email_smtp(
'[email protected]', 'foo', '<b>Foo</b> bar', ['/dev/null'], dryrun=True
)
"""
if not self.smtp_client:
raise AirflowException("The 'smtp_client' should be initialized before!")
from_email = from_email or self.from_email
if not from_email:
raise AirflowException("You should provide `from_email` or define it in the connection.")
mime_msg, recipients = self._build_mime_message(
mail_from=from_email,
to=to,
subject=subject,
html_content=html_content,
files=files,
cc=cc,
bcc=bcc,
mime_subtype=mime_subtype,
mime_charset=mime_charset,
custom_headers=custom_headers,
)
if not dryrun:
for attempt in range(1, self.smtp_retry_limit + 1):
try:
self.smtp_client.sendmail(
from_addr=from_email, to_addrs=recipients, msg=mime_msg.as_string()
)
except smtplib.SMTPServerDisconnected as e:
if attempt < self.smtp_retry_limit:
continue
raise e
break
def _build_mime_message(
self,
mail_from: str | None,
to: str | Iterable[str],
subject: str,
html_content: str,
files: list[str] | None = None,
cc: str | Iterable[str] | None = None,
bcc: str | Iterable[str] | None = None,
mime_subtype: str = "mixed",
mime_charset: str = "utf-8",
custom_headers: dict[str, Any] | None = None,
) -> tuple[MIMEMultipart, list[str]]:
"""
Build a MIME message that can be used to send an email and returns a full list of recipients.
:param mail_from: Email address to set as the email's "From" field.
:param to: A string or iterable of strings containing email addresses
to set as the email's "To" field.
:param subject: The subject of the email.
:param html_content: The content of the email in HTML format.
:param files: A list of paths to files to be attached to the email.
:param cc: A string or iterable of strings containing email addresses
to set as the email's "CC" field.
:param bcc: A string or iterable of strings containing email addresses
to set as the email's "BCC" field.
:param mime_subtype: The subtype of the MIME message. Default: "mixed".
:param mime_charset: The charset of the email. Default: "utf-8".
:param custom_headers: Additional headers to add to the MIME message.
No validations are run on these values, and they should be able to be encoded.
:return: A tuple containing the email as a MIMEMultipart object and
a list of recipient email addresses.
"""
to = self._get_email_address_list(to)
msg = MIMEMultipart(mime_subtype)
msg["Subject"] = subject
msg["From"] = mail_from
msg["To"] = ", ".join(to)
recipients = to
if cc:
cc = self._get_email_address_list(cc)
msg["CC"] = ", ".join(cc)
recipients += cc
if bcc:
# don't add bcc in header
bcc = self._get_email_address_list(bcc)
recipients += bcc
msg["Date"] = formatdate(localtime=True)
mime_text = MIMEText(html_content, "html", mime_charset)
msg.attach(mime_text)
for fname in files or []:
basename = os.path.basename(fname)
with open(fname, "rb") as file:
part = MIMEApplication(file.read(), Name=basename)
part["Content-Disposition"] = f'attachment; filename="{basename}"'
part["Content-ID"] = f"<{basename}>"
msg.attach(part)
if custom_headers:
for header_key, header_value in custom_headers.items():
msg[header_key] = header_value
return msg, recipients
def _get_email_address_list(self, addresses: str | Iterable[str]) -> list[str]:
"""
Returns a list of email addresses from the provided input.
:param addresses: A string or iterable of strings containing email addresses.
:return: A list of email addresses.
:raises TypeError: If the input is not a string or iterable of strings.
"""
if isinstance(addresses, str):
return self._get_email_list_from_str(addresses)
elif isinstance(addresses, collections.abc.Iterable):
if not all(isinstance(item, str) for item in addresses):
raise TypeError("The items in your iterable must be strings.")
return list(addresses)
else:
raise TypeError(f"Unexpected argument type: Received '{type(addresses).__name__}'.")
def _get_email_list_from_str(self, addresses: str) -> list[str]:
"""
Extract a list of email addresses from a string.
The string can contain multiple email addresses separated by
any of the following delimiters: ',' or ';'.
:param addresses: A string containing one or more email addresses.
:return: A list of email addresses.
"""
pattern = r"\s*[,;]\s*"
return [address for address in re.split(pattern, addresses)]
@property
def conn(self) -> Connection:
if not self.smtp_connection:
raise AirflowException("The smtp connection should be loaded before!")
return self.smtp_connection
@property
def smtp_retry_limit(self) -> int:
return int(self.conn.extra_dejson.get("retry_limit", 5))
@property
def from_email(self) -> str | None:
return self.conn.extra_dejson.get("from_email")
@property
def smtp_user(self) -> str:
return self.conn.login
@property
def smtp_password(self) -> str:
return self.conn.password
@property
def smtp_starttls(self) -> bool:
return not bool(self.conn.extra_dejson.get("disable_tls", False))
@property
def host(self) -> str:
return self.conn.host
@property
def port(self) -> int:
return self.conn.port
@property
def timeout(self) -> int:
return int(self.conn.extra_dejson.get("timeout", 30))
@property
def use_ssl(self) -> bool:
return not bool(self.conn.extra_dejson.get("disable_ssl", False))
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "extra"],
"relabeling": {},
}
| 13,784 | 36.767123 | 103 | py |
airflow | airflow-main/airflow/providers/zendesk/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "4.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-zendesk:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,532 | 35.5 | 116 | py |
airflow | airflow-main/airflow/providers/zendesk/hooks/zendesk.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from zenpy import Zenpy
from zenpy.lib.api import BaseApi
from zenpy.lib.api_objects import JobStatus, Ticket, TicketAudit
from zenpy.lib.generator import SearchResultGenerator
from airflow.hooks.base import BaseHook
class ZendeskHook(BaseHook):
"""
Interact with Zendesk. This hook uses the Zendesk conn_id.
:param zendesk_conn_id: The Airflow connection used for Zendesk credentials.
"""
conn_name_attr = "zendesk_conn_id"
default_conn_name = "zendesk_default"
conn_type = "zendesk"
hook_name = "Zendesk"
def __init__(self, zendesk_conn_id: str = default_conn_name) -> None:
super().__init__()
self.zendesk_conn_id = zendesk_conn_id
self.base_api: BaseApi | None = None
zenpy_client, url = self._init_conn()
self.zenpy_client = zenpy_client
self.__url = url
self.get = self.zenpy_client.users._get
def _init_conn(self) -> tuple[Zenpy, str]:
"""
Create the Zenpy Client for our Zendesk connection.
:return: zenpy.Zenpy client and the url for the API.
"""
conn = self.get_connection(self.zendesk_conn_id)
url = "https://" + conn.host
domain = conn.host
subdomain: str | None = None
if conn.host.count(".") >= 2:
dot_splitted_string = conn.host.rsplit(".", 2)
subdomain = dot_splitted_string[0]
domain = ".".join(dot_splitted_string[1:])
return Zenpy(domain=domain, subdomain=subdomain, email=conn.login, password=conn.password), url
def get_conn(self) -> Zenpy:
"""
Get the underlying Zenpy client.
:return: zenpy.Zenpy client.
"""
return self.zenpy_client
def get_ticket(self, ticket_id: int) -> Ticket:
"""
Retrieve ticket.
:return: Ticket object retrieved.
"""
return self.zenpy_client.tickets(id=ticket_id)
def search_tickets(self, **kwargs) -> SearchResultGenerator:
"""
Search tickets.
:param kwargs: (optional) Search fields given to the zenpy search method.
:return: SearchResultGenerator of Ticket objects.
"""
return self.zenpy_client.search(type="ticket", **kwargs)
def create_tickets(self, tickets: Ticket | list[Ticket], **kwargs) -> TicketAudit | JobStatus:
"""
Create tickets.
:param tickets: Ticket or List of Ticket to create.
:param kwargs: (optional) Additional fields given to the zenpy create method.
:return: A TicketAudit object containing information about the Ticket created.
When sending bulk request, returns a JobStatus object.
"""
return self.zenpy_client.tickets.create(tickets, **kwargs)
def update_tickets(self, tickets: Ticket | list[Ticket], **kwargs) -> TicketAudit | JobStatus:
"""
Update tickets.
:param tickets: Updated Ticket or List of Ticket object to update.
:param kwargs: (optional) Additional fields given to the zenpy update method.
:return: A TicketAudit object containing information about the Ticket updated.
When sending bulk request, returns a JobStatus object.
"""
return self.zenpy_client.tickets.update(tickets, **kwargs)
def delete_tickets(self, tickets: Ticket | list[Ticket], **kwargs) -> None:
"""
Delete tickets, returns nothing on success and raises APIException on failure.
:param tickets: Ticket or List of Ticket to delete.
:param kwargs: (optional) Additional fields given to the zenpy delete method.
:return:
"""
return self.zenpy_client.tickets.delete(tickets, **kwargs)
| 4,536 | 36.495868 | 103 | py |
airflow | airflow-main/airflow/providers/zendesk/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/facebook/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-facebook:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/facebook/ads/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/facebook/ads/hooks/ads.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains Facebook Ads Reporting hooks."""
from __future__ import annotations
import time
from enum import Enum
from functools import cached_property
from typing import Any
from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.adreportrun import AdReportRun
from facebook_business.adobjects.adsinsights import AdsInsights
from facebook_business.api import FacebookAdsApi
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
class JobStatus(Enum):
"""Available options for facebook async task status."""
COMPLETED = "Job Completed"
STARTED = "Job Started"
RUNNING = "Job Running"
FAILED = "Job Failed"
SKIPPED = "Job Skipped"
class FacebookAdsReportingHook(BaseHook):
"""Facebook Ads API.
.. seealso::
For more information on the Facebook Ads API, take a look at the API docs:
https://developers.facebook.com/docs/marketing-apis/
:param facebook_conn_id: Airflow Facebook Ads connection ID
:param api_version: The version of Facebook API. Default to None. If it is None,
it will use the Facebook business SDK default version.
"""
conn_name_attr = "facebook_conn_id"
default_conn_name = "facebook_default"
conn_type = "facebook_social"
hook_name = "Facebook Ads"
def __init__(
self,
facebook_conn_id: str = default_conn_name,
api_version: str | None = None,
) -> None:
super().__init__()
self.facebook_conn_id = facebook_conn_id
self.api_version = api_version
self.client_required_fields = ["app_id", "app_secret", "access_token", "account_id"]
def _get_service(self) -> FacebookAdsApi:
"""Returns Facebook Ads Client using a service account."""
config = self.facebook_ads_config
return FacebookAdsApi.init(
app_id=config["app_id"],
app_secret=config["app_secret"],
access_token=config["access_token"],
api_version=self.api_version,
)
@cached_property
def multiple_accounts(self) -> bool:
"""Checks whether provided account_id in the Facebook Ads Connection is provided as a list."""
return isinstance(self.facebook_ads_config["account_id"], list)
@cached_property
def facebook_ads_config(self) -> dict:
"""Get the ``facebook_ads_config`` attribute.
This fetches Facebook Ads connection from meta database, and sets the
``facebook_ads_config`` attribute with returned config file.
"""
self.log.info("Fetching fb connection: %s", self.facebook_conn_id)
conn = self.get_connection(self.facebook_conn_id)
config = conn.extra_dejson
missing_keys = self.client_required_fields - config.keys()
if missing_keys:
message = f"{missing_keys} fields are missing"
raise AirflowException(message)
return config
def bulk_facebook_report(
self,
params: dict[str, Any] | None,
fields: list[str],
sleep_time: int = 5,
) -> list[AdsInsights] | dict[str, list[AdsInsights]]:
"""Pull data from Facebook Ads API regarding Account ID with matching return type.
The return type and value depends on the ``account_id`` configuration. If the
configuration is a str representing a single Account ID, the return value is the
list of reports for that ID. If the configuration is a list of str representing
multiple Account IDs, the return value is a dict of Account IDs and their
respective list of reports.
:param fields: List of fields that is obtained from Facebook. Found in AdsInsights.Field class.
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:param params: Parameters that determine the query for Facebook
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:param sleep_time: Time to sleep when async call is happening
:return: Facebook Ads API response,
converted to Facebook Ads Row objects regarding given Account ID type
"""
api = self._get_service()
if self.multiple_accounts:
all_insights = {}
for account_id in self.facebook_ads_config["account_id"]:
all_insights[account_id] = self._facebook_report(
account_id=account_id, api=api, params=params, fields=fields, sleep_time=sleep_time
)
self.log.info(
"%s Account Id used to extract data from Facebook Ads Iterators successfully", account_id
)
return all_insights
else:
return self._facebook_report(
account_id=self.facebook_ads_config["account_id"],
api=api,
params=params,
fields=fields,
sleep_time=sleep_time,
)
def _facebook_report(
self,
account_id: str,
api: FacebookAdsApi,
params: dict[str, Any] | None,
fields: list[str],
sleep_time: int = 5,
) -> list[AdsInsights]:
"""Pull data from the Facebook Ads API with given ``account_id``.
:param account_id: Facebook Account ID that holds ads information
https://developers.facebook.com/docs/marketing-api/reference/ads-insights/
:param api: FacebookAdsApi created in the hook
:param fields: List of fields that is obtained from Facebook. Found in AdsInsights.Field class.
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:param params: Parameters that determine the query for Facebook
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:param sleep_time: Time to sleep when async call is happening
"""
ad_account = AdAccount(account_id, api=api)
_async = ad_account.get_insights(params=params, fields=fields, is_async=True)
while True:
request = _async.api_get()
async_status = request[AdReportRun.Field.async_status]
percent = request[AdReportRun.Field.async_percent_completion]
self.log.info("%s %s completed, async_status: %s", percent, "%", async_status)
if async_status == JobStatus.COMPLETED.value:
self.log.info("Job run completed")
break
if async_status in [JobStatus.SKIPPED.value, JobStatus.FAILED.value]:
message = f"{async_status}. Please retry."
raise AirflowException(message)
time.sleep(sleep_time)
report_run_id = _async.api_get()["report_run_id"]
report_object = AdReportRun(report_run_id, api=api)
self.log.info("Extracting data from returned Facebook Ads Iterators")
insights = report_object.get_insights()
return list(insights)
| 7,802 | 41.178378 | 109 | py |
airflow | airflow-main/airflow/providers/facebook/ads/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/tabular/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "1.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-tabular:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,532 | 35.5 | 116 | py |
airflow | airflow-main/airflow/providers/tabular/hooks/tabular.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
import requests
from requests import HTTPError
from airflow.hooks.base import BaseHook
DEFAULT_TABULAR_URL = "https://api.tabulardata.io/ws/v1"
TOKENS_ENDPOINT = "oauth/tokens"
class TabularHook(BaseHook):
"""
This hook acts as a base hook for tabular services.
It offers the ability to generate temporary, short-lived
session tokens to use within Airflow submitted jobs.
:param tabular_conn_id: The :ref:`Tabular connection id<howto/connection:tabular>`
which refers to the information to connect to the Tabular OAuth service.
"""
conn_name_attr = "tabular_conn_id"
default_conn_name = "tabular_default"
conn_type = "tabular"
hook_name = "Tabular"
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema", "port"],
"relabeling": {
"host": "Base URL",
"login": "Client ID",
"password": "Client Secret",
},
"placeholders": {
"host": DEFAULT_TABULAR_URL,
"login": "client_id (token credentials auth)",
"password": "secret (token credentials auth)",
},
}
def __init__(self, tabular_conn_id: str = default_conn_name) -> None:
super().__init__()
self.conn_id = tabular_conn_id
def test_connection(self) -> tuple[bool, str]:
"""Test the Tabular connection."""
try:
self.get_conn()
return True, "Successfully fetched token from Tabular"
except HTTPError as e:
return False, f"HTTP Error: {e}: {e.response.text}"
except Exception as e:
return False, str(e)
def get_conn(self) -> str:
"""Obtain a short-lived access token via a client_id and client_secret."""
conn = self.get_connection(self.conn_id)
base_url = conn.host if conn.host else DEFAULT_TABULAR_URL
base_url = base_url.rstrip("/")
client_id = conn.login
client_secret = conn.password
data = {"client_id": client_id, "client_secret": client_secret, "grant_type": "client_credentials"}
response = requests.post(f"{base_url}/{TOKENS_ENDPOINT}", data=data)
response.raise_for_status()
return response.json()["access_token"]
def get_token_macro(self):
return f"{{{{ conn.{self.conn_id}.get_hook().get_conn() }}}}"
| 3,336 | 34.5 | 107 | py |
airflow | airflow-main/airflow/providers/tabular/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/sftp/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "4.4.0"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-sftp:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,529 | 35.428571 | 113 | py |
airflow | airflow-main/airflow/providers/sftp/decorators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/sftp/decorators/sensors/sftp.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Callable, Sequence
from airflow.decorators.base import TaskDecorator, get_unique_task_id, task_decorator_factory
from airflow.providers.sftp.sensors.sftp import SFTPSensor
class _DecoratedSFTPSensor(SFTPSensor):
"""
Wraps a Python callable and captures args/kwargs when called for execution.
:param python_callable: A reference to an object that is callable
:param task_id: task Id
:param op_args: a list of positional arguments that will get unpacked when
calling your callable (templated)
:param op_kwargs: a dictionary of keyword arguments that will get unpacked
in your function (templated)
:param kwargs_to_upstream: For certain operators, we might need to upstream certain arguments
that would otherwise be absorbed by the DecoratedOperator (for example python_callable for the
PythonOperator). This gives a user the option to upstream kwargs as needed.
"""
template_fields: Sequence[str] = ("op_args", "op_kwargs", *SFTPSensor.template_fields)
custom_operator_name = "@task.sftp_sensor"
# since we won't mutate the arguments, we should just do the shallow copy
# there are some cases we can't deepcopy the objects (e.g protobuf).
shallow_copy_attrs: Sequence[str] = ("python_callable",)
def __init__(
self,
*,
task_id: str,
**kwargs,
) -> None:
kwargs.pop("multiple_outputs")
kwargs["task_id"] = get_unique_task_id(task_id, kwargs.get("dag"), kwargs.get("task_group"))
super().__init__(**kwargs)
def sftp_sensor_task(python_callable: Callable | None = None, **kwargs) -> TaskDecorator:
"""
Wraps a function into an Airflow operator.
Accepts kwargs for operator kwarg. Can be reused in a single DAG.
:param python_callable: Function to decorate
"""
return task_decorator_factory(
python_callable=python_callable,
multiple_outputs=False,
decorated_operator_class=_DecoratedSFTPSensor,
**kwargs,
)
| 2,862 | 38.219178 | 102 | py |
airflow | airflow-main/airflow/providers/sftp/decorators/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/sftp/operators/sftp.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains SFTP operator."""
from __future__ import annotations
import os
import warnings
from pathlib import Path
from typing import Any, Sequence
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.models import BaseOperator
from airflow.providers.sftp.hooks.sftp import SFTPHook
from airflow.providers.ssh.hooks.ssh import SSHHook
class SFTPOperation:
"""Operation that can be used with SFTP."""
PUT = "put"
GET = "get"
class SFTPOperator(BaseOperator):
"""
SFTPOperator for transferring files from remote host to local or vice a versa.
This operator uses sftp_hook to open sftp transport channel that serve as basis for file transfer.
:param ssh_conn_id: :ref:`ssh connection id<howto/connection:ssh>`
from airflow Connections. `ssh_conn_id` will be ignored if `ssh_hook`
or `sftp_hook` is provided.
:param sftp_hook: predefined SFTPHook to use
Either `sftp_hook` or `ssh_conn_id` needs to be provided.
:param ssh_hook: Deprecated - predefined SSHHook to use for remote execution
Use `sftp_hook` instead.
:param remote_host: remote host to connect (templated)
Nullable. If provided, it will replace the `remote_host` which was
defined in `sftp_hook`/`ssh_hook` or predefined in the connection of `ssh_conn_id`.
:param local_filepath: local file path or list of local file paths to get or put. (templated)
:param remote_filepath: remote file path or list of remote file paths to get or put. (templated)
:param operation: specify operation 'get' or 'put', defaults to put
:param confirm: specify if the SFTP operation should be confirmed, defaults to True
:param create_intermediate_dirs: create missing intermediate directories when
copying from remote to local and vice-versa. Default is False.
Example: The following task would copy ``file.txt`` to the remote host
at ``/tmp/tmp1/tmp2/`` while creating ``tmp``,``tmp1`` and ``tmp2`` if they
don't exist. If the parameter is not passed it would error as the directory
does not exist. ::
put_file = SFTPOperator(
task_id="test_sftp",
ssh_conn_id="ssh_default",
local_filepath="/tmp/file.txt",
remote_filepath="/tmp/tmp1/tmp2/file.txt",
operation="put",
create_intermediate_dirs=True,
dag=dag
)
"""
template_fields: Sequence[str] = ("local_filepath", "remote_filepath", "remote_host")
def __init__(
self,
*,
ssh_hook: SSHHook | None = None,
sftp_hook: SFTPHook | None = None,
ssh_conn_id: str | None = None,
remote_host: str | None = None,
local_filepath: str | list[str],
remote_filepath: str | list[str],
operation: str = SFTPOperation.PUT,
confirm: bool = True,
create_intermediate_dirs: bool = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.ssh_hook = ssh_hook
self.sftp_hook = sftp_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.operation = operation
self.confirm = confirm
self.create_intermediate_dirs = create_intermediate_dirs
self.local_filepath = local_filepath
self.remote_filepath = remote_filepath
def execute(self, context: Any) -> str | list[str] | None:
if isinstance(self.local_filepath, str):
local_filepath_array = [self.local_filepath]
else:
local_filepath_array = self.local_filepath
if isinstance(self.remote_filepath, str):
remote_filepath_array = [self.remote_filepath]
else:
remote_filepath_array = self.remote_filepath
if len(local_filepath_array) != len(remote_filepath_array):
raise ValueError(
f"{len(local_filepath_array)} paths in local_filepath "
f"!= {len(remote_filepath_array)} paths in remote_filepath"
)
if not (self.operation.lower() == SFTPOperation.GET or self.operation.lower() == SFTPOperation.PUT):
raise TypeError(
f"Unsupported operation value {self.operation}, "
f"expected {SFTPOperation.GET} or {SFTPOperation.PUT}."
)
# TODO: remove support for ssh_hook in next major provider version in hook and operator
if self.ssh_hook is not None and self.sftp_hook is not None:
raise AirflowException(
"Both `ssh_hook` and `sftp_hook` are defined. Please use only one of them."
)
if self.ssh_hook is not None:
if not isinstance(self.ssh_hook, SSHHook):
self.log.info("ssh_hook is invalid. Trying ssh_conn_id to create SFTPHook.")
self.sftp_hook = SFTPHook(ssh_conn_id=self.ssh_conn_id)
if self.sftp_hook is None:
warnings.warn(
"Parameter `ssh_hook` is deprecated"
"Please use `sftp_hook` instead."
"The old parameter `ssh_hook` will be removed in a future version.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
self.sftp_hook = SFTPHook(ssh_hook=self.ssh_hook)
file_msg = None
try:
if self.ssh_conn_id:
if self.sftp_hook and isinstance(self.sftp_hook, SFTPHook):
self.log.info("ssh_conn_id is ignored when sftp_hook/ssh_hook is provided.")
else:
self.log.info(
"sftp_hook/ssh_hook not provided or invalid. Trying ssh_conn_id to create SFTPHook."
)
self.sftp_hook = SFTPHook(ssh_conn_id=self.ssh_conn_id)
if not self.sftp_hook:
raise AirflowException("Cannot operate without sftp_hook or ssh_conn_id.")
if self.remote_host is not None:
self.log.info(
"remote_host is provided explicitly. "
"It will replace the remote_host which was defined "
"in sftp_hook or predefined in connection of ssh_conn_id."
)
self.sftp_hook.remote_host = self.remote_host
for _local_filepath, _remote_filepath in zip(local_filepath_array, remote_filepath_array):
if self.operation.lower() == SFTPOperation.GET:
local_folder = os.path.dirname(_local_filepath)
if self.create_intermediate_dirs:
Path(local_folder).mkdir(parents=True, exist_ok=True)
file_msg = f"from {_remote_filepath} to {_local_filepath}"
self.log.info("Starting to transfer %s", file_msg)
self.sftp_hook.retrieve_file(_remote_filepath, _local_filepath)
else:
remote_folder = os.path.dirname(_remote_filepath)
if self.create_intermediate_dirs:
self.sftp_hook.create_directory(remote_folder)
file_msg = f"from {_local_filepath} to {_remote_filepath}"
self.log.info("Starting to transfer file %s", file_msg)
self.sftp_hook.store_file(_remote_filepath, _local_filepath, confirm=self.confirm)
except Exception as e:
raise AirflowException(f"Error while transferring {file_msg}, error: {str(e)}")
return self.local_filepath
| 8,458 | 43.287958 | 108 | py |
airflow | airflow-main/airflow/providers/sftp/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/sftp/hooks/sftp.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains SFTP hook."""
from __future__ import annotations
import datetime
import os
import stat
import warnings
from fnmatch import fnmatch
from typing import Any, Callable
import paramiko
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.providers.ssh.hooks.ssh import SSHHook
class SFTPHook(SSHHook):
"""Interact with SFTP.
This hook inherits the SSH hook. Please refer to SSH hook for the input
arguments.
:Pitfalls::
- In contrast with FTPHook describe_directory only returns size, type and
modify. It doesn't return unix.owner, unix.mode, perm, unix.group and
unique.
- retrieve_file and store_file only take a local full path and not a
buffer.
- If no mode is passed to create_directory it will be created with 777
permissions.
Errors that may occur throughout but should be handled downstream.
For consistency reasons with SSHHook, the preferred parameter is "ssh_conn_id".
:param ssh_conn_id: The :ref:`sftp connection id<howto/connection:sftp>`
:param ssh_hook: Optional SSH hook (included to support passing of an SSH hook to the SFTP operator)
"""
conn_name_attr = "ssh_conn_id"
default_conn_name = "sftp_default"
conn_type = "sftp"
hook_name = "SFTP"
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
return {
"hidden_fields": ["schema"],
"relabeling": {
"login": "Username",
},
}
def __init__(
self,
ssh_conn_id: str | None = "sftp_default",
ssh_hook: SSHHook | None = None,
*args,
**kwargs,
) -> None:
self.conn: paramiko.SFTPClient | None = None
# TODO: remove support for ssh_hook when it is removed from SFTPOperator
self.ssh_hook = ssh_hook
if self.ssh_hook is not None:
warnings.warn(
"Parameter `ssh_hook` is deprecated and will be removed in a future version.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
if not isinstance(self.ssh_hook, SSHHook):
raise AirflowException(
f"ssh_hook must be an instance of SSHHook, but got {type(self.ssh_hook)}"
)
self.log.info("ssh_hook is provided. It will be used to generate SFTP connection.")
self.ssh_conn_id = self.ssh_hook.ssh_conn_id
return
ftp_conn_id = kwargs.pop("ftp_conn_id", None)
if ftp_conn_id:
warnings.warn(
"Parameter `ftp_conn_id` is deprecated. Please use `ssh_conn_id` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
ssh_conn_id = ftp_conn_id
kwargs["ssh_conn_id"] = ssh_conn_id
self.ssh_conn_id = ssh_conn_id
super().__init__(*args, **kwargs)
def get_conn(self) -> paramiko.SFTPClient: # type: ignore[override]
"""Opens an SFTP connection to the remote host."""
if self.conn is None:
# TODO: remove support for ssh_hook when it is removed from SFTPOperator
if self.ssh_hook is not None:
self.conn = self.ssh_hook.get_conn().open_sftp()
else:
self.conn = super().get_conn().open_sftp()
return self.conn
def close_conn(self) -> None:
"""Closes the SFTP connection."""
if self.conn is not None:
self.conn.close()
self.conn = None
def describe_directory(self, path: str) -> dict[str, dict[str, str | int | None]]:
"""Get file information in a directory on the remote system.
The return format is ``{filename: {attributes}}``. The remote system
support the MLSD command.
:param path: full path to the remote directory
"""
conn = self.get_conn()
flist = sorted(conn.listdir_attr(path), key=lambda x: x.filename)
files = {}
for f in flist:
modify = datetime.datetime.fromtimestamp(f.st_mtime).strftime("%Y%m%d%H%M%S") # type: ignore
files[f.filename] = {
"size": f.st_size,
"type": "dir" if stat.S_ISDIR(f.st_mode) else "file", # type: ignore
"modify": modify,
}
return files
def list_directory(self, path: str) -> list[str]:
"""List files in a directory on the remote system.
:param path: full path to the remote directory to list
"""
conn = self.get_conn()
files = sorted(conn.listdir(path))
return files
def mkdir(self, path: str, mode: int = 0o777) -> None:
"""Create a directory on the remote system.
The default mode is ``0o777``, but on some systems, the current umask
value may be first masked out.
:param path: full path to the remote directory to create
:param mode: int permissions of octal mode for directory
"""
conn = self.get_conn()
conn.mkdir(path, mode=mode)
def isdir(self, path: str) -> bool:
"""Check if the path provided is a directory.
:param path: full path to the remote directory to check
"""
conn = self.get_conn()
try:
result = stat.S_ISDIR(conn.stat(path).st_mode) # type: ignore
except OSError:
result = False
return result
def isfile(self, path: str) -> bool:
"""Check if the path provided is a file.
:param path: full path to the remote file to check
"""
conn = self.get_conn()
try:
result = stat.S_ISREG(conn.stat(path).st_mode) # type: ignore
except OSError:
result = False
return result
def create_directory(self, path: str, mode: int = 0o777) -> None:
"""Create a directory on the remote system.
The default mode is ``0o777``, but on some systems, the current umask
value may be first masked out. Different from :func:`.mkdir`, this
function attempts to create parent directories if needed, and returns
silently if the target directory already exists.
:param path: full path to the remote directory to create
:param mode: int permissions of octal mode for directory
"""
conn = self.get_conn()
if self.isdir(path):
self.log.info("%s already exists", path)
return
elif self.isfile(path):
raise AirflowException(f"{path} already exists and is a file")
else:
dirname, basename = os.path.split(path)
if dirname and not self.isdir(dirname):
self.create_directory(dirname, mode)
if basename:
self.log.info("Creating %s", path)
conn.mkdir(path, mode=mode)
def delete_directory(self, path: str) -> None:
"""Delete a directory on the remote system.
:param path: full path to the remote directory to delete
"""
conn = self.get_conn()
conn.rmdir(path)
def retrieve_file(self, remote_full_path: str, local_full_path: str) -> None:
"""Transfer the remote file to a local location.
If local_full_path is a string path, the file will be put
at that location.
:param remote_full_path: full path to the remote file
:param local_full_path: full path to the local file
"""
conn = self.get_conn()
conn.get(remote_full_path, local_full_path)
def store_file(self, remote_full_path: str, local_full_path: str, confirm: bool = True) -> None:
"""Transfer a local file to the remote location.
If local_full_path_or_buffer is a string path, the file will be read
from that location.
:param remote_full_path: full path to the remote file
:param local_full_path: full path to the local file
"""
conn = self.get_conn()
conn.put(local_full_path, remote_full_path, confirm=confirm)
def delete_file(self, path: str) -> None:
"""Remove a file on the server.
:param path: full path to the remote file
"""
conn = self.get_conn()
conn.remove(path)
def get_mod_time(self, path: str) -> str:
"""Get an entry's modification time.
:param path: full path to the remote file
"""
conn = self.get_conn()
ftp_mdtm = conn.stat(path).st_mtime
return datetime.datetime.fromtimestamp(ftp_mdtm).strftime("%Y%m%d%H%M%S") # type: ignore
def path_exists(self, path: str) -> bool:
"""Whether a remote entity exists.
:param path: full path to the remote file or directory
"""
conn = self.get_conn()
try:
conn.stat(path)
except OSError:
return False
return True
@staticmethod
def _is_path_match(path: str, prefix: str | None = None, delimiter: str | None = None) -> bool:
"""Whether given path starts with ``prefix`` (if set) and ends with ``delimiter`` (if set).
:param path: path to be checked
:param prefix: if set path will be checked is starting with prefix
:param delimiter: if set path will be checked is ending with suffix
:return: bool
"""
if prefix is not None and not path.startswith(prefix):
return False
if delimiter is not None and not path.endswith(delimiter):
return False
return True
def walktree(
self,
path: str,
fcallback: Callable[[str], Any | None],
dcallback: Callable[[str], Any | None],
ucallback: Callable[[str], Any | None],
recurse: bool = True,
) -> None:
"""Recursively descend, depth first, the directory tree at ``path``.
This calls discrete callback functions for each regular file, directory,
and unknown file type.
:param str path:
root of remote directory to descend, use '.' to start at
:attr:`.pwd`
:param callable fcallback:
callback function to invoke for a regular file.
(form: ``func(str)``)
:param callable dcallback:
callback function to invoke for a directory. (form: ``func(str)``)
:param callable ucallback:
callback function to invoke for an unknown file type.
(form: ``func(str)``)
:param bool recurse: *Default: True* - should it recurse
"""
conn = self.get_conn()
for entry in self.list_directory(path):
pathname = os.path.join(path, entry)
mode = conn.stat(pathname).st_mode
if stat.S_ISDIR(mode): # type: ignore
# It's a directory, call the dcallback function
dcallback(pathname)
if recurse:
# now, recurse into it
self.walktree(pathname, fcallback, dcallback, ucallback)
elif stat.S_ISREG(mode): # type: ignore
# It's a file, call the fcallback function
fcallback(pathname)
else:
# Unknown file type
ucallback(pathname)
def get_tree_map(
self, path: str, prefix: str | None = None, delimiter: str | None = None
) -> tuple[list[str], list[str], list[str]]:
"""Get tuple with recursive lists of files, directories and unknown paths.
It is possible to filter results by giving prefix and/or delimiter parameters.
:param path: path from which tree will be built
:param prefix: if set paths will be added if start with prefix
:param delimiter: if set paths will be added if end with delimiter
:return: tuple with list of files, dirs and unknown items
"""
files: list[str] = []
dirs: list[str] = []
unknowns: list[str] = []
def append_matching_path_callback(list_: list[str]) -> Callable:
return lambda item: list_.append(item) if self._is_path_match(item, prefix, delimiter) else None
self.walktree(
path=path,
fcallback=append_matching_path_callback(files),
dcallback=append_matching_path_callback(dirs),
ucallback=append_matching_path_callback(unknowns),
recurse=True,
)
return files, dirs, unknowns
def test_connection(self) -> tuple[bool, str]:
"""Test the SFTP connection by calling path with directory."""
try:
conn = self.get_conn()
conn.normalize(".")
return True, "Connection successfully tested"
except Exception as e:
return False, str(e)
def get_file_by_pattern(self, path, fnmatch_pattern) -> str:
"""Get the first matching file based on the given fnmatch type pattern.
:param path: path to be checked
:param fnmatch_pattern: The pattern that will be matched with `fnmatch`
:return: string containing the first found file, or an empty string if none matched
"""
for file in self.list_directory(path):
if fnmatch(file, fnmatch_pattern):
return file
return ""
def get_files_by_pattern(self, path, fnmatch_pattern) -> list[str]:
"""Get all matching files based on the given fnmatch type pattern.
:param path: path to be checked
:param fnmatch_pattern: The pattern that will be matched with `fnmatch`
:return: list of string containing the found files, or an empty list if none matched
"""
matched_files = []
for file in self.list_directory(path):
if fnmatch(file, fnmatch_pattern):
matched_files.append(file)
return matched_files
| 14,745 | 35.773067 | 108 | py |
airflow | airflow-main/airflow/providers/sftp/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/sftp/sensors/sftp.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains SFTP sensor."""
from __future__ import annotations
import os
from datetime import datetime
from typing import TYPE_CHECKING, Any, Callable, Sequence
from paramiko.sftp import SFTP_NO_SUCH_FILE
from airflow.providers.sftp.hooks.sftp import SFTPHook
from airflow.sensors.base import BaseSensorOperator, PokeReturnValue
from airflow.utils.timezone import convert_to_utc
if TYPE_CHECKING:
from airflow.utils.context import Context
class SFTPSensor(BaseSensorOperator):
"""
Waits for a file or directory to be present on SFTP.
:param path: Remote file or directory path
:param file_pattern: The pattern that will be used to match the file (fnmatch format)
:param sftp_conn_id: The connection to run the sensor against
:param newer_than: DateTime for which the file or file path should be newer than, comparison is inclusive
"""
template_fields: Sequence[str] = (
"path",
"newer_than",
)
def __init__(
self,
*,
path: str,
file_pattern: str = "",
newer_than: datetime | None = None,
sftp_conn_id: str = "sftp_default",
python_callable: Callable | None = None,
op_args: list | None = None,
op_kwargs: dict[str, Any] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.path = path
self.file_pattern = file_pattern
self.hook: SFTPHook | None = None
self.sftp_conn_id = sftp_conn_id
self.newer_than: datetime | None = newer_than
self.python_callable: Callable | None = python_callable
self.op_args = op_args or []
self.op_kwargs = op_kwargs or {}
def poke(self, context: Context) -> PokeReturnValue | bool:
self.hook = SFTPHook(self.sftp_conn_id)
self.log.info("Poking for %s, with pattern %s", self.path, self.file_pattern)
files_found = []
if self.file_pattern:
files_from_pattern = self.hook.get_files_by_pattern(self.path, self.file_pattern)
if files_from_pattern:
actual_files_to_check = [
os.path.join(self.path, file_from_pattern) for file_from_pattern in files_from_pattern
]
else:
return False
else:
actual_files_to_check = [self.path]
for actual_file_to_check in actual_files_to_check:
try:
mod_time = self.hook.get_mod_time(actual_file_to_check)
self.log.info("Found File %s last modified: %s", str(actual_file_to_check), str(mod_time))
except OSError as e:
if e.errno != SFTP_NO_SUCH_FILE:
raise e
continue
if self.newer_than:
_mod_time = convert_to_utc(datetime.strptime(mod_time, "%Y%m%d%H%M%S"))
_newer_than = convert_to_utc(self.newer_than)
if _newer_than <= _mod_time:
files_found.append(actual_file_to_check)
else:
files_found.append(actual_file_to_check)
self.hook.close_conn()
if not len(files_found):
return False
if self.python_callable is not None:
if self.op_kwargs:
self.op_kwargs["files_found"] = files_found
callable_return = self.python_callable(*self.op_args, **self.op_kwargs)
return PokeReturnValue(
is_done=True,
xcom_value={"files_found": files_found, "decorator_return_value": callable_return},
)
return True
| 4,410 | 37.692982 | 109 | py |
airflow | airflow-main/airflow/providers/sftp/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/openfaas/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-openfaas:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/openfaas/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/openfaas/hooks/openfaas.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
import requests
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
OK_STATUS_CODE = 202
class OpenFaasHook(BaseHook):
"""
Interact with OpenFaaS to query, deploy, invoke and update function.
:param function_name: Name of the function, Defaults to None
:param conn_id: openfaas connection to use, Defaults to open_faas_default
for example host : http://openfaas.faas.com, Connection Type : Http
"""
GET_FUNCTION = "/system/function/"
INVOKE_ASYNC_FUNCTION = "/async-function/"
INVOKE_FUNCTION = "/function/"
DEPLOY_FUNCTION = "/system/functions"
UPDATE_FUNCTION = "/system/functions"
def __init__(self, function_name=None, conn_id: str = "open_faas_default", *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.function_name = function_name
self.conn_id = conn_id
def get_conn(self):
conn = self.get_connection(self.conn_id)
return conn
def deploy_function(self, overwrite_function_if_exist: bool, body: dict[str, Any]) -> None:
"""Deploy OpenFaaS function."""
if overwrite_function_if_exist:
self.log.info("Function already exist %s going to update", self.function_name)
self.update_function(body)
else:
url = self.get_conn().host + self.DEPLOY_FUNCTION
self.log.info("Deploying function %s", url)
response = requests.post(url, body)
if response.status_code != OK_STATUS_CODE:
self.log.error("Response status %d", response.status_code)
self.log.error("Failed to deploy")
raise AirflowException("failed to deploy")
else:
self.log.info("Function deployed %s", self.function_name)
def invoke_async_function(self, body: dict[str, Any]) -> None:
"""Invoking function asynchronously."""
url = self.get_conn().host + self.INVOKE_ASYNC_FUNCTION + self.function_name
self.log.info("Invoking function asynchronously %s", url)
response = requests.post(url, body)
if response.ok:
self.log.info("Invoked %s", self.function_name)
else:
self.log.error("Response status %d", response.status_code)
raise AirflowException("failed to invoke function")
def invoke_function(self, body: dict[str, Any]) -> None:
"""Invoking function synchronously, will block until function completes and returns."""
url = self.get_conn().host + self.INVOKE_FUNCTION + self.function_name
self.log.info("Invoking function synchronously %s", url)
response = requests.post(url, body)
if response.ok:
self.log.info("Invoked %s", self.function_name)
self.log.info("Response code %s", response.status_code)
self.log.info("Response %s", response.text)
else:
self.log.error("Response status %d", response.status_code)
raise AirflowException("failed to invoke function")
def update_function(self, body: dict[str, Any]) -> None:
"""Update OpenFaaS function."""
url = self.get_conn().host + self.UPDATE_FUNCTION
self.log.info("Updating function %s", url)
response = requests.put(url, body)
if response.status_code != OK_STATUS_CODE:
self.log.error("Response status %d", response.status_code)
self.log.error("Failed to update response %s", response.content.decode("utf-8"))
raise AirflowException("failed to update " + self.function_name)
else:
self.log.info("Function was updated")
def does_function_exist(self) -> bool:
"""Whether OpenFaaS function exists or not."""
url = self.get_conn().host + self.GET_FUNCTION + self.function_name
response = requests.get(url)
if response.ok:
return True
else:
self.log.error("Failed to find function %s", self.function_name)
return False
| 4,890 | 41.163793 | 104 | py |
airflow | airflow-main/airflow/providers/cloudant/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-cloudant:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/cloudant/hooks/cloudant.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for Cloudant."""
from __future__ import annotations
from typing import Any
from cloudant import cloudant # type: ignore[attr-defined]
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
class CloudantHook(BaseHook):
"""
Interact with Cloudant. This class is a thin wrapper around the cloudant python library.
.. seealso:: the latest documentation `here <https://python-cloudant.readthedocs.io/en/latest/>`_.
:param cloudant_conn_id: The connection id to authenticate and get a session object from cloudant.
"""
conn_name_attr = "cloudant_conn_id"
default_conn_name = "cloudant_default"
conn_type = "cloudant"
hook_name = "Cloudant"
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["port", "extra"],
"relabeling": {"host": "Account", "login": "Username (or API Key)", "schema": "Database"},
}
def __init__(self, cloudant_conn_id: str = default_conn_name) -> None:
super().__init__()
self.cloudant_conn_id = cloudant_conn_id
def get_conn(self) -> cloudant:
"""
Opens a connection to the cloudant service and closes it automatically if used as context manager.
.. note::
In the connection form:
- 'host' equals the 'Account' (optional)
- 'login' equals the 'Username (or API Key)' (required)
- 'password' equals the 'Password' (required)
:return: an authorized cloudant session context manager object.
"""
conn = self.get_connection(self.cloudant_conn_id)
self._validate_connection(conn)
cloudant_session = cloudant(user=conn.login, passwd=conn.password, account=conn.host)
return cloudant_session
def _validate_connection(self, conn: cloudant) -> None:
for conn_param in ["login", "password"]:
if not getattr(conn, conn_param):
raise AirflowException(f"missing connection parameter {conn_param}")
| 2,898 | 35.696203 | 106 | py |
airflow | airflow-main/airflow/providers/cloudant/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/yandex/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
import airflow
__all__ = ["__version__"]
__version__ = "3.3.0"
if packaging.version.parse(airflow.version.version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-yandex:{__version__}` requires Apache Airflow 2.4.0+"
)
| 1,401 | 34.948718 | 101 | py |
airflow | airflow-main/airflow/providers/yandex/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/yandex/operators/yandexcloud_dataproc.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from dataclasses import dataclass
from typing import TYPE_CHECKING, Iterable, Sequence
from airflow.models import BaseOperator
from airflow.providers.yandex.hooks.yandexcloud_dataproc import DataprocHook
if TYPE_CHECKING:
from airflow.utils.context import Context
@dataclass
class InitializationAction:
"""Data for initialization action to be run at start of DataProc cluster."""
uri: str # Uri of the executable file
args: Sequence[str] # Arguments to the initialization action
timeout: int # Execution timeout
class DataprocCreateClusterOperator(BaseOperator):
"""Creates Yandex.Cloud Data Proc cluster.
:param folder_id: ID of the folder in which cluster should be created.
:param cluster_name: Cluster name. Must be unique inside the folder.
:param cluster_description: Cluster description.
:param cluster_image_version: Cluster image version. Use default.
:param ssh_public_keys: List of SSH public keys that will be deployed to created compute instances.
:param subnet_id: ID of the subnetwork. All Data Proc cluster nodes will use one subnetwork.
:param services: List of services that will be installed to the cluster. Possible options:
HDFS, YARN, MAPREDUCE, HIVE, TEZ, ZOOKEEPER, HBASE, SQOOP, FLUME, SPARK, SPARK, ZEPPELIN, OOZIE
:param s3_bucket: Yandex.Cloud S3 bucket to store cluster logs.
Jobs will not work if the bucket is not specified.
:param zone: Availability zone to create cluster in.
Currently there are ru-central1-a, ru-central1-b and ru-central1-c.
:param service_account_id: Service account id for the cluster.
Service account can be created inside the folder.
:param masternode_resource_preset: Resources preset (CPU+RAM configuration)
for the primary node of the cluster.
:param masternode_disk_size: Masternode storage size in GiB.
:param masternode_disk_type: Masternode storage type. Possible options: network-ssd, network-hdd.
:param datanode_resource_preset: Resources preset (CPU+RAM configuration)
for the data nodes of the cluster.
:param datanode_disk_size: Datanodes storage size in GiB.
:param datanode_disk_type: Datanodes storage type. Possible options: network-ssd, network-hdd.
:param computenode_resource_preset: Resources preset (CPU+RAM configuration)
for the compute nodes of the cluster.
:param computenode_disk_size: Computenodes storage size in GiB.
:param computenode_disk_type: Computenodes storage type. Possible options: network-ssd, network-hdd.
:param connection_id: ID of the Yandex.Cloud Airflow connection.
:param computenode_max_count: Maximum number of nodes of compute autoscaling subcluster.
:param computenode_warmup_duration: The warmup time of the instance in seconds. During this time,
traffic is sent to the instance,
but instance metrics are not collected. In seconds.
:param computenode_stabilization_duration: Minimum amount of time in seconds for monitoring before
Instance Groups can reduce the number of instances in the group.
During this time, the group size doesn't decrease,
even if the new metric values indicate that it should. In seconds.
:param computenode_preemptible: Preemptible instances are stopped at least once every 24 hours,
and can be stopped at any time if their resources are needed by Compute.
:param computenode_cpu_utilization_target: Defines an autoscaling rule
based on the average CPU utilization of the instance group.
in percents. 10-100.
By default is not set and default autoscaling strategy is used.
:param computenode_decommission_timeout: Timeout to gracefully decommission nodes during downscaling.
In seconds
:param properties: Properties passed to main node software.
Docs: https://cloud.yandex.com/docs/data-proc/concepts/settings-list
:param enable_ui_proxy: Enable UI Proxy feature for forwarding Hadoop components web interfaces
Docs: https://cloud.yandex.com/docs/data-proc/concepts/ui-proxy
:param host_group_ids: Dedicated host groups to place VMs of cluster on.
Docs: https://cloud.yandex.com/docs/compute/concepts/dedicated-host
:param security_group_ids: User security groups.
Docs: https://cloud.yandex.com/docs/data-proc/concepts/network#security-groups
:param log_group_id: Id of log group to write logs. By default logs will be sent to default log group.
To disable cloud log sending set cluster property dataproc:disable_cloud_logging = true
Docs: https://cloud.yandex.com/docs/data-proc/concepts/logs
:param initialization_actions: Set of init-actions to run when cluster starts.
Docs: https://cloud.yandex.com/docs/data-proc/concepts/init-action
"""
def __init__(
self,
*,
folder_id: str | None = None,
cluster_name: str | None = None,
cluster_description: str | None = "",
cluster_image_version: str | None = None,
ssh_public_keys: str | Iterable[str] | None = None,
subnet_id: str | None = None,
services: Iterable[str] = ("HDFS", "YARN", "MAPREDUCE", "HIVE", "SPARK"),
s3_bucket: str | None = None,
zone: str = "ru-central1-b",
service_account_id: str | None = None,
masternode_resource_preset: str | None = None,
masternode_disk_size: int | None = None,
masternode_disk_type: str | None = None,
datanode_resource_preset: str | None = None,
datanode_disk_size: int | None = None,
datanode_disk_type: str | None = None,
datanode_count: int = 1,
computenode_resource_preset: str | None = None,
computenode_disk_size: int | None = None,
computenode_disk_type: str | None = None,
computenode_count: int = 0,
computenode_max_hosts_count: int | None = None,
computenode_measurement_duration: int | None = None,
computenode_warmup_duration: int | None = None,
computenode_stabilization_duration: int | None = None,
computenode_preemptible: bool = False,
computenode_cpu_utilization_target: int | None = None,
computenode_decommission_timeout: int | None = None,
connection_id: str | None = None,
properties: dict[str, str] | None = None,
enable_ui_proxy: bool = False,
host_group_ids: Iterable[str] | None = None,
security_group_ids: Iterable[str] | None = None,
log_group_id: str | None = None,
initialization_actions: Iterable[InitializationAction] | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.folder_id = folder_id
self.yandex_conn_id = connection_id
self.cluster_name = cluster_name
self.cluster_description = cluster_description
self.cluster_image_version = cluster_image_version
self.ssh_public_keys = ssh_public_keys
self.subnet_id = subnet_id
self.services = services
self.s3_bucket = s3_bucket
self.zone = zone
self.service_account_id = service_account_id
self.masternode_resource_preset = masternode_resource_preset
self.masternode_disk_size = masternode_disk_size
self.masternode_disk_type = masternode_disk_type
self.datanode_resource_preset = datanode_resource_preset
self.datanode_disk_size = datanode_disk_size
self.datanode_disk_type = datanode_disk_type
self.datanode_count = datanode_count
self.computenode_resource_preset = computenode_resource_preset
self.computenode_disk_size = computenode_disk_size
self.computenode_disk_type = computenode_disk_type
self.computenode_count = computenode_count
self.computenode_max_hosts_count = computenode_max_hosts_count
self.computenode_measurement_duration = computenode_measurement_duration
self.computenode_warmup_duration = computenode_warmup_duration
self.computenode_stabilization_duration = computenode_stabilization_duration
self.computenode_preemptible = computenode_preemptible
self.computenode_cpu_utilization_target = computenode_cpu_utilization_target
self.computenode_decommission_timeout = computenode_decommission_timeout
self.properties = properties
self.enable_ui_proxy = enable_ui_proxy
self.host_group_ids = host_group_ids
self.security_group_ids = security_group_ids
self.log_group_id = log_group_id
self.initialization_actions = initialization_actions
self.hook: DataprocHook | None = None
def execute(self, context: Context) -> dict:
self.hook = DataprocHook(
yandex_conn_id=self.yandex_conn_id,
)
operation_result = self.hook.client.create_cluster(
folder_id=self.folder_id,
cluster_name=self.cluster_name,
cluster_description=self.cluster_description,
cluster_image_version=self.cluster_image_version,
ssh_public_keys=self.ssh_public_keys,
subnet_id=self.subnet_id,
services=self.services,
s3_bucket=self.s3_bucket,
zone=self.zone,
service_account_id=self.service_account_id,
masternode_resource_preset=self.masternode_resource_preset,
masternode_disk_size=self.masternode_disk_size,
masternode_disk_type=self.masternode_disk_type,
datanode_resource_preset=self.datanode_resource_preset,
datanode_disk_size=self.datanode_disk_size,
datanode_disk_type=self.datanode_disk_type,
datanode_count=self.datanode_count,
computenode_resource_preset=self.computenode_resource_preset,
computenode_disk_size=self.computenode_disk_size,
computenode_disk_type=self.computenode_disk_type,
computenode_count=self.computenode_count,
computenode_max_hosts_count=self.computenode_max_hosts_count,
computenode_measurement_duration=self.computenode_measurement_duration,
computenode_warmup_duration=self.computenode_warmup_duration,
computenode_stabilization_duration=self.computenode_stabilization_duration,
computenode_preemptible=self.computenode_preemptible,
computenode_cpu_utilization_target=self.computenode_cpu_utilization_target,
computenode_decommission_timeout=self.computenode_decommission_timeout,
properties=self.properties,
enable_ui_proxy=self.enable_ui_proxy,
host_group_ids=self.host_group_ids,
security_group_ids=self.security_group_ids,
log_group_id=self.log_group_id,
initialization_actions=self.initialization_actions
and [
self.hook.sdk.wrappers.InitializationAction(
uri=init_action.uri,
args=init_action.args,
timeout=init_action.timeout,
)
for init_action in self.initialization_actions
],
)
cluster_id = operation_result.response.id
context["task_instance"].xcom_push(key="cluster_id", value=cluster_id)
# Deprecated
context["task_instance"].xcom_push(key="yandexcloud_connection_id", value=self.yandex_conn_id)
return cluster_id
@property
def cluster_id(self):
return self.output
class DataprocBaseOperator(BaseOperator):
"""Base class for DataProc operators working with given cluster.
:param connection_id: ID of the Yandex.Cloud Airflow connection.
:param cluster_id: ID of the cluster to remove. (templated)
"""
template_fields: Sequence[str] = ("cluster_id",)
def __init__(self, *, yandex_conn_id: str | None = None, cluster_id: str | None = None, **kwargs) -> None:
super().__init__(**kwargs)
self.cluster_id = cluster_id
self.yandex_conn_id = yandex_conn_id
def _setup(self, context: Context) -> DataprocHook:
if self.cluster_id is None:
self.cluster_id = context["task_instance"].xcom_pull(key="cluster_id")
if self.yandex_conn_id is None:
xcom_yandex_conn_id = context["task_instance"].xcom_pull(key="yandexcloud_connection_id")
if xcom_yandex_conn_id:
warnings.warn("Implicit pass of `yandex_conn_id` is deprecated, please pass it explicitly")
self.yandex_conn_id = xcom_yandex_conn_id
return DataprocHook(yandex_conn_id=self.yandex_conn_id)
def execute(self, context: Context):
raise NotImplementedError()
class DataprocDeleteClusterOperator(DataprocBaseOperator):
"""Deletes Yandex.Cloud Data Proc cluster.
:param connection_id: ID of the Yandex.Cloud Airflow connection.
:param cluster_id: ID of the cluster to remove. (templated)
"""
def __init__(self, *, connection_id: str | None = None, cluster_id: str | None = None, **kwargs) -> None:
super().__init__(yandex_conn_id=connection_id, cluster_id=cluster_id, **kwargs)
def execute(self, context: Context) -> None:
hook = self._setup(context)
hook.client.delete_cluster(self.cluster_id)
class DataprocCreateHiveJobOperator(DataprocBaseOperator):
"""Runs Hive job in Data Proc cluster.
:param query: Hive query.
:param query_file_uri: URI of the script that contains Hive queries. Can be placed in HDFS or S3.
:param properties: A mapping of property names to values, used to configure Hive.
:param script_variables: Mapping of query variable names to values.
:param continue_on_failure: Whether to continue executing queries if a query fails.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
"""
def __init__(
self,
*,
query: str | None = None,
query_file_uri: str | None = None,
script_variables: dict[str, str] | None = None,
continue_on_failure: bool = False,
properties: dict[str, str] | None = None,
name: str = "Hive job",
cluster_id: str | None = None,
connection_id: str | None = None,
**kwargs,
) -> None:
super().__init__(yandex_conn_id=connection_id, cluster_id=cluster_id, **kwargs)
self.query = query
self.query_file_uri = query_file_uri
self.script_variables = script_variables
self.continue_on_failure = continue_on_failure
self.properties = properties
self.name = name
def execute(self, context: Context) -> None:
hook = self._setup(context)
hook.client.create_hive_job(
query=self.query,
query_file_uri=self.query_file_uri,
script_variables=self.script_variables,
continue_on_failure=self.continue_on_failure,
properties=self.properties,
name=self.name,
cluster_id=self.cluster_id,
)
class DataprocCreateMapReduceJobOperator(DataprocBaseOperator):
"""Runs Mapreduce job in Data Proc cluster.
:param main_jar_file_uri: URI of jar file with job.
Can be placed in HDFS or S3. Can be specified instead of main_class.
:param main_class: Name of the main class of the job. Can be specified instead of main_jar_file_uri.
:param file_uris: URIs of files used in the job. Can be placed in HDFS or S3.
:param archive_uris: URIs of archive files used in the job. Can be placed in HDFS or S3.
:param jar_file_uris: URIs of JAR files used in the job. Can be placed in HDFS or S3.
:param properties: Properties for the job.
:param args: Arguments to be passed to the job.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
"""
def __init__(
self,
*,
main_class: str | None = None,
main_jar_file_uri: str | None = None,
jar_file_uris: Iterable[str] | None = None,
archive_uris: Iterable[str] | None = None,
file_uris: Iterable[str] | None = None,
args: Iterable[str] | None = None,
properties: dict[str, str] | None = None,
name: str = "Mapreduce job",
cluster_id: str | None = None,
connection_id: str | None = None,
**kwargs,
) -> None:
super().__init__(yandex_conn_id=connection_id, cluster_id=cluster_id, **kwargs)
self.main_class = main_class
self.main_jar_file_uri = main_jar_file_uri
self.jar_file_uris = jar_file_uris
self.archive_uris = archive_uris
self.file_uris = file_uris
self.args = args
self.properties = properties
self.name = name
def execute(self, context: Context) -> None:
hook = self._setup(context)
hook.client.create_mapreduce_job(
main_class=self.main_class,
main_jar_file_uri=self.main_jar_file_uri,
jar_file_uris=self.jar_file_uris,
archive_uris=self.archive_uris,
file_uris=self.file_uris,
args=self.args,
properties=self.properties,
name=self.name,
cluster_id=self.cluster_id,
)
class DataprocCreateSparkJobOperator(DataprocBaseOperator):
"""Runs Spark job in Data Proc cluster.
:param main_jar_file_uri: URI of jar file with job. Can be placed in HDFS or S3.
:param main_class: Name of the main class of the job.
:param file_uris: URIs of files used in the job. Can be placed in HDFS or S3.
:param archive_uris: URIs of archive files used in the job. Can be placed in HDFS or S3.
:param jar_file_uris: URIs of JAR files used in the job. Can be placed in HDFS or S3.
:param properties: Properties for the job.
:param args: Arguments to be passed to the job.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
:param packages: List of maven coordinates of jars to include on the driver and executor classpaths.
:param repositories: List of additional remote repositories to search for the maven coordinates
given with --packages.
:param exclude_packages: List of groupId:artifactId, to exclude while resolving the dependencies
provided in --packages to avoid dependency conflicts.
"""
def __init__(
self,
*,
main_class: str | None = None,
main_jar_file_uri: str | None = None,
jar_file_uris: Iterable[str] | None = None,
archive_uris: Iterable[str] | None = None,
file_uris: Iterable[str] | None = None,
args: Iterable[str] | None = None,
properties: dict[str, str] | None = None,
name: str = "Spark job",
cluster_id: str | None = None,
connection_id: str | None = None,
packages: Iterable[str] | None = None,
repositories: Iterable[str] | None = None,
exclude_packages: Iterable[str] | None = None,
**kwargs,
) -> None:
super().__init__(yandex_conn_id=connection_id, cluster_id=cluster_id, **kwargs)
self.main_class = main_class
self.main_jar_file_uri = main_jar_file_uri
self.jar_file_uris = jar_file_uris
self.archive_uris = archive_uris
self.file_uris = file_uris
self.args = args
self.properties = properties
self.name = name
self.packages = packages
self.repositories = repositories
self.exclude_packages = exclude_packages
def execute(self, context: Context) -> None:
hook = self._setup(context)
hook.client.create_spark_job(
main_class=self.main_class,
main_jar_file_uri=self.main_jar_file_uri,
jar_file_uris=self.jar_file_uris,
archive_uris=self.archive_uris,
file_uris=self.file_uris,
args=self.args,
properties=self.properties,
packages=self.packages,
repositories=self.repositories,
exclude_packages=self.exclude_packages,
name=self.name,
cluster_id=self.cluster_id,
)
class DataprocCreatePysparkJobOperator(DataprocBaseOperator):
"""Runs Pyspark job in Data Proc cluster.
:param main_python_file_uri: URI of python file with job. Can be placed in HDFS or S3.
:param python_file_uris: URIs of python files used in the job. Can be placed in HDFS or S3.
:param file_uris: URIs of files used in the job. Can be placed in HDFS or S3.
:param archive_uris: URIs of archive files used in the job. Can be placed in HDFS or S3.
:param jar_file_uris: URIs of JAR files used in the job. Can be placed in HDFS or S3.
:param properties: Properties for the job.
:param args: Arguments to be passed to the job.
:param name: Name of the job. Used for labeling.
:param cluster_id: ID of the cluster to run job in.
Will try to take the ID from Dataproc Hook object if it's specified. (templated)
:param connection_id: ID of the Yandex.Cloud Airflow connection.
:param packages: List of maven coordinates of jars to include on the driver and executor classpaths.
:param repositories: List of additional remote repositories to search for the maven coordinates
given with --packages.
:param exclude_packages: List of groupId:artifactId, to exclude while resolving the dependencies
provided in --packages to avoid dependency conflicts.
"""
def __init__(
self,
*,
main_python_file_uri: str | None = None,
python_file_uris: Iterable[str] | None = None,
jar_file_uris: Iterable[str] | None = None,
archive_uris: Iterable[str] | None = None,
file_uris: Iterable[str] | None = None,
args: Iterable[str] | None = None,
properties: dict[str, str] | None = None,
name: str = "Pyspark job",
cluster_id: str | None = None,
connection_id: str | None = None,
packages: Iterable[str] | None = None,
repositories: Iterable[str] | None = None,
exclude_packages: Iterable[str] | None = None,
**kwargs,
) -> None:
super().__init__(yandex_conn_id=connection_id, cluster_id=cluster_id, **kwargs)
self.main_python_file_uri = main_python_file_uri
self.python_file_uris = python_file_uris
self.jar_file_uris = jar_file_uris
self.archive_uris = archive_uris
self.file_uris = file_uris
self.args = args
self.properties = properties
self.name = name
self.packages = packages
self.repositories = repositories
self.exclude_packages = exclude_packages
def execute(self, context: Context) -> None:
hook = self._setup(context)
hook.client.create_pyspark_job(
main_python_file_uri=self.main_python_file_uri,
python_file_uris=self.python_file_uris,
jar_file_uris=self.jar_file_uris,
archive_uris=self.archive_uris,
file_uris=self.file_uris,
args=self.args,
properties=self.properties,
packages=self.packages,
repositories=self.repositories,
exclude_packages=self.exclude_packages,
name=self.name,
cluster_id=self.cluster_id,
)
| 25,440 | 47.36692 | 110 | py |
airflow | airflow-main/airflow/providers/yandex/hooks/yandex.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import warnings
from typing import Any
import yandexcloud
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
class YandexCloudBaseHook(BaseHook):
"""
A base hook for Yandex.Cloud related tasks.
:param yandex_conn_id: The connection ID to use when fetching connection info.
"""
conn_name_attr = "yandex_conn_id"
default_conn_name = "yandexcloud_default"
conn_type = "yandexcloud"
hook_name = "Yandex Cloud"
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import PasswordField, StringField
return {
"service_account_json": PasswordField(
lazy_gettext("Service account auth JSON"),
widget=BS3PasswordFieldWidget(),
description="Service account auth JSON. Looks like "
'{"id", "...", "service_account_id": "...", "private_key": "..."}. '
"Will be used instead of OAuth token and SA JSON file path field if specified.",
),
"service_account_json_path": StringField(
lazy_gettext("Service account auth JSON file path"),
widget=BS3TextFieldWidget(),
description="Service account auth JSON file path. File content looks like "
'{"id", "...", "service_account_id": "...", "private_key": "..."}. '
"Will be used instead of OAuth token if specified.",
),
"oauth": PasswordField(
lazy_gettext("OAuth Token"),
widget=BS3PasswordFieldWidget(),
description="User account OAuth token. "
"Either this or service account JSON must be specified.",
),
"folder_id": StringField(
lazy_gettext("Default folder ID"),
widget=BS3TextFieldWidget(),
description="Optional. This folder will be used "
"to create all new clusters and nodes by default",
),
"public_ssh_key": StringField(
lazy_gettext("Public SSH key"),
widget=BS3TextFieldWidget(),
description="Optional. This key will be placed to all created Compute nodes"
"to let you have a root shell there",
),
"endpoint": StringField(
lazy_gettext("API endpoint"),
widget=BS3TextFieldWidget(),
description="Optional. Specify an API endpoint. Leave blank to use default.",
),
}
@classmethod
def provider_user_agent(cls) -> str | None:
"""Construct User-Agent from Airflow core & provider package versions."""
import airflow
from airflow.providers_manager import ProvidersManager
try:
manager = ProvidersManager()
provider_name = manager.hooks[cls.conn_type].package_name # type: ignore[union-attr]
provider = manager.providers[provider_name]
return f"apache-airflow/{airflow.__version__} {provider_name}/{provider.version}"
except KeyError:
warnings.warn(f"Hook '{cls.hook_name}' info is not initialized in airflow.ProviderManager")
return None
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["host", "schema", "login", "password", "port", "extra"],
"relabeling": {},
}
def __init__(
self,
# Connection id is deprecated. Use yandex_conn_id instead
connection_id: str | None = None,
yandex_conn_id: str | None = None,
default_folder_id: str | None = None,
default_public_ssh_key: str | None = None,
) -> None:
super().__init__()
if connection_id:
warnings.warn(
"Using `connection_id` is deprecated. Please use `yandex_conn_id` parameter.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
self.connection_id = yandex_conn_id or connection_id or self.default_conn_name
self.connection = self.get_connection(self.connection_id)
self.extras = self.connection.extra_dejson
credentials = self._get_credentials()
sdk_config = self._get_endpoint()
self.sdk = yandexcloud.SDK(user_agent=self.provider_user_agent(), **sdk_config, **credentials)
self.default_folder_id = default_folder_id or self._get_field("folder_id", False)
self.default_public_ssh_key = default_public_ssh_key or self._get_field("public_ssh_key", False)
self.client = self.sdk.client
def _get_credentials(self) -> dict[str, Any]:
service_account_json_path = self._get_field("service_account_json_path", False)
service_account_json = self._get_field("service_account_json", False)
oauth_token = self._get_field("oauth", False)
if not (service_account_json or oauth_token or service_account_json_path):
raise AirflowException(
"No credentials are found in connection. Specify either service account "
"authentication JSON or user OAuth token in Yandex.Cloud connection"
)
if service_account_json_path:
with open(service_account_json_path) as infile:
service_account_json = infile.read()
if service_account_json:
service_account_key = json.loads(service_account_json)
return {"service_account_key": service_account_key}
else:
return {"token": oauth_token}
def _get_endpoint(self) -> dict[str, str]:
sdk_config = {}
endpoint = self._get_field("endpoint", None)
if endpoint:
sdk_config["endpoint"] = endpoint
return sdk_config
def _get_field(self, field_name: str, default: Any = None) -> Any:
"""Get field from extra, first checking short name, then for backcompat we check for prefixed name."""
if not hasattr(self, "extras"):
return default
backcompat_prefix = "extra__yandexcloud__"
if field_name.startswith("extra__"):
raise ValueError(
f"Got prefixed name {field_name}; please remove the '{backcompat_prefix}' prefix "
"when using this method."
)
if field_name in self.extras:
return self.extras[field_name]
prefixed_name = f"{backcompat_prefix}{field_name}"
if prefixed_name in self.extras:
return self.extras[prefixed_name]
return default
| 7,755 | 42.819209 | 110 | py |
airflow | airflow-main/airflow/providers/yandex/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/yandex/hooks/yandexcloud_dataproc.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook
class DataprocHook(YandexCloudBaseHook):
"""
A base hook for Yandex.Cloud Data Proc.
:param yandex_conn_id: The connection ID to use when fetching connection info.
"""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.cluster_id = None
self.client = self.sdk.wrappers.Dataproc(
default_folder_id=self.default_folder_id,
default_public_ssh_key=self.default_public_ssh_key,
)
| 1,379 | 37.333333 | 82 | py |
airflow | airflow-main/airflow/providers/neo4j/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.3.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-neo4j:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,530 | 35.452381 | 114 | py |
airflow | airflow-main/airflow/providers/neo4j/operators/neo4j.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
from airflow.models import BaseOperator
from airflow.providers.neo4j.hooks.neo4j import Neo4jHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class Neo4jOperator(BaseOperator):
"""
Executes sql code in a specific Neo4j database.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:Neo4jOperator`
:param sql: the sql code to be executed. Can receive a str representing a
sql statement
:param neo4j_conn_id: Reference to :ref:`Neo4j connection id <howto/connection:neo4j>`.
"""
template_fields: Sequence[str] = ("sql",)
def __init__(
self,
*,
sql: str,
neo4j_conn_id: str = "neo4j_default",
parameters: Iterable | Mapping | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.neo4j_conn_id = neo4j_conn_id
self.sql = sql
self.parameters = parameters
def execute(self, context: Context) -> None:
self.log.info("Executing: %s", self.sql)
hook = Neo4jHook(conn_id=self.neo4j_conn_id)
hook.run(self.sql)
| 2,052 | 32.655738 | 91 | py |
airflow | airflow-main/airflow/providers/neo4j/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/neo4j/hooks/neo4j.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module allows to connect to a Neo4j database."""
from __future__ import annotations
from typing import Any
from urllib.parse import urlsplit
from neo4j import Driver, GraphDatabase
from airflow.hooks.base import BaseHook
from airflow.models import Connection
class Neo4jHook(BaseHook):
"""
Interact with Neo4j.
Performs a connection to Neo4j and runs the query.
:param neo4j_conn_id: Reference to :ref:`Neo4j connection id <howto/connection:neo4j>`.
"""
conn_name_attr = "neo4j_conn_id"
default_conn_name = "neo4j_default"
conn_type = "neo4j"
hook_name = "Neo4j"
def __init__(self, conn_id: str = default_conn_name, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.neo4j_conn_id = conn_id
self.connection = kwargs.pop("connection", None)
self.client: Driver | None = None
def get_conn(self) -> Driver:
"""Function that initiates a new Neo4j connection with username, password and database schema."""
if self.client is not None:
return self.client
self.connection = self.get_connection(self.neo4j_conn_id)
uri = self.get_uri(self.connection)
self.log.info("URI: %s", uri)
is_encrypted = self.connection.extra_dejson.get("encrypted", False)
self.client = self.get_client(self.connection, is_encrypted, uri)
return self.client
def get_client(self, conn: Connection, encrypted: bool, uri: str) -> Driver:
"""
Function to determine that relevant driver based on extras.
:param conn: Connection object.
:param encrypted: boolean if encrypted connection or not.
:param uri: uri string for connection.
:return: Driver
"""
parsed_uri = urlsplit(uri)
kwargs: dict[str, Any] = {}
if parsed_uri.scheme in ["bolt", "neo4j"]:
kwargs["encrypted"] = encrypted
return GraphDatabase.driver(uri, auth=(conn.login, conn.password), **kwargs)
def get_uri(self, conn: Connection) -> str:
"""
Build the uri based on extras.
- Default - uses bolt scheme(bolt://)
- neo4j_scheme - neo4j://
- certs_self_signed - neo4j+ssc://
- certs_trusted_ca - neo4j+s://
:param conn: connection object.
:return: uri
"""
use_neo4j_scheme = conn.extra_dejson.get("neo4j_scheme", False)
scheme = "neo4j" if use_neo4j_scheme else "bolt"
# Self signed certificates
ssc = conn.extra_dejson.get("certs_self_signed", False)
# Only certificates signed by CA.
trusted_ca = conn.extra_dejson.get("certs_trusted_ca", False)
encryption_scheme = ""
if ssc:
encryption_scheme = "+ssc"
elif trusted_ca:
encryption_scheme = "+s"
return f"{scheme}{encryption_scheme}://{conn.host}:{7687 if conn.port is None else conn.port}"
def run(self, query) -> list[Any]:
"""
Function to create a neo4j session and execute the query in the session.
:param query: Neo4j query
:return: Result
"""
driver = self.get_conn()
if not self.connection.schema:
with driver.session() as session:
result = session.run(query)
return result.data()
else:
with driver.session(database=self.connection.schema) as session:
result = session.run(query)
return result.data()
| 4,313 | 33.238095 | 105 | py |
airflow | airflow-main/airflow/providers/neo4j/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/singularity/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-singularity:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,536 | 35.595238 | 120 | py |
airflow | airflow-main/airflow/providers/singularity/operators/singularity.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import ast
import os
import shutil
from typing import TYPE_CHECKING, Any, Sequence
from spython.main import Client
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class SingularityOperator(BaseOperator):
"""
Execute a command inside a Singularity container.
Singularity has more seamless connection to the host than Docker, so
no special binds are needed to ensure binding content in the user $HOME
and temporary directories. If the user needs custom binds, this can
be done with --volumes
:param image: Singularity image or URI from which to create the container.
:param auto_remove: Delete the container when the process exits.
The default is False.
:param command: Command to be run in the container. (templated)
:param start_command: Start command to pass to the container instance.
:param environment: Environment variables to set in the container. (templated)
:param working_dir: Set a working directory for the instance.
:param force_pull: Pull the image on every run. Default is False.
:param volumes: List of volumes to mount into the container, e.g.
``['/host/path:/container/path', '/host/path2:/container/path2']``.
:param options: Other flags (list) to provide to the instance start.
:param working_dir: Working directory to
set on the container (equivalent to the -w switch the docker client).
"""
template_fields: Sequence[str] = (
"command",
"environment",
)
template_ext: Sequence[str] = (
".sh",
".bash",
)
template_fields_renderers = {"command": "bash", "environment": "json"}
def __init__(
self,
*,
image: str,
command: str | ast.AST,
start_command: str | list[str] | None = None,
environment: dict[str, Any] | None = None,
pull_folder: str | None = None,
working_dir: str | None = None,
force_pull: bool | None = False,
volumes: list[str] | None = None,
options: list[str] | None = None,
auto_remove: bool | None = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.auto_remove = auto_remove
self.command = command
self.start_command = start_command
self.environment = environment or {}
self.force_pull = force_pull
self.image = image
self.instance = None
self.options = options or []
self.pull_folder = pull_folder
self.volumes = volumes or []
self.working_dir = working_dir
self.cli = None
self.container = None
def execute(self, context: Context) -> None:
self.log.info("Preparing Singularity container %s", self.image)
self.cli = Client
if not self.command:
raise AirflowException("You must define a command.")
# Pull the container if asked, and ensure not a binary file
if self.force_pull and not os.path.exists(self.image):
self.log.info("Pulling container %s", self.image)
image = self.cli.pull( # type: ignore[attr-defined]
self.image, stream=True, pull_folder=self.pull_folder
)
# If we need to stream result for the user, returns lines
if isinstance(image, list):
lines = image.pop()
image = image[0]
for line in lines:
self.log.info(line)
# Update the image to be a filepath on the system
self.image = image
# Prepare list of binds
for bind in self.volumes:
self.options += ["--bind", bind]
# Does the user want a custom working directory?
if self.working_dir is not None:
self.options += ["--workdir", self.working_dir]
# Export environment before instance is run
for enkey, envar in self.environment.items():
self.log.debug("Exporting %s=%s", envar, enkey)
os.putenv(enkey, envar)
os.environ[enkey] = envar
# Create a container instance
self.log.debug("Options include: %s", self.options)
self.instance = self.cli.instance( # type: ignore[attr-defined]
self.image, options=self.options, args=self.start_command, start=False
)
self.instance.start() # type: ignore[attr-defined]
self.log.info(self.instance.cmd) # type: ignore[attr-defined]
self.log.info("Created instance %s from %s", self.instance, self.image)
self.log.info("Running command %s", self._get_command())
self.cli.quiet = True # type: ignore[attr-defined]
result = self.cli.execute( # type: ignore[attr-defined]
self.instance, self._get_command(), return_result=True
)
# Stop the instance
self.log.info("Stopping instance %s", self.instance)
self.instance.stop() # type: ignore[attr-defined]
if self.auto_remove is True:
if self.auto_remove and os.path.exists(self.image):
shutil.rmtree(self.image)
# If the container failed, raise the exception
if result["return_code"] != 0:
message = result["message"]
raise AirflowException(f"Singularity failed: {message}")
self.log.info("Output from command %s", result["message"])
def _get_command(self) -> Any | None:
if self.command is not None and self.command.strip().find("[") == 0: # type: ignore
commands = ast.literal_eval(self.command)
else:
commands = self.command
return commands
def on_kill(self) -> None:
if self.instance is not None:
self.log.info("Stopping Singularity instance")
self.instance.stop()
# If an image exists, clean it up
if self.auto_remove is True:
if self.auto_remove and os.path.exists(self.image):
shutil.rmtree(self.image)
| 6,935 | 36.491892 | 92 | py |
airflow | airflow-main/airflow/providers/singularity/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/arangodb/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "2.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-arangodb:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,533 | 35.52381 | 117 | py |
airflow | airflow-main/airflow/providers/arangodb/operators/arangodb.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Callable, Sequence
from airflow.models import BaseOperator
from airflow.providers.arangodb.hooks.arangodb import ArangoDBHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class AQLOperator(BaseOperator):
"""
Executes AQL query in a ArangoDB database.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:AQLOperator`
:param query: the AQL query to be executed. Can receive a str representing a
AQL statement, or you can provide .sql file having the query
:param result_processor: function to further process the Result from ArangoDB
:param arangodb_conn_id: Reference to :ref:`ArangoDB connection id <howto/connection:arangodb>`.
"""
template_fields: Sequence[str] = ("query",)
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {"query": "sql"}
def __init__(
self,
*,
query: str,
arangodb_conn_id: str = "arangodb_default",
result_processor: Callable | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.arangodb_conn_id = arangodb_conn_id
self.query = query
self.result_processor = result_processor
def execute(self, context: Context):
self.log.info("Executing: %s", self.query)
hook = ArangoDBHook(arangodb_conn_id=self.arangodb_conn_id)
result = hook.query(self.query)
if self.result_processor:
self.result_processor(result)
| 2,403 | 35.424242 | 100 | py |
airflow | airflow-main/airflow/providers/arangodb/operators/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/arangodb/hooks/arangodb.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module allows connecting to a ArangoDB."""
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING, Any
from arango import AQLQueryExecuteError, ArangoClient as ArangoDBClient
from airflow import AirflowException
from airflow.hooks.base import BaseHook
if TYPE_CHECKING:
from arango.cursor import Cursor
from arango.database import StandardDatabase
from airflow.models import Connection
class ArangoDBHook(BaseHook):
"""
Interact with ArangoDB.
Performs a connection to ArangoDB and retrieves client.
:param arangodb_conn_id: Reference to :ref:`ArangoDB connection id <howto/connection:arangodb>`.
"""
conn_name_attr = "arangodb_conn_id"
default_conn_name = "arangodb_default"
conn_type = "arangodb"
hook_name = "ArangoDB"
def __init__(self, arangodb_conn_id: str = default_conn_name, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.arangodb_conn_id = arangodb_conn_id
@cached_property
def client(self) -> ArangoDBClient:
"""Initiates a new ArangoDB connection (cached)."""
return ArangoDBClient(hosts=self.hosts)
@cached_property
def db_conn(self) -> StandardDatabase:
"""Connect to an ArangoDB database and return the database API wrapper."""
return self.client.db(name=self.database, username=self.username, password=self.password)
@cached_property
def _conn(self) -> Connection:
return self.get_connection(self.arangodb_conn_id)
@property
def hosts(self) -> list[str]:
if not self._conn.host:
raise AirflowException(f"No ArangoDB Host(s) provided in connection: {self.arangodb_conn_id!r}.")
return self._conn.host.split(",")
@property
def database(self) -> str:
if not self._conn.schema:
raise AirflowException(f"No ArangoDB Database provided in connection: {self.arangodb_conn_id!r}.")
return self._conn.schema
@property
def username(self) -> str:
if not self._conn.login:
raise AirflowException(f"No ArangoDB Username provided in connection: {self.arangodb_conn_id!r}.")
return self._conn.login
@property
def password(self) -> str:
return self._conn.password or ""
def get_conn(self) -> ArangoDBClient:
"""Function that initiates a new ArangoDB connection (cached)."""
return self.client
def query(self, query, **kwargs) -> Cursor:
"""
Function to create an ArangoDB session and execute the AQL query in the session.
:param query: AQL query
"""
try:
if self.db_conn:
result = self.db_conn.aql.execute(query, **kwargs)
if TYPE_CHECKING:
assert isinstance(result, Cursor)
return result
else:
raise AirflowException(
f"Failed to execute AQLQuery, error connecting to database: {self.database}"
)
except AQLQueryExecuteError as error:
raise AirflowException(f"Failed to execute AQLQuery, error: {str(error)}")
def create_collection(self, name):
if not self.db_conn.has_collection(name):
self.db_conn.create_collection(name)
return True
else:
self.log.info("Collection already exists: %s", name)
return False
def create_database(self, name):
if not self.db_conn.has_database(name):
self.db_conn.create_database(name)
return True
else:
self.log.info("Database already exists: %s", name)
return False
def create_graph(self, name):
if not self.db_conn.has_graph(name):
self.db_conn.create_graph(name)
return True
else:
self.log.info("Graph already exists: %s", name)
return False
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
return {
"hidden_fields": ["port", "extra"],
"relabeling": {
"host": "ArangoDB Host URL or comma separated list of URLs (coordinators in a cluster)",
"schema": "ArangoDB Database",
"login": "ArangoDB Username",
"password": "ArangoDB Password",
},
"placeholders": {
"host": 'eg."http://127.0.0.1:8529" or "http://127.0.0.1:8529,http://127.0.0.1:8530"'
" (coordinators in a cluster)",
"schema": "_system",
"login": "root",
"password": "password",
},
}
| 5,494 | 34.451613 | 110 | py |
airflow | airflow-main/airflow/providers/arangodb/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/arangodb/sensors/arangodb.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.providers.arangodb.hooks.arangodb import ArangoDBHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class AQLSensor(BaseSensorOperator):
"""
Checks for the existence of a document which matches the given query in ArangoDB.
:param collection: Target DB collection.
:param query: The query to poke, or you can provide .sql file having the query
:param arangodb_conn_id: The :ref:`ArangoDB connection id <howto/connection:arangodb>` to use
when connecting to ArangoDB.
:param arangodb_db: Target ArangoDB name.
"""
template_fields: Sequence[str] = ("query",)
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {"query": "sql"}
def __init__(self, *, query: str, arangodb_conn_id: str = "arangodb_default", **kwargs) -> None:
super().__init__(**kwargs)
self.arangodb_conn_id = arangodb_conn_id
self.query = query
def poke(self, context: Context) -> bool:
self.log.info("Sensor running the following query: %s", self.query)
hook = ArangoDBHook(self.arangodb_conn_id)
records = hook.query(self.query, count=True).count()
self.log.info("Total records found: %d", records)
return 0 != records
| 2,178 | 38.618182 | 100 | py |
airflow | airflow-main/airflow/providers/arangodb/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/arangodb/example_dags/example_arangodb.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from datetime import datetime
from airflow.models.dag import DAG
from airflow.providers.arangodb.operators.arangodb import AQLOperator
from airflow.providers.arangodb.sensors.arangodb import AQLSensor
dag = DAG(
"example_arangodb_operator",
start_date=datetime(2021, 1, 1),
tags=["example"],
catchup=False,
)
# [START howto_aql_sensor_arangodb]
sensor = AQLSensor(
task_id="aql_sensor",
query="FOR doc IN students FILTER doc.name == 'judy' RETURN doc",
timeout=60,
poke_interval=10,
dag=dag,
)
# [END howto_aql_sensor_arangodb]
# [START howto_aql_sensor_template_file_arangodb]
sensor2 = AQLSensor(
task_id="aql_sensor_template_file",
query="search_judy.sql",
timeout=60,
poke_interval=10,
dag=dag,
)
# [END howto_aql_sensor_template_file_arangodb]
# [START howto_aql_operator_arangodb]
operator = AQLOperator(
task_id="aql_operator",
query="FOR doc IN students RETURN doc",
dag=dag,
result_processor=lambda cursor: print([document["name"] for document in cursor]),
)
# [END howto_aql_operator_arangodb]
# [START howto_aql_operator_template_file_arangodb]
operator2 = AQLOperator(
task_id="aql_operator_template_file",
dag=dag,
result_processor=lambda cursor: print([document["name"] for document in cursor]),
query="search_all.sql",
)
# [END howto_aql_operator_template_file_arangodb]
| 2,210 | 27.346154 | 85 | py |
airflow | airflow-main/airflow/providers/arangodb/example_dags/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/python_kubernetes_script.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utilities for using the kubernetes decorator."""
from __future__ import annotations
import os
from collections import deque
import jinja2
def _balance_parens(after_decorator):
num_paren = 1
after_decorator = deque(after_decorator)
after_decorator.popleft()
while num_paren:
current = after_decorator.popleft()
if current == "(":
num_paren = num_paren + 1
elif current == ")":
num_paren = num_paren - 1
return "".join(after_decorator)
def remove_task_decorator(python_source: str, task_decorator_name: str) -> str:
"""
Removes @task.kubernetes or similar as well as @setup and @teardown.
:param python_source: python source code
:param task_decorator_name: the task decorator name
"""
def _remove_task_decorator(py_source, decorator_name):
if decorator_name not in py_source:
return python_source
split = python_source.split(decorator_name)
before_decorator, after_decorator = split[0], split[1]
if after_decorator[0] == "(":
after_decorator = _balance_parens(after_decorator)
if after_decorator[0] == "\n":
after_decorator = after_decorator[1:]
return before_decorator + after_decorator
decorators = ["@setup", "@teardown", task_decorator_name]
for decorator in decorators:
python_source = _remove_task_decorator(python_source, decorator)
return python_source
def write_python_script(
jinja_context: dict,
filename: str,
render_template_as_native_obj: bool = False,
):
"""
Renders the python script to a file to execute in the virtual environment.
:param jinja_context: The jinja context variables to unpack and replace with its placeholders in the
template file.
:param filename: The name of the file to dump the rendered script to.
:param render_template_as_native_obj: If ``True``, rendered Jinja template would be converted
to a native Python object
"""
template_loader = jinja2.FileSystemLoader(searchpath=os.path.dirname(__file__))
template_env: jinja2.Environment
if render_template_as_native_obj:
template_env = jinja2.nativetypes.NativeEnvironment(
loader=template_loader, undefined=jinja2.StrictUndefined
)
else:
template_env = jinja2.Environment(loader=template_loader, undefined=jinja2.StrictUndefined)
template = template_env.get_template("python_kubernetes_script.jinja2")
template.stream(**jinja_context).dump(filename)
| 3,345 | 36.595506 | 104 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "7.3.0"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-cncf-kubernetes:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,540 | 35.690476 | 124 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Executes task in a Kubernetes POD."""
from __future__ import annotations
from kubernetes.client import ApiClient, models as k8s
from airflow.exceptions import AirflowException
def _convert_kube_model_object(obj, new_class):
convert_op = getattr(obj, "to_k8s_client_obj", None)
if callable(convert_op):
return obj.to_k8s_client_obj()
elif isinstance(obj, new_class):
return obj
else:
raise AirflowException(f"Expected {new_class}, got {type(obj)}")
def _convert_from_dict(obj, new_class):
if isinstance(obj, new_class):
return obj
elif isinstance(obj, dict):
api_client = ApiClient()
return api_client._ApiClient__deserialize_model(obj, new_class)
else:
raise AirflowException(f"Expected dict or {new_class}, got {type(obj)}")
def convert_volume(volume) -> k8s.V1Volume:
"""Converts an airflow Volume object into a k8s.V1Volume.
:param volume:
"""
return _convert_kube_model_object(volume, k8s.V1Volume)
def convert_volume_mount(volume_mount) -> k8s.V1VolumeMount:
"""Converts an airflow VolumeMount object into a k8s.V1VolumeMount.
:param volume_mount:
"""
return _convert_kube_model_object(volume_mount, k8s.V1VolumeMount)
def convert_port(port) -> k8s.V1ContainerPort:
"""Converts an airflow Port object into a k8s.V1ContainerPort.
:param port:
"""
return _convert_kube_model_object(port, k8s.V1ContainerPort)
def convert_env_vars(env_vars) -> list[k8s.V1EnvVar]:
"""Converts a dictionary into a list of env_vars.
:param env_vars:
"""
if isinstance(env_vars, dict):
res = []
for k, v in env_vars.items():
res.append(k8s.V1EnvVar(name=k, value=v))
return res
elif isinstance(env_vars, list):
return env_vars
else:
raise AirflowException(f"Expected dict or list, got {type(env_vars)}")
def convert_pod_runtime_info_env(pod_runtime_info_envs) -> k8s.V1EnvVar:
"""Converts a PodRuntimeInfoEnv into an k8s.V1EnvVar.
:param pod_runtime_info_envs:
"""
return _convert_kube_model_object(pod_runtime_info_envs, k8s.V1EnvVar)
def convert_image_pull_secrets(image_pull_secrets) -> list[k8s.V1LocalObjectReference]:
"""Converts a PodRuntimeInfoEnv into an k8s.V1EnvVar.
:param image_pull_secrets:
"""
if isinstance(image_pull_secrets, str):
secrets = image_pull_secrets.split(",")
return [k8s.V1LocalObjectReference(name=secret) for secret in secrets]
else:
return image_pull_secrets
def convert_configmap(configmaps) -> k8s.V1EnvFromSource:
"""Converts a str into an k8s.V1EnvFromSource.
:param configmaps:
"""
return k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmaps))
def convert_affinity(affinity) -> k8s.V1Affinity:
"""Converts a dict into an k8s.V1Affinity."""
return _convert_from_dict(affinity, k8s.V1Affinity)
def convert_toleration(toleration) -> k8s.V1Toleration:
"""Converts a dict into an k8s.V1Toleration."""
return _convert_from_dict(toleration, k8s.V1Toleration)
| 3,914 | 31.355372 | 88 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/backcompat/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/decorators/kubernetes.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import base64
import inspect
import os
import pickle
import uuid
from shlex import quote
from tempfile import TemporaryDirectory
from textwrap import dedent
from typing import TYPE_CHECKING, Callable, Sequence
import dill
from kubernetes.client import models as k8s
from airflow.decorators.base import DecoratedOperator, TaskDecorator, task_decorator_factory
from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
from airflow.providers.cncf.kubernetes.python_kubernetes_script import (
remove_task_decorator,
write_python_script,
)
if TYPE_CHECKING:
from airflow.utils.context import Context
_PYTHON_SCRIPT_ENV = "__PYTHON_SCRIPT"
_PYTHON_INPUT_ENV = "__PYTHON_INPUT"
def _generate_decoded_command(env_var: str, file: str) -> str:
return (
f'python -c "import base64, os;'
rf"x = base64.b64decode(os.environ[\"{env_var}\"]);"
rf'f = open(\"{file}\", \"wb\"); f.write(x); f.close()"'
)
def _read_file_contents(filename: str) -> str:
with open(filename, "rb") as script_file:
return base64.b64encode(script_file.read()).decode("utf-8")
class _KubernetesDecoratedOperator(DecoratedOperator, KubernetesPodOperator):
custom_operator_name = "@task.kubernetes"
# `cmds` and `arguments` are used internally by the operator
template_fields: Sequence[str] = tuple(
{"op_args", "op_kwargs", *KubernetesPodOperator.template_fields} - {"cmds", "arguments"}
)
# Since we won't mutate the arguments, we should just do the shallow copy
# there are some cases we can't deepcopy the objects (e.g protobuf).
shallow_copy_attrs: Sequence[str] = ("python_callable",)
def __init__(self, namespace: str = "default", use_dill: bool = False, **kwargs) -> None:
self.use_dill = use_dill
super().__init__(
namespace=namespace,
name=kwargs.pop("name", f"k8s_airflow_pod_{uuid.uuid4().hex}"),
cmds=["placeholder-command"],
**kwargs,
)
# TODO: Remove me once this provider min supported Airflow version is 2.6
def get_python_source(self):
raw_source = inspect.getsource(self.python_callable)
res = dedent(raw_source)
res = remove_task_decorator(res, self.custom_operator_name)
return res
def _generate_cmds(self) -> list[str]:
script_filename = "/tmp/script.py"
input_filename = "/tmp/script.in"
output_filename = "/airflow/xcom/return.json"
write_local_script_file_cmd = (
f"{_generate_decoded_command(quote(_PYTHON_SCRIPT_ENV), quote(script_filename))}"
)
write_local_input_file_cmd = (
f"{_generate_decoded_command(quote(_PYTHON_INPUT_ENV), quote(input_filename))}"
)
make_xcom_dir_cmd = "mkdir -p /airflow/xcom"
exec_python_cmd = f"python {script_filename} {input_filename} {output_filename}"
return [
"bash",
"-cx",
" && ".join(
[
write_local_script_file_cmd,
write_local_input_file_cmd,
make_xcom_dir_cmd,
exec_python_cmd,
]
),
]
def execute(self, context: Context):
with TemporaryDirectory(prefix="venv") as tmp_dir:
pickling_library = dill if self.use_dill else pickle
script_filename = os.path.join(tmp_dir, "script.py")
input_filename = os.path.join(tmp_dir, "script.in")
with open(input_filename, "wb") as file:
pickling_library.dump({"args": self.op_args, "kwargs": self.op_kwargs}, file)
py_source = self.get_python_source()
jinja_context = {
"op_args": self.op_args,
"op_kwargs": self.op_kwargs,
"pickling_library": pickling_library.__name__,
"python_callable": self.python_callable.__name__,
"python_callable_source": py_source,
"string_args_global": False,
}
write_python_script(jinja_context=jinja_context, filename=script_filename)
self.env_vars = [
*self.env_vars,
k8s.V1EnvVar(name=_PYTHON_SCRIPT_ENV, value=_read_file_contents(script_filename)),
k8s.V1EnvVar(name=_PYTHON_INPUT_ENV, value=_read_file_contents(input_filename)),
]
self.cmds = self._generate_cmds()
return super().execute(context)
def kubernetes_task(
python_callable: Callable | None = None,
multiple_outputs: bool | None = None,
**kwargs,
) -> TaskDecorator:
"""Kubernetes operator decorator.
This wraps a function to be executed in K8s using KubernetesPodOperator.
Also accepts any argument that DockerOperator will via ``kwargs``. Can be
reused in a single DAG.
:param python_callable: Function to decorate
:param multiple_outputs: if set, function return value will be
unrolled to multiple XCom values. Dict will unroll to xcom values with
keys as XCom keys. Defaults to False.
"""
return task_decorator_factory(
python_callable=python_callable,
multiple_outputs=multiple_outputs,
decorated_operator_class=_KubernetesDecoratedOperator,
**kwargs,
)
| 6,178 | 36.448485 | 98 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/decorators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/operators/resource.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Manage a Kubernetes Resource."""
from __future__ import annotations
from functools import cached_property
import yaml
from kubernetes.client import ApiClient
from kubernetes.utils import create_from_yaml
from airflow.models import BaseOperator
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
from airflow.providers.cncf.kubernetes.utils.delete_from import delete_from_yaml
__all__ = ["KubernetesCreateResourceOperator", "KubernetesDeleteResourceOperator"]
class KubernetesResourceBaseOperator(BaseOperator):
"""
Abstract base class for all Kubernetes Resource operators.
:param yaml_conf: string. Contains the kubernetes resources to Create or Delete
:param namespace: string. Contains the namespace to create all resources inside.
The namespace must preexist otherwise the resource creation will fail.
If the API object in the yaml file already contains a namespace definition then
this parameter has no effect.
:param kubernetes_conn_id: The :ref:`kubernetes connection id <howto/connection:kubernetes>`
for the Kubernetes cluster.
:param in_cluster: run kubernetes client with in_cluster configuration.
:param cluster_context: context that points to kubernetes cluster.
Ignored when in_cluster is True. If None, current-context is used.
:param config_file: The path to the Kubernetes config file. (templated)
If not specified, default value is ``~/.kube/config``
"""
template_fields = ("yaml_conf",)
template_fields_renderers = {"yaml_conf": "yaml"}
def __init__(
self,
*,
yaml_conf: str,
namespace: str | None = None,
kubernetes_conn_id: str | None = KubernetesHook.default_conn_name,
**kwargs,
) -> None:
super().__init__(**kwargs)
self._namespace = namespace
self.kubernetes_conn_id = kubernetes_conn_id
self.yaml_conf = yaml_conf
@cached_property
def client(self) -> ApiClient:
return self.hook.api_client
@cached_property
def hook(self) -> KubernetesHook:
hook = KubernetesHook(conn_id=self.kubernetes_conn_id)
return hook
def get_namespace(self) -> str:
if self._namespace:
return self._namespace
else:
return self.hook.get_namespace() or "default"
class KubernetesCreateResourceOperator(KubernetesResourceBaseOperator):
"""Create a resource in a kubernetes."""
def execute(self, context) -> None:
create_from_yaml(
k8s_client=self.client,
yaml_objects=yaml.safe_load_all(self.yaml_conf),
namespace=self.get_namespace(),
)
class KubernetesDeleteResourceOperator(KubernetesResourceBaseOperator):
"""Delete a resource in a kubernetes."""
def execute(self, context) -> None:
delete_from_yaml(
k8s_client=self.client,
yaml_objects=yaml.safe_load_all(self.yaml_conf),
namespace=self.get_namespace(),
)
| 3,827 | 35.807692 | 96 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import datetime
from typing import TYPE_CHECKING, Sequence
from kubernetes.watch import Watch
from airflow import AirflowException
from airflow.models import BaseOperator
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook, _load_body_to_dict
if TYPE_CHECKING:
from airflow.utils.context import Context
class SparkKubernetesOperator(BaseOperator):
"""
Creates sparkApplication object in kubernetes cluster.
.. seealso::
For more detail about Spark Application Object have a look at the reference:
https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/v1beta2-1.1.0-2.4.5/docs/api-docs.md#sparkapplication
:param application_file: Defines Kubernetes 'custom_resource_definition' of 'sparkApplication' as either a
path to a '.yaml' file, '.json' file, YAML string or JSON string.
:param namespace: kubernetes namespace to put sparkApplication
:param kubernetes_conn_id: The :ref:`kubernetes connection id <howto/connection:kubernetes>`
for the to Kubernetes cluster.
:param api_group: kubernetes api group of sparkApplication
:param api_version: kubernetes api version of sparkApplication
:param watch: whether to watch the job status and logs or not
"""
template_fields: Sequence[str] = ("application_file", "namespace")
template_ext: Sequence[str] = (".yaml", ".yml", ".json")
ui_color = "#f4a460"
def __init__(
self,
*,
application_file: str,
namespace: str | None = None,
kubernetes_conn_id: str = "kubernetes_default",
api_group: str = "sparkoperator.k8s.io",
api_version: str = "v1beta2",
in_cluster: bool | None = None,
cluster_context: str | None = None,
config_file: str | None = None,
watch: bool = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.namespace = namespace
self.kubernetes_conn_id = kubernetes_conn_id
self.api_group = api_group
self.api_version = api_version
self.plural = "sparkapplications"
self.application_file = application_file
self.in_cluster = in_cluster
self.cluster_context = cluster_context
self.config_file = config_file
self.watch = watch
self.hook = KubernetesHook(
conn_id=self.kubernetes_conn_id,
in_cluster=self.in_cluster,
config_file=self.config_file,
cluster_context=self.cluster_context,
)
def execute(self, context: Context):
body = _load_body_to_dict(self.application_file)
name = body["metadata"]["name"]
namespace = self.namespace or self.hook.get_namespace()
response = None
is_job_created = False
if self.watch:
try:
namespace_event_stream = Watch().stream(
self.hook.core_v1_client.list_namespaced_event,
namespace=namespace,
watch=True,
field_selector=f"involvedObject.kind=SparkApplication,involvedObject.name={name}",
)
response = self.hook.create_custom_object(
group=self.api_group,
version=self.api_version,
plural=self.plural,
body=body,
namespace=namespace,
)
is_job_created = True
for event in namespace_event_stream:
obj = event["object"]
if event["object"].last_timestamp >= datetime.datetime.strptime(
response["metadata"]["creationTimestamp"], "%Y-%m-%dT%H:%M:%S%z"
):
self.log.info(obj.message)
if obj.reason == "SparkDriverRunning":
pod_log_stream = Watch().stream(
self.hook.core_v1_client.read_namespaced_pod_log,
name=f"{name}-driver",
namespace=namespace,
timestamps=True,
)
for line in pod_log_stream:
self.log.info(line)
elif obj.reason in [
"SparkApplicationSubmissionFailed",
"SparkApplicationFailed",
"SparkApplicationDeleted",
]:
is_job_created = False
raise AirflowException(obj.message)
elif obj.reason == "SparkApplicationCompleted":
break
else:
continue
except Exception:
if is_job_created:
self.on_kill()
raise
else:
response = self.hook.create_custom_object(
group=self.api_group,
version=self.api_version,
plural=self.plural,
body=body,
namespace=namespace,
)
return response
def on_kill(self) -> None:
body = _load_body_to_dict(self.application_file)
name = body["metadata"]["name"]
namespace = self.namespace or self.hook.get_namespace()
self.hook.delete_custom_object(
group=self.api_group,
version=self.api_version,
plural=self.plural,
namespace=namespace,
name=name,
)
| 6,484 | 38.30303 | 127 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/operators/pod.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Executes task in a Kubernetes POD."""
from __future__ import annotations
import json
import logging
import re
import secrets
import string
import warnings
from collections.abc import Container
from contextlib import AbstractContextManager
from functools import cached_property
from typing import TYPE_CHECKING, Any, Iterable, Sequence
from kubernetes.client import CoreV1Api, models as k8s
from slugify import slugify
from urllib3.exceptions import HTTPError
from airflow.configuration import conf
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
from airflow.kubernetes import pod_generator
from airflow.kubernetes.pod_generator import PodGenerator
from airflow.kubernetes.secret import Secret
from airflow.models import BaseOperator
from airflow.providers.cncf.kubernetes.backcompat.backwards_compat_converters import (
convert_affinity,
convert_configmap,
convert_env_vars,
convert_image_pull_secrets,
convert_pod_runtime_info_env,
convert_port,
convert_toleration,
convert_volume,
convert_volume_mount,
)
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
from airflow.providers.cncf.kubernetes.triggers.pod import KubernetesPodTrigger
from airflow.providers.cncf.kubernetes.utils import xcom_sidecar # type: ignore[attr-defined]
from airflow.providers.cncf.kubernetes.utils.pod_manager import (
OnFinishAction,
PodLaunchFailedException,
PodManager,
PodOperatorHookProtocol,
PodPhase,
get_container_termination_message,
)
from airflow.settings import pod_mutation_hook
from airflow.typing_compat import Literal
from airflow.utils import yaml
from airflow.utils.helpers import prune_dict, validate_key
from airflow.utils.timezone import utcnow
from airflow.version import version as airflow_version
if TYPE_CHECKING:
import jinja2
from airflow.utils.context import Context
alphanum_lower = string.ascii_lowercase + string.digits
KUBE_CONFIG_ENV_VAR = "KUBECONFIG"
def _rand_str(num):
"""Generate random lowercase alphanumeric string of length num.
TODO: when min airflow version >= 2.5, delete this function and import from kubernetes_helper_functions.
:meta private:
"""
return "".join(secrets.choice(alphanum_lower) for _ in range(num))
def _add_pod_suffix(*, pod_name, rand_len=8, max_len=253):
"""Add random string to pod name while staying under max len.
TODO: when min airflow version >= 2.5, delete this function and import from kubernetes_helper_functions.
:meta private:
"""
suffix = "-" + _rand_str(rand_len)
return pod_name[: max_len - len(suffix)].strip("-.") + suffix
def _create_pod_id(
dag_id: str | None = None,
task_id: str | None = None,
*,
max_length: int = 80,
unique: bool = True,
) -> str:
"""
Generates unique pod ID given a dag_id and / or task_id.
TODO: when min airflow version >= 2.5, delete this function and import from kubernetes_helper_functions.
:param dag_id: DAG ID
:param task_id: Task ID
:param max_length: max number of characters
:param unique: whether a random string suffix should be added
:return: A valid identifier for a kubernetes pod name
"""
if not (dag_id or task_id):
raise ValueError("Must supply either dag_id or task_id.")
name = ""
if dag_id:
name += dag_id
if task_id:
if name:
name += "-"
name += task_id
base_name = slugify(name, lowercase=True)[:max_length].strip(".-")
if unique:
return _add_pod_suffix(pod_name=base_name, max_len=max_length)
else:
return base_name
class PodReattachFailure(AirflowException):
"""When we expect to be able to find a pod but cannot."""
class KubernetesPodOperator(BaseOperator):
"""
Execute a task in a Kubernetes Pod.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:KubernetesPodOperator`
.. note::
If you use `Google Kubernetes Engine <https://cloud.google.com/kubernetes-engine/>`__
and Airflow is not running in the same cluster, consider using
:class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator`, which
simplifies the authorization process.
:param kubernetes_conn_id: The :ref:`kubernetes connection id <howto/connection:kubernetes>`
for the Kubernetes cluster.
:param namespace: the namespace to run within kubernetes.
:param image: Docker image you wish to launch. Defaults to hub.docker.com,
but fully qualified URLS will point to custom repositories. (templated)
:param name: name of the pod in which the task will run, will be used (plus a random
suffix if random_name_suffix is True) to generate a pod id (DNS-1123 subdomain,
containing only [a-z0-9.-]).
:param random_name_suffix: if True, will generate a random suffix.
:param cmds: entrypoint of the container. (templated)
The docker images's entrypoint is used if this is not provided.
:param arguments: arguments of the entrypoint. (templated)
The docker image's CMD is used if this is not provided.
:param ports: ports for the launched pod.
:param volume_mounts: volumeMounts for the launched pod.
:param volumes: volumes for the launched pod. Includes ConfigMaps and PersistentVolumes.
:param env_vars: Environment variables initialized in the container. (templated)
:param env_from: (Optional) List of sources to populate environment variables in the container.
:param secrets: Kubernetes secrets to inject in the container.
They can be exposed as environment vars or files in a volume.
:param in_cluster: run kubernetes client with in_cluster configuration.
:param cluster_context: context that points to kubernetes cluster.
Ignored when in_cluster is True. If None, current-context is used.
:param reattach_on_restart: if the worker dies while the pod is running, reattach and monitor
during the next try. If False, always create a new pod for each try.
:param labels: labels to apply to the Pod. (templated)
:param startup_timeout_seconds: timeout in seconds to startup the pod.
:param get_logs: get the stdout of the base container as logs of the tasks.
:param container_logs: list of containers whose logs will be published to stdout
Takes a sequence of containers, a single container name or True. If True,
all the containers logs are published. Works in conjunction with get_logs param.
The default value is the base container.
:param image_pull_policy: Specify a policy to cache or always pull an image.
:param annotations: non-identifying metadata you can attach to the Pod.
Can be a large range of data, and can include characters
that are not permitted by labels.
:param container_resources: resources for the launched pod. (templated)
:param affinity: affinity scheduling rules for the launched pod.
:param config_file: The path to the Kubernetes config file. (templated)
If not specified, default value is ``~/.kube/config``
:param node_selector: A dict containing a group of scheduling rules.
:param image_pull_secrets: Any image pull secrets to be given to the pod.
If more than one secret is required, provide a
comma separated list: secret_a,secret_b
:param service_account_name: Name of the service account
:param hostnetwork: If True enable host networking on the pod.
:param tolerations: A list of kubernetes tolerations.
:param security_context: security options the pod should run with (PodSecurityContext).
:param container_security_context: security options the container should run with.
:param dnspolicy: dnspolicy for the pod.
:param dns_config: dns configuration (ip addresses, searches, options) for the pod.
:param hostname: hostname for the pod.
:param subdomain: subdomain for the pod.
:param schedulername: Specify a schedulername for the pod
:param full_pod_spec: The complete podSpec
:param init_containers: init container for the launched Pod
:param log_events_on_failure: Log the pod's events if a failure occurs
:param do_xcom_push: If True, the content of the file
/airflow/xcom/return.json in the container will also be pushed to an
XCom when the container completes.
:param pod_template_file: path to pod template file (templated)
:param priority_class_name: priority class name for the launched Pod
:param pod_runtime_info_envs: (Optional) A list of environment variables,
to be set in the container.
:param termination_grace_period: Termination grace period if task killed in UI,
defaults to kubernetes default
:param configmaps: (Optional) A list of names of config maps from which it collects ConfigMaps
to populate the environment variables with. The contents of the target
ConfigMap's Data field will represent the key-value pairs as environment variables.
Extends env_from.
:param skip_on_exit_code: If task exits with this exit code, leave the task
in ``skipped`` state (default: None). If set to ``None``, any non-zero
exit code will be treated as a failure.
:param base_container_name: The name of the base container in the pod. This container's logs
will appear as part of this task's logs if get_logs is True. Defaults to None. If None,
will consult the class variable BASE_CONTAINER_NAME (which defaults to "base") for the base
container name to use.
:param deferrable: Run operator in the deferrable mode.
:param poll_interval: Polling period in seconds to check for the status. Used only in deferrable mode.
:param log_pod_spec_on_failure: Log the pod's specification if a failure occurs
:param on_finish_action: What to do when the pod reaches its final state, or the execution is interrupted.
If "delete_pod", the pod will be deleted regardless it's state; if "delete_succeeded_pod",
only succeeded pod will be deleted. You can set to "keep_pod" to keep the pod.
:param is_delete_operator_pod: What to do when the pod reaches its final
state, or the execution is interrupted. If True (default), delete the
pod; if False, leave the pod.
Deprecated - use `on_finish_action` instead.
"""
# This field can be overloaded at the instance level via base_container_name
BASE_CONTAINER_NAME = "base"
POD_CHECKED_KEY = "already_checked"
POST_TERMINATION_TIMEOUT = 120
template_fields: Sequence[str] = (
"image",
"cmds",
"arguments",
"env_vars",
"labels",
"config_file",
"pod_template_file",
"namespace",
"container_resources",
"volumes",
"volume_mounts",
)
template_fields_renderers = {"env_vars": "py"}
def __init__(
self,
*,
kubernetes_conn_id: str | None = KubernetesHook.default_conn_name,
namespace: str | None = None,
image: str | None = None,
name: str | None = None,
random_name_suffix: bool = True,
cmds: list[str] | None = None,
arguments: list[str] | None = None,
ports: list[k8s.V1ContainerPort] | None = None,
volume_mounts: list[k8s.V1VolumeMount] | None = None,
volumes: list[k8s.V1Volume] | None = None,
env_vars: list[k8s.V1EnvVar] | None = None,
env_from: list[k8s.V1EnvFromSource] | None = None,
secrets: list[Secret] | None = None,
in_cluster: bool | None = None,
cluster_context: str | None = None,
labels: dict | None = None,
reattach_on_restart: bool = True,
startup_timeout_seconds: int = 120,
get_logs: bool = True,
container_logs: Iterable[str] | str | Literal[True] = BASE_CONTAINER_NAME,
image_pull_policy: str | None = None,
annotations: dict | None = None,
container_resources: k8s.V1ResourceRequirements | None = None,
affinity: k8s.V1Affinity | None = None,
config_file: str | None = None,
node_selector: dict | None = None,
image_pull_secrets: list[k8s.V1LocalObjectReference] | None = None,
service_account_name: str | None = None,
hostnetwork: bool = False,
tolerations: list[k8s.V1Toleration] | None = None,
security_context: dict | None = None,
container_security_context: dict | None = None,
dnspolicy: str | None = None,
dns_config: k8s.V1PodDNSConfig | None = None,
hostname: str | None = None,
subdomain: str | None = None,
schedulername: str | None = None,
full_pod_spec: k8s.V1Pod | None = None,
init_containers: list[k8s.V1Container] | None = None,
log_events_on_failure: bool = False,
do_xcom_push: bool = False,
pod_template_file: str | None = None,
priority_class_name: str | None = None,
pod_runtime_info_envs: list[k8s.V1EnvVar] | None = None,
termination_grace_period: int | None = None,
configmaps: list[str] | None = None,
skip_on_exit_code: int | Container[int] | None = None,
base_container_name: str | None = None,
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
poll_interval: float = 2,
log_pod_spec_on_failure: bool = True,
on_finish_action: str = "delete_pod",
is_delete_operator_pod: None | bool = None,
**kwargs,
) -> None:
# TODO: remove in provider 6.0.0 release. This is a mitigate step to advise users to switch to the
# container_resources parameter.
if isinstance(kwargs.get("resources"), k8s.V1ResourceRequirements):
raise AirflowException(
"Specifying resources for the launched pod with 'resources' is deprecated. "
"Use 'container_resources' instead."
)
# TODO: remove in provider 6.0.0 release. This is a mitigate step to advise users to switch to the
# node_selector parameter.
if "node_selectors" in kwargs:
raise ValueError(
"Param `node_selectors` supplied. This param is no longer supported. "
"Use `node_selector` instead."
)
super().__init__(**kwargs)
self.kubernetes_conn_id = kubernetes_conn_id
self.do_xcom_push = do_xcom_push
self.image = image
self.namespace = namespace
self.cmds = cmds or []
self.arguments = arguments or []
self.labels = labels or {}
self.startup_timeout_seconds = startup_timeout_seconds
self.env_vars = convert_env_vars(env_vars) if env_vars else []
if pod_runtime_info_envs:
self.env_vars.extend([convert_pod_runtime_info_env(p) for p in pod_runtime_info_envs])
self.env_from = env_from or []
if configmaps:
self.env_from.extend([convert_configmap(c) for c in configmaps])
self.ports = [convert_port(p) for p in ports] if ports else []
self.volume_mounts = [convert_volume_mount(v) for v in volume_mounts] if volume_mounts else []
self.volumes = [convert_volume(volume) for volume in volumes] if volumes else []
self.secrets = secrets or []
self.in_cluster = in_cluster
self.cluster_context = cluster_context
self.reattach_on_restart = reattach_on_restart
self.get_logs = get_logs
self.container_logs = container_logs
if self.container_logs == KubernetesPodOperator.BASE_CONTAINER_NAME:
self.container_logs = (
base_container_name if base_container_name else KubernetesPodOperator.BASE_CONTAINER_NAME
)
self.image_pull_policy = image_pull_policy
self.node_selector = node_selector or {}
self.annotations = annotations or {}
self.affinity = convert_affinity(affinity) if affinity else {}
self.container_resources = container_resources
self.config_file = config_file
self.image_pull_secrets = convert_image_pull_secrets(image_pull_secrets) if image_pull_secrets else []
self.service_account_name = service_account_name
self.hostnetwork = hostnetwork
self.tolerations = (
[convert_toleration(toleration) for toleration in tolerations] if tolerations else []
)
self.security_context = security_context or {}
self.container_security_context = container_security_context
self.dnspolicy = dnspolicy
self.dns_config = dns_config
self.hostname = hostname
self.subdomain = subdomain
self.schedulername = schedulername
self.full_pod_spec = full_pod_spec
self.init_containers = init_containers or []
self.log_events_on_failure = log_events_on_failure
self.priority_class_name = priority_class_name
self.pod_template_file = pod_template_file
self.name = self._set_name(name)
self.random_name_suffix = random_name_suffix
self.termination_grace_period = termination_grace_period
self.pod_request_obj: k8s.V1Pod | None = None
self.pod: k8s.V1Pod | None = None
self.skip_on_exit_code = (
skip_on_exit_code
if isinstance(skip_on_exit_code, Container)
else [skip_on_exit_code]
if skip_on_exit_code
else []
)
self.base_container_name = base_container_name or self.BASE_CONTAINER_NAME
self.deferrable = deferrable
self.poll_interval = poll_interval
self.remote_pod: k8s.V1Pod | None = None
self.log_pod_spec_on_failure = log_pod_spec_on_failure
if is_delete_operator_pod is not None:
warnings.warn(
"`is_delete_operator_pod` parameter is deprecated, please use `on_finish_action`",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
self.on_finish_action = (
OnFinishAction.DELETE_POD if is_delete_operator_pod else OnFinishAction.KEEP_POD
)
self.is_delete_operator_pod = is_delete_operator_pod
else:
self.on_finish_action = OnFinishAction(on_finish_action)
self.is_delete_operator_pod = self.on_finish_action == OnFinishAction.DELETE_POD
self._config_dict: dict | None = None # TODO: remove it when removing convert_config_file_to_dict
@cached_property
def _incluster_namespace(self):
from pathlib import Path
path = Path("/var/run/secrets/kubernetes.io/serviceaccount/namespace")
return path.exists() and path.read_text() or None
def _render_nested_template_fields(
self,
content: Any,
context: Context,
jinja_env: jinja2.Environment,
seen_oids: set,
) -> None:
if id(content) not in seen_oids:
template_fields: tuple | None
if isinstance(content, k8s.V1EnvVar):
template_fields = ("value", "name")
elif isinstance(content, k8s.V1ResourceRequirements):
template_fields = ("limits", "requests")
elif isinstance(content, k8s.V1Volume):
template_fields = ("name", "persistent_volume_claim")
elif isinstance(content, k8s.V1VolumeMount):
template_fields = ("name",)
elif isinstance(content, k8s.V1PersistentVolumeClaimVolumeSource):
template_fields = ("claim_name",)
else:
template_fields = None
if template_fields:
seen_oids.add(id(content))
self._do_render_template_fields(content, template_fields, context, jinja_env, seen_oids)
return
super()._render_nested_template_fields(content, context, jinja_env, seen_oids)
@staticmethod
def _get_ti_pod_labels(context: Context | None = None, include_try_number: bool = True) -> dict[str, str]:
"""
Generate labels for the pod to track the pod in case of Operator crash.
:param context: task context provided by airflow DAG
:return: dict
"""
if not context:
return {}
ti = context["ti"]
run_id = context["run_id"]
labels = {
"dag_id": ti.dag_id,
"task_id": ti.task_id,
"run_id": run_id,
"kubernetes_pod_operator": "True",
}
map_index = ti.map_index
if map_index >= 0:
labels["map_index"] = map_index
if include_try_number:
labels.update(try_number=ti.try_number)
# In the case of sub dags this is just useful
if context["dag"].parent_dag:
labels["parent_dag_id"] = context["dag"].parent_dag.dag_id
# Ensure that label is valid for Kube,
# and if not truncate/remove invalid chars and replace with short hash.
for label_id, label in labels.items():
safe_label = pod_generator.make_safe_label_value(str(label))
labels[label_id] = safe_label
return labels
@cached_property
def pod_manager(self) -> PodManager:
return PodManager(kube_client=self.client)
@cached_property
def hook(self) -> PodOperatorHookProtocol:
hook = KubernetesHook(
conn_id=self.kubernetes_conn_id,
in_cluster=self.in_cluster,
config_file=self.config_file,
cluster_context=self.cluster_context,
)
return hook
@cached_property
def client(self) -> CoreV1Api:
return self.hook.core_v1_client
def find_pod(self, namespace: str, context: Context, *, exclude_checked: bool = True) -> k8s.V1Pod | None:
"""Returns an already-running pod for this task instance if one exists."""
label_selector = self._build_find_pod_label_selector(context, exclude_checked=exclude_checked)
pod_list = self.client.list_namespaced_pod(
namespace=namespace,
label_selector=label_selector,
).items
pod = None
num_pods = len(pod_list)
if num_pods > 1:
raise AirflowException(f"More than one pod running with labels {label_selector}")
elif num_pods == 1:
pod = pod_list[0]
self.log.info("Found matching pod %s with labels %s", pod.metadata.name, pod.metadata.labels)
self.log.info("`try_number` of task_instance: %s", context["ti"].try_number)
self.log.info("`try_number` of pod: %s", pod.metadata.labels["try_number"])
return pod
def get_or_create_pod(self, pod_request_obj: k8s.V1Pod, context: Context) -> k8s.V1Pod:
if self.reattach_on_restart:
pod = self.find_pod(self.namespace or pod_request_obj.metadata.namespace, context=context)
if pod:
return pod
self.log.debug("Starting pod:\n%s", yaml.safe_dump(pod_request_obj.to_dict()))
self.pod_manager.create_pod(pod=pod_request_obj)
return pod_request_obj
def await_pod_start(self, pod: k8s.V1Pod):
try:
self.pod_manager.await_pod_start(pod=pod, startup_timeout=self.startup_timeout_seconds)
except PodLaunchFailedException:
if self.log_events_on_failure:
for event in self.pod_manager.read_pod_events(pod).items:
self.log.error("Pod Event: %s - %s", event.reason, event.message)
raise
def extract_xcom(self, pod: k8s.V1Pod):
"""Retrieves xcom value and kills xcom sidecar container."""
result = self.pod_manager.extract_xcom(pod)
if isinstance(result, str) and result.rstrip() == "__airflow_xcom_result_empty__":
self.log.info("xcom result file is empty.")
return None
else:
self.log.info("xcom result: \n%s", result)
return json.loads(result)
def execute(self, context: Context):
"""Based on the deferrable parameter runs the pod asynchronously or synchronously."""
if self.deferrable:
self.execute_async(context)
else:
return self.execute_sync(context)
def execute_sync(self, context: Context):
try:
self.pod_request_obj = self.build_pod_request_obj(context)
self.pod = self.get_or_create_pod( # must set `self.pod` for `on_kill`
pod_request_obj=self.pod_request_obj,
context=context,
)
# push to xcom now so that if there is an error we still have the values
ti = context["ti"]
ti.xcom_push(key="pod_name", value=self.pod.metadata.name)
ti.xcom_push(key="pod_namespace", value=self.pod.metadata.namespace)
# get remote pod for use in cleanup methods
self.remote_pod = self.find_pod(self.pod.metadata.namespace, context=context)
self.await_pod_start(pod=self.pod)
if self.get_logs:
self.pod_manager.fetch_requested_container_logs(
pod=self.pod,
container_logs=self.container_logs,
follow_logs=True,
)
else:
self.pod_manager.await_container_completion(
pod=self.pod, container_name=self.base_container_name
)
if self.do_xcom_push:
self.pod_manager.await_xcom_sidecar_container_start(pod=self.pod)
result = self.extract_xcom(pod=self.pod)
self.remote_pod = self.pod_manager.await_pod_completion(self.pod)
finally:
self.cleanup(
pod=self.pod or self.pod_request_obj,
remote_pod=self.remote_pod,
)
if self.do_xcom_push:
return result
def execute_async(self, context: Context):
self.pod_request_obj = self.build_pod_request_obj(context)
self.pod = self.get_or_create_pod( # must set `self.pod` for `on_kill`
pod_request_obj=self.pod_request_obj,
context=context,
)
self.invoke_defer_method()
def invoke_defer_method(self):
"""Method to easily redefine triggers which are being used in child classes."""
trigger_start_time = utcnow()
self.defer(
trigger=KubernetesPodTrigger(
pod_name=self.pod.metadata.name,
pod_namespace=self.pod.metadata.namespace,
trigger_start_time=trigger_start_time,
kubernetes_conn_id=self.kubernetes_conn_id,
cluster_context=self.cluster_context,
config_file=self.config_file,
in_cluster=self.in_cluster,
poll_interval=self.poll_interval,
get_logs=self.get_logs,
startup_timeout=self.startup_timeout_seconds,
base_container_name=self.base_container_name,
on_finish_action=self.on_finish_action.value,
),
method_name="execute_complete",
)
def execute_complete(self, context: Context, event: dict, **kwargs):
pod = None
try:
pod = self.hook.get_pod(
event["name"],
event["namespace"],
)
if event["status"] in ("error", "failed", "timeout"):
# fetch some logs when pod is failed
if self.get_logs:
self.write_logs(pod)
raise AirflowException(event["message"])
elif event["status"] == "success":
ti = context["ti"]
ti.xcom_push(key="pod_name", value=pod.metadata.name)
ti.xcom_push(key="pod_namespace", value=pod.metadata.namespace)
# fetch some logs when pod is executed successfully
if self.get_logs:
self.write_logs(pod)
if self.do_xcom_push:
xcom_sidecar_output = self.extract_xcom(pod=pod)
return xcom_sidecar_output
finally:
pod = self.pod_manager.await_pod_completion(pod)
if pod is not None:
self.post_complete_action(
pod=pod,
remote_pod=pod,
)
def write_logs(self, pod: k8s.V1Pod):
try:
logs = self.pod_manager.read_pod_logs(
pod=pod,
container_name=self.base_container_name,
follow=False,
)
for raw_line in logs:
line = raw_line.decode("utf-8", errors="backslashreplace").rstrip("\n")
self.log.info("Container logs: %s", line)
except HTTPError as e:
self.log.warning(
"Reading of logs interrupted with error %r; will retry. "
"Set log level to DEBUG for traceback.",
e,
)
def post_complete_action(self, *, pod, remote_pod, **kwargs):
"""Actions that must be done after operator finishes logic of the deferrable_execution."""
self.cleanup(
pod=pod,
remote_pod=remote_pod,
)
def cleanup(self, pod: k8s.V1Pod, remote_pod: k8s.V1Pod):
pod_phase = remote_pod.status.phase if hasattr(remote_pod, "status") else None
# if the pod fails or success, but we don't want to delete it
if pod_phase != PodPhase.SUCCEEDED or self.on_finish_action == OnFinishAction.KEEP_POD:
self.patch_already_checked(remote_pod, reraise=False)
if pod_phase != PodPhase.SUCCEEDED:
if self.log_events_on_failure:
self._read_pod_events(pod, reraise=False)
self.process_pod_deletion(remote_pod, reraise=False)
error_message = get_container_termination_message(remote_pod, self.base_container_name)
if self.skip_on_exit_code is not None:
container_statuses = (
remote_pod.status.container_statuses if remote_pod and remote_pod.status else None
) or []
base_container_status = next(
(x for x in container_statuses if x.name == self.base_container_name), None
)
exit_code = (
base_container_status.last_state.terminated.exit_code
if base_container_status
and base_container_status.last_state
and base_container_status.last_state.terminated
else None
)
if exit_code in self.skip_on_exit_code:
raise AirflowSkipException(
f"Pod {pod and pod.metadata.name} returned exit code "
f"{self.skip_on_exit_code}. Skipping."
)
raise AirflowException(
"\n".join(
filter(
None,
[
f"Pod {pod and pod.metadata.name} returned a failure.",
error_message if isinstance(error_message, str) else None,
f"remote_pod: {remote_pod}" if self.log_pod_spec_on_failure else None,
],
)
)
)
else:
self.process_pod_deletion(remote_pod, reraise=False)
def _read_pod_events(self, pod, *, reraise=True):
"""Will fetch and emit events from pod."""
with _optionally_suppress(reraise=reraise):
for event in self.pod_manager.read_pod_events(pod).items:
self.log.error("Pod Event: %s - %s", event.reason, event.message)
def process_pod_deletion(self, pod: k8s.V1Pod, *, reraise=True):
with _optionally_suppress(reraise=reraise):
if pod is not None:
should_delete_pod = (self.on_finish_action == OnFinishAction.DELETE_POD) or (
self.on_finish_action == OnFinishAction.DELETE_SUCCEEDED_POD
and pod.status.phase == PodPhase.SUCCEEDED
)
if should_delete_pod:
self.log.info("Deleting pod: %s", pod.metadata.name)
self.pod_manager.delete_pod(pod)
else:
self.log.info("Skipping deleting pod: %s", pod.metadata.name)
def _build_find_pod_label_selector(self, context: Context | None = None, *, exclude_checked=True) -> str:
labels = self._get_ti_pod_labels(context, include_try_number=False)
label_strings = [f"{label_id}={label}" for label_id, label in sorted(labels.items())]
labels_value = ",".join(label_strings)
if exclude_checked:
labels_value += f",{self.POD_CHECKED_KEY}!=True"
labels_value += ",!airflow-worker"
return labels_value
@staticmethod
def _set_name(name: str | None) -> str | None:
if name is not None:
validate_key(name, max_length=220)
return re.sub(r"[^a-z0-9-]+", "-", name.lower())
return None
def patch_already_checked(self, pod: k8s.V1Pod, *, reraise=True):
"""Add an "already checked" annotation to ensure we don't reattach on retries."""
with _optionally_suppress(reraise=reraise):
self.client.patch_namespaced_pod(
name=pod.metadata.name,
namespace=pod.metadata.namespace,
body={"metadata": {"labels": {self.POD_CHECKED_KEY: "True"}}},
)
def on_kill(self) -> None:
if self.pod:
pod = self.pod
kwargs = dict(
name=pod.metadata.name,
namespace=pod.metadata.namespace,
)
if self.termination_grace_period is not None:
kwargs.update(grace_period_seconds=self.termination_grace_period)
self.client.delete_namespaced_pod(**kwargs)
def build_pod_request_obj(self, context: Context | None = None) -> k8s.V1Pod:
"""
Returns V1Pod object based on pod template file, full pod spec, and other operator parameters.
The V1Pod attributes are derived (in order of precedence) from operator params, full pod spec, pod
template file.
"""
self.log.debug("Creating pod for KubernetesPodOperator task %s", self.task_id)
if self.pod_template_file:
self.log.debug("Pod template file found, will parse for base pod")
pod_template = pod_generator.PodGenerator.deserialize_model_file(self.pod_template_file)
if self.full_pod_spec:
pod_template = PodGenerator.reconcile_pods(pod_template, self.full_pod_spec)
elif self.full_pod_spec:
pod_template = self.full_pod_spec
else:
pod_template = k8s.V1Pod(metadata=k8s.V1ObjectMeta())
pod = k8s.V1Pod(
api_version="v1",
kind="Pod",
metadata=k8s.V1ObjectMeta(
namespace=self.namespace,
labels=self.labels,
name=self.name,
annotations=self.annotations,
),
spec=k8s.V1PodSpec(
node_selector=self.node_selector,
affinity=self.affinity,
tolerations=self.tolerations,
init_containers=self.init_containers,
containers=[
k8s.V1Container(
image=self.image,
name=self.base_container_name,
command=self.cmds,
ports=self.ports,
image_pull_policy=self.image_pull_policy,
resources=self.container_resources,
volume_mounts=self.volume_mounts,
args=self.arguments,
env=self.env_vars,
env_from=self.env_from,
security_context=self.container_security_context,
)
],
image_pull_secrets=self.image_pull_secrets,
service_account_name=self.service_account_name,
host_network=self.hostnetwork,
hostname=self.hostname,
subdomain=self.subdomain,
security_context=self.security_context,
dns_policy=self.dnspolicy,
dns_config=self.dns_config,
scheduler_name=self.schedulername,
restart_policy="Never",
priority_class_name=self.priority_class_name,
volumes=self.volumes,
),
)
pod = PodGenerator.reconcile_pods(pod_template, pod)
if not pod.metadata.name:
pod.metadata.name = _create_pod_id(
task_id=self.task_id, unique=self.random_name_suffix, max_length=80
)
elif self.random_name_suffix:
# user has supplied pod name, we're just adding suffix
pod.metadata.name = _add_pod_suffix(pod_name=pod.metadata.name)
if not pod.metadata.namespace:
hook_namespace = self.hook.get_namespace()
pod_namespace = self.namespace or hook_namespace or self._incluster_namespace or "default"
pod.metadata.namespace = pod_namespace
for secret in self.secrets:
self.log.debug("Adding secret to task %s", self.task_id)
pod = secret.attach_to_pod(pod)
if self.do_xcom_push:
self.log.debug("Adding xcom sidecar to task %s", self.task_id)
pod = xcom_sidecar.add_xcom_sidecar(pod)
labels = self._get_ti_pod_labels(context)
self.log.info("Building pod %s with labels: %s", pod.metadata.name, labels)
# Merge Pod Identifying labels with labels passed to operator
pod.metadata.labels.update(labels)
# Add Airflow Version to the label
# And a label to identify that pod is launched by KubernetesPodOperator
pod.metadata.labels.update(
{
"airflow_version": airflow_version.replace("+", "-"),
"airflow_kpo_in_cluster": str(self.hook.is_in_cluster),
}
)
pod_mutation_hook(pod)
return pod
def dry_run(self) -> None:
"""
Prints out the pod definition that would be created by this operator.
Does not include labels specific to the task instance (since there isn't
one in a dry_run) and excludes all empty elements.
"""
pod = self.build_pod_request_obj()
print(yaml.dump(prune_dict(pod.to_dict(), mode="strict")))
class _optionally_suppress(AbstractContextManager):
"""
Returns context manager that will swallow and log exceptions.
By default swallows descendents of Exception, but you can provide other classes through
the vararg ``exceptions``.
Suppression behavior can be disabled with reraise=True.
:meta private:
"""
def __init__(self, *exceptions, reraise=False):
self._exceptions = exceptions or (Exception,)
self.reraise = reraise
self.exception = None
def __enter__(self):
return self
def __exit__(self, exctype, excinst, exctb):
error = exctype is not None
matching_error = error and issubclass(exctype, self._exceptions)
if error and not matching_error:
return False
elif matching_error and self.reraise:
return False
elif matching_error:
self.exception = excinst
logger = logging.getLogger(__name__)
logger.exception(excinst)
return True
else:
return True
| 40,898 | 42.463337 | 110 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use :mod:`airflow.providers.cncf.kubernetes.operators.pod` instead."""
from __future__ import annotations
import warnings
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.cncf.kubernetes.operators.pod import * # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.cncf.kubernetes.operators.pod` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
| 1,262 | 39.741935 | 107 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/triggers/pod.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import asyncio
import warnings
from asyncio import CancelledError
from datetime import datetime
from enum import Enum
from typing import Any, AsyncIterator
import pytz
from kubernetes_asyncio.client.models import V1Pod
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.cncf.kubernetes.hooks.kubernetes import AsyncKubernetesHook
from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction, PodPhase
from airflow.triggers.base import BaseTrigger, TriggerEvent
class ContainerState(str, Enum):
"""
Possible container states.
See https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-phase.
"""
WAITING = "waiting"
RUNNING = "running"
TERMINATED = "terminated"
FAILED = "failed"
UNDEFINED = "undefined"
class KubernetesPodTrigger(BaseTrigger):
"""
KubernetesPodTrigger run on the trigger worker to check the state of Pod.
:param pod_name: The name of the pod.
:param pod_namespace: The namespace of the pod.
:param kubernetes_conn_id: The :ref:`kubernetes connection id <howto/connection:kubernetes>`
for the Kubernetes cluster.
:param cluster_context: Context that points to kubernetes cluster.
:param config_file: Path to kubeconfig file.
:param poll_interval: Polling period in seconds to check for the status.
:param trigger_start_time: time in Datetime format when the trigger was started
:param in_cluster: run kubernetes client with in_cluster configuration.
:param get_logs: get the stdout of the container as logs of the tasks.
:param startup_timeout: timeout in seconds to start up the pod.
:param on_finish_action: What to do when the pod reaches its final state, or the execution is interrupted.
If "delete_pod", the pod will be deleted regardless it's state; if "delete_succeeded_pod",
only succeeded pod will be deleted. You can set to "keep_pod" to keep the pod.
:param should_delete_pod: What to do when the pod reaches its final
state, or the execution is interrupted. If True (default), delete the
pod; if False, leave the pod.
Deprecated - use `on_finish_action` instead.
"""
def __init__(
self,
pod_name: str,
pod_namespace: str,
trigger_start_time: datetime,
base_container_name: str,
kubernetes_conn_id: str | None = None,
poll_interval: float = 2,
cluster_context: str | None = None,
config_file: str | None = None,
in_cluster: bool | None = None,
get_logs: bool = True,
startup_timeout: int = 120,
on_finish_action: str = "delete_pod",
should_delete_pod: bool | None = None,
):
super().__init__()
self.pod_name = pod_name
self.pod_namespace = pod_namespace
self.trigger_start_time = trigger_start_time
self.base_container_name = base_container_name
self.kubernetes_conn_id = kubernetes_conn_id
self.poll_interval = poll_interval
self.cluster_context = cluster_context
self.config_file = config_file
self.in_cluster = in_cluster
self.get_logs = get_logs
self.startup_timeout = startup_timeout
if should_delete_pod is not None:
warnings.warn(
"`should_delete_pod` parameter is deprecated, please use `on_finish_action`",
AirflowProviderDeprecationWarning,
)
self.on_finish_action = (
OnFinishAction.DELETE_POD if should_delete_pod else OnFinishAction.KEEP_POD
)
self.should_delete_pod = should_delete_pod
else:
self.on_finish_action = OnFinishAction(on_finish_action)
self.should_delete_pod = self.on_finish_action == OnFinishAction.DELETE_POD
self._hook: AsyncKubernetesHook | None = None
self._since_time = None
def serialize(self) -> tuple[str, dict[str, Any]]:
"""Serializes KubernetesCreatePodTrigger arguments and classpath."""
return (
"airflow.providers.cncf.kubernetes.triggers.pod.KubernetesPodTrigger",
{
"pod_name": self.pod_name,
"pod_namespace": self.pod_namespace,
"base_container_name": self.base_container_name,
"kubernetes_conn_id": self.kubernetes_conn_id,
"poll_interval": self.poll_interval,
"cluster_context": self.cluster_context,
"config_file": self.config_file,
"in_cluster": self.in_cluster,
"get_logs": self.get_logs,
"startup_timeout": self.startup_timeout,
"trigger_start_time": self.trigger_start_time,
"should_delete_pod": self.should_delete_pod,
"on_finish_action": self.on_finish_action.value,
},
)
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
"""Gets current pod status and yields a TriggerEvent."""
hook = self._get_async_hook()
self.log.info("Checking pod %r in namespace %r.", self.pod_name, self.pod_namespace)
while True:
try:
pod = await hook.get_pod(
name=self.pod_name,
namespace=self.pod_namespace,
)
pod_status = pod.status.phase
self.log.debug("Pod %s status: %s", self.pod_name, pod_status)
container_state = self.define_container_state(pod)
self.log.debug("Container %s status: %s", self.base_container_name, container_state)
if container_state == ContainerState.TERMINATED:
yield TriggerEvent(
{
"name": self.pod_name,
"namespace": self.pod_namespace,
"status": "success",
"message": "All containers inside pod have started successfully.",
}
)
return
elif self.should_wait(pod_phase=pod_status, container_state=container_state):
self.log.info("Container is not completed and still working.")
if pod_status == PodPhase.PENDING and container_state == ContainerState.UNDEFINED:
delta = datetime.now(tz=pytz.UTC) - self.trigger_start_time
if delta.total_seconds() >= self.startup_timeout:
message = (
f"Pod took longer than {self.startup_timeout} seconds to start. "
"Check the pod events in kubernetes to determine why."
)
yield TriggerEvent(
{
"name": self.pod_name,
"namespace": self.pod_namespace,
"status": "timeout",
"message": message,
}
)
return
self.log.info("Sleeping for %s seconds.", self.poll_interval)
await asyncio.sleep(self.poll_interval)
else:
yield TriggerEvent(
{
"name": self.pod_name,
"namespace": self.pod_namespace,
"status": "failed",
"message": pod.status.message,
}
)
return
except CancelledError:
# That means that task was marked as failed
if self.get_logs:
self.log.info("Outputting container logs...")
await self._get_async_hook().read_logs(
name=self.pod_name,
namespace=self.pod_namespace,
)
if self.on_finish_action == OnFinishAction.DELETE_POD:
self.log.info("Deleting pod...")
await self._get_async_hook().delete_pod(
name=self.pod_name,
namespace=self.pod_namespace,
)
yield TriggerEvent(
{
"name": self.pod_name,
"namespace": self.pod_namespace,
"status": "cancelled",
"message": "Pod execution was cancelled",
}
)
return
except Exception as e:
self.log.exception("Exception occurred while checking pod phase:")
yield TriggerEvent(
{
"name": self.pod_name,
"namespace": self.pod_namespace,
"status": "error",
"message": str(e),
}
)
return
def _get_async_hook(self) -> AsyncKubernetesHook:
if self._hook is None:
self._hook = AsyncKubernetesHook(
conn_id=self.kubernetes_conn_id,
in_cluster=self.in_cluster,
config_file=self.config_file,
cluster_context=self.cluster_context,
)
return self._hook
def define_container_state(self, pod: V1Pod) -> ContainerState:
pod_containers = pod.status.container_statuses
if pod_containers is None:
return ContainerState.UNDEFINED
container = [c for c in pod_containers if c.name == self.base_container_name][0]
for state in (ContainerState.RUNNING, ContainerState.WAITING, ContainerState.TERMINATED):
state_obj = getattr(container.state, state)
if state_obj is not None:
if state != ContainerState.TERMINATED:
return state
else:
return ContainerState.TERMINATED if state_obj.exit_code == 0 else ContainerState.FAILED
return ContainerState.UNDEFINED
@staticmethod
def should_wait(pod_phase: PodPhase, container_state: ContainerState) -> bool:
return (
container_state == ContainerState.WAITING
or container_state == ContainerState.RUNNING
or (container_state == ContainerState.UNDEFINED and pod_phase == PodPhase.PENDING)
)
| 11,511 | 42.116105 | 110 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/triggers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/triggers/kubernetes_pod.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use :mod:`airflow.providers.cncf.kubernetes.triggers.pod` instead."""
from __future__ import annotations
import warnings
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.cncf.kubernetes.triggers.pod import * # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.cncf.kubernetes.triggers.pod` instead.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
| 1,259 | 39.645161 | 106 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/hooks/kubernetes.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import tempfile
from functools import cached_property
from typing import TYPE_CHECKING, Any, Generator
from asgiref.sync import sync_to_async
from kubernetes import client, config, watch
from kubernetes.client.models import V1Pod
from kubernetes.config import ConfigException
from kubernetes_asyncio import client as async_client, config as async_config
from urllib3.exceptions import HTTPError
from airflow.exceptions import AirflowException, AirflowNotFoundException
from airflow.hooks.base import BaseHook
from airflow.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
from airflow.models import Connection
from airflow.providers.cncf.kubernetes.utils.pod_manager import PodOperatorHookProtocol
from airflow.utils import yaml
LOADING_KUBE_CONFIG_FILE_RESOURCE = "Loading Kubernetes configuration file kube_config from {}..."
def _load_body_to_dict(body: str) -> dict:
try:
body_dict = yaml.safe_load(body)
except yaml.YAMLError as e:
raise AirflowException(f"Exception when loading resource definition: {e}\n")
return body_dict
class KubernetesHook(BaseHook, PodOperatorHookProtocol):
"""
Creates Kubernetes API connection.
- use in cluster configuration by using extra field ``in_cluster`` in connection
- use custom config by providing path to the file using extra field ``kube_config_path`` in connection
- use custom configuration by providing content of kubeconfig file via
extra field ``kube_config`` in connection
- use default config by providing no extras
This hook check for configuration option in the above order. Once an option is present it will
use this configuration.
.. seealso::
For more information about Kubernetes connection:
:doc:`/connections/kubernetes`
:param conn_id: The :ref:`kubernetes connection <howto/connection:kubernetes>`
to Kubernetes cluster.
:param client_configuration: Optional dictionary of client configuration params.
Passed on to kubernetes client.
:param cluster_context: Optionally specify a context to use (e.g. if you have multiple
in your kubeconfig.
:param config_file: Path to kubeconfig file.
:param in_cluster: Set to ``True`` if running from within a kubernetes cluster.
:param disable_verify_ssl: Set to ``True`` if SSL verification should be disabled.
:param disable_tcp_keepalive: Set to ``True`` if you want to disable keepalive logic.
"""
conn_name_attr = "kubernetes_conn_id"
default_conn_name = "kubernetes_default"
conn_type = "kubernetes"
hook_name = "Kubernetes Cluster Connection"
DEFAULT_NAMESPACE = "default"
@staticmethod
def get_connection_form_widgets() -> dict[str, Any]:
"""Returns connection widgets to add to connection form."""
from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
from flask_babel import lazy_gettext
from wtforms import BooleanField, StringField
return {
"in_cluster": BooleanField(lazy_gettext("In cluster configuration")),
"kube_config_path": StringField(lazy_gettext("Kube config path"), widget=BS3TextFieldWidget()),
"kube_config": StringField(
lazy_gettext("Kube config (JSON format)"), widget=BS3TextFieldWidget()
),
"namespace": StringField(lazy_gettext("Namespace"), widget=BS3TextFieldWidget()),
"cluster_context": StringField(lazy_gettext("Cluster context"), widget=BS3TextFieldWidget()),
"disable_verify_ssl": BooleanField(lazy_gettext("Disable SSL")),
"disable_tcp_keepalive": BooleanField(lazy_gettext("Disable TCP keepalive")),
}
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["host", "schema", "login", "password", "port", "extra"],
"relabeling": {},
}
def __init__(
self,
conn_id: str | None = default_conn_name,
client_configuration: client.Configuration | None = None,
cluster_context: str | None = None,
config_file: str | None = None,
in_cluster: bool | None = None,
disable_verify_ssl: bool | None = None,
disable_tcp_keepalive: bool | None = None,
) -> None:
super().__init__()
self.conn_id = conn_id
self.client_configuration = client_configuration
self.cluster_context = cluster_context
self.config_file = config_file
self.in_cluster = in_cluster
self.disable_verify_ssl = disable_verify_ssl
self.disable_tcp_keepalive = disable_tcp_keepalive
self._is_in_cluster: bool | None = None
@staticmethod
def _coalesce_param(*params):
for param in params:
if param is not None:
return param
@classmethod
def get_connection(cls, conn_id: str) -> Connection:
"""
Return requested connection.
If missing and conn_id is "kubernetes_default", will return empty connection so that hook will
default to cluster-derived credentials.
"""
try:
return super().get_connection(conn_id)
except AirflowNotFoundException:
if conn_id == cls.default_conn_name:
return Connection(conn_id=cls.default_conn_name)
else:
raise
@cached_property
def conn_extras(self):
if self.conn_id:
connection = self.get_connection(self.conn_id)
extras = connection.extra_dejson
else:
extras = {}
return extras
def _get_field(self, field_name):
"""
Handles backcompat for extra fields.
Prior to Airflow 2.3, in order to make use of UI customizations for extra fields,
we needed to store them with the prefix ``extra__kubernetes__``. This method
handles the backcompat, i.e. if the extra dict contains prefixed fields.
"""
if field_name.startswith("extra__"):
raise ValueError(
f"Got prefixed name {field_name}; please remove the 'extra__kubernetes__' prefix "
f"when using this method."
)
if field_name in self.conn_extras:
return self.conn_extras[field_name] or None
prefixed_name = f"extra__kubernetes__{field_name}"
return self.conn_extras.get(prefixed_name) or None
def get_conn(self) -> client.ApiClient:
"""Returns kubernetes api session for use with requests."""
in_cluster = self._coalesce_param(self.in_cluster, self._get_field("in_cluster"))
cluster_context = self._coalesce_param(self.cluster_context, self._get_field("cluster_context"))
kubeconfig_path = self._coalesce_param(self.config_file, self._get_field("kube_config_path"))
kubeconfig = self._get_field("kube_config")
num_selected_configuration = len([o for o in [in_cluster, kubeconfig, kubeconfig_path] if o])
if num_selected_configuration > 1:
raise AirflowException(
"Invalid connection configuration. Options kube_config_path, "
"kube_config, in_cluster are mutually exclusive. "
"You can only use one option at a time."
)
disable_verify_ssl = self._coalesce_param(
self.disable_verify_ssl, _get_bool(self._get_field("disable_verify_ssl"))
)
disable_tcp_keepalive = self._coalesce_param(
self.disable_tcp_keepalive, _get_bool(self._get_field("disable_tcp_keepalive"))
)
if disable_verify_ssl is True:
_disable_verify_ssl()
if disable_tcp_keepalive is not True:
_enable_tcp_keepalive()
if in_cluster:
self.log.debug("loading kube_config from: in_cluster configuration")
self._is_in_cluster = True
config.load_incluster_config()
return client.ApiClient()
if kubeconfig_path is not None:
self.log.debug("loading kube_config from: %s", kubeconfig_path)
self._is_in_cluster = False
config.load_kube_config(
config_file=kubeconfig_path,
client_configuration=self.client_configuration,
context=cluster_context,
)
return client.ApiClient()
if kubeconfig is not None:
with tempfile.NamedTemporaryFile() as temp_config:
self.log.debug("loading kube_config from: connection kube_config")
temp_config.write(kubeconfig.encode())
temp_config.flush()
self._is_in_cluster = False
config.load_kube_config(
config_file=temp_config.name,
client_configuration=self.client_configuration,
context=cluster_context,
)
return client.ApiClient()
return self._get_default_client(cluster_context=cluster_context)
def _get_default_client(self, *, cluster_context: str | None = None) -> client.ApiClient:
# if we get here, then no configuration has been supplied
# we should try in_cluster since that's most likely
# but failing that just load assuming a kubeconfig file
# in the default location
try:
config.load_incluster_config(client_configuration=self.client_configuration)
self._is_in_cluster = True
except ConfigException:
self.log.debug("loading kube_config from: default file")
self._is_in_cluster = False
config.load_kube_config(
client_configuration=self.client_configuration,
context=cluster_context,
)
return client.ApiClient()
@property
def is_in_cluster(self) -> bool:
"""Expose whether the hook is configured with ``load_incluster_config`` or not."""
if self._is_in_cluster is not None:
return self._is_in_cluster
self.api_client # so we can determine if we are in_cluster or not
if TYPE_CHECKING:
assert self._is_in_cluster is not None
return self._is_in_cluster
@cached_property
def api_client(self) -> client.ApiClient:
"""Cached Kubernetes API client."""
return self.get_conn()
@cached_property
def core_v1_client(self) -> client.CoreV1Api:
return client.CoreV1Api(api_client=self.api_client)
@cached_property
def custom_object_client(self) -> client.CustomObjectsApi:
return client.CustomObjectsApi(api_client=self.api_client)
def create_custom_object(
self, group: str, version: str, plural: str, body: str | dict, namespace: str | None = None
):
"""
Creates custom resource definition object in Kubernetes.
:param group: api group
:param version: api version
:param plural: api plural
:param body: crd object definition
:param namespace: kubernetes namespace
"""
api: client.CustomObjectsApi = self.custom_object_client
if isinstance(body, str):
body_dict = _load_body_to_dict(body)
else:
body_dict = body
response = api.create_namespaced_custom_object(
group=group,
version=version,
namespace=namespace or self.get_namespace() or self.DEFAULT_NAMESPACE,
plural=plural,
body=body_dict,
)
self.log.debug("Response: %s", response)
return response
def get_custom_object(
self, group: str, version: str, plural: str, name: str, namespace: str | None = None
):
"""
Get custom resource definition object from Kubernetes.
:param group: api group
:param version: api version
:param plural: api plural
:param name: crd object name
:param namespace: kubernetes namespace
"""
api = client.CustomObjectsApi(self.api_client)
response = api.get_namespaced_custom_object(
group=group,
version=version,
namespace=namespace or self.get_namespace() or self.DEFAULT_NAMESPACE,
plural=plural,
name=name,
)
return response
def delete_custom_object(
self, group: str, version: str, plural: str, name: str, namespace: str | None = None, **kwargs
):
"""
Delete custom resource definition object from Kubernetes.
:param group: api group
:param version: api version
:param plural: api plural
:param name: crd object name
:param namespace: kubernetes namespace
"""
api = client.CustomObjectsApi(self.api_client)
return api.delete_namespaced_custom_object(
group=group,
version=version,
namespace=namespace or self.get_namespace() or self.DEFAULT_NAMESPACE,
plural=plural,
name=name,
**kwargs,
)
def get_namespace(self) -> str | None:
"""Returns the namespace that defined in the connection."""
if self.conn_id:
return self._get_field("namespace")
return None
def get_pod_log_stream(
self,
pod_name: str,
container: str | None = "",
namespace: str | None = None,
) -> tuple[watch.Watch, Generator[str, None, None]]:
"""
Retrieves a log stream for a container in a kubernetes pod.
:param pod_name: pod name
:param container: container name
:param namespace: kubernetes namespace
"""
watcher = watch.Watch()
return (
watcher,
watcher.stream(
self.core_v1_client.read_namespaced_pod_log,
name=pod_name,
container=container,
namespace=namespace or self.get_namespace() or self.DEFAULT_NAMESPACE,
),
)
def get_pod_logs(
self,
pod_name: str,
container: str | None = "",
namespace: str | None = None,
):
"""
Retrieves a container's log from the specified pod.
:param pod_name: pod name
:param container: container name
:param namespace: kubernetes namespace
"""
return self.core_v1_client.read_namespaced_pod_log(
name=pod_name,
container=container,
_preload_content=False,
namespace=namespace or self.get_namespace() or self.DEFAULT_NAMESPACE,
)
def get_pod(self, name: str, namespace: str) -> V1Pod:
"""Read pod object from kubernetes API."""
return self.core_v1_client.read_namespaced_pod(
name=name,
namespace=namespace,
)
def get_namespaced_pod_list(
self,
label_selector: str | None = "",
namespace: str | None = None,
watch: bool = False,
**kwargs,
):
"""
Retrieves a list of Kind pod which belong default kubernetes namespace.
:param label_selector: A selector to restrict the list of returned objects by their labels
:param namespace: kubernetes namespace
:param watch: Watch for changes to the described resources and return them as a stream
"""
return self.core_v1_client.list_namespaced_pod(
namespace=namespace or self.get_namespace() or self.DEFAULT_NAMESPACE,
watch=watch,
label_selector=label_selector,
_preload_content=False,
**kwargs,
)
def _get_bool(val) -> bool | None:
"""Converts val to bool if can be done with certainty; if we cannot infer intention we return None."""
if isinstance(val, bool):
return val
elif isinstance(val, str):
if val.strip().lower() == "true":
return True
elif val.strip().lower() == "false":
return False
return None
class AsyncKubernetesHook(KubernetesHook):
"""Hook to use Kubernetes SDK asynchronously."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._extras: dict | None = None
async def _load_config(self):
"""Returns Kubernetes API session for use with requests."""
in_cluster = self._coalesce_param(self.in_cluster, await self._get_field("in_cluster"))
cluster_context = self._coalesce_param(self.cluster_context, await self._get_field("cluster_context"))
kubeconfig_path = self._coalesce_param(self.config_file, await self._get_field("kube_config_path"))
kubeconfig = await self._get_field("kube_config")
num_selected_configuration = len([o for o in [in_cluster, kubeconfig, kubeconfig_path] if o])
if num_selected_configuration > 1:
raise AirflowException(
"Invalid connection configuration. Options kube_config_path, "
"kube_config, in_cluster are mutually exclusive. "
"You can only use one option at a time."
)
if in_cluster:
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("within a pod"))
self._is_in_cluster = True
async_config.load_incluster_config()
return async_client.ApiClient()
if kubeconfig_path:
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("kube_config"))
self._is_in_cluster = False
await async_config.load_kube_config(
config_file=kubeconfig_path,
client_configuration=self.client_configuration,
context=cluster_context,
)
return async_client.ApiClient()
if kubeconfig is not None:
with tempfile.NamedTemporaryFile() as temp_config:
self.log.debug(
"Reading kubernetes configuration file from connection "
"object and writing temporary config file with its content",
)
temp_config.write(kubeconfig.encode())
temp_config.flush()
self._is_in_cluster = False
await async_config.load_kube_config(
config_file=temp_config.name,
client_configuration=self.client_configuration,
context=cluster_context,
)
return async_client.ApiClient()
self.log.debug(LOADING_KUBE_CONFIG_FILE_RESOURCE.format("default configuration file"))
await async_config.load_kube_config(
client_configuration=self.client_configuration,
context=cluster_context,
)
async def get_conn_extras(self) -> dict:
if self._extras is None:
if self.conn_id:
connection = await sync_to_async(self.get_connection)(self.conn_id)
self._extras = connection.extra_dejson
else:
self._extras = {}
return self._extras
async def _get_field(self, field_name):
if field_name.startswith("extra__"):
raise ValueError(
f"Got prefixed name {field_name}; please remove the 'extra__kubernetes__' prefix "
"when using this method."
)
extras = await self.get_conn_extras()
if field_name in extras:
return extras.get(field_name)
prefixed_name = f"extra__kubernetes__{field_name}"
return extras.get(prefixed_name)
@contextlib.asynccontextmanager
async def get_conn(self) -> async_client.ApiClient:
kube_client = None
try:
kube_client = await self._load_config() or async_client.ApiClient()
yield kube_client
finally:
if kube_client is not None:
await kube_client.close()
async def get_pod(self, name: str, namespace: str) -> V1Pod:
"""
Gets pod's object.
:param name: Name of the pod.
:param namespace: Name of the pod's namespace.
"""
async with self.get_conn() as connection:
v1_api = async_client.CoreV1Api(connection)
pod: V1Pod = await v1_api.read_namespaced_pod(
name=name,
namespace=namespace,
)
return pod
async def delete_pod(self, name: str, namespace: str):
"""
Deletes pod's object.
:param name: Name of the pod.
:param namespace: Name of the pod's namespace.
"""
async with self.get_conn() as connection:
try:
v1_api = async_client.CoreV1Api(connection)
await v1_api.delete_namespaced_pod(
name=name, namespace=namespace, body=client.V1DeleteOptions()
)
except async_client.ApiException as e:
# If the pod is already deleted
if e.status != 404:
raise
async def read_logs(self, name: str, namespace: str):
"""
Reads logs inside the pod while starting containers inside.
All the logs will be outputted with its timestamp to track
the logs after the execution of the pod is completed. The
method is used for async output of the logs only in the pod
failed it execution or the task was cancelled by the user.
:param name: Name of the pod.
:param namespace: Name of the pod's namespace.
"""
async with self.get_conn() as connection:
try:
v1_api = async_client.CoreV1Api(connection)
logs = await v1_api.read_namespaced_pod_log(
name=name,
namespace=namespace,
follow=False,
timestamps=True,
)
logs = logs.splitlines()
for line in logs:
self.log.info("Container logs from %s", line)
return logs
except HTTPError:
self.log.exception("There was an error reading the kubernetes API.")
raise
| 23,125 | 37.737018 | 110 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/hooks/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/utils/pod_manager.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Launches PODs."""
from __future__ import annotations
import enum
import json
import logging
import math
import time
import warnings
from collections.abc import Iterable
from contextlib import closing, suppress
from dataclasses import dataclass
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Generator, cast
import pendulum
import tenacity
from kubernetes import client, watch
from kubernetes.client.models.v1_container_status import V1ContainerStatus
from kubernetes.client.models.v1_pod import V1Pod
from kubernetes.client.rest import ApiException
from kubernetes.stream import stream as kubernetes_stream
from pendulum import DateTime
from pendulum.parsing.exceptions import ParserError
from tenacity import before_log
from urllib3.exceptions import HTTPError as BaseHTTPError
from urllib3.response import HTTPResponse
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.kubernetes.pod_generator import PodDefaults
from airflow.typing_compat import Literal, Protocol
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.timezone import utcnow
if TYPE_CHECKING:
from kubernetes.client.models.core_v1_event_list import CoreV1EventList
class PodLaunchFailedException(AirflowException):
"""When pod launching fails in KubernetesPodOperator."""
def should_retry_start_pod(exception: BaseException) -> bool:
"""Check if an Exception indicates a transient error and warrants retrying."""
if isinstance(exception, ApiException):
return exception.status == 409
return False
class PodPhase:
"""
Possible pod phases.
See https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-phase.
"""
PENDING = "Pending"
RUNNING = "Running"
FAILED = "Failed"
SUCCEEDED = "Succeeded"
terminal_states = {FAILED, SUCCEEDED}
class PodOperatorHookProtocol(Protocol):
"""
Protocol to define methods relied upon by KubernetesPodOperator.
Subclasses of KubernetesPodOperator, such as GKEStartPodOperator, may use
hooks that don't extend KubernetesHook. We use this protocol to document the
methods used by KPO and ensure that these methods exist on such other hooks.
"""
@property
def core_v1_client(self) -> client.CoreV1Api:
"""Get authenticated CoreV1Api object."""
@property
def is_in_cluster(self) -> bool:
"""Expose whether the hook is configured with ``load_incluster_config`` or not."""
def get_pod(self, name: str, namespace: str) -> V1Pod:
"""Read pod object from kubernetes API."""
def get_namespace(self) -> str | None:
"""Returns the namespace that defined in the connection."""
def get_container_status(pod: V1Pod, container_name: str) -> V1ContainerStatus | None:
"""Retrieves container status."""
container_statuses = pod.status.container_statuses if pod and pod.status else None
if container_statuses:
# In general the variable container_statuses can store multiple items matching different containers.
# The following generator expression yields all items that have name equal to the container_name.
# The function next() here calls the generator to get only the first value. If there's nothing found
# then None is returned.
return next((x for x in container_statuses if x.name == container_name), None)
return None
def container_is_running(pod: V1Pod, container_name: str) -> bool:
"""
Examines V1Pod ``pod`` to determine whether ``container_name`` is running.
If that container is present and running, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.running is not None
def container_is_completed(pod: V1Pod, container_name: str) -> bool:
"""
Examines V1Pod ``pod`` to determine whether ``container_name`` is completed.
If that container is present and completed, returns True. Returns False otherwise.
"""
container_status = get_container_status(pod, container_name)
if not container_status:
return False
return container_status.state.terminated is not None
def container_is_terminated(pod: V1Pod, container_name: str) -> bool:
"""
Examines V1Pod ``pod`` to determine whether ``container_name`` is terminated.
If that container is present and terminated, returns True. Returns False otherwise.
"""
container_statuses = pod.status.container_statuses if pod and pod.status else None
if not container_statuses:
return False
container_status = next((x for x in container_statuses if x.name == container_name), None)
if not container_status:
return False
return container_status.state.terminated is not None
def get_container_termination_message(pod: V1Pod, container_name: str):
with suppress(AttributeError, TypeError):
container_statuses = pod.status.container_statuses
container_status = next((x for x in container_statuses if x.name == container_name), None)
return container_status.state.terminated.message if container_status else None
class PodLogsConsumer:
"""
Responsible for pulling pod logs from a stream with checking a container status before reading data.
This class is a workaround for the issue https://github.com/apache/airflow/issues/23497.
:param response: HTTP response with logs
:param pod: Pod instance from Kubernetes client
:param pod_manager: Pod manager instance
:param container_name: Name of the container that we're reading logs from
:param post_termination_timeout: (Optional) The period of time in seconds representing for how long time
logs are available after the container termination.
:param read_pod_cache_timeout: (Optional) The container's status cache lifetime.
The container status is cached to reduce API calls.
:meta private:
"""
def __init__(
self,
response: HTTPResponse,
pod: V1Pod,
pod_manager: PodManager,
container_name: str,
post_termination_timeout: int = 120,
read_pod_cache_timeout: int = 120,
):
self.response = response
self.pod = pod
self.pod_manager = pod_manager
self.container_name = container_name
self.post_termination_timeout = post_termination_timeout
self.last_read_pod_at = None
self.read_pod_cache = None
self.read_pod_cache_timeout = read_pod_cache_timeout
def __iter__(self) -> Generator[bytes, None, None]:
r"""The generator yields log items divided by the '\n' symbol."""
incomplete_log_item: list[bytes] = []
if self.logs_available():
for data_chunk in self.response.stream(amt=None, decode_content=True):
if b"\n" in data_chunk:
log_items = data_chunk.split(b"\n")
yield from self._extract_log_items(incomplete_log_item, log_items)
incomplete_log_item = self._save_incomplete_log_item(log_items[-1])
else:
incomplete_log_item.append(data_chunk)
if not self.logs_available():
break
if incomplete_log_item:
yield b"".join(incomplete_log_item)
@staticmethod
def _extract_log_items(incomplete_log_item: list[bytes], log_items: list[bytes]):
yield b"".join(incomplete_log_item) + log_items[0] + b"\n"
for x in log_items[1:-1]:
yield x + b"\n"
@staticmethod
def _save_incomplete_log_item(sub_chunk: bytes):
return [sub_chunk] if [sub_chunk] else []
def logs_available(self):
remote_pod = self.read_pod()
if container_is_running(pod=remote_pod, container_name=self.container_name):
return True
container_status = get_container_status(pod=remote_pod, container_name=self.container_name)
state = container_status.state if container_status else None
terminated = state.terminated if state else None
if terminated:
termination_time = terminated.finished_at
if termination_time:
return termination_time + timedelta(seconds=self.post_termination_timeout) > utcnow()
return False
def read_pod(self):
_now = utcnow()
if (
self.read_pod_cache is None
or self.last_read_pod_at + timedelta(seconds=self.read_pod_cache_timeout) < _now
):
self.read_pod_cache = self.pod_manager.read_pod(self.pod)
self.last_read_pod_at = _now
return self.read_pod_cache
@dataclass
class PodLoggingStatus:
"""Used for returning the status of the pod and last log time when exiting from `fetch_container_logs`."""
running: bool
last_log_time: DateTime | None
class PodManager(LoggingMixin):
"""Create, monitor, and otherwise interact with Kubernetes pods for use with the KubernetesPodOperator."""
def __init__(
self,
kube_client: client.CoreV1Api,
):
"""
Creates the launcher.
:param kube_client: kubernetes client
"""
super().__init__()
self._client = kube_client
self._watch = watch.Watch()
def run_pod_async(self, pod: V1Pod, **kwargs) -> V1Pod:
"""Runs POD asynchronously."""
sanitized_pod = self._client.api_client.sanitize_for_serialization(pod)
json_pod = json.dumps(sanitized_pod, indent=2)
self.log.debug("Pod Creation Request: \n%s", json_pod)
try:
resp = self._client.create_namespaced_pod(
body=sanitized_pod, namespace=pod.metadata.namespace, **kwargs
)
self.log.debug("Pod Creation Response: %s", resp)
except Exception as e:
self.log.exception(
"Exception when attempting to create Namespaced Pod: %s", str(json_pod).replace("\n", " ")
)
raise e
return resp
def delete_pod(self, pod: V1Pod) -> None:
"""Deletes POD."""
try:
self._client.delete_namespaced_pod(
pod.metadata.name, pod.metadata.namespace, body=client.V1DeleteOptions()
)
except ApiException as e:
# If the pod is already deleted
if e.status != 404:
raise
@tenacity.retry(
stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_random_exponential(),
reraise=True,
retry=tenacity.retry_if_exception(should_retry_start_pod),
)
def create_pod(self, pod: V1Pod) -> V1Pod:
"""Launches the pod asynchronously."""
return self.run_pod_async(pod)
def await_pod_start(self, pod: V1Pod, startup_timeout: int = 120) -> None:
"""
Waits for the pod to reach phase other than ``Pending``.
:param pod:
:param startup_timeout: Timeout (in seconds) for startup of the pod
(if pod is pending for too long, fails task)
:return:
"""
curr_time = datetime.now()
while True:
remote_pod = self.read_pod(pod)
if remote_pod.status.phase != PodPhase.PENDING:
break
self.log.warning("Pod not yet started: %s", pod.metadata.name)
delta = datetime.now() - curr_time
if delta.total_seconds() >= startup_timeout:
msg = (
f"Pod took longer than {startup_timeout} seconds to start. "
"Check the pod events in kubernetes to determine why."
)
raise PodLaunchFailedException(msg)
time.sleep(1)
def follow_container_logs(self, pod: V1Pod, container_name: str) -> PodLoggingStatus:
warnings.warn(
"Method `follow_container_logs` is deprecated. Use `fetch_container_logs` instead"
"with option `follow=True`.",
AirflowProviderDeprecationWarning,
)
return self.fetch_container_logs(pod=pod, container_name=container_name, follow=True)
def fetch_container_logs(
self,
pod: V1Pod,
container_name: str,
*,
follow=False,
since_time: DateTime | None = None,
post_termination_timeout: int = 120,
) -> PodLoggingStatus:
"""
Follows the logs of container and streams to airflow logging.
Returns when container exits.
Between when the pod starts and logs being available, there might be a delay due to CSR not approved
and signed yet. In such situation, ApiException is thrown. This is why we are retrying on this
specific exception.
"""
@tenacity.retry(
retry=tenacity.retry_if_exception_type(ApiException),
stop=tenacity.stop_after_attempt(10),
wait=tenacity.wait_fixed(1),
before=before_log(self.log, logging.INFO),
)
def consume_logs(
*, since_time: DateTime | None = None, follow: bool = True, termination_timeout: int = 120
) -> DateTime | None:
"""
Tries to follow container logs until container completes.
For a long-running container, sometimes the log read may be interrupted
Such errors of this kind are suppressed.
Returns the last timestamp observed in logs.
"""
timestamp = None
try:
logs = self.read_pod_logs(
pod=pod,
container_name=container_name,
timestamps=True,
since_seconds=(
math.ceil((pendulum.now() - since_time).total_seconds()) if since_time else None
),
follow=follow,
post_termination_timeout=termination_timeout,
)
for raw_line in logs:
line = raw_line.decode("utf-8", errors="backslashreplace")
timestamp, message = self.parse_log_line(line)
self.log.info("[%s] %s", container_name, message)
except BaseHTTPError as e:
self.log.warning(
"Reading of logs interrupted for container %r with error %r; will retry. "
"Set log level to DEBUG for traceback.",
container_name,
e,
)
self.log.debug(
"Traceback for interrupted logs read for pod %r",
pod.metadata.name,
exc_info=True,
)
return timestamp or since_time
# note: `read_pod_logs` follows the logs, so we shouldn't necessarily *need* to
# loop as we do here. But in a long-running process we might temporarily lose connectivity.
# So the looping logic is there to let us resume following the logs.
last_log_time = since_time
while True:
last_log_time = consume_logs(
since_time=last_log_time, follow=follow, termination_timeout=post_termination_timeout
)
if not self.container_is_running(pod, container_name=container_name):
return PodLoggingStatus(running=False, last_log_time=last_log_time)
if not follow:
return PodLoggingStatus(running=True, last_log_time=last_log_time)
else:
self.log.warning(
"Pod %s log read interrupted but container %s still running",
pod.metadata.name,
container_name,
)
time.sleep(1)
def fetch_requested_container_logs(
self, pod: V1Pod, container_logs: Iterable[str] | str | Literal[True], follow_logs=False
) -> list[PodLoggingStatus]:
"""
Follow the logs of containers in the specified pod and publish it to airflow logging.
Returns when all the containers exit.
"""
pod_logging_statuses = []
all_containers = self.get_container_names(pod)
if len(all_containers) == 0:
self.log.error("Could not retrieve containers for the pod: %s", pod.metadata.name)
else:
if isinstance(container_logs, str):
# fetch logs only for requested container if only one container is provided
if container_logs in all_containers:
status = self.fetch_container_logs(
pod=pod, container_name=container_logs, follow=follow_logs
)
pod_logging_statuses.append(status)
else:
self.log.error(
"container %s whose logs were requested not found in the pod %s",
container_logs,
pod.metadata.name,
)
elif isinstance(container_logs, bool):
# if True is provided, get logs for all the containers
if container_logs is True:
for container_name in all_containers:
status = self.fetch_container_logs(
pod=pod, container_name=container_name, follow=follow_logs
)
pod_logging_statuses.append(status)
else:
self.log.error(
"False is not a valid value for container_logs",
)
else:
# if a sequence of containers are provided, iterate for every container in the pod
if isinstance(container_logs, Iterable):
for container in container_logs:
if container in all_containers:
status = self.fetch_container_logs(
pod=pod, container_name=container, follow=follow_logs
)
pod_logging_statuses.append(status)
else:
self.log.error(
"Container %s whose logs were requests not found in the pod %s",
container,
pod.metadata.name,
)
else:
self.log.error(
"Invalid type %s specified for container names input parameter", type(container_logs)
)
return pod_logging_statuses
def await_container_completion(self, pod: V1Pod, container_name: str) -> None:
"""
Waits for the given container in the given pod to be completed.
:param pod: pod spec that will be monitored
:param container_name: name of the container within the pod to monitor
"""
while True:
remote_pod = self.read_pod(pod)
terminated = container_is_completed(remote_pod, container_name)
if terminated:
break
self.log.info("Waiting for container '%s' state to be completed", container_name)
time.sleep(1)
def await_pod_completion(self, pod: V1Pod) -> V1Pod:
"""
Monitors a pod and returns the final state.
:param pod: pod spec that will be monitored
:return: tuple[State, str | None]
"""
while True:
remote_pod = self.read_pod(pod)
if remote_pod.status.phase in PodPhase.terminal_states:
break
self.log.info("Pod %s has phase %s", pod.metadata.name, remote_pod.status.phase)
time.sleep(2)
return remote_pod
def parse_log_line(self, line: str) -> tuple[DateTime | None, str]:
"""
Parse K8s log line and returns the final state.
:param line: k8s log line
:return: timestamp and log message
"""
split_at = line.find(" ")
if split_at == -1:
self.log.error(
"Error parsing timestamp (no timestamp in message %r). "
"Will continue execution but won't update timestamp",
line,
)
return None, line
timestamp = line[:split_at]
message = line[split_at + 1 :].rstrip()
try:
last_log_time = cast(DateTime, pendulum.parse(timestamp))
except ParserError:
self.log.error("Error parsing timestamp. Will continue execution but won't update timestamp")
return None, line
return last_log_time, message
def container_is_running(self, pod: V1Pod, container_name: str) -> bool:
"""Reads pod and checks if container is running."""
remote_pod = self.read_pod(pod)
return container_is_running(pod=remote_pod, container_name=container_name)
def container_is_terminated(self, pod: V1Pod, container_name: str) -> bool:
"""Reads pod and checks if container is terminated."""
remote_pod = self.read_pod(pod)
return container_is_terminated(pod=remote_pod, container_name=container_name)
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod_logs(
self,
pod: V1Pod,
container_name: str,
tail_lines: int | None = None,
timestamps: bool = False,
since_seconds: int | None = None,
follow=True,
post_termination_timeout: int = 120,
) -> PodLogsConsumer:
"""Reads log from the POD."""
additional_kwargs = {}
if since_seconds:
additional_kwargs["since_seconds"] = since_seconds
if tail_lines:
additional_kwargs["tail_lines"] = tail_lines
try:
logs = self._client.read_namespaced_pod_log(
name=pod.metadata.name,
namespace=pod.metadata.namespace,
container=container_name,
follow=follow,
timestamps=timestamps,
_preload_content=False,
**additional_kwargs,
)
except BaseHTTPError:
self.log.exception("There was an error reading the kubernetes API.")
raise
return PodLogsConsumer(
response=logs,
pod=pod,
pod_manager=self,
container_name=container_name,
post_termination_timeout=post_termination_timeout,
)
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def get_container_names(self, pod: V1Pod) -> list[str]:
"""Return container names from the POD except for the airflow-xcom-sidecar container."""
pod_info = self.read_pod(pod)
return [
container_spec.name
for container_spec in pod_info.spec.containers
if container_spec.name != PodDefaults.SIDECAR_CONTAINER_NAME
]
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod_events(self, pod: V1Pod) -> CoreV1EventList:
"""Reads events from the POD."""
try:
return self._client.list_namespaced_event(
namespace=pod.metadata.namespace, field_selector=f"involvedObject.name={pod.metadata.name}"
)
except BaseHTTPError as e:
raise AirflowException(f"There was an error reading the kubernetes API: {e}")
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod(self, pod: V1Pod) -> V1Pod:
"""Read POD information."""
try:
return self._client.read_namespaced_pod(pod.metadata.name, pod.metadata.namespace)
except BaseHTTPError as e:
raise AirflowException(f"There was an error reading the kubernetes API: {e}")
def await_xcom_sidecar_container_start(self, pod: V1Pod) -> None:
self.log.info("Checking if xcom sidecar container is started.")
warned = False
while True:
if self.container_is_running(pod, PodDefaults.SIDECAR_CONTAINER_NAME):
self.log.info("The xcom sidecar container is started.")
break
if not warned:
self.log.warning("The xcom sidecar container is not yet started.")
warned = True
time.sleep(1)
def extract_xcom(self, pod: V1Pod) -> str:
"""Retrieves XCom value and kills xcom sidecar container."""
try:
result = self.extract_xcom_json(pod)
return result
finally:
self.extract_xcom_kill(pod)
@tenacity.retry(
stop=tenacity.stop_after_attempt(5),
wait=tenacity.wait_exponential(multiplier=1, min=4, max=10),
reraise=True,
)
def extract_xcom_json(self, pod: V1Pod) -> str:
"""Retrieves XCom value and also checks if xcom json is valid."""
with closing(
kubernetes_stream(
self._client.connect_get_namespaced_pod_exec,
pod.metadata.name,
pod.metadata.namespace,
container=PodDefaults.SIDECAR_CONTAINER_NAME,
command=["/bin/sh"],
stdin=True,
stdout=True,
stderr=True,
tty=False,
_preload_content=False,
)
) as resp:
result = self._exec_pod_command(
resp,
f"if [ -s {PodDefaults.XCOM_MOUNT_PATH}/return.json ]; then cat {PodDefaults.XCOM_MOUNT_PATH}/return.json; else echo __airflow_xcom_result_empty__; fi", # noqa
)
if result and result.rstrip() != "__airflow_xcom_result_empty__":
# Note: result string is parsed to check if its valid json.
# This function still returns a string which is converted into json in the calling method.
json.loads(result)
if result is None:
raise AirflowException(f"Failed to extract xcom from pod: {pod.metadata.name}")
return result
@tenacity.retry(
stop=tenacity.stop_after_attempt(5),
wait=tenacity.wait_exponential(multiplier=1, min=4, max=10),
reraise=True,
)
def extract_xcom_kill(self, pod: V1Pod):
"""Kills xcom sidecar container."""
with closing(
kubernetes_stream(
self._client.connect_get_namespaced_pod_exec,
pod.metadata.name,
pod.metadata.namespace,
container=PodDefaults.SIDECAR_CONTAINER_NAME,
command=["/bin/sh"],
stdin=True,
stdout=True,
stderr=True,
tty=False,
_preload_content=False,
)
) as resp:
self._exec_pod_command(resp, "kill -s SIGINT 1")
def _exec_pod_command(self, resp, command: str) -> str | None:
res = None
if resp.is_open():
self.log.info("Running command... %s\n", command)
resp.write_stdin(command + "\n")
while resp.is_open():
resp.update(timeout=1)
while resp.peek_stdout():
res = res + resp.read_stdout() if res else resp.read_stdout()
error_res = None
while resp.peek_stderr():
error_res = error_res + resp.read_stderr() if error_res else resp.read_stderr()
if error_res:
self.log.info("stderr from command: %s", error_res)
break
if res:
return res
return res
class OnFinishAction(enum.Enum):
"""Action to take when the pod finishes."""
KEEP_POD = "keep_pod"
DELETE_POD = "delete_pod"
DELETE_SUCCEEDED_POD = "delete_succeeded_pod"
| 28,931 | 38.85124 | 176 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/utils/xcom_sidecar.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Attach a sidecar container that blocks the pod from completing until Airflow pulls result data."""
from __future__ import annotations
import copy
from kubernetes.client import models as k8s
class PodDefaults:
"""Static defaults for Pods."""
XCOM_MOUNT_PATH = "/airflow/xcom"
SIDECAR_CONTAINER_NAME = "airflow-xcom-sidecar"
XCOM_CMD = 'trap "exit 0" INT; while true; do sleep 1; done;'
VOLUME_MOUNT = k8s.V1VolumeMount(name="xcom", mount_path=XCOM_MOUNT_PATH)
VOLUME = k8s.V1Volume(name="xcom", empty_dir=k8s.V1EmptyDirVolumeSource())
SIDECAR_CONTAINER = k8s.V1Container(
name=SIDECAR_CONTAINER_NAME,
command=["sh", "-c", XCOM_CMD],
image="alpine",
volume_mounts=[VOLUME_MOUNT],
resources=k8s.V1ResourceRequirements(
requests={
"cpu": "1m",
"memory": "10Mi",
},
),
)
def add_xcom_sidecar(
pod: k8s.V1Pod,
*,
sidecar_container_image: str | None = None,
sidecar_container_resources: k8s.V1ResourceRequirements | dict | None = None,
) -> k8s.V1Pod:
"""Adds sidecar."""
pod_cp = copy.deepcopy(pod)
pod_cp.spec.volumes = pod.spec.volumes or []
pod_cp.spec.volumes.insert(0, PodDefaults.VOLUME)
pod_cp.spec.containers[0].volume_mounts = pod_cp.spec.containers[0].volume_mounts or []
pod_cp.spec.containers[0].volume_mounts.insert(0, PodDefaults.VOLUME_MOUNT)
sidecar = copy.deepcopy(PodDefaults.SIDECAR_CONTAINER)
sidecar.image = sidecar_container_image or PodDefaults.SIDECAR_CONTAINER.image
if sidecar_container_resources:
sidecar.resources = sidecar_container_resources
pod_cp.spec.containers.append(sidecar)
return pod_cp
| 2,519 | 37.181818 | 101 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/utils/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
__all__ = ["xcom_sidecar", "pod_manager"]
| 863 | 42.2 | 62 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/utils/delete_from.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# from https://github.com/tomplus/kubernetes_asyncio/pull/239/files
from __future__ import annotations
import re
from kubernetes import client
from kubernetes.client import ApiClient
DEFAULT_DELETION_BODY = client.V1DeleteOptions(
propagation_policy="Background",
grace_period_seconds=5,
)
def delete_from_dict(k8s_client, data, body, namespace, verbose=False, **kwargs):
api_exceptions = []
if "List" in data["kind"]:
kind = data["kind"].replace("List", "")
for yml_doc in data["items"]:
if kind != "":
yml_doc["apiVersion"] = data["apiVersion"]
yml_doc["kind"] = kind
try:
_delete_from_yaml_single_item(
k8s_client=k8s_client,
yml_document=yml_doc,
verbose=verbose,
namespace=namespace,
body=body,
**kwargs,
)
except client.rest.ApiException as api_exception:
api_exceptions.append(api_exception)
else:
try:
_delete_from_yaml_single_item(
k8s_client=k8s_client,
yml_document=data,
verbose=verbose,
namespace=namespace,
body=body,
**kwargs,
)
except client.rest.ApiException as api_exception:
api_exceptions.append(api_exception)
if api_exceptions:
raise FailToDeleteError(api_exceptions)
def delete_from_yaml(
*,
k8s_client: ApiClient,
yaml_objects=None,
verbose: bool = False,
namespace: str = "default",
body: dict | None = None,
**kwargs,
):
for yml_document in yaml_objects:
if yml_document is None:
continue
else:
delete_from_dict(
k8s_client=k8s_client,
data=yml_document,
body=body,
namespace=namespace,
verbose=verbose,
**kwargs,
)
def _delete_from_yaml_single_item(
*,
k8s_client: ApiClient,
yml_document: dict,
verbose: bool = False,
namespace: str = "default",
body: dict | None = None,
**kwargs,
):
if body is None:
body = DEFAULT_DELETION_BODY
# get group and version from apiVersion
group, _, version = yml_document["apiVersion"].partition("/")
if version == "":
version = group
group = "core"
# Take care for the case e.g. api_type is "apiextensions.k8s.io"
# Only replace the last instance
group = "".join(group.rsplit(".k8s.io", 1))
# convert group name from DNS subdomain format to
# python class name convention
group = "".join(word.capitalize() for word in group.split("."))
fcn_to_call = f"{group}{version.capitalize()}Api"
k8s_api = getattr(client, fcn_to_call)(k8s_client)
# Replace CamelCased action_type into snake_case
kind = yml_document["kind"]
kind = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", kind)
kind = re.sub("([a-z0-9])([A-Z])", r"\1_\2", kind).lower()
# Decide which namespace we are going to use for deleting the object
# IMPORTANT: the docs namespace takes precedence over the namespace in args
# create_from_yaml_single_item have same behaviour
if "namespace" in yml_document["metadata"]:
namespace = yml_document["metadata"]["namespace"]
name = yml_document["metadata"]["name"]
# Expect the user to delete namespaced objects more often
resp: client.V1Status
if hasattr(k8s_api, f"delete_namespaced_{kind}"):
resp = getattr(k8s_api, f"delete_namespaced_{kind}")(
name=name, namespace=namespace, body=body, **kwargs
)
else:
resp = getattr(k8s_api, f"delete_{kind}")(name=name, body=body, **kwargs)
if verbose:
print(f"{kind} deleted. status='{str(resp.status)}'")
return resp
class FailToDeleteError(Exception):
"""For handling error if an error occurred when handling a yaml file during deletion of the resource."""
def __init__(self, api_exceptions: list):
self.api_exceptions = api_exceptions
def __str__(self):
msg = ""
for api_exception in self.api_exceptions:
msg += f"Error from server ({api_exception.reason}):{api_exception.body}\n"
return msg
| 5,176 | 32.4 | 108 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from kubernetes import client
from airflow.exceptions import AirflowException
from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
from airflow.sensors.base import BaseSensorOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class SparkKubernetesSensor(BaseSensorOperator):
"""
Checks sparkApplication object in kubernetes cluster.
.. seealso::
For more detail about Spark Application Object have a look at the reference:
https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/v1beta2-1.1.0-2.4.5/docs/api-docs.md#sparkapplication
:param application_name: spark Application resource name
:param namespace: the kubernetes namespace where the sparkApplication reside in
:param container_name: the kubernetes container name where the sparkApplication reside in
:param kubernetes_conn_id: The :ref:`kubernetes connection<howto/connection:kubernetes>`
to Kubernetes cluster.
:param attach_log: determines whether logs for driver pod should be appended to the sensor log
:param api_group: kubernetes api group of sparkApplication
:param api_version: kubernetes api version of sparkApplication
"""
template_fields: Sequence[str] = ("application_name", "namespace")
FAILURE_STATES = ("FAILED", "UNKNOWN")
SUCCESS_STATES = ("COMPLETED",)
def __init__(
self,
*,
application_name: str,
attach_log: bool = False,
namespace: str | None = None,
container_name: str = "spark-kubernetes-driver",
kubernetes_conn_id: str = "kubernetes_default",
api_group: str = "sparkoperator.k8s.io",
api_version: str = "v1beta2",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.application_name = application_name
self.attach_log = attach_log
self.namespace = namespace
self.container_name = container_name
self.kubernetes_conn_id = kubernetes_conn_id
self.hook = KubernetesHook(conn_id=self.kubernetes_conn_id)
self.api_group = api_group
self.api_version = api_version
def _log_driver(self, application_state: str, response: dict) -> None:
if not self.attach_log:
return
status_info = response["status"]
if "driverInfo" not in status_info:
return
driver_info = status_info["driverInfo"]
if "podName" not in driver_info:
return
driver_pod_name = driver_info["podName"]
namespace = response["metadata"]["namespace"]
log_method = self.log.error if application_state in self.FAILURE_STATES else self.log.info
try:
log = ""
for line in self.hook.get_pod_logs(
driver_pod_name, namespace=namespace, container=self.container_name
):
log += line.decode()
log_method(log)
except client.rest.ApiException as e:
self.log.warning(
"Could not read logs for pod %s. It may have been disposed.\n"
"Make sure timeToLiveSeconds is set on your SparkApplication spec.\n"
"underlying exception: %s",
driver_pod_name,
e,
)
def poke(self, context: Context) -> bool:
self.log.info("Poking: %s", self.application_name)
response = self.hook.get_custom_object(
group=self.api_group,
version=self.api_version,
plural="sparkapplications",
name=self.application_name,
namespace=self.namespace,
)
try:
application_state = response["status"]["applicationState"]["state"]
except KeyError:
return False
if self.attach_log and application_state in self.FAILURE_STATES + self.SUCCESS_STATES:
self._log_driver(application_state, response)
if application_state in self.FAILURE_STATES:
raise AirflowException(f"Spark application failed with state: {application_state}")
elif application_state in self.SUCCESS_STATES:
self.log.info("Spark application ended successfully")
return True
else:
self.log.info("Spark application is still in state: %s", application_state)
return False
| 5,226 | 40.15748 | 127 | py |
airflow | airflow-main/airflow/providers/cncf/kubernetes/sensors/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/discord/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.3.0"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-discord:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,532 | 35.5 | 116 | py |
airflow | airflow-main/airflow/providers/discord/notifications/discord.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from functools import cached_property
from airflow.exceptions import AirflowOptionalProviderFeatureException
try:
from airflow.notifications.basenotifier import BaseNotifier
except ImportError:
raise AirflowOptionalProviderFeatureException(
"Failed to import BaseNotifier. This feature is only available in Airflow versions >= 2.6.0"
)
from airflow.providers.discord.hooks.discord_webhook import DiscordWebhookHook
ICON_URL: str = "https://raw.githubusercontent.com/apache/airflow/main/airflow/www/static/pin_100.png"
class DiscordNotifier(BaseNotifier):
"""
Discord BaseNotifier.
:param discord_conn_id: Http connection ID with host as "https://discord.com/api/" and
default webhook endpoint in the extra field in the form of
{"webhook_endpoint": "webhooks/{webhook.id}/{webhook.token}"}
:param text: The content of the message
:param username: The username to send the message as. Optional
:param avatar_url: The URL of the avatar to use for the message. Optional
:param tts: Text to speech.
"""
# A property that specifies the attributes that can be templated.
template_fields = ("discord_conn_id", "text", "username", "avatar_url", "tts")
def __init__(
self,
discord_conn_id: str = "discord_webhook_default",
text: str = "This is a default message",
username: str = "Airflow",
avatar_url: str = ICON_URL,
tts: bool = False,
):
super().__init__()
self.discord_conn_id = discord_conn_id
self.text = text
self.username = username
self.avatar_url = avatar_url
# If you're having problems with tts not being recognized in __init__(),
# you can define that after instantiating the class
self.tts = tts
@cached_property
def hook(self) -> DiscordWebhookHook:
"""Discord Webhook Hook."""
return DiscordWebhookHook(http_conn_id=self.discord_conn_id)
def notify(self, context):
"""Send a message to a Discord channel."""
self.hook.username = self.username
self.hook.message = self.text
self.hook.avatar_url = self.avatar_url
self.hook.tts = self.tts
self.hook.execute()
| 3,105 | 36.421687 | 102 | py |
airflow | airflow-main/airflow/providers/discord/notifications/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/discord/operators/discord_webhook.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.exceptions import AirflowException
from airflow.providers.discord.hooks.discord_webhook import DiscordWebhookHook
from airflow.providers.http.operators.http import SimpleHttpOperator
if TYPE_CHECKING:
from airflow.utils.context import Context
class DiscordWebhookOperator(SimpleHttpOperator):
"""
This operator allows you to post messages to Discord using incoming webhooks.
Takes a Discord connection ID with a default relative webhook endpoint. The
default endpoint can be overridden using the webhook_endpoint parameter
(https://discordapp.com/developers/docs/resources/webhook).
Each Discord webhook can be pre-configured to use a specific username and
avatar_url. You can override these defaults in this operator.
:param http_conn_id: Http connection ID with host as "https://discord.com/api/" and
default webhook endpoint in the extra field in the form of
{"webhook_endpoint": "webhooks/{webhook.id}/{webhook.token}"}
:param webhook_endpoint: Discord webhook endpoint in the form of
"webhooks/{webhook.id}/{webhook.token}" (templated)
:param message: The message you want to send to your Discord channel
(max 2000 characters). (templated)
:param username: Override the default username of the webhook. (templated)
:param avatar_url: Override the default avatar of the webhook
:param tts: Is a text-to-speech message
:param proxy: Proxy to use to make the Discord webhook call
"""
template_fields: Sequence[str] = ("username", "message", "webhook_endpoint")
def __init__(
self,
*,
http_conn_id: str | None = None,
webhook_endpoint: str | None = None,
message: str = "",
username: str | None = None,
avatar_url: str | None = None,
tts: bool = False,
proxy: str | None = None,
**kwargs,
) -> None:
super().__init__(endpoint=webhook_endpoint, **kwargs)
if not http_conn_id:
raise AirflowException("No valid Discord http_conn_id supplied.")
self.http_conn_id = http_conn_id
self.webhook_endpoint = webhook_endpoint
self.message = message
self.username = username
self.avatar_url = avatar_url
self.tts = tts
self.proxy = proxy
self.hook: DiscordWebhookHook | None = None
def execute(self, context: Context) -> None:
"""Call the DiscordWebhookHook to post message."""
self.hook = DiscordWebhookHook(
self.http_conn_id,
self.webhook_endpoint,
self.message,
self.username,
self.avatar_url,
self.tts,
self.proxy,
)
self.hook.execute()
| 3,689 | 38.255319 | 87 | py |
airflow | airflow-main/airflow/providers/discord/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/discord/hooks/discord_webhook.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import re
from typing import Any
from airflow.exceptions import AirflowException
from airflow.providers.http.hooks.http import HttpHook
class DiscordWebhookHook(HttpHook):
"""
This hook allows you to post messages to Discord using incoming webhooks.
Takes a Discord connection ID with a default relative webhook endpoint. The
default endpoint can be overridden using the webhook_endpoint parameter
(https://discordapp.com/developers/docs/resources/webhook).
Each Discord webhook can be pre-configured to use a specific username and
avatar_url. You can override these defaults in this hook.
:param http_conn_id: Http connection ID with host as "https://discord.com/api/" and
default webhook endpoint in the extra field in the form of
{"webhook_endpoint": "webhooks/{webhook.id}/{webhook.token}"}
:param webhook_endpoint: Discord webhook endpoint in the form of
"webhooks/{webhook.id}/{webhook.token}"
:param message: The message you want to send to your Discord channel
(max 2000 characters)
:param username: Override the default username of the webhook
:param avatar_url: Override the default avatar of the webhook
:param tts: Is a text-to-speech message
:param proxy: Proxy to use to make the Discord webhook call
"""
conn_name_attr = "http_conn_id"
default_conn_name = "discord_default"
conn_type = "discord"
hook_name = "Discord"
def __init__(
self,
http_conn_id: str | None = None,
webhook_endpoint: str | None = None,
message: str = "",
username: str | None = None,
avatar_url: str | None = None,
tts: bool = False,
proxy: str | None = None,
*args: Any,
**kwargs: Any,
) -> None:
super().__init__(*args, **kwargs)
self.http_conn_id: Any = http_conn_id
self.webhook_endpoint = self._get_webhook_endpoint(http_conn_id, webhook_endpoint)
self.message = message
self.username = username
self.avatar_url = avatar_url
self.tts = tts
self.proxy = proxy
def _get_webhook_endpoint(self, http_conn_id: str | None, webhook_endpoint: str | None) -> str:
"""
Return the default webhook endpoint or override if a webhook_endpoint is manually supplied.
:param http_conn_id: The provided connection ID
:param webhook_endpoint: The manually provided webhook endpoint
:return: Webhook endpoint (str) to use
"""
if webhook_endpoint:
endpoint = webhook_endpoint
elif http_conn_id:
conn = self.get_connection(http_conn_id)
extra = conn.extra_dejson
endpoint = extra.get("webhook_endpoint", "")
else:
raise AirflowException(
"Cannot get webhook endpoint: No valid Discord webhook endpoint or http_conn_id supplied."
)
# make sure endpoint matches the expected Discord webhook format
if not re.match("^webhooks/[0-9]+/[a-zA-Z0-9_-]+$", endpoint):
raise AirflowException(
'Expected Discord webhook endpoint in the form of "webhooks/{webhook.id}/{webhook.token}".'
)
return endpoint
def _build_discord_payload(self) -> str:
"""
Combine all relevant parameters into a valid Discord JSON payload.
:return: Discord payload (str) to send
"""
payload: dict[str, Any] = {}
if self.username:
payload["username"] = self.username
if self.avatar_url:
payload["avatar_url"] = self.avatar_url
payload["tts"] = self.tts
if len(self.message) <= 2000:
payload["content"] = self.message
else:
raise AirflowException("Discord message length must be 2000 or fewer characters.")
return json.dumps(payload)
def execute(self) -> None:
"""Execute the Discord webhook call."""
proxies = {}
if self.proxy:
# we only need https proxy for Discord
proxies = {"https": self.proxy}
discord_payload = self._build_discord_payload()
self.run(
endpoint=self.webhook_endpoint,
data=discord_payload,
headers={"Content-type": "application/json"},
extra_options={"proxies": proxies},
)
| 5,304 | 36.359155 | 107 | py |
airflow | airflow-main/airflow/providers/discord/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/ssh/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.7.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-ssh:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,528 | 35.404762 | 112 | py |
airflow | airflow-main/airflow/providers/ssh/operators/ssh.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from base64 import b64encode
from typing import TYPE_CHECKING, Sequence
from airflow.configuration import conf
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.models import BaseOperator
from airflow.utils.types import NOTSET, ArgNotSet
if TYPE_CHECKING:
from paramiko.client import SSHClient
from airflow.providers.ssh.hooks.ssh import SSHHook
class SSHOperator(BaseOperator):
"""
SSHOperator to execute commands on given remote host using the ssh_hook.
:param ssh_hook: predefined ssh_hook to use for remote execution.
Either `ssh_hook` or `ssh_conn_id` needs to be provided.
:param ssh_conn_id: :ref:`ssh connection id<howto/connection:ssh>`
from airflow Connections. `ssh_conn_id` will be ignored if
`ssh_hook` is provided.
:param remote_host: remote host to connect (templated)
Nullable. If provided, it will replace the `remote_host` which was
defined in `ssh_hook` or predefined in the connection of `ssh_conn_id`.
:param command: command to execute on remote host. (templated)
:param conn_timeout: timeout (in seconds) for maintaining the connection. The default is 10 seconds.
Nullable. If provided, it will replace the `conn_timeout` which was
predefined in the connection of `ssh_conn_id`.
:param cmd_timeout: timeout (in seconds) for executing the command. The default is 10 seconds.
Nullable, `None` means no timeout. If provided, it will replace the `cmd_timeout`
which was predefined in the connection of `ssh_conn_id`.
:param environment: a dict of shell environment variables. Note that the
server will reject them silently if `AcceptEnv` is not set in SSH config. (templated)
:param get_pty: request a pseudo-terminal from the server. Set to ``True``
to have the remote process killed upon task timeout.
The default is ``False`` but note that `get_pty` is forced to ``True``
when the `command` starts with ``sudo``.
:param banner_timeout: timeout to wait for banner from the server in seconds
If *do_xcom_push* is *True*, the numeric exit code emitted by
the ssh session is pushed to XCom under key ``ssh_exit``.
"""
template_fields: Sequence[str] = ("command", "environment", "remote_host")
template_ext: Sequence[str] = (
".sh",
".bash",
".csh",
".zsh",
".dash",
".ksh",
)
template_fields_renderers = {
"command": "bash",
"environment": "python",
}
def __init__(
self,
*,
ssh_hook: SSHHook | None = None,
ssh_conn_id: str | None = None,
remote_host: str | None = None,
command: str | None = None,
conn_timeout: int | None = None,
cmd_timeout: int | ArgNotSet | None = NOTSET,
environment: dict | None = None,
get_pty: bool = False,
banner_timeout: float = 30.0,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.ssh_hook = ssh_hook
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.command = command
self.conn_timeout = conn_timeout
self.cmd_timeout = cmd_timeout
self.environment = environment
self.get_pty = get_pty
self.banner_timeout = banner_timeout
def get_hook(self) -> SSHHook:
from airflow.providers.ssh.hooks.ssh import SSHHook
if self.ssh_conn_id:
if self.ssh_hook and isinstance(self.ssh_hook, SSHHook):
self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
else:
self.log.info("ssh_hook is not provided or invalid. Trying ssh_conn_id to create SSHHook.")
self.ssh_hook = SSHHook(
ssh_conn_id=self.ssh_conn_id,
conn_timeout=self.conn_timeout,
cmd_timeout=self.cmd_timeout,
banner_timeout=self.banner_timeout,
)
if not self.ssh_hook:
raise AirflowException("Cannot operate without ssh_hook or ssh_conn_id.")
if self.remote_host is not None:
self.log.info(
"remote_host is provided explicitly. "
"It will replace the remote_host which was defined "
"in ssh_hook or predefined in connection of ssh_conn_id."
)
self.ssh_hook.remote_host = self.remote_host
return self.ssh_hook
def get_ssh_client(self) -> SSHClient:
# Remember to use context manager or call .close() on this when done
self.log.info("Creating ssh_client")
return self.get_hook().get_conn()
def exec_ssh_client_command(self, ssh_client: SSHClient, command: str):
warnings.warn(
"exec_ssh_client_command method on SSHOperator is deprecated, call "
"`ssh_hook.exec_ssh_client_command` instead",
AirflowProviderDeprecationWarning,
)
assert self.ssh_hook
return self.ssh_hook.exec_ssh_client_command(
ssh_client, command, timeout=self.cmd_timeout, environment=self.environment, get_pty=self.get_pty
)
def raise_for_status(self, exit_status: int, stderr: bytes, context=None) -> None:
if context and self.do_xcom_push:
ti = context.get("task_instance")
ti.xcom_push(key="ssh_exit", value=exit_status)
if exit_status != 0:
raise AirflowException(f"SSH operator error: exit status = {exit_status}")
def run_ssh_client_command(self, ssh_client: SSHClient, command: str, context=None) -> bytes:
assert self.ssh_hook
exit_status, agg_stdout, agg_stderr = self.ssh_hook.exec_ssh_client_command(
ssh_client, command, timeout=self.cmd_timeout, environment=self.environment, get_pty=self.get_pty
)
self.raise_for_status(exit_status, agg_stderr, context=context)
return agg_stdout
def execute(self, context=None) -> bytes | str:
result: bytes | str
if self.command is None:
raise AirflowException("SSH operator error: SSH command not specified. Aborting.")
# Forcing get_pty to True if the command begins with "sudo".
self.get_pty = self.command.startswith("sudo") or self.get_pty
with self.get_ssh_client() as ssh_client:
result = self.run_ssh_client_command(ssh_client, self.command, context=context)
enable_pickling = conf.getboolean("core", "enable_xcom_pickling")
if not enable_pickling:
result = b64encode(result).decode("utf-8")
return result
def tunnel(self) -> None:
"""Get ssh tunnel."""
ssh_client = self.ssh_hook.get_conn() # type: ignore[union-attr]
ssh_client.get_transport()
| 7,718 | 41.180328 | 109 | py |
airflow | airflow-main/airflow/providers/ssh/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/ssh/hooks/ssh.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hook for SSH connections."""
from __future__ import annotations
import os
import warnings
from base64 import decodebytes
from functools import cached_property
from io import StringIO
from select import select
from typing import Any, Sequence
import paramiko
from paramiko.config import SSH_PORT
from sshtunnel import SSHTunnelForwarder
from tenacity import Retrying, stop_after_attempt, wait_fixed, wait_random
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
from airflow.hooks.base import BaseHook
from airflow.utils.platform import getuser
from airflow.utils.types import NOTSET, ArgNotSet
TIMEOUT_DEFAULT = 10
CMD_TIMEOUT = 10
class SSHHook(BaseHook):
"""Execute remote commands with Paramiko.
.. seealso:: https://github.com/paramiko/paramiko
This hook also lets you create ssh tunnel and serve as basis for SFTP file transfer.
:param ssh_conn_id: :ref:`ssh connection id<howto/connection:ssh>` from airflow
Connections from where all the required parameters can be fetched like
username, password or key_file, though priority is given to the
params passed during init.
:param remote_host: remote host to connect
:param username: username to connect to the remote_host
:param password: password of the username to connect to the remote_host
:param key_file: path to key file to use to connect to the remote_host
:param port: port of remote host to connect (Default is paramiko SSH_PORT)
:param conn_timeout: timeout (in seconds) for the attempt to connect to the remote_host.
The default is 10 seconds. If provided, it will replace the `conn_timeout` which was
predefined in the connection of `ssh_conn_id`.
:param timeout: (Deprecated). timeout for the attempt to connect to the remote_host.
Use conn_timeout instead.
:param cmd_timeout: timeout (in seconds) for executing the command. The default is 10 seconds.
Nullable, `None` means no timeout. If provided, it will replace the `cmd_timeout`
which was predefined in the connection of `ssh_conn_id`.
:param keepalive_interval: send a keepalive packet to remote host every
keepalive_interval seconds
:param banner_timeout: timeout to wait for banner from the server in seconds
:param disabled_algorithms: dictionary mapping algorithm type to an
iterable of algorithm identifiers, which will be disabled for the
lifetime of the transport
:param ciphers: list of ciphers to use in order of preference
"""
# List of classes to try loading private keys as, ordered (roughly) by most common to least common
_pkey_loaders: Sequence[type[paramiko.PKey]] = (
paramiko.RSAKey,
paramiko.ECDSAKey,
paramiko.Ed25519Key,
paramiko.DSSKey,
)
_host_key_mappings = {
"rsa": paramiko.RSAKey,
"dss": paramiko.DSSKey,
"ecdsa": paramiko.ECDSAKey,
"ed25519": paramiko.Ed25519Key,
}
conn_name_attr = "ssh_conn_id"
default_conn_name = "ssh_default"
conn_type = "ssh"
hook_name = "SSH"
@staticmethod
def get_ui_field_behaviour() -> dict[str, Any]:
"""Returns custom field behaviour."""
return {
"hidden_fields": ["schema"],
"relabeling": {
"login": "Username",
},
}
def __init__(
self,
ssh_conn_id: str | None = None,
remote_host: str = "",
username: str | None = None,
password: str | None = None,
key_file: str | None = None,
port: int | None = None,
timeout: int | None = None,
conn_timeout: int | None = None,
cmd_timeout: int | ArgNotSet | None = NOTSET,
keepalive_interval: int = 30,
banner_timeout: float = 30.0,
disabled_algorithms: dict | None = None,
ciphers: list[str] | None = None,
) -> None:
super().__init__()
self.ssh_conn_id = ssh_conn_id
self.remote_host = remote_host
self.username = username
self.password = password
self.key_file = key_file
self.pkey = None
self.port = port
self.timeout = timeout
self.conn_timeout = conn_timeout
self.cmd_timeout = cmd_timeout
self.keepalive_interval = keepalive_interval
self.banner_timeout = banner_timeout
self.disabled_algorithms = disabled_algorithms
self.ciphers = ciphers
self.host_proxy_cmd = None
# Default values, overridable from Connection
self.compress = True
self.no_host_key_check = True
self.allow_host_key_change = False
self.host_key = None
self.look_for_keys = True
# Placeholder for deprecated __enter__
self.client: paramiko.SSHClient | None = None
# Use connection to override defaults
if self.ssh_conn_id is not None:
conn = self.get_connection(self.ssh_conn_id)
if self.username is None:
self.username = conn.login
if self.password is None:
self.password = conn.password
if not self.remote_host:
self.remote_host = conn.host
if self.port is None:
self.port = conn.port
if conn.extra is not None:
extra_options = conn.extra_dejson
if "key_file" in extra_options and self.key_file is None:
self.key_file = extra_options.get("key_file")
private_key = extra_options.get("private_key")
private_key_passphrase = extra_options.get("private_key_passphrase")
if private_key:
self.pkey = self._pkey_from_private_key(private_key, passphrase=private_key_passphrase)
if "timeout" in extra_options:
warnings.warn(
"Extra option `timeout` is deprecated."
"Please use `conn_timeout` instead."
"The old option `timeout` will be removed in a future version.",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
self.timeout = int(extra_options["timeout"])
if "conn_timeout" in extra_options and self.conn_timeout is None:
self.conn_timeout = int(extra_options["conn_timeout"])
if "cmd_timeout" in extra_options and self.cmd_timeout is NOTSET:
if extra_options["cmd_timeout"]:
self.cmd_timeout = int(extra_options["cmd_timeout"])
else:
self.cmd_timeout = None
if "compress" in extra_options and str(extra_options["compress"]).lower() == "false":
self.compress = False
host_key = extra_options.get("host_key")
no_host_key_check = extra_options.get("no_host_key_check")
if no_host_key_check is not None:
no_host_key_check = str(no_host_key_check).lower() == "true"
if host_key is not None and no_host_key_check:
raise ValueError("Must check host key when provided")
self.no_host_key_check = no_host_key_check
if (
"allow_host_key_change" in extra_options
and str(extra_options["allow_host_key_change"]).lower() == "true"
):
self.allow_host_key_change = True
if (
"look_for_keys" in extra_options
and str(extra_options["look_for_keys"]).lower() == "false"
):
self.look_for_keys = False
if "disabled_algorithms" in extra_options:
self.disabled_algorithms = extra_options.get("disabled_algorithms")
if "ciphers" in extra_options:
self.ciphers = extra_options.get("ciphers")
if host_key is not None:
if host_key.startswith("ssh-"):
key_type, host_key = host_key.split(None)[:2]
key_constructor = self._host_key_mappings[key_type[4:]]
else:
key_constructor = paramiko.RSAKey
decoded_host_key = decodebytes(host_key.encode("utf-8"))
self.host_key = key_constructor(data=decoded_host_key)
self.no_host_key_check = False
if self.timeout:
warnings.warn(
"Parameter `timeout` is deprecated."
"Please use `conn_timeout` instead."
"The old option `timeout` will be removed in a future version.",
AirflowProviderDeprecationWarning,
stacklevel=1,
)
if self.conn_timeout is None:
self.conn_timeout = self.timeout if self.timeout else TIMEOUT_DEFAULT
if self.cmd_timeout is NOTSET:
self.cmd_timeout = CMD_TIMEOUT
if self.pkey and self.key_file:
raise AirflowException(
"Params key_file and private_key both provided. Must provide no more than one."
)
if not self.remote_host:
raise AirflowException("Missing required param: remote_host")
# Auto detecting username values from system
if not self.username:
self.log.debug(
"username to ssh to host: %s is not specified for connection id"
" %s. Using system's default provided by getpass.getuser()",
self.remote_host,
self.ssh_conn_id,
)
self.username = getuser()
user_ssh_config_filename = os.path.expanduser("~/.ssh/config")
if os.path.isfile(user_ssh_config_filename):
ssh_conf = paramiko.SSHConfig()
with open(user_ssh_config_filename) as config_fd:
ssh_conf.parse(config_fd)
host_info = ssh_conf.lookup(self.remote_host)
if host_info and host_info.get("proxycommand"):
self.host_proxy_cmd = host_info["proxycommand"]
if not (self.password or self.key_file):
if host_info and host_info.get("identityfile"):
self.key_file = host_info["identityfile"][0]
self.port = self.port or SSH_PORT
@cached_property
def host_proxy(self) -> paramiko.ProxyCommand | None:
cmd = self.host_proxy_cmd
return paramiko.ProxyCommand(cmd) if cmd else None
def get_conn(self) -> paramiko.SSHClient:
"""Opens an SSH connection to the remote host."""
self.log.debug("Creating SSH client for conn_id: %s", self.ssh_conn_id)
client = paramiko.SSHClient()
if self.allow_host_key_change:
self.log.warning(
"Remote Identification Change is not verified. "
"This won't protect against Man-In-The-Middle attacks"
)
# to avoid BadHostKeyException, skip loading host keys
client.set_missing_host_key_policy(paramiko.MissingHostKeyPolicy)
else:
client.load_system_host_keys()
if self.no_host_key_check:
self.log.warning("No Host Key Verification. This won't protect against Man-In-The-Middle attacks")
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# to avoid BadHostKeyException, skip loading and saving host keys
known_hosts = os.path.expanduser("~/.ssh/known_hosts")
if not self.allow_host_key_change and os.path.isfile(known_hosts):
client.load_host_keys(known_hosts)
elif self.host_key is not None:
# Get host key from connection extra if it not set or None then we fallback to system host keys
client_host_keys = client.get_host_keys()
if self.port == SSH_PORT:
client_host_keys.add(self.remote_host, self.host_key.get_name(), self.host_key)
else:
client_host_keys.add(
f"[{self.remote_host}]:{self.port}", self.host_key.get_name(), self.host_key
)
connect_kwargs: dict[str, Any] = dict(
hostname=self.remote_host,
username=self.username,
timeout=self.conn_timeout,
compress=self.compress,
port=self.port,
sock=self.host_proxy,
look_for_keys=self.look_for_keys,
banner_timeout=self.banner_timeout,
)
if self.password:
password = self.password.strip()
connect_kwargs.update(password=password)
if self.pkey:
connect_kwargs.update(pkey=self.pkey)
if self.key_file:
connect_kwargs.update(key_filename=self.key_file)
if self.disabled_algorithms:
connect_kwargs.update(disabled_algorithms=self.disabled_algorithms)
log_before_sleep = lambda retry_state: self.log.info(
"Failed to connect. Sleeping before retry attempt %d", retry_state.attempt_number
)
for attempt in Retrying(
reraise=True,
wait=wait_fixed(3) + wait_random(0, 2),
stop=stop_after_attempt(3),
before_sleep=log_before_sleep,
):
with attempt:
client.connect(**connect_kwargs)
if self.keepalive_interval:
# MyPy check ignored because "paramiko" isn't well-typed. The `client.get_transport()` returns
# type "Transport | None" and item "None" has no attribute "set_keepalive".
client.get_transport().set_keepalive(self.keepalive_interval) # type: ignore[union-attr]
if self.ciphers:
# MyPy check ignored because "paramiko" isn't well-typed. The `client.get_transport()` returns
# type "Transport | None" and item "None" has no method `get_security_options`".
client.get_transport().get_security_options().ciphers = self.ciphers # type: ignore[union-attr]
self.client = client
return client
def __enter__(self) -> SSHHook:
warnings.warn(
"The contextmanager of SSHHook is deprecated."
"Please use get_conn() as a contextmanager instead."
"This method will be removed in Airflow 2.0",
category=AirflowProviderDeprecationWarning,
)
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
if self.client is not None:
self.client.close()
self.client = None
def get_tunnel(
self, remote_port: int, remote_host: str = "localhost", local_port: int | None = None
) -> SSHTunnelForwarder:
"""Create a tunnel between two hosts.
This is conceptually similar to ``ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>``.
:param remote_port: The remote port to create a tunnel to
:param remote_host: The remote host to create a tunnel to (default localhost)
:param local_port: The local port to attach the tunnel to
:return: sshtunnel.SSHTunnelForwarder object
"""
if local_port:
local_bind_address: tuple[str, int] | tuple[str] = ("localhost", local_port)
else:
local_bind_address = ("localhost",)
tunnel_kwargs = dict(
ssh_port=self.port,
ssh_username=self.username,
ssh_pkey=self.key_file or self.pkey,
ssh_proxy=self.host_proxy,
local_bind_address=local_bind_address,
remote_bind_address=(remote_host, remote_port),
logger=self.log,
)
if self.password:
password = self.password.strip()
tunnel_kwargs.update(
ssh_password=password,
)
else:
tunnel_kwargs.update(
host_pkey_directories=None,
)
client = SSHTunnelForwarder(self.remote_host, **tunnel_kwargs)
return client
def create_tunnel(
self, local_port: int, remote_port: int, remote_host: str = "localhost"
) -> SSHTunnelForwarder:
"""Create a tunnel for SSH connection [Deprecated].
:param local_port: local port number
:param remote_port: remote port number
:param remote_host: remote host
"""
warnings.warn(
"SSHHook.create_tunnel is deprecated, Please"
"use get_tunnel() instead. But please note that the"
"order of the parameters have changed"
"This method will be removed in Airflow 2.0",
category=AirflowProviderDeprecationWarning,
)
return self.get_tunnel(remote_port, remote_host, local_port)
def _pkey_from_private_key(self, private_key: str, passphrase: str | None = None) -> paramiko.PKey:
"""Create an appropriate Paramiko key for a given private key.
:param private_key: string containing private key
:return: ``paramiko.PKey`` appropriate for given key
:raises AirflowException: if key cannot be read
"""
if len(private_key.split("\n", 2)) < 2:
raise AirflowException("Key must have BEGIN and END header/footer on separate lines.")
for pkey_class in self._pkey_loaders:
try:
key = pkey_class.from_private_key(StringIO(private_key), password=passphrase)
# Test it actually works. If Paramiko loads an openssh generated key, sometimes it will
# happily load it as the wrong type, only to fail when actually used.
key.sign_ssh_data(b"")
return key
except (paramiko.ssh_exception.SSHException, ValueError):
continue
raise AirflowException(
"Private key provided cannot be read by paramiko."
"Ensure key provided is valid for one of the following"
"key formats: RSA, DSS, ECDSA, or Ed25519"
)
def exec_ssh_client_command(
self,
ssh_client: paramiko.SSHClient,
command: str,
get_pty: bool,
environment: dict | None,
timeout: int | ArgNotSet | None = NOTSET,
) -> tuple[int, bytes, bytes]:
self.log.info("Running command: %s", command)
cmd_timeout: int | None
if not isinstance(timeout, ArgNotSet):
cmd_timeout = timeout
elif not isinstance(self.cmd_timeout, ArgNotSet):
cmd_timeout = self.cmd_timeout
else:
cmd_timeout = CMD_TIMEOUT
del timeout # Too easy to confuse with "timedout" below.
# set timeout taken as params
stdin, stdout, stderr = ssh_client.exec_command(
command=command,
get_pty=get_pty,
timeout=cmd_timeout,
environment=environment,
)
# get channels
channel = stdout.channel
# closing stdin
stdin.close()
channel.shutdown_write()
agg_stdout = b""
agg_stderr = b""
# capture any initial output in case channel is closed already
stdout_buffer_length = len(stdout.channel.in_buffer)
if stdout_buffer_length > 0:
agg_stdout += stdout.channel.recv(stdout_buffer_length)
timedout = False
# read from both stdout and stderr
while not channel.closed or channel.recv_ready() or channel.recv_stderr_ready():
readq, _, _ = select([channel], [], [], cmd_timeout)
if cmd_timeout is not None:
timedout = len(readq) == 0
for recv in readq:
if recv.recv_ready():
output = stdout.channel.recv(len(recv.in_buffer))
agg_stdout += output
for line in output.decode("utf-8", "replace").strip("\n").splitlines():
self.log.info(line)
if recv.recv_stderr_ready():
output = stderr.channel.recv_stderr(len(recv.in_stderr_buffer))
agg_stderr += output
for line in output.decode("utf-8", "replace").strip("\n").splitlines():
self.log.warning(line)
if (
stdout.channel.exit_status_ready()
and not stderr.channel.recv_stderr_ready()
and not stdout.channel.recv_ready()
) or timedout:
stdout.channel.shutdown_read()
try:
stdout.channel.close()
except Exception:
# there is a race that when shutdown_read has been called and when
# you try to close the connection, the socket is already closed
# We should ignore such errors (but we should log them with warning)
self.log.warning("Ignoring exception on close", exc_info=True)
break
stdout.close()
stderr.close()
if timedout:
raise AirflowException("SSH command timed out")
exit_status = stdout.channel.recv_exit_status()
return exit_status, agg_stdout, agg_stderr
def test_connection(self) -> tuple[bool, str]:
"""Test the ssh connection by execute remote bash commands."""
try:
with self.get_conn() as conn:
conn.exec_command("pwd")
return True, "Connection successfully tested"
except Exception as e:
return False, str(e)
| 22,522 | 39.219643 | 110 | py |
airflow | airflow-main/airflow/providers/ssh/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/mysql/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "5.1.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-mysql:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,530 | 35.452381 | 114 | py |
airflow | airflow-main/airflow/providers/mysql/transfers/s3_to_mysql.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.mysql.hooks.mysql import MySqlHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class S3ToMySqlOperator(BaseOperator):
"""
Loads a file from S3 into a MySQL table.
:param s3_source_key: The path to the file (S3 key) that will be loaded into MySQL.
:param mysql_table: The MySQL table into where the data will be sent.
:param mysql_duplicate_key_handling: Specify what should happen to duplicate data.
You can choose either `IGNORE` or `REPLACE`.
.. seealso::
https://dev.mysql.com/doc/refman/8.0/en/load-data.html#load-data-duplicate-key-handling
:param mysql_extra_options: MySQL options to specify exactly how to load the data.
:param aws_conn_id: The S3 connection that contains the credentials to the S3 Bucket.
:param mysql_conn_id: Reference to :ref:`mysql connection id <howto/connection:mysql>`.
"""
template_fields: Sequence[str] = (
"s3_source_key",
"mysql_table",
)
template_ext: Sequence[str] = ()
ui_color = "#f4a460"
def __init__(
self,
*,
s3_source_key: str,
mysql_table: str,
mysql_duplicate_key_handling: str = "IGNORE",
mysql_extra_options: str | None = None,
aws_conn_id: str = "aws_default",
mysql_conn_id: str = "mysql_default",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.s3_source_key = s3_source_key
self.mysql_table = mysql_table
self.mysql_duplicate_key_handling = mysql_duplicate_key_handling
self.mysql_extra_options = mysql_extra_options or ""
self.aws_conn_id = aws_conn_id
self.mysql_conn_id = mysql_conn_id
def execute(self, context: Context) -> None:
"""
Executes the transfer operation from S3 to MySQL.
:param context: The context that is being provided when executing.
"""
self.log.info("Loading %s to MySql table %s...", self.s3_source_key, self.mysql_table)
s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
file = s3_hook.download_file(key=self.s3_source_key)
try:
mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id)
mysql.bulk_load_custom(
table=self.mysql_table,
tmp_file=file,
duplicate_key_handling=self.mysql_duplicate_key_handling,
extra_options=self.mysql_extra_options,
)
finally:
# Remove file downloaded from s3 to be idempotent.
os.remove(file)
| 3,558 | 36.861702 | 99 | py |
airflow | airflow-main/airflow/providers/mysql/transfers/trino_to_mysql.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.mysql.hooks.mysql import MySqlHook
from airflow.providers.trino.hooks.trino import TrinoHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class TrinoToMySqlOperator(BaseOperator):
"""
Moves data from Trino to MySQL.
Note that for now the data is loaded into memory before being pushed
to MySQL, so this operator should be used for smallish amount of data.
:param sql: SQL query to execute against Trino. (templated)
:param mysql_table: target MySQL table, use dot notation to target a
specific database. (templated)
:param mysql_conn_id: Reference to :ref:`mysql connection id <howto/connection:mysql>`.
:param trino_conn_id: source trino connection
:param mysql_preoperator: sql statement to run against mysql prior to
import, typically use to truncate of delete in place
of the data coming in, allowing the task to be idempotent (running
the task twice won't double load data). (templated)
"""
template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator")
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {
"sql": "sql",
"mysql_preoperator": "mysql",
}
ui_color = "#a0e08c"
def __init__(
self,
*,
sql: str,
mysql_table: str,
trino_conn_id: str = "trino_default",
mysql_conn_id: str = "mysql_default",
mysql_preoperator: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.mysql_table = mysql_table
self.mysql_conn_id = mysql_conn_id
self.mysql_preoperator = mysql_preoperator
self.trino_conn_id = trino_conn_id
def execute(self, context: Context) -> None:
trino = TrinoHook(trino_conn_id=self.trino_conn_id)
self.log.info("Extracting data from Trino: %s", self.sql)
results = trino.get_records(self.sql)
mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id)
if self.mysql_preoperator:
self.log.info("Running MySQL preoperator")
self.log.info(self.mysql_preoperator)
mysql.run(self.mysql_preoperator)
self.log.info("Inserting rows into MySQL")
mysql.insert_rows(table=self.mysql_table, rows=results)
| 3,251 | 36.813953 | 91 | py |
airflow | airflow-main/airflow/providers/mysql/transfers/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 | py |
airflow | airflow-main/airflow/providers/mysql/transfers/vertica_to_mysql.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import csv
from contextlib import closing
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Sequence
import MySQLdb
from airflow.models import BaseOperator
from airflow.providers.mysql.hooks.mysql import MySqlHook
from airflow.providers.vertica.hooks.vertica import VerticaHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class VerticaToMySqlOperator(BaseOperator):
"""
Moves data from Vertica to MySQL.
:param sql: SQL query to execute against the Vertica database. (templated)
:param vertica_conn_id: source Vertica connection
:param mysql_table: target MySQL table, use dot notation to target a
specific database. (templated)
:param mysql_conn_id: Reference to :ref:`mysql connection id <howto/connection:mysql>`.
:param mysql_preoperator: sql statement to run against MySQL prior to
import, typically use to truncate of delete in place of the data
coming in, allowing the task to be idempotent (running the task
twice won't double load data). (templated)
:param mysql_postoperator: sql statement to run against MySQL after the
import, typically used to move data from staging to production
and issue cleanup commands. (templated)
:param bulk_load: flag to use bulk_load option. This loads MySQL directly
from a tab-delimited text file using the LOAD DATA LOCAL INFILE command. The MySQL
server must support loading local files via this command (it is disabled by default).
"""
template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator", "mysql_postoperator")
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {
"sql": "sql",
"mysql_preoperator": "mysql",
"mysql_postoperator": "mysql",
}
ui_color = "#a0e08c"
def __init__(
self,
sql: str,
mysql_table: str,
vertica_conn_id: str = "vertica_default",
mysql_conn_id: str = "mysql_default",
mysql_preoperator: str | None = None,
mysql_postoperator: str | None = None,
bulk_load: bool = False,
*args,
**kwargs,
) -> None:
super().__init__(*args, **kwargs)
self.sql = sql
self.mysql_table = mysql_table
self.mysql_conn_id = mysql_conn_id
self.mysql_preoperator = mysql_preoperator
self.mysql_postoperator = mysql_postoperator
self.vertica_conn_id = vertica_conn_id
self.bulk_load = bulk_load
def execute(self, context: Context):
vertica = VerticaHook(vertica_conn_id=self.vertica_conn_id)
mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id, local_infile=self.bulk_load)
if self.bulk_load:
self._bulk_load_transfer(mysql, vertica)
else:
self._non_bulk_load_transfer(mysql, vertica)
if self.mysql_postoperator:
self.log.info("Running MySQL postoperator...")
mysql.run(self.mysql_postoperator)
self.log.info("Done")
def _non_bulk_load_transfer(self, mysql, vertica):
with closing(vertica.get_conn()) as conn:
with closing(conn.cursor()) as cursor:
cursor.execute(self.sql)
selected_columns = [d.name for d in cursor.description]
self.log.info("Selecting rows from Vertica...")
self.log.info(self.sql)
result = cursor.fetchall()
count = len(result)
self.log.info("Selected rows from Vertica %s", count)
self._run_preoperator(mysql)
try:
self.log.info("Inserting rows into MySQL...")
mysql.insert_rows(table=self.mysql_table, rows=result, target_fields=selected_columns)
self.log.info("Inserted rows into MySQL %s", count)
except (MySQLdb.Error, MySQLdb.Warning):
self.log.info("Inserted rows into MySQL 0")
raise
def _bulk_load_transfer(self, mysql, vertica):
count = 0
with closing(vertica.get_conn()) as conn:
with closing(conn.cursor()) as cursor:
cursor.execute(self.sql)
selected_columns = [d.name for d in cursor.description]
with NamedTemporaryFile("w", encoding="utf-8") as tmpfile:
self.log.info("Selecting rows from Vertica to local file %s...", tmpfile.name)
self.log.info(self.sql)
csv_writer = csv.writer(tmpfile, delimiter="\t")
for row in cursor.iterate():
csv_writer.writerow(row)
count += 1
tmpfile.flush()
self._run_preoperator(mysql)
try:
self.log.info("Bulk inserting rows into MySQL...")
with closing(mysql.get_conn()) as conn:
with closing(conn.cursor()) as cursor:
cursor.execute(
f"LOAD DATA LOCAL INFILE '{tmpfile.name}' "
f"INTO TABLE {self.mysql_table} "
f"LINES TERMINATED BY '\r\n' ({', '.join(selected_columns)})"
)
conn.commit()
tmpfile.close()
self.log.info("Inserted rows into MySQL %s", count)
except (MySQLdb.Error, MySQLdb.Warning):
self.log.info("Inserted rows into MySQL 0")
raise
def _run_preoperator(self, mysql):
if self.mysql_preoperator:
self.log.info("Running MySQL preoperator...")
mysql.run(self.mysql_preoperator)
| 6,455 | 39.603774 | 102 | py |
airflow | airflow-main/airflow/providers/mysql/transfers/presto_to_mysql.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Sequence
from airflow.models import BaseOperator
from airflow.providers.mysql.hooks.mysql import MySqlHook
from airflow.providers.presto.hooks.presto import PrestoHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class PrestoToMySqlOperator(BaseOperator):
"""
Moves data from Presto to MySQL.
Note that for now the data is loaded into memory before being pushed
to MySQL, so this operator should be used for smallish amount of data.
:param sql: SQL query to execute against Presto. (templated)
:param mysql_table: target MySQL table, use dot notation to target a
specific database. (templated)
:param mysql_conn_id: Reference to :ref:`mysql connection id <howto/connection:mysql>`.
:param presto_conn_id: source presto connection
:param mysql_preoperator: sql statement to run against mysql prior to
import, typically use to truncate of delete in place
of the data coming in, allowing the task to be idempotent (running
the task twice won't double load data). (templated)
"""
template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator")
template_ext: Sequence[str] = (".sql",)
template_fields_renderers = {
"sql": "sql",
"mysql_preoperator": "mysql",
}
ui_color = "#a0e08c"
def __init__(
self,
*,
sql: str,
mysql_table: str,
presto_conn_id: str = "presto_default",
mysql_conn_id: str = "mysql_default",
mysql_preoperator: str | None = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.sql = sql
self.mysql_table = mysql_table
self.mysql_conn_id = mysql_conn_id
self.mysql_preoperator = mysql_preoperator
self.presto_conn_id = presto_conn_id
def execute(self, context: Context) -> None:
presto = PrestoHook(presto_conn_id=self.presto_conn_id)
self.log.info("Extracting data from Presto: %s", self.sql)
results = presto.get_records(self.sql)
mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id)
if self.mysql_preoperator:
self.log.info("Running MySQL preoperator")
self.log.info(self.mysql_preoperator)
mysql.run(self.mysql_preoperator)
self.log.info("Inserting rows into MySQL")
mysql.insert_rows(table=self.mysql_table, rows=results)
| 3,269 | 37.023256 | 91 | py |
airflow | airflow-main/airflow/providers/mysql/operators/mysql.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import warnings
from typing import Sequence
from airflow.exceptions import AirflowProviderDeprecationWarning
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
class MySqlOperator(SQLExecuteQueryOperator):
"""
Executes sql code in a specific MySQL database.
This class is deprecated.
Please use :class:`airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:MySqlOperator`
:param sql: the sql code to be executed. Can receive a str representing a
sql statement, a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
(templated)
:param mysql_conn_id: Reference to :ref:`mysql connection id <howto/connection:mysql>`.
:param parameters: (optional) the parameters to render the SQL query with.
Template reference are recognized by str ending in '.json'
(templated)
:param autocommit: if True, each command is automatically committed.
(default value: False)
:param database: name of database which overwrite defined one in connection
"""
template_fields: Sequence[str] = ("sql", "parameters")
template_fields_renderers = {
"sql": "mysql",
"parameters": "json",
}
template_ext: Sequence[str] = (".sql", ".json")
ui_color = "#ededed"
def __init__(
self, *, mysql_conn_id: str = "mysql_default", database: str | None = None, **kwargs
) -> None:
if database is not None:
hook_params = kwargs.pop("hook_params", {})
kwargs["hook_params"] = {"schema": database, **hook_params}
super().__init__(conn_id=mysql_conn_id, **kwargs)
warnings.warn(
"""This class is deprecated.
Please use `airflow.providers.common.sql.operators.sql.SQLExecuteQueryOperator`.
Also, you can provide `hook_params={'schema': <database>}`.""",
AirflowProviderDeprecationWarning,
stacklevel=2,
)
| 2,987 | 38.84 | 92 | py |
airflow | airflow-main/airflow/providers/mysql/operators/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/mysql/hooks/mysql.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module allows to connect to a MySQL database."""
from __future__ import annotations
import json
import logging
from typing import TYPE_CHECKING, Any, Union
from airflow.exceptions import AirflowOptionalProviderFeatureException
from airflow.models import Connection
from airflow.providers.common.sql.hooks.sql import DbApiHook
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
try:
from mysql.connector.abstracts import MySQLConnectionAbstract
except ModuleNotFoundError:
logger.warning("The package 'mysql-connector-python' is not installed. Import skipped")
from MySQLdb.connections import Connection as MySQLdbConnection
MySQLConnectionTypes = Union["MySQLdbConnection", "MySQLConnectionAbstract"]
class MySqlHook(DbApiHook):
"""
Interact with MySQL.
You can specify charset in the extra field of your connection
as ``{"charset": "utf8"}``. Also you can choose cursor as
``{"cursor": "SSCursor"}``. Refer to the MySQLdb.cursors for more details.
Note: For AWS IAM authentication, use iam in the extra connection parameters
and set it to true. Leave the password field empty. This will use the
"aws_default" connection to get the temporary token unless you override
in extras.
extras example: ``{"iam":true, "aws_conn_id":"my_aws_conn"}``
You can also add "local_infile" parameter to determine whether local_infile feature of MySQL client is
going to be enabled (it is disabled by default).
:param schema: The MySQL database schema to connect to.
:param connection: The :ref:`MySQL connection id <howto/connection:mysql>` used for MySQL credentials.
:param local_infile: Boolean flag determining if local_infile should be used
"""
conn_name_attr = "mysql_conn_id"
default_conn_name = "mysql_default"
conn_type = "mysql"
hook_name = "MySQL"
supports_autocommit = True
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.schema = kwargs.pop("schema", None)
self.connection = kwargs.pop("connection", None)
self.local_infile = kwargs.pop("local_infile", False)
def set_autocommit(self, conn: MySQLConnectionTypes, autocommit: bool) -> None:
"""
Set *autocommit*.
*mysqlclient* uses an *autocommit* method rather than an *autocommit*
property, so we need to override this to support it.
:param conn: connection to set autocommit setting
:param autocommit: autocommit setting
"""
if hasattr(conn.__class__, "autocommit") and isinstance(conn.__class__.autocommit, property):
conn.autocommit = autocommit
else:
conn.autocommit(autocommit) # type: ignore[operator]
def get_autocommit(self, conn: MySQLConnectionTypes) -> bool:
"""
Whether *autocommit* is active.
*mysqlclient* uses an *get_autocommit* method rather than an *autocommit*
property, so we need to override this to support it.
:param conn: connection to get autocommit setting from.
:return: connection autocommit setting
"""
if hasattr(conn.__class__, "autocommit") and isinstance(conn.__class__.autocommit, property):
return conn.autocommit
else:
return conn.get_autocommit() # type: ignore[union-attr]
def _get_conn_config_mysql_client(self, conn: Connection) -> dict:
conn_config = {
"user": conn.login,
"passwd": conn.password or "",
"host": conn.host or "localhost",
"db": self.schema or conn.schema or "",
}
# check for authentication via AWS IAM
if conn.extra_dejson.get("iam", False):
conn_config["passwd"], conn.port = self.get_iam_token(conn)
conn_config["read_default_group"] = "enable-cleartext-plugin"
conn_config["port"] = int(conn.port) if conn.port else 3306
if conn.extra_dejson.get("charset", False):
conn_config["charset"] = conn.extra_dejson["charset"]
if conn_config["charset"].lower() in ("utf8", "utf-8"):
conn_config["use_unicode"] = True
if conn.extra_dejson.get("cursor", False):
import MySQLdb.cursors
if (conn.extra_dejson["cursor"]).lower() == "sscursor":
conn_config["cursorclass"] = MySQLdb.cursors.SSCursor
elif (conn.extra_dejson["cursor"]).lower() == "dictcursor":
conn_config["cursorclass"] = MySQLdb.cursors.DictCursor
elif (conn.extra_dejson["cursor"]).lower() == "ssdictcursor":
conn_config["cursorclass"] = MySQLdb.cursors.SSDictCursor
if conn.extra_dejson.get("ssl", False):
# SSL parameter for MySQL has to be a dictionary and in case
# of extra/dejson we can get string if extra is passed via
# URL parameters
dejson_ssl = conn.extra_dejson["ssl"]
if isinstance(dejson_ssl, str):
dejson_ssl = json.loads(dejson_ssl)
conn_config["ssl"] = dejson_ssl
if conn.extra_dejson.get("ssl_mode", False):
conn_config["ssl_mode"] = conn.extra_dejson["ssl_mode"]
if conn.extra_dejson.get("unix_socket"):
conn_config["unix_socket"] = conn.extra_dejson["unix_socket"]
if self.local_infile:
conn_config["local_infile"] = 1
return conn_config
def _get_conn_config_mysql_connector_python(self, conn: Connection) -> dict:
conn_config = {
"user": conn.login,
"password": conn.password or "",
"host": conn.host or "localhost",
"database": self.schema or conn.schema or "",
"port": int(conn.port) if conn.port else 3306,
}
if self.local_infile:
conn_config["allow_local_infile"] = True
# Ref: https://dev.mysql.com/doc/connector-python/en/connector-python-connectargs.html
for key, value in conn.extra_dejson.items():
if key.startswith("ssl_"):
conn_config[key] = value
return conn_config
def get_conn(self) -> MySQLConnectionTypes:
"""
Connection to a MySQL database.
Establishes a connection to a mysql database
by extracting the connection configuration from the Airflow connection.
.. note:: By default it connects to the database via the mysqlclient library.
But you can also choose the mysql-connector-python library which lets you connect through ssl
without any further ssl parameters required.
:return: a mysql connection object
"""
conn = self.connection or self.get_connection(getattr(self, self.conn_name_attr))
client_name = conn.extra_dejson.get("client", "mysqlclient")
if client_name == "mysqlclient":
import MySQLdb
conn_config = self._get_conn_config_mysql_client(conn)
return MySQLdb.connect(**conn_config)
if client_name == "mysql-connector-python":
try:
import mysql.connector
except ModuleNotFoundError:
raise AirflowOptionalProviderFeatureException(
"The pip package 'mysql-connector-python' is not installed, therefore the connection "
"wasn't established. Please, consider using default driver or pip install the package "
"'mysql-connector-python'. Warning! It might cause dependency conflicts."
)
conn_config = self._get_conn_config_mysql_connector_python(conn)
return mysql.connector.connect(**conn_config)
raise ValueError("Unknown MySQL client name provided!")
def bulk_load(self, table: str, tmp_file: str) -> None:
"""Load a tab-delimited file into a database table."""
conn = self.get_conn()
cur = conn.cursor()
cur.execute(
f"""
LOAD DATA LOCAL INFILE '{tmp_file}'
INTO TABLE {table}
"""
)
conn.commit()
conn.close() # type: ignore[misc]
def bulk_dump(self, table: str, tmp_file: str) -> None:
"""Dump a database table into a tab-delimited file."""
conn = self.get_conn()
cur = conn.cursor()
cur.execute(
f"""
SELECT * INTO OUTFILE '{tmp_file}'
FROM {table}
"""
)
conn.commit()
conn.close() # type: ignore[misc]
@staticmethod
def _serialize_cell(cell: object, conn: Connection | None = None) -> Any:
"""
Convert argument to a literal.
The package MySQLdb converts an argument to a literal
when passing those separately to execute. Hence, this method does nothing.
:param cell: The cell to insert into the table
:param conn: The database connection
:return: The same cell
"""
return cell
def get_iam_token(self, conn: Connection) -> tuple[str, int]:
"""
Retrieve a temporary password to connect to MySQL.
Uses AWSHook to retrieve a temporary password to connect to MySQL
Port is required. If none is provided, default 3306 is used
"""
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
aws_conn_id = conn.extra_dejson.get("aws_conn_id", "aws_default")
aws_hook = AwsBaseHook(aws_conn_id, client_type="rds")
if conn.port is None:
port = 3306
else:
port = conn.port
client = aws_hook.get_conn()
token = client.generate_db_auth_token(conn.host, port, conn.login)
return token, port
def bulk_load_custom(
self, table: str, tmp_file: str, duplicate_key_handling: str = "IGNORE", extra_options: str = ""
) -> None:
"""
A more configurable way to load local data from a file into the database.
.. warning:: According to the mysql docs using this function is a
`security risk <https://dev.mysql.com/doc/refman/8.0/en/load-data-local.html>`_.
If you want to use it anyway you can do so by setting a client-side + server-side option.
This depends on the mysql client library used.
:param table: The table were the file will be loaded into.
:param tmp_file: The file (name) that contains the data.
:param duplicate_key_handling: Specify what should happen to duplicate data.
You can choose either `IGNORE` or `REPLACE`.
.. seealso::
https://dev.mysql.com/doc/refman/8.0/en/load-data.html#load-data-duplicate-key-handling
:param extra_options: More sql options to specify exactly how to load the data.
.. seealso:: https://dev.mysql.com/doc/refman/8.0/en/load-data.html
"""
conn = self.get_conn()
cursor = conn.cursor()
cursor.execute(
f"""
LOAD DATA LOCAL INFILE '{tmp_file}'
{duplicate_key_handling}
INTO TABLE {table}
{extra_options}
"""
)
cursor.close()
conn.commit()
conn.close() # type: ignore[misc]
| 12,081 | 39.139535 | 107 | py |
airflow | airflow-main/airflow/providers/mysql/hooks/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 | py |
airflow | airflow-main/airflow/providers/plexus/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
#
# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
#
from __future__ import annotations
import packaging.version
__all__ = ["__version__"]
__version__ = "3.2.1"
try:
from airflow import __version__ as airflow_version
except ImportError:
from airflow.version import version as airflow_version
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
raise RuntimeError(
f"The package `apache-airflow-providers-plexus:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
)
| 1,531 | 35.47619 | 115 | py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.