response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Init main app view - redirect to FAB. | def init_flash_views(app):
"""Init main app view - redirect to FAB."""
from airflow.www.blueprints import routes
app.register_blueprint(routes) |
Initialize Web UI views. | def init_appbuilder_views(app):
"""Initialize Web UI views."""
from airflow.models import import_all_models
import_all_models()
from airflow.www import views
appbuilder = app.appbuilder
# Remove the session from scoped_session registry to avoid
# reusing a session with a disconnected connection
appbuilder.session.remove()
appbuilder.add_view_no_menu(views.AutocompleteView())
appbuilder.add_view_no_menu(views.Airflow())
appbuilder.add_view(
views.DagRunModelView,
permissions.RESOURCE_DAG_RUN,
category=permissions.RESOURCE_BROWSE_MENU,
category_icon="fa-globe",
)
appbuilder.add_view(
views.JobModelView, permissions.RESOURCE_JOB, category=permissions.RESOURCE_BROWSE_MENU
)
appbuilder.add_view(
views.LogModelView, permissions.RESOURCE_AUDIT_LOG, category=permissions.RESOURCE_BROWSE_MENU
)
appbuilder.add_view(
views.VariableModelView, permissions.RESOURCE_VARIABLE, category=permissions.RESOURCE_ADMIN_MENU
)
appbuilder.add_view(
views.TaskInstanceModelView,
permissions.RESOURCE_TASK_INSTANCE,
category=permissions.RESOURCE_BROWSE_MENU,
)
appbuilder.add_view(
views.TaskRescheduleModelView,
permissions.RESOURCE_TASK_RESCHEDULE,
category=permissions.RESOURCE_BROWSE_MENU,
)
appbuilder.add_view(
views.TriggerModelView,
permissions.RESOURCE_TRIGGER,
category=permissions.RESOURCE_BROWSE_MENU,
)
appbuilder.add_view(
views.ConfigurationView,
permissions.RESOURCE_CONFIG,
category=permissions.RESOURCE_ADMIN_MENU,
category_icon="fa-user",
)
appbuilder.add_view(
views.ConnectionModelView, permissions.RESOURCE_CONNECTION, category=permissions.RESOURCE_ADMIN_MENU
)
appbuilder.add_view(
views.SlaMissModelView, permissions.RESOURCE_SLA_MISS, category=permissions.RESOURCE_BROWSE_MENU
)
appbuilder.add_view(
views.PluginView, permissions.RESOURCE_PLUGIN, category=permissions.RESOURCE_ADMIN_MENU
)
appbuilder.add_view(
views.ProviderView, permissions.RESOURCE_PROVIDER, category=permissions.RESOURCE_ADMIN_MENU
)
appbuilder.add_view(
views.PoolModelView, permissions.RESOURCE_POOL, category=permissions.RESOURCE_ADMIN_MENU
)
appbuilder.add_view(
views.XComModelView, permissions.RESOURCE_XCOM, category=permissions.RESOURCE_ADMIN_MENU
)
appbuilder.add_view(
views.DagDependenciesView,
permissions.RESOURCE_DAG_DEPENDENCIES,
category=permissions.RESOURCE_BROWSE_MENU,
)
# add_view_no_menu to change item position.
# I added link in extensions.init_appbuilder_links.init_appbuilder_links
appbuilder.add_view_no_menu(views.RedocView)
# Development views
appbuilder.add_view_no_menu(views.DevView)
appbuilder.add_view_no_menu(views.DocsView) |
Integrate Flask and FAB with plugins. | def init_plugins(app):
"""Integrate Flask and FAB with plugins."""
from airflow import plugins_manager
plugins_manager.initialize_web_ui_plugins()
appbuilder = app.appbuilder
for view in plugins_manager.flask_appbuilder_views:
name = view.get("name")
if name:
log.debug("Adding view %s with menu", name)
appbuilder.add_view(view["view"], name, category=view["category"])
else:
# if 'name' key is missing, intent is to add view without menu
log.debug("Adding view %s without menu", str(type(view["view"])))
appbuilder.add_view_no_menu(view["view"])
for menu_link in sorted(
plugins_manager.flask_appbuilder_menu_links, key=lambda x: (x.get("category", ""), x["name"])
):
log.debug("Adding menu link %s to %s", menu_link["name"], menu_link["href"])
appbuilder.add_link(**menu_link)
for blue_print in plugins_manager.flask_blueprints:
log.debug("Adding blueprint %s:%s", blue_print["name"], blue_print["blueprint"].import_name)
app.register_blueprint(blue_print["blueprint"]) |
Add custom errors handlers. | def init_error_handlers(app: Flask):
"""Add custom errors handlers."""
from airflow.www import views
app.register_error_handler(500, views.show_traceback)
app.register_error_handler(404, views.not_found) |
Add response headers. | def set_cors_headers_on_response(response):
"""Add response headers."""
allow_headers = conf.get("api", "access_control_allow_headers")
allow_methods = conf.get("api", "access_control_allow_methods")
allow_origins = conf.get("api", "access_control_allow_origins")
if allow_headers:
response.headers["Access-Control-Allow-Headers"] = allow_headers
if allow_methods:
response.headers["Access-Control-Allow-Methods"] = allow_methods
if allow_origins == "*":
response.headers["Access-Control-Allow-Origin"] = "*"
elif allow_origins:
allowed_origins = allow_origins.split(" ")
origin = request.environ.get("HTTP_ORIGIN", allowed_origins[0])
if origin in allowed_origins:
response.headers["Access-Control-Allow-Origin"] = origin
return response |
Add error handlers for 404 and 405 errors for existing API paths. | def init_api_error_handlers(app: Flask) -> None:
"""Add error handlers for 404 and 405 errors for existing API paths."""
from airflow.www import views
@app.errorhandler(404)
def _handle_api_not_found(ex):
if any([request.path.startswith(p) for p in base_paths]):
# 404 errors are never handled on the blueprint level
# unless raised from a view func so actual 404 errors,
# i.e. "no route for it" defined, need to be handled
# here on the application level
return common_error_handler(ex)
else:
return views.not_found(ex)
@app.errorhandler(405)
def _handle_method_not_allowed(ex):
if any([request.path.startswith(p) for p in base_paths]):
return common_error_handler(ex)
else:
return views.method_not_allowed(ex)
app.register_error_handler(ProblemException, common_error_handler) |
Initialize Stable API. | def init_api_connexion(app: Flask) -> None:
"""Initialize Stable API."""
base_path = "/api/v1"
base_paths.append(base_path)
with ROOT_APP_DIR.joinpath("api_connexion", "openapi", "v1.yaml").open() as f:
specification = safe_load(f)
api_bp = FlaskApi(
specification=specification,
resolver=_LazyResolver(),
base_path=base_path,
options={"swagger_ui": SWAGGER_ENABLED, "swagger_path": SWAGGER_BUNDLE.__fspath__()},
strict_validation=True,
validate_responses=True,
validator_map={"body": _CustomErrorRequestBodyValidator},
).blueprint
api_bp.after_request(set_cors_headers_on_response)
app.register_blueprint(api_bp)
app.extensions["csrf"].exempt(api_bp) |
Initialize Internal API. | def init_api_internal(app: Flask, standalone_api: bool = False) -> None:
"""Initialize Internal API."""
if not standalone_api and not conf.getboolean("webserver", "run_internal_api", fallback=False):
return
base_paths.append("/internal_api/v1")
with ROOT_APP_DIR.joinpath("api_internal", "openapi", "internal_api_v1.yaml").open() as f:
specification = safe_load(f)
api_bp = FlaskApi(
specification=specification,
base_path="/internal_api/v1",
options={"swagger_ui": SWAGGER_ENABLED, "swagger_path": SWAGGER_BUNDLE.__fspath__()},
strict_validation=True,
validate_responses=True,
).blueprint
api_bp.after_request(set_cors_headers_on_response)
app.register_blueprint(api_bp)
app.after_request_funcs.setdefault(api_bp.name, []).append(set_cors_headers_on_response)
app.extensions["csrf"].exempt(api_bp) |
Initialize Experimental API. | def init_api_experimental(app):
"""Initialize Experimental API."""
if not conf.getboolean("api", "enable_experimental_api", fallback=False):
return
from airflow.www.api.experimental import endpoints
warnings.warn(
"The experimental REST API is deprecated. Please migrate to the stable REST API. "
"Please note that the experimental API do not have access control. "
"The authenticated user has full access.",
RemovedInAirflow3Warning,
stacklevel=2,
)
base_paths.append("/api/experimental")
app.register_blueprint(endpoints.api_experimental, url_prefix="/api/experimental")
app.extensions["csrf"].exempt(endpoints.api_experimental) |
Initialize the API offered by the auth manager. | def init_api_auth_provider(app):
"""Initialize the API offered by the auth manager."""
auth_mgr = get_auth_manager()
blueprint = auth_mgr.get_api_endpoints()
if blueprint:
base_paths.append(blueprint.url_prefix)
app.register_blueprint(blueprint)
app.extensions["csrf"].exempt(blueprint) |
Handle X-Forwarded-* headers and base_url support. | def init_wsgi_middleware(flask_app: Flask) -> None:
"""Handle X-Forwarded-* headers and base_url support."""
webserver_base_url = conf.get_mandatory_value("webserver", "BASE_URL", fallback="")
if webserver_base_url.endswith("/"):
raise AirflowConfigException("webserver.base_url conf cannot have a trailing slash.")
# Apply DispatcherMiddleware
base_url = urlsplit(webserver_base_url)[2]
if not base_url or base_url == "/":
base_url = ""
if base_url:
wsgi_app = DispatcherMiddleware(_root_app, mounts={base_url: flask_app.wsgi_app})
flask_app.wsgi_app = wsgi_app # type: ignore[assignment]
# Apply ProxyFix middleware
if conf.getboolean("webserver", "ENABLE_PROXY_FIX"):
flask_app.wsgi_app = ProxyFix( # type: ignore
flask_app.wsgi_app,
x_for=conf.getint("webserver", "PROXY_FIX_X_FOR", fallback=1),
x_proto=conf.getint("webserver", "PROXY_FIX_X_PROTO", fallback=1),
x_host=conf.getint("webserver", "PROXY_FIX_X_HOST", fallback=1),
x_port=conf.getint("webserver", "PROXY_FIX_X_PORT", fallback=1),
x_prefix=conf.getint("webserver", "PROXY_FIX_X_PREFIX", fallback=1),
) |
Renders template based on its name. Reads the template from <name>.jinja2 in current dir.
:param template_name: name of the template to use
:param context: Jinja2 context
:param autoescape: Whether to autoescape HTML
:param keep_trailing_newline: Whether to keep the newline in rendered output
:return: rendered template | def render_template(
template_name: str,
context: dict[str, Any],
autoescape: bool = False,
keep_trailing_newline: bool = False,
) -> str:
"""
Renders template based on its name. Reads the template from <name>.jinja2 in current dir.
:param template_name: name of the template to use
:param context: Jinja2 context
:param autoescape: Whether to autoescape HTML
:param keep_trailing_newline: Whether to keep the newline in rendered output
:return: rendered template
"""
import jinja2
template_loader = jinja2.FileSystemLoader(searchpath=MY_DIR_PATH)
template_env = jinja2.Environment(
loader=template_loader,
undefined=jinja2.StrictUndefined,
autoescape=autoescape,
keep_trailing_newline=keep_trailing_newline,
)
template = template_env.get_template(f"{template_name}.jinja2")
content: str = template.render(context)
return content |
Get git command to run for the current repo from the current folder (which is the package folder).
:param verbose: whether to print verbose info while getting the command
:param from_commit: if present - base commit from which to start the log from
:param to_commit: if present - final commit which should be the start of the log
:return: git command to run | def get_git_log_command(
verbose: bool, from_commit: str | None = None, to_commit: str | None = None
) -> list[str]:
"""
Get git command to run for the current repo from the current folder (which is the package folder).
:param verbose: whether to print verbose info while getting the command
:param from_commit: if present - base commit from which to start the log from
:param to_commit: if present - final commit which should be the start of the log
:return: git command to run
"""
git_cmd = [
"git",
"log",
"--pretty=format:%H %h %cd %s",
"--date=short",
]
if from_commit and to_commit:
git_cmd.append(f"{from_commit}...{to_commit}")
elif from_commit:
git_cmd.append(from_commit)
git_cmd.extend(["--", "."])
if verbose:
console.print(f"Command to run: '{' '.join(git_cmd)}'")
return git_cmd |
Use this tool to verify that all expected packages are present in Apache Airflow svn.
In case of providers, it will generate Dockerfile.pmc that you can use
to verify that all packages are installable.
In case of providers, you should update `packages.txt` file with list of packages
that you expect to find (copy-paste the list from VOTE thread).
Example usages:
python check_files.py airflow -p ~/code/airflow_svn -v 1.10.15rc1
python check_files.py upgrade_check -p ~/code/airflow_svn -v 1.3.0rc2
python check_files.py providers -p ~/code/airflow_svn | def cli():
"""
Use this tool to verify that all expected packages are present in Apache Airflow svn.
In case of providers, it will generate Dockerfile.pmc that you can use
to verify that all packages are installable.
In case of providers, you should update `packages.txt` file with list of packages
that you expect to find (copy-paste the list from VOTE thread).
Example usages:
python check_files.py airflow -p ~/code/airflow_svn -v 1.10.15rc1
python check_files.py upgrade_check -p ~/code/airflow_svn -v 1.3.0rc2
python check_files.py providers -p ~/code/airflow_svn
""" |
Passes if all present | def test_check_release_pass():
"""Passes if all present"""
files = [
"apache_airflow-2.8.1-py3-none-any.whl",
"apache_airflow-2.8.1-py3-none-any.whl.asc",
"apache_airflow-2.8.1-py3-none-any.whl.sha512",
"apache-airflow-2.8.1-source.tar.gz",
"apache-airflow-2.8.1-source.tar.gz.asc",
"apache-airflow-2.8.1-source.tar.gz.sha512",
"apache_airflow-2.8.1.tar.gz",
"apache_airflow-2.8.1.tar.gz.asc",
"apache_airflow-2.8.1.tar.gz.sha512",
]
assert check_release(files, version="2.8.1rc2") == [] |
Fails if missing one | def test_check_release_fail():
"""Fails if missing one"""
files = [
"apache_airflow-2.8.1-py3-none-any.whl",
"apache_airflow-2.8.1-py3-none-any.whl.asc",
"apache_airflow-2.8.1-py3-none-any.whl.sha512",
"apache-airflow-2.8.1-source.tar.gz",
"apache-airflow-2.8.1-source.tar.gz.asc",
"apache-airflow-2.8.1-source.tar.gz.sha512",
"apache_airflow-2.8.1.tar.gz.asc",
"apache_airflow-2.8.1.tar.gz.sha512",
]
missing_files = check_release(files, version="2.8.1rc2")
assert missing_files == ["apache_airflow-2.8.1.tar.gz"] |
Passes if all present | def test_check_providers_pass(monkeypatch, tmp_path):
"""Passes if all present"""
monkeypatch.chdir(tmp_path)
(tmp_path / "packages.txt").write_text(
"https://pypi.org/project/apache-airflow-providers-airbyte/3.1.0rc1/\n"
"https://pypi.org/project/apache-airflow-providers-foo-bar/9.6.42rc2/\n"
)
files = [
"apache_airflow_providers_airbyte-3.1.0.tar.gz",
"apache_airflow_providers_airbyte-3.1.0.tar.gz.asc",
"apache_airflow_providers_airbyte-3.1.0.tar.gz.sha512",
"apache_airflow_providers_airbyte-3.1.0-py3-none-any.whl",
"apache_airflow_providers_airbyte-3.1.0-py3-none-any.whl.asc",
"apache_airflow_providers_airbyte-3.1.0-py3-none-any.whl.sha512",
"apache_airflow_providers_foo_bar-9.6.42.tar.gz",
"apache_airflow_providers_foo_bar-9.6.42.tar.gz.asc",
"apache_airflow_providers_foo_bar-9.6.42.tar.gz.sha512",
"apache_airflow_providers_foo_bar-9.6.42-py3-none-any.whl",
"apache_airflow_providers_foo_bar-9.6.42-py3-none-any.whl.asc",
"apache_airflow_providers_foo_bar-9.6.42-py3-none-any.whl.sha512",
]
assert check_providers(files) == [] |
Passes if all present | def test_check_providers_failure(monkeypatch, tmp_path):
"""Passes if all present"""
monkeypatch.chdir(tmp_path)
(tmp_path / "packages.txt").write_text(
"https://pypi.org/project/apache-airflow-providers-spam-egg/1.2.3rc4/\n"
)
files = [
"apache_airflow_providers_spam_egg-1.2.3.tar.gz",
"apache_airflow_providers_spam_egg-1.2.3.tar.gz.sha512",
"apache_airflow_providers_spam_egg-1.2.3-py3-none-any.whl",
"apache_airflow_providers_spam_egg-1.2.3-py3-none-any.whl.asc",
]
assert sorted(check_providers(files)) == [
"apache_airflow_providers_spam_egg-1.2.3-py3-none-any.whl.sha512",
"apache_airflow_providers_spam_egg-1.2.3.tar.gz.asc",
] |
Renders template based on its name. Reads the template from <name> file in the current dir.
:param template_name: name of the template to use
:param context: Jinja2 context
:param autoescape: Whether to autoescape HTML
:param keep_trailing_newline: Whether to keep the newline in rendered output
:return: rendered template | def render_template_file(
template_name: str,
context: dict[str, Any],
autoescape: bool = True,
keep_trailing_newline: bool = False,
) -> str:
"""
Renders template based on its name. Reads the template from <name> file in the current dir.
:param template_name: name of the template to use
:param context: Jinja2 context
:param autoescape: Whether to autoescape HTML
:param keep_trailing_newline: Whether to keep the newline in rendered output
:return: rendered template
"""
import jinja2
template_loader = jinja2.FileSystemLoader(searchpath=MY_DIR_PATH)
template_env = jinja2.Environment(
loader=template_loader,
undefined=jinja2.StrictUndefined,
autoescape=autoescape,
keep_trailing_newline=keep_trailing_newline,
)
template = template_env.get_template(template_name)
content: str = template.render(context)
return content |
Renders template based on its name. Reads the template from <name> file in the current dir.
:param template_string: string of the template to use
:param context: Jinja2 context
:param autoescape: Whether to autoescape HTML
:param keep_trailing_newline: Whether to keep the newline in rendered output
:return: rendered template | def render_template_string(
template_string: str,
context: dict[str, Any],
autoescape: bool = True,
keep_trailing_newline: bool = False,
) -> str:
"""
Renders template based on its name. Reads the template from <name> file in the current dir.
:param template_string: string of the template to use
:param context: Jinja2 context
:param autoescape: Whether to autoescape HTML
:param keep_trailing_newline: Whether to keep the newline in rendered output
:return: rendered template
"""
import jinja2
template = jinja2.Environment(
loader=BaseLoader(),
undefined=jinja2.StrictUndefined,
autoescape=autoescape,
keep_trailing_newline=keep_trailing_newline,
).from_string(template_string)
content: str = template.render(context)
return content |
Split string to list | def string_comma_to_list(message: str) -> list[str]:
"""
Split string to list
"""
return message.split(",") if message else [] |
Send a simple text email (SMTP) | def send_email(
smtp_server: str,
smpt_port: int,
username: str,
password: str,
sender_email: str,
receiver_email: str | list,
message: str,
):
"""
Send a simple text email (SMTP)
"""
context = ssl.create_default_context()
with smtplib.SMTP(smtp_server, smpt_port) as server:
server.starttls(context=context)
server.login(username, password)
server.sendmail(sender_email, receiver_email, message) |
Simple render template based on named parameters
:param template_file: The template file location
:kwargs: Named parameters to use when rendering the template
:return: Rendered template | def render_template(template_file: str, **kwargs) -> str:
"""
Simple render template based on named parameters
:param template_file: The template file location
:kwargs: Named parameters to use when rendering the template
:return: Rendered template
"""
dir_path = os.path.dirname(os.path.realpath(__file__))
template = jinja2.Template(open(os.path.join(dir_path, template_file)).read())
return template.render(kwargs) |
Show message on the Command Line | def show_message(entity: str, message: str):
"""
Show message on the Command Line
"""
width, _ = shutil.get_terminal_size()
click.secho("-" * width, fg="blue")
click.secho(f"{entity} Message:", fg="bright_red", bold=True)
click.secho("-" * width, fg="blue")
click.echo(message)
click.secho("-" * width, fg="blue") |
Send email using SMTP | def inter_send_email(
username: str, password: str, sender_email: str, receiver_email: str | list, message: str
):
"""
Send email using SMTP
"""
show_message("SMTP", message)
click.confirm("Is the Email message ok?", abort=True)
try:
send_email(
SMTP_SERVER,
SMTP_PORT,
username,
password,
sender_email,
receiver_email,
message,
)
click.secho("✅ Email sent successfully", fg="green")
except smtplib.SMTPAuthenticationError:
sys.exit("SMTP User authentication error, Email not sent!")
except Exception as e:
sys.exit(f"SMTP exception {e}") |
🚀 CLI to send emails for the following:
* Voting thread for the rc
* Result of the voting for the rc
* Announcing that the new version has been released | def cli(
ctx,
apache_email: str,
apache_username: str,
apache_password: str,
version: str,
version_rc: str,
name: str,
):
"""
🚀 CLI to send emails for the following:
\b
* Voting thread for the rc
* Result of the voting for the rc
* Announcing that the new version has been released
"""
base_parameters = BaseParameters(
name, apache_email, apache_username, apache_password, version, version_rc
)
base_parameters.template_arguments["version"] = base_parameters.version
base_parameters.template_arguments["version_rc"] = base_parameters.version_rc
base_parameters.template_arguments["sender_email"] = base_parameters.email
base_parameters.template_arguments["release_manager"] = base_parameters.name
ctx.obj = base_parameters |
Send email calling for Votes on RC | def vote(base_parameters, receiver_email: str):
"""
Send email calling for Votes on RC
"""
template_file = "templates/vote_email.j2"
base_parameters.template_arguments["receiver_email"] = receiver_email
message = render_template(template_file, **base_parameters.template_arguments)
inter_send_email(
base_parameters.username,
base_parameters.password,
base_parameters.template_arguments["sender_email"],
base_parameters.template_arguments["receiver_email"],
message,
)
if click.confirm("Show Slack message for announcement?", default=True):
base_parameters.template_arguments["slack_rc"] = False
slack_msg = render_template("templates/slack.j2", **base_parameters.template_arguments)
show_message("Slack", slack_msg) |
Send email with results of voting on RC | def result(
base_parameters,
receiver_email: str,
vote_bindings: str,
vote_nonbindings: str,
vote_negatives: str,
):
"""
Send email with results of voting on RC
"""
template_file = "templates/result_email.j2"
base_parameters.template_arguments["receiver_email"] = receiver_email
base_parameters.template_arguments["vote_bindings"] = string_comma_to_list(vote_bindings)
base_parameters.template_arguments["vote_nonbindings"] = string_comma_to_list(vote_nonbindings)
base_parameters.template_arguments["vote_negatives"] = string_comma_to_list(vote_negatives)
message = render_template(template_file, **base_parameters.template_arguments)
inter_send_email(
base_parameters.username,
base_parameters.password,
base_parameters.template_arguments["sender_email"],
base_parameters.template_arguments["receiver_email"],
message,
) |
Send email to announce release of the new version | def announce(base_parameters, receiver_email: str):
"""
Send email to announce release of the new version
"""
receiver_emails: list[str] = string_comma_to_list(receiver_email)
template_file = "templates/announce_email.j2"
base_parameters.template_arguments["receiver_email"] = receiver_emails
message = render_template(template_file, **base_parameters.template_arguments)
inter_send_email(
base_parameters.username,
base_parameters.password,
base_parameters.template_arguments["sender_email"],
base_parameters.template_arguments["receiver_email"],
message,
)
if click.confirm("Show Slack message for announcement?", default=True):
base_parameters.template_arguments["slack_rc"] = False
slack_msg = render_template("templates/slack.j2", **base_parameters.template_arguments)
show_message("Slack", slack_msg)
if click.confirm("Show Twitter message for announcement?", default=True):
twitter_msg = render_template("templates/twitter.j2", **base_parameters.template_arguments)
show_message("Twitter", twitter_msg) |
Parses config_template.yaml new format and returns config_options | def parse_config_template_new_format(config_content: str) -> set[tuple[str, str, str]]:
"""
Parses config_template.yaml new format and returns config_options
"""
config_sections = yaml.safe_load(config_content)
return {
(config_section_name, config_option_name, config_option_value["version_added"])
for config_section_name, config_section_value in config_sections.items()
for config_option_name, config_option_value in config_section_value["options"].items()
} |
Parses config_template.yaml old format and returns config_options | def parse_config_template_old_format(config_content: str) -> set[tuple[str, str, str]]:
"""
Parses config_template.yaml old format and returns config_options
"""
config_sections = yaml.safe_load(config_content)
return {
(
config_section["name"],
config_option["name"],
config_option.get("version_added"),
)
for config_section in config_sections
for config_option in config_section["options"]
} |
Starts building attempt. Returns false if we should not continue | def check_if_image_building_is_needed(ci_image_params: BuildCiParams, output: Output | None) -> bool:
"""Starts building attempt. Returns false if we should not continue"""
result = run_command(
["docker", "inspect", ci_image_params.airflow_image_name_with_tag],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
return True
if not ci_image_params.force_build and not ci_image_params.upgrade_to_newer_dependencies:
if not should_we_run_the_build(build_ci_params=ci_image_params):
return False
return True |
Build CI image. Include building multiple images for all python versions. | def build(
additional_airflow_extras: str | None,
additional_dev_apt_command: str | None,
additional_dev_apt_deps: str | None,
additional_dev_apt_env: str | None,
additional_pip_install_flags: str | None,
additional_python_deps: str | None,
airflow_constraints_location: str | None,
airflow_constraints_mode: str,
airflow_constraints_reference: str,
build_progress: str,
build_timeout_minutes: int | None,
builder: str,
commit_sha: str | None,
debian_version: str,
debug_resources: bool,
dev_apt_command: str | None,
dev_apt_deps: str | None,
docker_cache: str,
docker_host: str | None,
eager_upgrade_additional_requirements: str | None,
github_repository: str,
github_token: str | None,
image_tag: str,
include_success_outputs,
install_mysql_client_type: str,
parallelism: int,
platform: str | None,
prepare_buildx_cache: bool,
push: bool,
python: str,
python_image: str | None,
python_versions: str,
run_in_parallel: bool,
skip_cleanup: bool,
tag_as_latest: bool,
upgrade_on_failure: bool,
upgrade_to_newer_dependencies: bool,
use_uv: bool,
uv_http_timeout: int,
version_suffix_for_pypi: str,
):
"""Build CI image. Include building multiple images for all python versions."""
def run_build(ci_image_params: BuildCiParams) -> None:
return_code, info = run_build_ci_image(
ci_image_params=ci_image_params,
param_description=ci_image_params.python + ":" + ci_image_params.platform,
output=None,
)
if return_code != 0:
get_console().print(f"[error]Error when building image! {info}")
sys.exit(return_code)
if build_timeout_minutes:
pid = os.fork()
if pid:
# Parent process - send signal to process group of the child process
handler: Callable[..., tuple[Any, Any]] = partial(build_timout_handler, pid)
# kill the child process group when we exit before - for example when we are Ctrl-C-ed
atexit.register(kill_process_group, pid)
signal.signal(signal.SIGALRM, handler)
signal.alarm(build_timeout_minutes * 60)
child_pid, status = os.waitpid(pid, 0)
exit_code = get_exitcode(status)
if exit_code:
get_console().print(f"[error]Exiting with exit code {exit_code}")
else:
get_console().print(f"[success]Exiting with exit code {exit_code}")
sys.exit(exit_code)
else:
# turn us into a process group leader
os.setpgid(0, 0)
perform_environment_checks()
check_remote_ghcr_io_commands()
fix_group_permissions()
base_build_params = BuildCiParams(
additional_airflow_extras=additional_airflow_extras,
additional_dev_apt_command=additional_dev_apt_command,
additional_dev_apt_env=additional_dev_apt_env,
additional_pip_install_flags=additional_pip_install_flags,
additional_python_deps=additional_python_deps,
airflow_constraints_location=airflow_constraints_location,
airflow_constraints_mode=airflow_constraints_mode,
airflow_constraints_reference=airflow_constraints_reference,
build_progress=build_progress,
builder=builder,
commit_sha=commit_sha,
debian_version=debian_version,
dev_apt_command=dev_apt_command,
dev_apt_deps=dev_apt_deps,
docker_cache=docker_cache,
docker_host=docker_host,
eager_upgrade_additional_requirements=eager_upgrade_additional_requirements,
force_build=True,
github_repository=github_repository,
github_token=github_token,
image_tag=image_tag,
install_mysql_client_type=install_mysql_client_type,
prepare_buildx_cache=prepare_buildx_cache,
push=push,
python=python,
python_image=python_image,
tag_as_latest=tag_as_latest,
upgrade_on_failure=upgrade_on_failure,
upgrade_to_newer_dependencies=upgrade_to_newer_dependencies,
use_uv=use_uv,
uv_http_timeout=uv_http_timeout,
version_suffix_for_pypi=version_suffix_for_pypi,
)
if platform:
base_build_params.platform = platform
if additional_dev_apt_deps:
# For CI image we only set additional_dev_apt_deps when we explicitly pass it
base_build_params.additional_dev_apt_deps = additional_dev_apt_deps
if run_in_parallel:
params_list: list[BuildCiParams] = []
if prepare_buildx_cache:
platforms_list = base_build_params.platform.split(",")
for platform in platforms_list:
build_params = deepcopy(base_build_params)
build_params.platform = platform
params_list.append(build_params)
prepare_for_building_ci_image(params=params_list[0])
run_build_in_parallel(
image_params_list=params_list,
params_description_list=platforms_list,
include_success_outputs=include_success_outputs,
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
)
else:
python_version_list = get_python_version_list(python_versions)
for python in python_version_list:
build_params = deepcopy(base_build_params)
build_params.python = python
params_list.append(build_params)
prepare_for_building_ci_image(params=params_list[0])
run_build_in_parallel(
image_params_list=params_list,
params_description_list=python_version_list,
include_success_outputs=include_success_outputs,
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
)
else:
prepare_for_building_ci_image(params=base_build_params)
run_build(ci_image_params=base_build_params) |
Pull and optionally verify CI images - possibly in parallel for all Python versions. | def pull(
python: str,
run_in_parallel: bool,
python_versions: str,
github_token: str,
parallelism: int,
skip_cleanup: bool,
debug_resources: bool,
include_success_outputs: bool,
image_tag: str,
wait_for_image: bool,
tag_as_latest: bool,
verify: bool,
github_repository: str,
extra_pytest_args: tuple,
):
"""Pull and optionally verify CI images - possibly in parallel for all Python versions."""
perform_environment_checks()
check_remote_ghcr_io_commands()
if run_in_parallel:
python_version_list = get_python_version_list(python_versions)
ci_image_params_list = [
BuildCiParams(
image_tag=image_tag,
python=python,
github_repository=github_repository,
github_token=github_token,
)
for python in python_version_list
]
run_pull_in_parallel(
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
include_success_outputs=include_success_outputs,
image_params_list=ci_image_params_list,
python_version_list=python_version_list,
verify=verify,
wait_for_image=wait_for_image,
tag_as_latest=tag_as_latest,
extra_pytest_args=extra_pytest_args if extra_pytest_args is not None else (),
)
else:
image_params = BuildCiParams(
image_tag=image_tag,
python=python,
github_repository=github_repository,
github_token=github_token,
)
return_code, info = run_pull_image(
image_params=image_params,
output=None,
wait_for_image=wait_for_image,
tag_as_latest=tag_as_latest,
)
if return_code != 0:
get_console().print(f"[error]There was an error when pulling CI image: {info}[/]")
sys.exit(return_code) |
Verify CI image. | def verify(
python: str,
python_versions: str,
image_name: str,
image_tag: str | None,
pull: bool,
github_token: str,
github_repository: str,
extra_pytest_args: tuple[str, ...],
run_in_parallel: bool,
parallelism: int,
skip_cleanup: bool,
debug_resources: bool,
include_success_outputs: bool,
):
"""Verify CI image."""
perform_environment_checks()
check_remote_ghcr_io_commands()
if (pull or image_name) and run_in_parallel:
get_console().print(
"[error]You cannot use --pull,--image-name and --run-in-parallel at the same time. Exiting[/]"
)
sys.exit(1)
if run_in_parallel:
base_build_params = BuildCiParams(
python=python,
github_repository=github_repository,
image_tag=image_tag,
)
python_version_list = get_python_version_list(python_versions)
params_list: list[BuildCiParams] = []
for python in python_version_list:
build_params = deepcopy(base_build_params)
build_params.python = python
params_list.append(build_params)
run_verify_in_parallel(
image_params_list=params_list,
python_version_list=python_version_list,
extra_pytest_args=extra_pytest_args,
include_success_outputs=include_success_outputs,
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
)
else:
if image_name is None:
build_params = BuildCiParams(
python=python,
image_tag=image_tag,
github_repository=github_repository,
github_token=github_token,
)
image_name = build_params.airflow_image_name_with_tag
if pull:
check_remote_ghcr_io_commands()
command_to_run = ["docker", "pull", image_name]
run_command(command_to_run, check=True)
get_console().print(f"[info]Verifying CI image: {image_name}[/]")
return_code, info = verify_an_image(
image_name=image_name,
output=None,
image_type="CI",
slim_image=False,
extra_pytest_args=extra_pytest_args,
)
sys.exit(return_code) |
Check if we should run the build based on what files have been modified since last build and answer from
the user.
* If build is needed, the user is asked for confirmation
* If the branch is not rebased it warns the user to rebase (to make sure latest remote cache is useful)
* Builds Image/Skips/Quits depending on the answer
:param build_ci_params: parameters for the build | def should_we_run_the_build(build_ci_params: BuildCiParams) -> bool:
"""
Check if we should run the build based on what files have been modified since last build and answer from
the user.
* If build is needed, the user is asked for confirmation
* If the branch is not rebased it warns the user to rebase (to make sure latest remote cache is useful)
* Builds Image/Skips/Quits depending on the answer
:param build_ci_params: parameters for the build
"""
# We import those locally so that click autocomplete works
from inputimeout import TimeoutOccurred
if not md5sum_check_if_build_is_needed(
build_ci_params=build_ci_params,
md5sum_cache_dir=build_ci_params.md5sum_cache_dir,
skip_provider_dependencies_check=build_ci_params.skip_provider_dependencies_check,
):
return False
try:
answer = user_confirm(
message="Do you want to build the image (this works best when you have good connection and "
"can take usually from 20 seconds to few minutes depending how old your image is)?",
timeout=STANDARD_TIMEOUT,
default_answer=Answer.NO,
)
if answer == answer.YES:
if is_repo_rebased(build_ci_params.github_repository, build_ci_params.airflow_branch):
return True
else:
get_console().print(
"\n[warning]This might take a lot of time (more than 10 minutes) even if you have "
"a good network connection. We think you should attempt to rebase first.[/]\n"
)
answer = user_confirm(
"But if you really, really want - you can attempt it. Are you really sure?",
timeout=STANDARD_TIMEOUT,
default_answer=Answer.NO,
)
if answer == Answer.YES:
return True
else:
get_console().print(
f"[info]Please rebase your code to latest {build_ci_params.airflow_branch} "
"before continuing.[/]\nCheck this link to find out how "
"https://github.com/apache/airflow/blob/main/contributing-docs/11_working_with_git.rst\n"
)
get_console().print("[error]Exiting the process[/]\n")
sys.exit(1)
elif answer == Answer.NO:
instruct_build_image(build_ci_params.python)
return False
else: # users_status == Answer.QUIT:
get_console().print("\n[warning]Quitting the process[/]\n")
sys.exit()
except TimeoutOccurred:
get_console().print("\nTimeout. Considering your response as No\n")
instruct_build_image(build_ci_params.python)
return False
except Exception as e:
get_console().print(f"\nTerminating the process on {e}")
sys.exit(1) |
Builds CI image:
* fixes group permissions for files (to improve caching when umask is 002)
* converts all the parameters received via kwargs into BuildCIParams (including cache)
* prints info about the image to build
* logs int to docker registry on CI if build cache is being executed
* removes "tag" for previously build image so that inline cache uses only remote image
* constructs docker-compose command to run based on parameters passed
* run the build command
* update cached information that the build completed and saves checksums of all files
for quick future check if the build is needed
:param ci_image_params: CI image parameters
:param param_description: description of the parameter used
:param output: output redirection | def run_build_ci_image(
ci_image_params: BuildCiParams,
param_description: str,
output: Output | None,
) -> tuple[int, str]:
"""
Builds CI image:
* fixes group permissions for files (to improve caching when umask is 002)
* converts all the parameters received via kwargs into BuildCIParams (including cache)
* prints info about the image to build
* logs int to docker registry on CI if build cache is being executed
* removes "tag" for previously build image so that inline cache uses only remote image
* constructs docker-compose command to run based on parameters passed
* run the build command
* update cached information that the build completed and saves checksums of all files
for quick future check if the build is needed
:param ci_image_params: CI image parameters
:param param_description: description of the parameter used
:param output: output redirection
"""
if (
ci_image_params.is_multi_platform()
and not ci_image_params.push
and not ci_image_params.prepare_buildx_cache
):
get_console(output=output).print(
"\n[red]You cannot use multi-platform build without using --push flag or "
"preparing buildx cache![/]\n"
)
return 1, "Error: building multi-platform image without --push."
if get_verbose() or get_dry_run():
get_console(output=output).print(
f"\n[info]Building CI image of airflow from {AIRFLOW_SOURCES_ROOT}: {param_description}[/]\n"
)
if ci_image_params.prepare_buildx_cache:
build_command_result = build_cache(
image_params=ci_image_params,
output=output,
)
else:
env = get_docker_build_env(ci_image_params)
subprocess.run(
[
sys.executable,
os.fspath(
AIRFLOW_SOURCES_ROOT
/ "scripts"
/ "ci"
/ "pre_commit"
/ "update_providers_dependencies.py"
),
],
check=False,
)
get_console(output=output).print(f"\n[info]Building CI Image for {param_description}\n")
build_command_result = run_command(
prepare_docker_build_command(
image_params=ci_image_params,
),
cwd=AIRFLOW_SOURCES_ROOT,
text=True,
check=False,
env=env,
output=output,
)
if build_command_result.returncode != 0 and not ci_image_params.upgrade_to_newer_dependencies:
if ci_image_params.upgrade_on_failure:
ci_image_params.upgrade_to_newer_dependencies = True
get_console().print(
"[warning]Attempting to build with --upgrade-to-newer-dependencies on failure"
)
build_command_result = run_command(
prepare_docker_build_command(
image_params=ci_image_params,
),
cwd=AIRFLOW_SOURCES_ROOT,
env=env,
text=True,
check=False,
output=output,
)
else:
get_console().print(
"[warning]Your image build failed. It could be caused by conflicting dependencies."
)
get_console().print(
"[info]Run `breeze ci-image build --upgrade-to-newer-dependencies` to upgrade them.\n"
)
if build_command_result.returncode == 0:
if ci_image_params.tag_as_latest:
build_command_result = tag_image_as_latest(image_params=ci_image_params, output=output)
if ci_image_params.preparing_latest_image():
if get_dry_run():
get_console(output=output).print(
"[info]Not updating build hash because we are in `dry_run` mode.[/]"
)
else:
mark_image_as_refreshed(ci_image_params)
return build_command_result.returncode, f"Image build: {param_description}" |
Rebuilds CI image if needed and user confirms it.
:param command_params: parameters of the command to execute | def rebuild_or_pull_ci_image_if_needed(command_params: ShellParams | BuildCiParams) -> None:
"""
Rebuilds CI image if needed and user confirms it.
:param command_params: parameters of the command to execute
"""
build_ci_image_check_cache = Path(
BUILD_CACHE_DIR, command_params.airflow_branch, f".built_{command_params.python}"
)
ci_image_params = BuildCiParams(
builder=command_params.builder,
docker_host=command_params.docker_host,
force_build=command_params.force_build,
github_repository=command_params.github_repository,
image_tag=command_params.image_tag,
platform=command_params.platform,
python=command_params.python,
skip_image_upgrade_check=command_params.skip_image_upgrade_check,
skip_provider_dependencies_check=command_params.skip_provider_dependencies_check,
upgrade_to_newer_dependencies=False,
warn_image_upgrade_needed=command_params.warn_image_upgrade_needed,
# upgrade on failure is disabled on CI but enabled locally, to make sure we are not
# accidentally upgrading dependencies on CI
upgrade_on_failure=not os.environ.get("CI", ""),
)
if command_params.image_tag is not None and command_params.image_tag != "latest":
return_code, message = run_pull_image(
image_params=ci_image_params,
output=None,
wait_for_image=True,
tag_as_latest=False,
)
if return_code != 0:
get_console().print(f"[error]Pulling image with {command_params.image_tag} failed! {message}[/]")
sys.exit(return_code)
return
if build_ci_image_check_cache.exists():
if get_verbose():
get_console().print(f"[info]{command_params.image_type} image already built locally.[/]")
else:
get_console().print(
f"[warning]{command_params.image_type} image for Python {command_params.python} "
f"was never built locally or was deleted. Forcing build.[/]"
)
ci_image_params.force_build = True
if check_if_image_building_is_needed(
ci_image_params=ci_image_params,
output=None,
):
run_build_ci_image(
ci_image_params=ci_image_params, param_description=ci_image_params.python, output=None
) |
Determine which constraints reference to use.
When use-airflow-version is branch or version, we derive the constraints branch from it, unless
someone specified the constraints branch explicitly.
:param airflow_constraints_reference: the constraint reference specified (or default)
:param use_airflow_version: which airflow version we are installing
:return: the actual constraints reference to use | def _determine_constraint_branch_used(airflow_constraints_reference: str, use_airflow_version: str | None):
"""
Determine which constraints reference to use.
When use-airflow-version is branch or version, we derive the constraints branch from it, unless
someone specified the constraints branch explicitly.
:param airflow_constraints_reference: the constraint reference specified (or default)
:param use_airflow_version: which airflow version we are installing
:return: the actual constraints reference to use
"""
if (
use_airflow_version
and airflow_constraints_reference == DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
and re.match(r"[0-9]+\.[0-9]+\.[0-9]+[0-9a-z.]*|main|v[0-9]_.*", use_airflow_version)
):
get_console().print(
f"[info]Using constraints for {use_airflow_version} - matching airflow version used."
)
return f"constraints-{use_airflow_version}"
return airflow_constraints_reference |
Enter breeze environment. this is the default command use when no other is selected. | def shell(
airflow_constraints_location: str,
airflow_constraints_mode: str,
airflow_constraints_reference: str,
airflow_extras: str,
airflow_skip_constraints: bool,
backend: str,
builder: str,
celery_broker: str,
celery_flower: bool,
database_isolation: bool,
db_reset: bool,
downgrade_sqlalchemy: bool,
downgrade_pendulum: bool,
docker_host: str | None,
executor: str,
extra_args: tuple,
force_build: bool,
forward_credentials: bool,
github_repository: str,
image_tag: str | None,
include_mypy_volume: bool,
install_selected_providers: str,
integration: tuple[str, ...],
max_time: int | None,
mount_sources: str,
mysql_version: str,
package_format: str,
platform: str | None,
postgres_version: str,
project_name: str,
providers_constraints_location: str,
providers_constraints_mode: str,
providers_constraints_reference: str,
providers_skip_constraints: bool,
pydantic: str,
python: str,
quiet: bool,
restart: bool,
run_db_tests_only: bool,
skip_environment_initialization: bool,
skip_db_tests: bool,
skip_image_upgrade_check: bool,
standalone_dag_processor: bool,
tty: str,
upgrade_boto: bool,
use_airflow_version: str | None,
use_packages_from_dist: bool,
use_uv: bool,
uv_http_timeout: int,
verbose_commands: bool,
warn_image_upgrade_needed: bool,
):
"""Enter breeze environment. this is the default command use when no other is selected."""
if get_verbose() or get_dry_run() and not quiet:
get_console().print("\n[success]Welcome to breeze.py[/]\n")
get_console().print(f"\n[success]Root of Airflow Sources = {AIRFLOW_SOURCES_ROOT}[/]\n")
if max_time:
TimerThread(max_time=max_time).start()
set_forced_answer("yes")
airflow_constraints_reference = _determine_constraint_branch_used(
airflow_constraints_reference, use_airflow_version
)
shell_params = ShellParams(
airflow_constraints_location=airflow_constraints_location,
airflow_constraints_mode=airflow_constraints_mode,
airflow_constraints_reference=airflow_constraints_reference,
airflow_extras=airflow_extras,
airflow_skip_constraints=airflow_skip_constraints,
backend=backend,
builder=builder,
celery_broker=celery_broker,
celery_flower=celery_flower,
database_isolation=database_isolation,
db_reset=db_reset,
downgrade_sqlalchemy=downgrade_sqlalchemy,
downgrade_pendulum=downgrade_pendulum,
docker_host=docker_host,
executor=executor,
extra_args=extra_args if not max_time else ["exit"],
force_build=force_build,
forward_credentials=forward_credentials,
github_repository=github_repository,
image_tag=image_tag,
include_mypy_volume=include_mypy_volume,
install_selected_providers=install_selected_providers,
install_airflow_with_constraints=True,
integration=integration,
mount_sources=mount_sources,
mysql_version=mysql_version,
package_format=package_format,
platform=platform,
postgres_version=postgres_version,
project_name=project_name,
providers_constraints_location=providers_constraints_location,
providers_constraints_mode=providers_constraints_mode,
providers_constraints_reference=providers_constraints_reference,
providers_skip_constraints=providers_skip_constraints,
pydantic=pydantic,
python=python,
quiet=quiet,
restart=restart,
run_db_tests_only=run_db_tests_only,
skip_db_tests=skip_db_tests,
skip_image_upgrade_check=skip_image_upgrade_check,
skip_environment_initialization=skip_environment_initialization,
standalone_dag_processor=standalone_dag_processor,
tty=tty,
upgrade_boto=upgrade_boto,
use_airflow_version=use_airflow_version,
use_packages_from_dist=use_packages_from_dist,
use_uv=use_uv,
uv_http_timeout=uv_http_timeout,
verbose_commands=verbose_commands,
warn_image_upgrade_needed=warn_image_upgrade_needed,
)
rebuild_or_pull_ci_image_if_needed(command_params=shell_params)
result = enter_shell(shell_params=shell_params)
fix_ownership_using_docker()
sys.exit(result.returncode) |
Enter breeze environment and starts all Airflow components in the tmux session.
Compile assets if contents of www directory changed. | def start_airflow(
airflow_constraints_mode: str,
airflow_constraints_location: str,
airflow_constraints_reference: str,
airflow_extras: str,
airflow_skip_constraints: bool,
backend: str,
builder: str,
celery_broker: str,
celery_flower: bool,
database_isolation: bool,
db_reset: bool,
dev_mode: bool,
docker_host: str | None,
executor: str,
extra_args: tuple,
force_build: bool,
forward_credentials: bool,
github_repository: str,
image_tag: str | None,
integration: tuple[str, ...],
install_selected_providers: str,
load_default_connections: bool,
load_example_dags: bool,
mount_sources: str,
mysql_version: str,
package_format: str,
platform: str | None,
postgres_version: str,
project_name: str,
providers_constraints_location: str,
providers_constraints_mode: str,
providers_constraints_reference: str,
providers_skip_constraints: bool,
python: str,
restart: bool,
skip_assets_compilation: bool,
standalone_dag_processor: bool,
use_airflow_version: str | None,
use_packages_from_dist: bool,
use_uv: bool,
uv_http_timeout: int,
):
"""
Enter breeze environment and starts all Airflow components in the tmux session.
Compile assets if contents of www directory changed.
"""
if dev_mode and skip_assets_compilation:
get_console().print(
"[warning]You cannot skip asset compilation in dev mode! Assets will be compiled!"
)
skip_assets_compilation = True
if use_airflow_version is None and not skip_assets_compilation:
run_compile_www_assets(dev=dev_mode, run_in_background=True, force_clean=False)
airflow_constraints_reference = _determine_constraint_branch_used(
airflow_constraints_reference, use_airflow_version
)
shell_params = ShellParams(
airflow_constraints_location=airflow_constraints_location,
airflow_constraints_mode=airflow_constraints_mode,
airflow_constraints_reference=airflow_constraints_reference,
airflow_extras=airflow_extras,
airflow_skip_constraints=airflow_skip_constraints,
backend=backend,
builder=builder,
celery_broker=celery_broker,
celery_flower=celery_flower,
database_isolation=database_isolation,
db_reset=db_reset,
dev_mode=dev_mode,
docker_host=docker_host,
executor=executor,
extra_args=extra_args,
force_build=force_build,
forward_credentials=forward_credentials,
github_repository=github_repository,
image_tag=image_tag,
integration=integration,
install_selected_providers=install_selected_providers,
install_airflow_with_constraints=True,
load_default_connections=load_default_connections,
load_example_dags=load_example_dags,
mount_sources=mount_sources,
mysql_version=mysql_version,
package_format=package_format,
platform=platform,
postgres_version=postgres_version,
project_name=project_name,
providers_constraints_location=providers_constraints_location,
providers_constraints_mode=providers_constraints_mode,
providers_constraints_reference=providers_constraints_reference,
providers_skip_constraints=providers_skip_constraints,
python=python,
restart=restart,
standalone_dag_processor=standalone_dag_processor,
start_airflow=True,
use_airflow_version=use_airflow_version,
use_packages_from_dist=use_packages_from_dist,
use_uv=use_uv,
uv_http_timeout=uv_http_timeout,
)
rebuild_or_pull_ci_image_if_needed(command_params=shell_params)
result = enter_shell(shell_params=shell_params)
fix_ownership_using_docker()
sys.exit(result.returncode) |
Build documents. | def build_docs(
builder: str,
clean_build: bool,
docs_only: bool,
github_repository: str,
include_not_ready_providers: bool,
include_removed_providers: bool,
one_pass_only: bool,
package_filter: tuple[str, ...],
package_list: str,
spellcheck_only: bool,
doc_packages: tuple[str, ...],
):
"""
Build documents.
"""
perform_environment_checks()
fix_ownership_using_docker()
cleanup_python_generated_files()
build_params = BuildCiParams(
github_repository=github_repository, python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION, builder=builder
)
rebuild_or_pull_ci_image_if_needed(command_params=build_params)
if clean_build:
docs_dir = AIRFLOW_SOURCES_ROOT / "docs"
for dir_name in ["_build", "_doctrees", "_inventory_cache", "_api"]:
for directory in docs_dir.rglob(dir_name):
get_console().print(f"[info]Removing {directory}")
shutil.rmtree(directory, ignore_errors=True)
docs_list_as_tuple: tuple[str, ...] = ()
if package_list and len(package_list):
get_console().print(f"\n[info]Populating provider list from PACKAGE_LIST env as {package_list}")
# Override doc_packages with values from PACKAGE_LIST
docs_list_as_tuple = tuple(package_list.split(","))
if doc_packages and docs_list_as_tuple:
get_console().print(
f"[warning]Both package arguments and --package-list / PACKAGE_LIST passed. "
f"Overriding to {docs_list_as_tuple}"
)
doc_packages = docs_list_as_tuple or doc_packages
doc_builder = DocBuildParams(
package_filter=package_filter,
docs_only=docs_only,
spellcheck_only=spellcheck_only,
one_pass_only=one_pass_only,
short_doc_packages=expand_all_provider_packages(
short_doc_packages=doc_packages,
include_removed=include_removed_providers,
include_not_ready=include_not_ready_providers,
),
)
cmd = "/opt/airflow/scripts/in_container/run_docs_build.sh " + " ".join(
[shlex.quote(arg) for arg in doc_builder.args_doc_builder]
)
shell_params = ShellParams(
github_repository=github_repository,
python=DEFAULT_PYTHON_MAJOR_MINOR_VERSION,
)
result = execute_command_in_shell(shell_params, project_name="docs", command=cmd)
fix_ownership_using_docker()
if result.returncode == 0:
get_console().print(
"[info]To view the built documentation, you have two options:\n\n"
"1. Start the webserver in breeze and access the built docs at "
"http://localhost:28080/docs/\n"
"2. Alternatively, you can run ./docs/start_doc_server.sh for a lighter resource option and view"
"the built docs at http://localhost:8000"
)
sys.exit(result.returncode) |
Build Production image. Include building multiple images for all or selected Python versions sequentially. | def build(
additional_airflow_extras: str | None,
additional_dev_apt_command: str | None,
additional_dev_apt_deps: str | None,
additional_dev_apt_env: str | None,
additional_pip_install_flags: str | None,
additional_python_deps: str | None,
additional_runtime_apt_command: str | None,
additional_runtime_apt_deps: str | None,
additional_runtime_apt_env: str | None,
airflow_constraints_location: str | None,
airflow_constraints_mode: str,
airflow_constraints_reference: str | None,
airflow_extras: str,
build_progress: str,
builder: str,
cleanup_context: bool,
commit_sha: str | None,
debian_version: str,
debug_resources: bool,
dev_apt_command: str | None,
dev_apt_deps: str | None,
disable_airflow_repo_cache: bool,
disable_mssql_client_installation: bool,
disable_mysql_client_installation: bool,
disable_postgres_client_installation: bool,
docker_cache: str,
docker_host: str | None,
github_repository: str,
github_token: str | None,
image_tag: str,
include_success_outputs,
install_airflow_reference: str | None,
install_airflow_version: str | None,
install_mysql_client_type: str,
install_packages_from_context: bool,
installation_method: str,
parallelism: int,
platform: str | None,
prepare_buildx_cache: bool,
push: bool,
python: str,
python_image: str | None,
python_versions: str,
run_in_parallel: bool,
runtime_apt_command: str | None,
runtime_apt_deps: str | None,
skip_cleanup: bool,
tag_as_latest: bool,
use_constraints_for_context_packages: bool,
use_uv: bool,
uv_http_timeout: int,
version_suffix_for_pypi: str,
):
"""
Build Production image. Include building multiple images for all or selected Python versions sequentially.
"""
def run_build(prod_image_params: BuildProdParams) -> None:
return_code, info = run_build_production_image(
output=None,
param_description=prod_image_params.python + prod_image_params.platform,
prod_image_params=prod_image_params,
)
if return_code != 0:
get_console().print(f"[error]Error when building image! {info}")
sys.exit(return_code)
perform_environment_checks()
check_remote_ghcr_io_commands()
base_build_params = BuildProdParams(
additional_airflow_extras=additional_airflow_extras,
additional_dev_apt_command=additional_dev_apt_command,
additional_dev_apt_deps=additional_dev_apt_deps,
additional_dev_apt_env=additional_dev_apt_env,
additional_pip_install_flags=additional_pip_install_flags,
additional_python_deps=additional_python_deps,
additional_runtime_apt_command=additional_runtime_apt_command,
additional_runtime_apt_deps=additional_runtime_apt_deps,
additional_runtime_apt_env=additional_runtime_apt_env,
airflow_constraints_location=airflow_constraints_location,
airflow_constraints_mode=airflow_constraints_mode,
airflow_constraints_reference=airflow_constraints_reference,
airflow_extras=airflow_extras,
build_progress=build_progress,
builder=builder,
cleanup_context=cleanup_context,
commit_sha=commit_sha,
debian_version=debian_version,
dev_apt_command=dev_apt_command,
dev_apt_deps=dev_apt_deps,
docker_host=docker_host,
disable_airflow_repo_cache=disable_airflow_repo_cache,
disable_mssql_client_installation=disable_mssql_client_installation,
disable_mysql_client_installation=disable_mysql_client_installation,
disable_postgres_client_installation=disable_postgres_client_installation,
docker_cache=docker_cache,
github_repository=github_repository,
github_token=github_token,
image_tag=image_tag,
install_airflow_reference=install_airflow_reference,
install_airflow_version=install_airflow_version,
install_mysql_client_type=install_mysql_client_type,
install_packages_from_context=install_packages_from_context,
installation_method=installation_method,
prepare_buildx_cache=prepare_buildx_cache,
push=push,
python=python,
python_image=python_image,
runtime_apt_command=runtime_apt_command,
runtime_apt_deps=runtime_apt_deps,
tag_as_latest=tag_as_latest,
use_constraints_for_context_packages=use_constraints_for_context_packages,
use_uv=use_uv,
uv_http_timeout=uv_http_timeout,
version_suffix_for_pypi=version_suffix_for_pypi,
)
if platform:
base_build_params.platform = platform
fix_group_permissions()
if run_in_parallel:
params_list: list[BuildProdParams] = []
if prepare_buildx_cache:
platforms_list = base_build_params.platform.split(",")
for platform in platforms_list:
build_params = deepcopy(base_build_params)
build_params.platform = platform
params_list.append(build_params)
prepare_for_building_prod_image(params=params_list[0])
run_build_in_parallel(
image_params_list=params_list,
params_description_list=platforms_list,
include_success_outputs=include_success_outputs,
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
)
else:
python_version_list = get_python_version_list(python_versions)
for python in python_version_list:
params = deepcopy(base_build_params)
params.python = python
params_list.append(params)
prepare_for_building_prod_image(params=params_list[0])
run_build_in_parallel(
image_params_list=params_list,
params_description_list=python_version_list,
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
include_success_outputs=include_success_outputs,
)
else:
prepare_for_building_prod_image(params=base_build_params)
run_build(prod_image_params=base_build_params) |
Pull and optionally verify Production images - possibly in parallel for all Python versions. | def pull_prod_image(
python: str,
run_in_parallel: bool,
parallelism: int,
skip_cleanup: bool,
debug_resources: bool,
include_success_outputs,
python_versions: str,
github_token: str,
image_tag: str,
wait_for_image: bool,
tag_as_latest: bool,
verify: bool,
github_repository: str,
extra_pytest_args: tuple,
):
"""Pull and optionally verify Production images - possibly in parallel for all Python versions."""
perform_environment_checks()
check_remote_ghcr_io_commands()
if run_in_parallel:
python_version_list = get_python_version_list(python_versions)
prod_image_params_list = [
BuildProdParams(
image_tag=image_tag,
python=python,
github_repository=github_repository,
github_token=github_token,
)
for python in python_version_list
]
run_pull_in_parallel(
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
include_success_outputs=include_success_outputs,
image_params_list=prod_image_params_list,
python_version_list=python_version_list,
verify=verify,
wait_for_image=wait_for_image,
tag_as_latest=tag_as_latest,
extra_pytest_args=extra_pytest_args if extra_pytest_args is not None else (),
)
else:
image_params = BuildProdParams(
image_tag=image_tag, python=python, github_repository=github_repository, github_token=github_token
)
return_code, info = run_pull_image(
image_params=image_params,
output=None,
wait_for_image=wait_for_image,
tag_as_latest=tag_as_latest,
poll_time_seconds=10.0,
)
if return_code != 0:
get_console().print(f"[error]There was an error when pulling PROD image: {info}[/]")
sys.exit(return_code) |
Verify Production image. | def verify(
python: str,
python_versions: str,
github_repository: str,
image_name: str,
image_tag: str | None,
pull: bool,
slim_image: bool,
github_token: str,
extra_pytest_args: tuple,
run_in_parallel: bool,
parallelism: int,
skip_cleanup: bool,
debug_resources: bool,
include_success_outputs: bool,
):
"""Verify Production image."""
perform_environment_checks()
check_remote_ghcr_io_commands()
if (pull or image_name) and run_in_parallel:
get_console().print(
"[error]You cannot use --pull,--image-name and --run-in-parallel at the same time. Exiting[/]"
)
sys.exit(1)
if run_in_parallel:
base_build_params = BuildProdParams(
python=python,
github_repository=github_repository,
image_tag=image_tag,
)
python_version_list = get_python_version_list(python_versions)
params_list: list[BuildProdParams] = []
for python in python_version_list:
build_params = deepcopy(base_build_params)
build_params.python = python
params_list.append(build_params)
run_verify_in_parallel(
image_params_list=params_list,
python_version_list=python_version_list,
extra_pytest_args=extra_pytest_args,
include_success_outputs=include_success_outputs,
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
)
else:
if image_name is None:
build_params = BuildProdParams(
python=python,
image_tag=image_tag,
github_repository=github_repository,
github_token=github_token,
)
image_name = build_params.airflow_image_name_with_tag
if pull:
check_remote_ghcr_io_commands()
command_to_run = ["docker", "pull", image_name]
run_command(command_to_run, check=True)
get_console().print(f"[info]Verifying PROD image: {image_name}[/]")
return_code, info = verify_an_image(
image_name=image_name,
output=None,
image_type="PROD",
extra_pytest_args=extra_pytest_args,
slim_image=slim_image,
)
sys.exit(return_code) |
Cleans up docker context files folder - leaving only .README.md there. | def clean_docker_context_files():
"""
Cleans up docker context files folder - leaving only .README.md there.
"""
if get_verbose() or get_dry_run():
get_console().print("[info]Cleaning docker-context-files[/]")
if get_dry_run():
return
context_files_to_delete = DOCKER_CONTEXT_DIR.rglob("*")
for file_to_delete in context_files_to_delete:
if file_to_delete.name != ".README.md":
file_to_delete.unlink(missing_ok=True) |
Quick check - if we want to install from docker-context-files we expect some packages there but if
we don't - we don't expect them, and they might invalidate Docker cache.
This method exits with an error if what we see is unexpected for given operation.
:param install_packages_from_context: whether we want to install from docker-context-files | def check_docker_context_files(install_packages_from_context: bool):
"""
Quick check - if we want to install from docker-context-files we expect some packages there but if
we don't - we don't expect them, and they might invalidate Docker cache.
This method exits with an error if what we see is unexpected for given operation.
:param install_packages_from_context: whether we want to install from docker-context-files
"""
context_file = DOCKER_CONTEXT_DIR.rglob("*")
any_context_files = any(
context.is_file()
and context.name not in (".README.md", ".DS_Store")
and not context.parent.name.startswith("constraints")
for context in context_file
)
if not any_context_files and install_packages_from_context:
get_console().print("[warning]\nERROR! You want to install packages from docker-context-files")
get_console().print("[warning]\n but there are no packages to install in this folder.")
sys.exit(1)
elif any_context_files and not install_packages_from_context:
get_console().print(
"[warning]\n ERROR! There are some extra files in docker-context-files except README.md"
)
get_console().print("[warning]\nAnd you did not choose --install-packages-from-context flag")
get_console().print(
"[warning]\nThis might result in unnecessary cache invalidation and long build times"
)
get_console().print("[warning]Please restart the command with --cleanup-context switch\n")
sys.exit(1) |
Builds PROD image:
* fixes group permissions for files (to improve caching when umask is 002)
* converts all the parameters received via kwargs into BuildProdParams (including cache)
* prints info about the image to build
* removes docker-context-files if requested
* performs quick check if the files are present in docker-context-files if expected
* logs int to docker registry on CI if build cache is being executed
* removes "tag" for previously build image so that inline cache uses only remote image
* constructs docker-compose command to run based on parameters passed
* run the build command
* update cached information that the build completed and saves checksums of all files
for quick future check if the build is needed
:param prod_image_params: PROD image parameters
:param param_description: description of the parameters
:param output: output redirection | def run_build_production_image(
prod_image_params: BuildProdParams,
param_description: str,
output: Output | None,
) -> tuple[int, str]:
"""
Builds PROD image:
* fixes group permissions for files (to improve caching when umask is 002)
* converts all the parameters received via kwargs into BuildProdParams (including cache)
* prints info about the image to build
* removes docker-context-files if requested
* performs quick check if the files are present in docker-context-files if expected
* logs int to docker registry on CI if build cache is being executed
* removes "tag" for previously build image so that inline cache uses only remote image
* constructs docker-compose command to run based on parameters passed
* run the build command
* update cached information that the build completed and saves checksums of all files
for quick future check if the build is needed
:param prod_image_params: PROD image parameters
:param param_description: description of the parameters
:param output: output redirection
"""
if (
prod_image_params.is_multi_platform()
and not prod_image_params.push
and not prod_image_params.prepare_buildx_cache
):
get_console(output=output).print(
"\n[red]You cannot use multi-platform build without using --push flag"
" or preparing buildx cache![/]\n"
)
return 1, "Error: building multi-platform image without --push."
get_console(output=output).print(f"\n[info]Building PROD Image for {param_description}\n")
if prod_image_params.prepare_buildx_cache:
build_command_result = build_cache(image_params=prod_image_params, output=output)
else:
env = get_docker_build_env(prod_image_params)
build_command_result = run_command(
prepare_docker_build_command(
image_params=prod_image_params,
),
cwd=AIRFLOW_SOURCES_ROOT,
check=False,
env=env,
text=True,
output=output,
)
if build_command_result.returncode == 0 and prod_image_params.tag_as_latest:
build_command_result = tag_image_as_latest(image_params=prod_image_params, output=output)
return build_command_result.returncode, f"Image build: {param_description}" |
Run generate constraints in parallel | def run_generate_constraints_in_parallel(
shell_params_list: list[ShellParams],
python_version_list: list[str],
include_success_outputs: bool,
parallelism: int,
skip_cleanup: bool,
debug_resources: bool,
):
"""Run generate constraints in parallel"""
with ci_group(f"Constraints for {python_version_list}"):
all_params = [
f"Constraints {shell_params.airflow_constraints_mode}:{shell_params.python}"
for shell_params in shell_params_list
]
with run_with_pool(
parallelism=parallelism,
all_params=all_params,
debug_resources=debug_resources,
progress_matcher=GenericRegexpProgressMatcher(
regexp=CONSTRAINT_PROGRESS_MATCHER, lines_to_search=6
),
) as (pool, outputs):
results = [
pool.apply_async(
run_generate_constraints,
kwds={
"shell_params": shell_params,
"output": outputs[index],
},
)
for index, shell_params in enumerate(shell_params_list)
]
check_async_run_results(
results=results,
success="All constraints are generated.",
outputs=outputs,
include_success_outputs=include_success_outputs,
skip_cleanup=skip_cleanup,
summarize_on_ci=SummarizeAfter.SUCCESS,
summary_start_regexp=".*Constraints generated in.*",
) |
Returns all providers in dist, optionally filtered by install_selected_providers.
:param package_format: package format to look for
:param install_selected_providers: list of providers to filter by | def get_all_providers_in_dist(package_format: str, install_selected_providers: str) -> list[str]:
"""
Returns all providers in dist, optionally filtered by install_selected_providers.
:param package_format: package format to look for
:param install_selected_providers: list of providers to filter by
"""
if package_format == "sdist":
all_found_providers = list(
_get_all_providers_in_dist(
filename_prefix=SDIST_FILENAME_PREFIX, filename_pattern=SDIST_FILENAME_PATTERN
)
)
elif package_format == "wheel":
all_found_providers = list(
_get_all_providers_in_dist(
filename_prefix=WHEEL_FILENAME_PREFIX, filename_pattern=WHEEL_FILENAME_PATTERN
)
)
else:
raise SystemExit(f"Unknown package format {package_format}")
if install_selected_providers:
filter_list = install_selected_providers.split(",")
return [provider for provider in all_found_providers if provider in filter_list]
return all_found_providers |
Run docs publishing in parallel | def run_publish_docs_in_parallel(
package_list: tuple[str, ...],
airflow_site_directory: str,
override_versioned: bool,
include_success_outputs: bool,
parallelism: int,
skip_cleanup: bool,
debug_resources: bool,
):
"""Run docs publishing in parallel"""
success_entries = []
skipped_entries = []
with ci_group("Publishing docs for packages"):
all_params = [f"Publishing docs {package_name}" for package_name in package_list]
with run_with_pool(
parallelism=parallelism,
all_params=all_params,
debug_resources=debug_resources,
progress_matcher=GenericRegexpProgressMatcher(
regexp=PUBLISHING_DOCS_PROGRESS_MATCHER, lines_to_search=6
),
) as (pool, outputs):
results = [
pool.apply_async(
run_docs_publishing,
kwds={
"package_name": package_name,
"airflow_site_directory": airflow_site_directory,
"override_versioned": override_versioned,
"output": outputs[index],
"verbose": get_verbose(),
},
)
for index, package_name in enumerate(package_list)
]
# Iterate over the results and collect success and skipped entries
for result in results:
return_code, message = result.get()
if return_code == 0:
success_entries.append(message)
else:
skipped_entries.append(message)
get_console().print("[blue]Summary:")
need_rule = False
if len(success_entries):
get_console().print("[success]Packages published:")
for entry in success_entries:
get_console().print(f"[success]{entry}")
need_rule = True
if need_rule:
get_console().rule()
if len(skipped_entries):
get_console().print("\n[warning]Packages skipped:")
for entry in skipped_entries:
get_console().print(f"[warning]{entry}") |
Publishes documentation to airflow-site. | def publish_docs(
airflow_site_directory: str,
debug_resources: bool,
doc_packages: tuple[str, ...],
include_success_outputs: bool,
include_not_ready_providers: bool,
include_removed_providers: bool,
override_versioned: bool,
package_filter: tuple[str, ...],
package_list: str,
parallelism: int,
run_in_parallel: bool,
skip_cleanup: bool,
):
"""Publishes documentation to airflow-site."""
if not os.path.isdir(airflow_site_directory):
get_console().print(
"\n[error]location pointed by airflow_site_dir is not valid. "
"Provide the path of cloned airflow-site repo\n"
)
packages_list_as_tuple: tuple[str, ...] = ()
if package_list and len(package_list):
get_console().print(f"\n[info]Populating provider list from PACKAGE_LIST env as {package_list}")
# Override doc_packages with values from PACKAGE_LIST
packages_list_as_tuple = tuple(package_list.split(","))
if doc_packages and packages_list_as_tuple:
get_console().print(
f"[warning]Both package arguments and --package-list / PACKAGE_LIST passed. "
f"Overriding to {packages_list_as_tuple}"
)
doc_packages = packages_list_as_tuple or doc_packages
current_packages = find_matching_long_package_names(
short_packages=expand_all_provider_packages(
short_doc_packages=doc_packages,
include_removed=include_removed_providers,
include_not_ready=include_not_ready_providers,
),
filters=package_filter,
)
print(f"Publishing docs for {len(current_packages)} package(s)")
for pkg in current_packages:
print(f" - {pkg}")
print()
if run_in_parallel:
run_publish_docs_in_parallel(
package_list=current_packages,
parallelism=parallelism,
skip_cleanup=skip_cleanup,
debug_resources=debug_resources,
include_success_outputs=include_success_outputs,
airflow_site_directory=airflow_site_directory,
override_versioned=override_versioned,
)
else:
success_entries = []
skipped_entries = []
for package_name in current_packages:
return_code, message = run_docs_publishing(
package_name, airflow_site_directory, override_versioned, verbose=get_verbose(), output=None
)
if return_code == 0:
success_entries.append(message)
else:
skipped_entries.append(message)
get_console().print("[blue]Summary:")
need_rule = False
if len(success_entries):
get_console().print("[success]Packages published:")
for entry in success_entries:
get_console().print(f"[success]{entry}")
need_rule = True
if need_rule:
get_console().rule()
if len(skipped_entries):
get_console().print("\n[warning]Packages skipped:")
for entry in skipped_entries:
get_console().print(f"[warning]{entry}") |
Adds back references for documentation generated by build-docs and publish-docs | def add_back_references(
airflow_site_directory: str,
include_not_ready_providers: bool,
include_removed_providers: bool,
doc_packages: tuple[str, ...],
):
"""Adds back references for documentation generated by build-docs and publish-docs"""
site_path = Path(airflow_site_directory)
if not site_path.is_dir():
get_console().print(
"\n[error]location pointed by airflow_site_dir is not valid. "
"Provide the path of cloned airflow-site repo\n"
)
sys.exit(1)
if not doc_packages:
get_console().print(
"\n[error]You need to specify at least one package to generate back references for\n"
)
sys.exit(1)
start_generating_back_references(
site_path,
list(
expand_all_provider_packages(
short_doc_packages=doc_packages,
include_removed=include_removed_providers,
include_not_ready=include_not_ready_providers,
)
),
) |
Cleans up the old airflow providers artifacts in order to maintain
only one provider version in the release SVN folder | def clean_old_provider_artifacts(
directory: str,
):
"""Cleans up the old airflow providers artifacts in order to maintain
only one provider version in the release SVN folder"""
cleanup_suffixes = [
".tar.gz",
".tar.gz.sha512",
".tar.gz.asc",
"-py3-none-any.whl",
"-py3-none-any.whl.sha512",
"-py3-none-any.whl.asc",
]
for suffix in cleanup_suffixes:
get_console().print(f"[info]Running provider cleanup for suffix: {suffix}[/]")
package_types_dicts: dict[str, list[VersionedFile]] = defaultdict(list)
os.chdir(directory)
for file in glob.glob(f"*{suffix}"):
versioned_file = split_version_and_suffix(file, suffix)
package_types_dicts[versioned_file.type].append(versioned_file)
for package_types in package_types_dicts.values():
package_types.sort(key=operator.attrgetter("comparable_version"))
for package_types in package_types_dicts.values():
if len(package_types) == 1:
versioned_file = package_types[0]
get_console().print(
f"[success]Leaving the only version: "
f"{versioned_file.base + versioned_file.version + versioned_file.suffix}[/]"
)
# Leave only last version from each type
for versioned_file in package_types[:-1]:
get_console().print(
f"[warning]Removing {versioned_file.file_name} as they are older than remaining file: "
f"{package_types[-1].file_name}[/]"
)
command = ["svn", "rm", versioned_file.file_name]
run_command(command, check=False) |
Check if package has been prepared in dist folder. | def is_package_in_dist(dist_files: list[str], package: str) -> bool:
"""Check if package has been prepared in dist folder."""
return any(
file.startswith(
(
f'apache_airflow_providers_{package.replace(".", "_")}',
f'apache-airflow-providers-{package.replace(".", "-")}',
)
)
for file in dist_files
) |
Get suffix from package prepared in dist folder. | def get_suffix_from_package_in_dist(dist_files: list[str], package: str) -> str | None:
"""Get suffix from package prepared in dist folder."""
for file in dist_files:
if file.startswith(f'apache_airflow_providers_{package.replace(".", "_")}') and file.endswith(
".tar.gz"
):
file = file[: -len(".tar.gz")]
version = file.split("-")[-1]
match = VERSION_MATCH.match(version)
if match:
return match.group(4)
return None |
Creates URL to create the issue with title, body and labels.
:param title: issue title
:param body: issue body
:param labels: labels for the issue
:return: URL to use to create the issue | def create_github_issue_url(title: str, body: str, labels: Iterable[str]) -> str:
"""
Creates URL to create the issue with title, body and labels.
:param title: issue title
:param body: issue body
:param labels: labels for the issue
:return: URL to use to create the issue
"""
from urllib.parse import quote
quoted_labels = quote(",".join(labels))
quoted_title = quote(title)
quoted_body = quote(body)
return (
f"https://github.com/apache/airflow/issues/new?labels={quoted_labels}&"
f"title={quoted_title}&body={quoted_body}"
) |
Enables autocompletion of breeze commands. | def autocomplete(force: bool):
"""
Enables autocompletion of breeze commands.
"""
# Determine if the shell is bash/zsh/powershell. It helps to build the autocomplete path
detected_shell = os.environ.get("SHELL")
detected_shell = None if detected_shell is None else detected_shell.split(os.sep)[-1]
if detected_shell not in ["bash", "zsh", "fish"]:
get_console().print(f"\n[error] The shell {detected_shell} is not supported for autocomplete![/]\n")
sys.exit(1)
get_console().print(f"Installing {detected_shell} completion for local user")
autocomplete_path = (
AIRFLOW_SOURCES_ROOT / "dev" / "breeze" / "autocomplete" / f"{NAME}-complete-{detected_shell}.sh"
)
get_console().print(f"[info]Activation command script is available here: {autocomplete_path}[/]\n")
get_console().print(f"[warning]We need to add above script to your {detected_shell} profile.[/]\n")
given_answer = user_confirm(
"Should we proceed with modifying the script?", default_answer=Answer.NO, timeout=STANDARD_TIMEOUT
)
if given_answer == Answer.YES:
if detected_shell == "bash":
script_path = str(Path("~").expanduser() / ".bash_completion")
command_to_execute = f"source {autocomplete_path}"
write_to_shell(command_to_execute, script_path, force)
elif detected_shell == "zsh":
script_path = str(Path("~").expanduser() / ".zshrc")
command_to_execute = f"source {autocomplete_path}"
write_to_shell(command_to_execute, script_path, force)
elif detected_shell == "fish":
# Include steps for fish shell
script_path = str(Path("~").expanduser() / f".config/fish/completions/{NAME}.fish")
if os.path.exists(script_path) and not force:
get_console().print(
"\n[warning]Autocompletion is already setup. Skipping. "
"You can force autocomplete installation by adding --force/]\n"
)
else:
with open(autocomplete_path) as source_file, open(script_path, "w") as destination_file:
for line in source_file:
destination_file.write(line)
else:
# Include steps for powershell
subprocess.check_call(["powershell", "Set-ExecutionPolicy Unrestricted -Scope CurrentUser"])
script_path = (
subprocess.check_output(["powershell", "-NoProfile", "echo $profile"]).decode("utf-8").strip()
)
command_to_execute = f". {autocomplete_path}"
write_to_shell(command_to_execute=command_to_execute, script_path=script_path, force_setup=force)
elif given_answer == Answer.NO:
get_console().print(
"\nPlease follow the https://click.palletsprojects.com/en/8.1.x/shell-completion/ "
"to setup autocompletion for breeze manually if you want to use it.\n"
)
else:
sys.exit(0) |
Print information about version of apache-airflow-breeze. | def version():
"""Print information about version of apache-airflow-breeze."""
get_console().print(ASCIIART, style=ASCIIART_STYLE)
get_console().print(f"\n[info]Breeze version: {VERSION}[/]")
get_console().print(f"[info]Breeze installed from: {get_installation_airflow_sources()}[/]")
get_console().print(f"[info]Used Airflow sources : {get_used_airflow_sources()}[/]\n")
if get_verbose():
get_console().print(
f"[info]Installation sources config hash : "
f"{get_installation_sources_config_metadata_hash()}[/]"
)
get_console().print(
f"[info]Used sources config hash : {get_used_sources_setup_metadata_hash()}[/]"
)
get_console().print(
f"[info]Package config hash : {(get_package_setup_metadata_hash())}[/]\n"
) |
Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). | def change_config(
python: str,
backend: str,
postgres_version: str,
mysql_version: str,
cheatsheet: bool,
asciiart: bool,
colour: bool,
):
"""
Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).
"""
asciiart_file = "suppress_asciiart"
cheatsheet_file = "suppress_cheatsheet"
colour_file = "suppress_colour"
if asciiart is not None:
if asciiart:
delete_cache(asciiart_file)
get_console().print("[info]Enable ASCIIART![/]")
else:
touch_cache_file(asciiart_file)
get_console().print("[info]Disable ASCIIART![/]")
if cheatsheet is not None:
if cheatsheet:
delete_cache(cheatsheet_file)
get_console().print("[info]Enable Cheatsheet[/]")
elif cheatsheet is not None:
touch_cache_file(cheatsheet_file)
get_console().print("[info]Disable Cheatsheet[/]")
if colour is not None:
if colour:
delete_cache(colour_file)
get_console().print("[info]Enable Colour[/]")
elif colour is not None:
touch_cache_file(colour_file)
get_console().print("[info]Disable Colour[/]")
def get_status(file: str):
return "disabled" if check_if_cache_exists(file) else "enabled"
get_console().print()
get_console().print("[info]Current configuration:[/]")
get_console().print()
get_console().print(f"[info]* Python: {python}[/]")
get_console().print(f"[info]* Backend: {backend}[/]")
get_console().print()
get_console().print(f"[info]* Postgres version: {postgres_version}[/]")
get_console().print(f"[info]* MySQL version: {mysql_version}[/]")
get_console().print()
get_console().print(f"[info]* ASCIIART: {get_status(asciiart_file)}[/]")
get_console().print(f"[info]* Cheatsheet: {get_status(cheatsheet_file)}[/]")
get_console().print()
get_console().print()
get_console().print(f"[info]* Colour: {get_status(colour_file)}[/]")
get_console().print() |
MD5 hash of a dictionary. Sorted and dumped via json to account for random sequence) | def dict_hash(dictionary: dict[str, Any]) -> str:
"""MD5 hash of a dictionary. Sorted and dumped via json to account for random sequence)"""
# noinspection InsecureHash
dhash = hashlib.md5()
try:
encoded = json.dumps(dictionary, sort_keys=True, default=vars).encode()
except TypeError:
get_console().print(dictionary)
raise
dhash.update(encoded)
return dhash.hexdigest() |
Run docker-compose tests. | def docker_compose_tests(
python: str,
image_name: str,
image_tag: str | None,
skip_docker_compose_deletion: bool,
github_repository: str,
extra_pytest_args: tuple,
):
"""Run docker-compose tests."""
perform_environment_checks()
if image_name is None:
build_params = BuildProdParams(
python=python, image_tag=image_tag, github_repository=github_repository
)
image_name = build_params.airflow_image_name_with_tag
get_console().print(f"[info]Running docker-compose with PROD image: {image_name}[/]")
return_code, info = run_docker_compose_tests(
image_name=image_name,
extra_pytest_args=extra_pytest_args,
skip_docker_compose_deletion=skip_docker_compose_deletion,
)
sys.exit(return_code) |
Set variable in env dict.
Priorities:
1. attribute comes first if not None
2. then environment variable if set
3. then not None default value if environment variable is None
4. if default is None, then the key is not set at all in dictionary | def _set_var(env: dict[str, str], variable: str, attribute: str | bool | None, default: str | None = None):
"""Set variable in env dict.
Priorities:
1. attribute comes first if not None
2. then environment variable if set
3. then not None default value if environment variable is None
4. if default is None, then the key is not set at all in dictionary
"""
if attribute is not None:
if isinstance(attribute, bool):
env[variable] = str(attribute).lower()
else:
env[variable] = str(attribute)
else:
os_variable_value = os.environ.get(variable)
if os_variable_value is not None:
env[variable] = os_variable_value
elif default is not None:
env[variable] = default |
Get git command to run for the current repo from the current folder.
The current directory should always be the package folder.
:param from_commit: if present - base commit from which to start the log from
:param to_commit: if present - final commit which should be the start of the log
:return: git command to run | def _get_git_log_command(from_commit: str | None = None, to_commit: str | None = None) -> list[str]:
"""Get git command to run for the current repo from the current folder.
The current directory should always be the package folder.
:param from_commit: if present - base commit from which to start the log from
:param to_commit: if present - final commit which should be the start of the log
:return: git command to run
"""
git_cmd = [
"git",
"log",
"--pretty=format:%H %h %cd %s",
"--date=short",
]
if from_commit and to_commit:
git_cmd.append(f"{from_commit}...{to_commit}")
elif from_commit:
git_cmd.append(from_commit)
elif to_commit:
raise ValueError("It makes no sense to specify to_commit without from_commit.")
git_cmd.extend(["--", "."])
return git_cmd |
Converts list of changes from its string form to markdown/RST table and array of change information
The changes are in the form of multiple lines where each line consists of:
FULL_COMMIT_HASH SHORT_COMMIT_HASH COMMIT_DATE COMMIT_SUBJECT
The subject can contain spaces but one of the preceding values can, so we can make split
3 times on spaces to break it up.
:param version: Version from which the changes are
:param changes: list of changes in a form of multiple-line string
:param base_url: base url for the commit URL
:param markdown: if True, Markdown format is used else rst
:return: formatted table + list of changes (starting from the latest) | def _convert_git_changes_to_table(
version: str, changes: str, base_url: str, markdown: bool = True
) -> tuple[str, list[Change]]:
"""
Converts list of changes from its string form to markdown/RST table and array of change information
The changes are in the form of multiple lines where each line consists of:
FULL_COMMIT_HASH SHORT_COMMIT_HASH COMMIT_DATE COMMIT_SUBJECT
The subject can contain spaces but one of the preceding values can, so we can make split
3 times on spaces to break it up.
:param version: Version from which the changes are
:param changes: list of changes in a form of multiple-line string
:param base_url: base url for the commit URL
:param markdown: if True, Markdown format is used else rst
:return: formatted table + list of changes (starting from the latest)
"""
from tabulate import tabulate
lines = changes.splitlines()
headers = ["Commit", "Committed", "Subject"]
table_data = []
changes_list: list[Change] = []
for line in lines:
if line == "":
continue
change = _get_change_from_line(line, version)
table_data.append(
(
f"[{change.short_hash}]({base_url}{change.full_hash})"
if markdown
else f"`{change.short_hash} <{base_url}{change.full_hash}>`_",
change.date,
f"`{change.message_without_backticks}`"
if markdown
else f"``{change.message_without_backticks}``",
)
)
changes_list.append(change)
header = ""
if not table_data:
return header, []
table = tabulate(table_data, headers=headers, tablefmt="pipe" if markdown else "rst")
if not markdown:
header += f"\n\n{version}\n" + "." * len(version) + "\n\n"
release_date = table_data[0][1]
header += f"Latest change: {release_date}\n\n"
return header + table, changes_list |
Retrieves all changes for the package.
:param provider_package_id: provider package id
:param base_branch: base branch to check changes in apache remote for changes
:param reapply_templates_only: whether to only reapply templates without bumping the version
:return tuple of:
bool (whether to proceed with update)
list of lists of changes for all past versions (might be empty)
the same list converted to string RST table | def _get_all_changes_for_package(
provider_package_id: str,
base_branch: str,
reapply_templates_only: bool,
) -> tuple[bool, list[list[Change]], str]:
"""Retrieves all changes for the package.
:param provider_package_id: provider package id
:param base_branch: base branch to check changes in apache remote for changes
:param reapply_templates_only: whether to only reapply templates without bumping the version
:return tuple of:
bool (whether to proceed with update)
list of lists of changes for all past versions (might be empty)
the same list converted to string RST table
"""
provider_details = get_provider_details(provider_package_id)
current_version = provider_details.versions[0]
current_tag_no_suffix = get_version_tag(current_version, provider_package_id)
if get_verbose():
get_console().print(f"[info]Checking if tag '{current_tag_no_suffix}' exist.")
result = run_command(
["git", "rev-parse", current_tag_no_suffix],
cwd=provider_details.source_provider_package_path,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=False,
)
if not reapply_templates_only and result.returncode == 0:
if get_verbose():
get_console().print(f"[info]The tag {current_tag_no_suffix} exists.")
# The tag already exists
result = run_command(
_get_git_log_command(f"{HTTPS_REMOTE}/{base_branch}", current_tag_no_suffix),
cwd=provider_details.source_provider_package_path,
capture_output=True,
text=True,
check=True,
)
changes = result.stdout.strip()
if changes:
provider_details = get_provider_details(provider_package_id)
doc_only_change_file = (
provider_details.source_provider_package_path / ".latest-doc-only-change.txt"
)
if doc_only_change_file.exists():
last_doc_only_hash = doc_only_change_file.read_text().strip()
try:
result = run_command(
_get_git_log_command(f"{HTTPS_REMOTE}/{base_branch}", last_doc_only_hash),
cwd=provider_details.source_provider_package_path,
capture_output=True,
text=True,
check=True,
)
changes_since_last_doc_only_check = result.stdout.strip()
if not changes_since_last_doc_only_check:
get_console().print(
"\n[warning]The provider has doc-only changes since the last release. Skipping[/]"
)
raise PrepareReleaseDocsChangesOnlyException()
if len(changes.splitlines()) > len(changes_since_last_doc_only_check.splitlines()):
# if doc-only was released after previous release - use it as starting point
# but if before - stay with the releases from last tag.
changes = changes_since_last_doc_only_check
except subprocess.CalledProcessError:
# ignore when the commit mentioned as last doc-only change is obsolete
pass
get_console().print(
f"[warning]The provider {provider_package_id} has {len(changes.splitlines())} "
f"changes since last release[/]"
)
get_console().print(f"\n[info]Provider: {provider_package_id}[/]\n")
changes_table, array_of_changes = _convert_git_changes_to_table(
f"NEXT VERSION AFTER + {provider_details.versions[0]}",
changes,
base_url="https://github.com/apache/airflow/commit/",
markdown=False,
)
_print_changes_table(changes_table)
return False, [array_of_changes], changes_table
else:
get_console().print(f"[info]No changes for {provider_package_id}")
return False, [], ""
if len(provider_details.versions) == 1:
get_console().print(
f"[info]The provider '{provider_package_id}' has never "
f"been released but it is ready to release!\n"
)
else:
get_console().print(
f"[info]New version of the '{provider_package_id}' package is ready to be released!\n"
)
next_version_tag = f"{HTTPS_REMOTE}/{base_branch}"
changes_table = ""
current_version = provider_details.versions[0]
list_of_list_of_changes: list[list[Change]] = []
for version in provider_details.versions[1:]:
version_tag = get_version_tag(version, provider_package_id)
result = run_command(
_get_git_log_command(next_version_tag, version_tag),
cwd=provider_details.source_provider_package_path,
capture_output=True,
text=True,
check=True,
)
changes = result.stdout.strip()
changes_table_for_version, array_of_changes_for_version = _convert_git_changes_to_table(
current_version, changes, base_url="https://github.com/apache/airflow/commit/", markdown=False
)
changes_table += changes_table_for_version
list_of_list_of_changes.append(array_of_changes_for_version)
next_version_tag = version_tag
current_version = version
result = run_command(
_get_git_log_command(next_version_tag),
cwd=provider_details.source_provider_package_path,
capture_output=True,
text=True,
check=True,
)
changes = result.stdout.strip()
changes_table_for_version, array_of_changes_for_version = _convert_git_changes_to_table(
current_version, changes, base_url="https://github.com/apache/airflow/commit/", markdown=False
)
changes_table += changes_table_for_version
return True, list_of_list_of_changes, changes_table |
Ask user to specify type of changes (case-insensitive).
:return: Type of change. | def _ask_the_user_for_the_type_of_changes(non_interactive: bool) -> TypeOfChange:
"""Ask user to specify type of changes (case-insensitive).
:return: Type of change.
"""
# have to do that while waiting for Python 3.11+ StrEnum [*TypeOfChange] :(
type_of_changes_array = [t.value for t in TypeOfChange]
if non_interactive:
# Simulate all possible non-terminal answers - this is useful for running on CI where we want to
# Test all possibilities.
return TypeOfChange(random.choice(type_of_changes_array))
display_answers = "/".join(type_of_changes_array) + "/q"
while True:
get_console().print(
"[warning]Type of change (d)ocumentation, (b)ugfix, (f)eature, (x)breaking "
f"change, (s)kip, (q)uit [{display_answers}]?[/] ",
end="",
)
try:
given_answer = input("").lower()
except KeyboardInterrupt:
raise PrepareReleaseDocsUserQuitException()
if given_answer == "q":
raise PrepareReleaseDocsUserQuitException()
if given_answer in type_of_changes_array:
return TypeOfChange(given_answer)
get_console().print(
f"[warning] Wrong answer given: '{given_answer}'. Should be one of {display_answers}"
) |
Updates provider version based on the type of change selected by the user
:param type_of_change: type of change selected
:param provider_package_id: provider package
:return: tuple of two bools: (with_breaking_change, maybe_with_new_features) | def _update_version_in_provider_yaml(
provider_package_id: str,
type_of_change: TypeOfChange,
) -> tuple[bool, bool]:
"""
Updates provider version based on the type of change selected by the user
:param type_of_change: type of change selected
:param provider_package_id: provider package
:return: tuple of two bools: (with_breaking_change, maybe_with_new_features)
"""
provider_details = get_provider_details(provider_package_id)
version = provider_details.versions[0]
v = semver.VersionInfo.parse(version)
with_breaking_changes = False
maybe_with_new_features = False
if type_of_change == TypeOfChange.BREAKING_CHANGE:
v = v.bump_major()
with_breaking_changes = True
# we do not know, but breaking changes may also contain new features
maybe_with_new_features = True
elif type_of_change == TypeOfChange.FEATURE:
v = v.bump_minor()
maybe_with_new_features = True
elif type_of_change == TypeOfChange.BUGFIX:
v = v.bump_patch()
provider_yaml_path = get_source_package_path(provider_package_id) / "provider.yaml"
original_text = provider_yaml_path.read_text()
new_text = re.sub(r"^versions:", f"versions:\n - {v}", original_text, 1, re.MULTILINE)
provider_yaml_path.write_text(new_text)
get_console().print(f"[special]Bumped version to {v}\n")
return with_breaking_changes, maybe_with_new_features |
Updates source date epoch in provider yaml that then can be used to generate reproducible packages.
:param provider_package_id: provider package | def _update_source_date_epoch_in_provider_yaml(
provider_package_id: str,
) -> None:
"""
Updates source date epoch in provider yaml that then can be used to generate reproducible packages.
:param provider_package_id: provider package
"""
provider_yaml_path = get_source_package_path(provider_package_id) / "provider.yaml"
original_text = provider_yaml_path.read_text()
source_date_epoch = int(time())
new_text = re.sub(
r"source-date-epoch: [0-9]*", f"source-date-epoch: {source_date_epoch}", original_text, 1
)
provider_yaml_path.write_text(new_text)
refresh_provider_metadata_with_provider_id(provider_package_id)
get_console().print(f"[special]Updated source-date-epoch to {source_date_epoch}\n") |
Returns additional info for the package.
:param provider_package_path: path for the package
:return: additional information for the path (empty string if missing) | def _get_additional_package_info(provider_package_path: Path) -> str:
"""Returns additional info for the package.
:param provider_package_path: path for the package
:return: additional information for the path (empty string if missing)
"""
additional_info_file_path = provider_package_path / "ADDITIONAL_INFO.md"
if additional_info_file_path.is_file():
additional_info = additional_info_file_path.read_text()
additional_info_lines = additional_info.splitlines(keepends=True)
result = ""
skip_comment = True
for line in additional_info_lines:
if line.startswith(" -->"):
skip_comment = False
elif not skip_comment:
result += line
return result
return "" |
Updates generated files.
This includes the readme, changes, and provider.yaml files.
:param provider_package_id: id of the package
:param reapply_templates_only: regenerate already released documentation only - without updating versions
:param base_branch: base branch to check changes in apache remote for changes
:param regenerate_missing_docs: whether to regenerate missing docs
:param non_interactive: run in non-interactive mode (useful for CI)
:return: tuple of two bools: (with_breaking_change, maybe_with_new_features) | def update_release_notes(
provider_package_id: str,
reapply_templates_only: bool,
base_branch: str,
regenerate_missing_docs: bool,
non_interactive: bool,
) -> tuple[bool, bool]:
"""Updates generated files.
This includes the readme, changes, and provider.yaml files.
:param provider_package_id: id of the package
:param reapply_templates_only: regenerate already released documentation only - without updating versions
:param base_branch: base branch to check changes in apache remote for changes
:param regenerate_missing_docs: whether to regenerate missing docs
:param non_interactive: run in non-interactive mode (useful for CI)
:return: tuple of two bools: (with_breaking_change, maybe_with_new_features)
"""
proceed, list_of_list_of_changes, changes_as_table = _get_all_changes_for_package(
provider_package_id=provider_package_id,
base_branch=base_branch,
reapply_templates_only=reapply_templates_only,
)
with_breaking_changes = False
maybe_with_new_features = False
if not reapply_templates_only:
if proceed:
if non_interactive:
answer = Answer.YES
else:
answer = user_confirm(f"Provider {provider_package_id} marked for release. Proceed?")
if answer == Answer.NO:
get_console().print(
f"\n[warning]Skipping provider: {provider_package_id} on user request![/]\n"
)
raise PrepareReleaseDocsUserSkippedException()
elif answer == Answer.QUIT:
raise PrepareReleaseDocsUserQuitException()
elif not list_of_list_of_changes:
get_console().print(
f"\n[warning]Provider: {provider_package_id} - "
f"skipping documentation generation. No changes![/]\n"
)
raise PrepareReleaseDocsNoChangesException()
else:
type_of_change = _ask_the_user_for_the_type_of_changes(non_interactive=non_interactive)
if type_of_change == TypeOfChange.SKIP:
raise PrepareReleaseDocsUserSkippedException()
get_console().print(
f"[info]Provider {provider_package_id} has been classified as:[/]\n\n"
f"[special]{TYPE_OF_CHANGE_DESCRIPTION[type_of_change]}"
)
get_console().print()
if type_of_change == TypeOfChange.DOCUMENTATION:
_mark_latest_changes_as_documentation_only(provider_package_id, list_of_list_of_changes)
elif type_of_change in [TypeOfChange.BUGFIX, TypeOfChange.FEATURE, TypeOfChange.BREAKING_CHANGE]:
with_breaking_changes, maybe_with_new_features = _update_version_in_provider_yaml(
provider_package_id=provider_package_id, type_of_change=type_of_change
)
_update_source_date_epoch_in_provider_yaml(provider_package_id)
proceed, list_of_list_of_changes, changes_as_table = _get_all_changes_for_package(
provider_package_id=provider_package_id,
base_branch=base_branch,
reapply_templates_only=reapply_templates_only,
)
else:
_update_source_date_epoch_in_provider_yaml(provider_package_id)
provider_details = get_provider_details(provider_package_id)
_verify_changelog_exists(provider_details.provider_id)
jinja_context = get_provider_documentation_jinja_context(
provider_id=provider_package_id,
with_breaking_changes=with_breaking_changes,
maybe_with_new_features=maybe_with_new_features,
)
jinja_context["DETAILED_CHANGES_RST"] = changes_as_table
jinja_context["DETAILED_CHANGES_PRESENT"] = bool(changes_as_table)
_update_changelog_rst(
jinja_context,
provider_package_id,
provider_details.documentation_provider_package_path,
regenerate_missing_docs,
)
_update_commits_rst(
jinja_context,
provider_package_id,
provider_details.documentation_provider_package_path,
regenerate_missing_docs,
)
return with_breaking_changes, maybe_with_new_features |
Finds insertion index for the specified version from the .rst changelog content.
:param content: changelog split into separate lines
:param version: version to look for
:return: A 2-tuple. The first item indicates the insertion index, while the
second is a boolean indicating whether to append (False) or insert (True)
to the changelog. | def _find_insertion_index_for_version(content: list[str], version: str) -> tuple[int, bool]:
"""Finds insertion index for the specified version from the .rst changelog content.
:param content: changelog split into separate lines
:param version: version to look for
:return: A 2-tuple. The first item indicates the insertion index, while the
second is a boolean indicating whether to append (False) or insert (True)
to the changelog.
"""
changelog_found = False
skip_next_line = False
index = 0
for index, line in enumerate(content):
if not changelog_found and line.strip() == version:
changelog_found = True
skip_next_line = True
elif not skip_next_line and line and all(char == "." for char in line):
return index - 2, changelog_found
else:
skip_next_line = False
return index, changelog_found |
Pre-classifies changes based on commit message, it's wildly guessing now,
The classification also includes the decision made by the release manager when classifying the release.
However, if we switch to semantic commits, it could be automated. This list
is supposed to be manually reviewed and re-classified by release manager
anyway.
:param changes: list of changes
:return: list of changes classified semi-automatically to the fix/feature/breaking/other buckets | def _get_changes_classified(
changes: list[Change], with_breaking_changes: bool, maybe_with_new_features: bool
) -> ClassifiedChanges:
"""Pre-classifies changes based on commit message, it's wildly guessing now,
The classification also includes the decision made by the release manager when classifying the release.
However, if we switch to semantic commits, it could be automated. This list
is supposed to be manually reviewed and re-classified by release manager
anyway.
:param changes: list of changes
:return: list of changes classified semi-automatically to the fix/feature/breaking/other buckets
"""
classified_changes = ClassifiedChanges()
for change in changes:
# Special cases
if "bump minimum Airflow version in providers" in change.message.lower():
classified_changes.misc.append(change)
# General cases
elif "fix" in change.message.lower():
classified_changes.fixes.append(change)
elif "misc" in change.message.lower():
classified_changes.misc.append(change)
elif "add" in change.message.lower() and maybe_with_new_features:
classified_changes.features.append(change)
elif "breaking" in change.message.lower() and with_breaking_changes:
classified_changes.breaking_changes.append(change)
else:
classified_changes.other.append(change)
return classified_changes |
Internal update changelog method.
:param package_id: package id
:param base_branch: base branch to check changes in apache remote for changes
:param reapply_templates_only: only reapply templates, no changelog generation
:param with_breaking_changes: whether there are any breaking changes
:param maybe_with_new_features: whether there are any new features | def update_changelog(
package_id: str,
base_branch: str,
reapply_templates_only: bool,
with_breaking_changes: bool,
maybe_with_new_features: bool,
):
"""Internal update changelog method.
:param package_id: package id
:param base_branch: base branch to check changes in apache remote for changes
:param reapply_templates_only: only reapply templates, no changelog generation
:param with_breaking_changes: whether there are any breaking changes
:param maybe_with_new_features: whether there are any new features
"""
provider_details = get_provider_details(package_id)
jinja_context = get_provider_documentation_jinja_context(
provider_id=package_id,
with_breaking_changes=with_breaking_changes,
maybe_with_new_features=maybe_with_new_features,
)
proceed, changes, _ = _get_all_changes_for_package(
provider_package_id=package_id, base_branch=base_branch, reapply_templates_only=reapply_templates_only
)
if not proceed:
get_console().print(
f"[warning]The provider {package_id} is not being released. Skipping the package.[/]"
)
raise PrepareReleaseDocsNoChangesException()
if reapply_templates_only:
get_console().print("[info]Only reapply templates, no changelog update[/]")
else:
_generate_new_changelog(
package_id=package_id,
provider_details=provider_details,
changes=changes,
context=jinja_context,
with_breaking_changes=with_breaking_changes,
maybe_with_new_features=maybe_with_new_features,
)
get_console().print(f"\n[info]Update index.rst for {package_id}\n")
_update_index_rst(jinja_context, package_id, provider_details.documentation_provider_package_path) |
Updates min airflow version in provider yaml and __init__.py
:param provider_package_id: provider package id
:param with_breaking_changes: whether there are any breaking changes
:param maybe_with_new_features: whether there are any new features
:return: | def update_min_airflow_version(
provider_package_id: str, with_breaking_changes: bool, maybe_with_new_features: bool
):
"""Updates min airflow version in provider yaml and __init__.py
:param provider_package_id: provider package id
:param with_breaking_changes: whether there are any breaking changes
:param maybe_with_new_features: whether there are any new features
:return:
"""
provider_details = get_provider_details(provider_package_id)
if provider_details.removed:
return
jinja_context = get_provider_documentation_jinja_context(
provider_id=provider_package_id,
with_breaking_changes=with_breaking_changes,
maybe_with_new_features=maybe_with_new_features,
)
_generate_init_py_file_for_provider(
context=jinja_context,
target_path=provider_details.source_provider_package_path,
)
_replace_min_airflow_version_in_provider_yaml(
context=jinja_context, target_path=provider_details.source_provider_package_path
) |
Return True, version if the package should be skipped and False, good version suffix if not.
For RC and official releases we check if the "officially released" version exists
and skip the released if it was. This allows to skip packages that have not been
marked for release in this wave. For "dev" suffixes, we always build all packages. | def should_skip_the_package(provider_id: str, version_suffix: str) -> tuple[bool, str]:
"""Return True, version if the package should be skipped and False, good version suffix if not.
For RC and official releases we check if the "officially released" version exists
and skip the released if it was. This allows to skip packages that have not been
marked for release in this wave. For "dev" suffixes, we always build all packages.
"""
if version_suffix != "" and not version_suffix.startswith("rc"):
return False, version_suffix
if version_suffix == "":
current_tag = get_latest_provider_tag(provider_id, "")
if tag_exists_for_provider(provider_id, current_tag):
get_console().print(f"[warning]The 'final' tag {current_tag} exists. Skipping the package.[/]")
return True, version_suffix
return False, version_suffix
# version_suffix starts with "rc"
current_version = int(version_suffix[2:])
release_tag = get_latest_provider_tag(provider_id, "")
if tag_exists_for_provider(provider_id, release_tag):
get_console().print(f"[warning]The tag {release_tag} exists. Provider is released. Skipping it.[/]")
return True, ""
while True:
current_tag = get_latest_provider_tag(provider_id, f"rc{current_version}")
if tag_exists_for_provider(provider_id, current_tag):
current_version += 1
get_console().print(f"[warning]The tag {current_tag} exists. Checking rc{current_version}.[/]")
else:
return False, f"rc{current_version}" |
Writs value to cache. If asked it can also check if the value is allowed for the parameter. and exit
in case the value is not allowed for that parameter instead of writing it.
:param param_name: name of the parameter
:param param_value: new value for the parameter
:param check_allowed_values: whether to fail if the parameter value is not allowed for that name. | def write_to_cache_file(param_name: str, param_value: str, check_allowed_values: bool = True) -> None:
"""
Writs value to cache. If asked it can also check if the value is allowed for the parameter. and exit
in case the value is not allowed for that parameter instead of writing it.
:param param_name: name of the parameter
:param param_value: new value for the parameter
:param check_allowed_values: whether to fail if the parameter value is not allowed for that name.
"""
allowed = False
allowed_values = None
if check_allowed_values:
allowed, allowed_values = check_if_values_allowed(param_name, param_value)
if allowed or not check_allowed_values:
cache_path = Path(BUILD_CACHE_DIR, f".{param_name}")
cache_path.parent.mkdir(parents=True, exist_ok=True)
cache_path.write_text(param_value)
else:
get_console().print(f"[cyan]You have sent the {param_value} for {param_name}")
get_console().print(f"[cyan]Allowed value for the {param_name} are {allowed_values}")
get_console().print("[cyan]Provide one of the supported params. Write to cache dir failed")
sys.exit(1) |
Reads and validates value from cache is present and whether its value is valid according to current rules.
It could happen that the allowed values have been modified since the last time cached value was set,
so this check is crucial to check outdated values.
If the value is not set or in case the cached value stored is not currently allowed,
the default value is stored in the cache and returned instead.
:param param_name: name of the parameter
:param default_param_value: default value of the parameter
:return: Tuple informing whether the value was read from cache and the parameter value that is
set in the cache after this method returns. | def read_and_validate_value_from_cache(param_name: str, default_param_value: str) -> tuple[bool, str | None]:
"""
Reads and validates value from cache is present and whether its value is valid according to current rules.
It could happen that the allowed values have been modified since the last time cached value was set,
so this check is crucial to check outdated values.
If the value is not set or in case the cached value stored is not currently allowed,
the default value is stored in the cache and returned instead.
:param param_name: name of the parameter
:param default_param_value: default value of the parameter
:return: Tuple informing whether the value was read from cache and the parameter value that is
set in the cache after this method returns.
"""
is_from_cache = False
cached_value = read_from_cache_file(param_name)
if cached_value is None:
write_to_cache_file(param_name, default_param_value)
cached_value = default_param_value
else:
allowed, allowed_values = check_if_values_allowed(param_name, cached_value)
if allowed:
is_from_cache = True
else:
write_to_cache_file(param_name, default_param_value)
cached_value = default_param_value
return is_from_cache, cached_value |
Checks if parameter value is allowed by looking at global constants. | def check_if_values_allowed(param_name: str, param_value: str) -> tuple[bool, list[Any]]:
"""Checks if parameter value is allowed by looking at global constants."""
allowed = False
allowed_values = getattr(global_constants, f"ALLOWED_{param_name.upper()}S")
if param_value in allowed_values:
allowed = True
return allowed, allowed_values |
Deletes value from cache. Returns true if the delete operation happened (i.e. cache was present). | def delete_cache(param_name: str) -> bool:
"""Deletes value from cache. Returns true if the delete operation happened (i.e. cache was present)."""
deleted = False
if check_if_cache_exists(param_name):
(Path(BUILD_CACHE_DIR) / f".{param_name}").unlink()
deleted = True
return deleted |
Start cdxgen server that is used to perform cdxgen scans of applications in child process
:param run_in_parallel: run parallel servers
:param parallelism: parallelism to use
:param application_root_path: path where the application to scan is located | def start_cdxgen_server(application_root_path: Path, run_in_parallel: bool, parallelism: int) -> None:
"""
Start cdxgen server that is used to perform cdxgen scans of applications in child process
:param run_in_parallel: run parallel servers
:param parallelism: parallelism to use
:param application_root_path: path where the application to scan is located
"""
run_command(
[
"docker",
"pull",
"ghcr.io/cyclonedx/cdxgen",
],
check=True,
)
if not run_in_parallel:
fork_cdxgen_server(application_root_path)
else:
for i in range(parallelism):
fork_cdxgen_server(application_root_path, port=9091 + i)
time.sleep(1)
get_console().print("[info]Waiting for cdxgen server to start")
time.sleep(3) |
Map processes from pool to port numbers so that there is always the same port
used by the same process in the pool - effectively having one multiprocessing
process talking to the same cdxgen server
:param parallelism: parallelism to use
:param pool: pool to map ports for
:return: mapping of process name to port | def get_cdxgen_port_mapping(parallelism: int, pool: Pool) -> dict[str, int]:
"""
Map processes from pool to port numbers so that there is always the same port
used by the same process in the pool - effectively having one multiprocessing
process talking to the same cdxgen server
:param parallelism: parallelism to use
:param pool: pool to map ports for
:return: mapping of process name to port
"""
port_map: dict[str, int] = dict(pool.map(get_port_mapping, range(parallelism)))
return port_map |
Build an image with all airflow versions pre-installed in separate virtualenvs.
Image cache was built using stable main/ci tags to not rebuild cache on every
main new commit. Tags used are:
main_ci_images_fixed_tags = {
"3.6": "latest",
"3.7": "latest",
"3.8": "e698dbfe25da10d09c5810938f586535633928a4",
"3.9": "e698dbfe25da10d09c5810938f586535633928a4",
"3.10": "e698dbfe25da10d09c5810938f586535633928a4",
"3.11": "e698dbfe25da10d09c5810938f586535633928a4",
"3.12": "e698dbfe25da10d09c5810938f586535633928a4",
} | def build_all_airflow_versions_base_image(
python_version: str,
output: Output | None,
) -> tuple[int, str]:
"""
Build an image with all airflow versions pre-installed in separate virtualenvs.
Image cache was built using stable main/ci tags to not rebuild cache on every
main new commit. Tags used are:
main_ci_images_fixed_tags = {
"3.6": "latest",
"3.7": "latest",
"3.8": "e698dbfe25da10d09c5810938f586535633928a4",
"3.9": "e698dbfe25da10d09c5810938f586535633928a4",
"3.10": "e698dbfe25da10d09c5810938f586535633928a4",
"3.11": "e698dbfe25da10d09c5810938f586535633928a4",
"3.12": "e698dbfe25da10d09c5810938f586535633928a4",
}
"""
image_name = get_all_airflow_versions_image_name(python_version=python_version)
dockerfile = f"""
FROM {image_name}
RUN pip install --upgrade pip --no-cache-dir
# Prevent setting sources in PYTHONPATH to not interfere with virtualenvs
ENV USE_AIRFLOW_VERSION=none
ENV START_AIRFLOW=none
"""
compatible_airflow_versions = [
airflow_version
for airflow_version, python_versions in AIRFLOW_PYTHON_COMPATIBILITY_MATRIX.items()
if python_version in python_versions
]
for airflow_version in compatible_airflow_versions:
dockerfile += f"""
# Create the virtualenv and install the proper airflow version in it
RUN python -m venv /opt/airflow/airflow-{airflow_version} && \
/opt/airflow/airflow-{airflow_version}/bin/pip install --no-cache-dir --upgrade pip && \
/opt/airflow/airflow-{airflow_version}/bin/pip install apache-airflow=={airflow_version} \
--constraint https://raw.githubusercontent.com/apache/airflow/\
constraints-{airflow_version}/constraints-{python_version}.txt
"""
build_command = run_command(
["docker", "buildx", "build", "--cache-from", image_name, "--tag", image_name, "-"],
input=dockerfile,
text=True,
check=True,
output=output,
)
return build_command.returncode, f"All airflow image built for python {python_version}" |
Produces SBOM for application using cdxgen server.
:param job: Job to run
:param output: Output to use
:param port_map map of process name to port - making sure that one process talks to one server
in case parallel processing is used
:return: tuple with exit code and output | def produce_sbom_for_application_via_cdxgen_server(
job: SbomApplicationJob, output: Output | None, port_map: dict[str, int] | None = None
) -> tuple[int, str]:
"""
Produces SBOM for application using cdxgen server.
:param job: Job to run
:param output: Output to use
:param port_map map of process name to port - making sure that one process talks to one server
in case parallel processing is used
:return: tuple with exit code and output
"""
if port_map is None:
port = 9090
else:
port = port_map[multiprocessing.current_process().name]
get_console(output=output).print(f"[info]Using port {port}")
return job.produce(output, port) |
If used in GitHub Action, creates an expandable group in the GitHub Action log.
Otherwise, display simple text groups.
For more information, see:
https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines | def ci_group(title: str, message_type: MessageType | None = MessageType.INFO, output: Output | None = None):
"""
If used in GitHub Action, creates an expandable group in the GitHub Action log.
Otherwise, display simple text groups.
For more information, see:
https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines
"""
global _in_ci_group
if _in_ci_group or skip_group_output():
yield
return
if os.environ.get("GITHUB_ACTIONS", "false") != "true":
if message_type is not None:
get_console(output=output).print(f"\n[{message_type.value}]{title}\n")
else:
get_console(output=output).print(f"\n{title}\n")
yield
return
_in_ci_group = True
if message_type is not None:
get_console().print(f"::group::[{message_type.value}]{title}[/]")
else:
get_console().print(f"::group::{title}")
try:
yield
finally:
get_console().print("::endgroup::")
_in_ci_group = False |
Ask the user for confirmation.
:param message: message to display to the user (should end with the question mark)
:param timeout: time given user to answer
:param default_answer: default value returned on timeout. If no default - is set, the timeout is ignored.
:param quit_allowed: whether quit answer is allowed | def user_confirm(
message: str,
timeout: float | None = None,
default_answer: Answer | None = Answer.NO,
quit_allowed: bool = True,
) -> Answer:
"""Ask the user for confirmation.
:param message: message to display to the user (should end with the question mark)
:param timeout: time given user to answer
:param default_answer: default value returned on timeout. If no default - is set, the timeout is ignored.
:param quit_allowed: whether quit answer is allowed
"""
from inputimeout import TimeoutOccurred, inputimeout
allowed_answers = "y/n/q" if quit_allowed else "y/n"
while True:
try:
force = get_forced_answer() or os.environ.get("ANSWER")
if force:
user_status = force
print(f"Forced answer for '{message}': {force}")
else:
if default_answer:
# Capitalise default answer
allowed_answers = allowed_answers.replace(
default_answer.value, default_answer.value.upper()
)
timeout_answer = default_answer.value
else:
timeout = None
timeout_answer = ""
message_prompt = f"\n{message} \nPress {allowed_answers}"
if default_answer and timeout:
message_prompt += (
f". Auto-select {timeout_answer} in {timeout} seconds "
f"(add `--answer {default_answer.value}` to avoid delay next time)"
)
message_prompt += ": "
user_status = inputimeout(
prompt=message_prompt,
timeout=timeout,
)
if user_status == "":
if default_answer:
return default_answer
else:
continue
if user_status.upper() in ["Y", "YES"]:
return Answer.YES
elif user_status.upper() in ["N", "NO"]:
return Answer.NO
elif user_status.upper() in ["Q", "QUIT"] and quit_allowed:
return Answer.QUIT
else:
print(f"Wrong answer given {user_status}. Should be one of {allowed_answers}. Try again.")
except TimeoutOccurred:
if default_answer:
return default_answer
# timeout should only occur when default_answer is set so this should never happened
except KeyboardInterrupt:
if quit_allowed:
return Answer.QUIT
sys.exit(1) |
Check if we have enough resources to run docker. This is done via running script embedded in our image.
:param airflow_image_name: name of the airflow image to use | def check_docker_resources(airflow_image_name: str) -> RunCommandResult:
"""
Check if we have enough resources to run docker. This is done via running script embedded in our image.
:param airflow_image_name: name of the airflow image to use
"""
return run_command(
cmd=[
"docker",
"run",
"-t",
"--entrypoint",
"/bin/bash",
"-e",
"PYTHONDONTWRITEBYTECODE=true",
airflow_image_name,
"-c",
"python /opt/airflow/scripts/in_container/run_resource_check.py",
],
text=True,
) |
Checks if we have permission to write to docker socket. By default, on Linux you need to add your user
to docker group and some new users do not realize that. We help those users if we have
permission to run docker commands.
:return: True if permission is denied | def check_docker_permission_denied() -> bool:
"""
Checks if we have permission to write to docker socket. By default, on Linux you need to add your user
to docker group and some new users do not realize that. We help those users if we have
permission to run docker commands.
:return: True if permission is denied
"""
permission_denied = False
docker_permission_command = ["docker", "info"]
command_result = run_command(
docker_permission_command,
no_output_dump_on_exception=True,
capture_output=True,
text=True,
check=False,
)
if command_result.returncode != 0:
permission_denied = True
if command_result.stdout and "Got permission denied while trying to connect" in command_result.stdout:
get_console().print(
"ERROR: You have `permission denied` error when trying to communicate with docker."
)
get_console().print(
"Most likely you need to add your user to `docker` group: \
https://docs.docker.com/ engine/install/linux-postinstall/ ."
)
return permission_denied |
Checks if docker is running. Suppressed Dockers stdout and stderr output. | def check_docker_is_running():
"""
Checks if docker is running. Suppressed Dockers stdout and stderr output.
"""
response = run_command(
["docker", "info"],
no_output_dump_on_exception=True,
text=False,
capture_output=True,
check=False,
)
if response.returncode != 0:
get_console().print(
"[error]Docker is not running.[/]\n"
"[warning]Please make sure Docker is installed and running.[/]"
)
sys.exit(1) |
Checks if the docker compose version is as expected. including some specific modifications done by
some vendors such as Microsoft. They might have modified version of docker-compose/docker in their
cloud. In case docker compose version is wrong we continue but print warning for the user. | def check_docker_version(quiet: bool = False):
"""
Checks if the docker compose version is as expected. including some specific modifications done by
some vendors such as Microsoft. They might have modified version of docker-compose/docker in their
cloud. In case docker compose version is wrong we continue but print warning for the user.
"""
permission_denied = check_docker_permission_denied()
if not permission_denied:
docker_version_command = ["docker", "version", "--format", "{{.Client.Version}}"]
docker_version = ""
docker_version_result = run_command(
docker_version_command,
no_output_dump_on_exception=True,
capture_output=True,
text=True,
check=False,
dry_run_override=False,
)
if docker_version_result.returncode == 0:
docker_version = docker_version_result.stdout.strip()
if docker_version == "":
get_console().print(
f"""
[warning]Your version of docker is unknown. If the scripts fail, please make sure to[/]
[warning]install docker at least: {MIN_DOCKER_VERSION} version.[/]
"""
)
sys.exit(1)
else:
good_version = compare_version(docker_version, MIN_DOCKER_VERSION)
if good_version:
if not quiet:
get_console().print(f"[success]Good version of Docker: {docker_version}.[/]")
else:
get_console().print(
f"""
[error]Your version of docker is too old: {docker_version}.\n[/]
[warning]Please upgrade to at least {MIN_DOCKER_VERSION}.\n[/]
You can find installation instructions here: https://docs.docker.com/engine/install/
"""
)
sys.exit(1) |
Checks if you have permissions to pull an empty image from ghcr.io.
Unfortunately, GitHub packages treat expired login as "no-access" even on
public repos. We need to detect that situation and suggest user to log-out
or if they are in CI environment to re-push their PR/close or reopen the PR. | def check_remote_ghcr_io_commands():
"""Checks if you have permissions to pull an empty image from ghcr.io.
Unfortunately, GitHub packages treat expired login as "no-access" even on
public repos. We need to detect that situation and suggest user to log-out
or if they are in CI environment to re-push their PR/close or reopen the PR.
"""
response = run_command(
["docker", "pull", "ghcr.io/apache/airflow-hello-world"],
no_output_dump_on_exception=True,
text=False,
capture_output=True,
check=False,
)
if response.returncode != 0:
if "no such host" in response.stderr.decode("utf-8"):
get_console().print(
"[error]\nYou seem to be offline. This command requires access to network.[/]\n"
)
sys.exit(2)
get_console().print("[error]Response:[/]\n")
get_console().print(response.stdout.decode("utf-8"))
get_console().print(response.stderr.decode("utf-8"))
if os.environ.get("CI"):
get_console().print(
"\n[error]We are extremely sorry but you've hit the rare case that the "
"credentials you got from GitHub Actions to run are expired, and we cannot do much.[/]"
"\n¯\\_(ツ)_/¯\n\n"
"[warning]You have the following options now:\n\n"
" * Close and reopen the Pull Request of yours\n"
" * Rebase or amend your commit and push your branch again\n"
" * Ask in the PR to re-run the failed job\n\n"
)
sys.exit(1)
else:
get_console().print(
"[error]\nYou seem to have expired permissions on ghcr.io.[/]\n"
"[warning]Please logout. Run this command:[/]\n\n"
" docker logout ghcr.io\n\n"
)
sys.exit(1) |
Checks if the docker compose version is as expected.
This includes specific modifications done by some vendors such as Microsoft.
They might have modified version of docker-compose/docker in their cloud. In
the case the docker compose version is wrong, we continue but print a
warning for the user. | def check_docker_compose_version(quiet: bool = False):
"""Checks if the docker compose version is as expected.
This includes specific modifications done by some vendors such as Microsoft.
They might have modified version of docker-compose/docker in their cloud. In
the case the docker compose version is wrong, we continue but print a
warning for the user.
"""
version_pattern = re.compile(r"(\d+)\.(\d+)\.(\d+)")
docker_compose_version_command = ["docker", "compose", "version"]
try:
docker_compose_version_result = run_command(
docker_compose_version_command,
no_output_dump_on_exception=True,
capture_output=True,
text=True,
dry_run_override=False,
)
except Exception:
get_console().print(
"[error]You either do not have docker-composer or have docker-compose v1 installed.[/]\n"
"[warning]Breeze does not support docker-compose v1 any more as it has been replaced by v2.[/]\n"
"Follow https://docs.docker.com/compose/migrate/ to migrate to v2"
)
sys.exit(1)
if docker_compose_version_result.returncode == 0:
docker_compose_version = docker_compose_version_result.stdout
version_extracted = version_pattern.search(docker_compose_version)
if version_extracted is not None:
docker_compose_version = ".".join(version_extracted.groups())
good_version = compare_version(docker_compose_version, MIN_DOCKER_COMPOSE_VERSION)
if good_version:
if not quiet:
get_console().print(
f"[success]Good version of docker-compose: {docker_compose_version}[/]"
)
else:
get_console().print(
f"""
[error]You have too old version of docker-compose: {docker_compose_version}!\n[/]
[warning]At least {MIN_DOCKER_COMPOSE_VERSION} needed! Please upgrade!\n[/]
See https://docs.docker.com/compose/install/ for installation instructions.\n
Make sure docker-compose you install is first on the PATH variable of yours.\n
"""
)
sys.exit(1)
else:
get_console().print(
f"""
[error]Unknown docker-compose version.[/]
[warning]At least {MIN_DOCKER_COMPOSE_VERSION} needed! Please upgrade!\n[/]
See https://docs.docker.com/compose/install/ for installation instructions.\n
Make sure docker-compose you install is first on the PATH variable of yours.\n
"""
)
sys.exit(1) |
Constructs docker build_cache command based on the parameters passed.
:param image_params: parameters of the image
:return: Command to run as list of string | def prepare_docker_build_cache_command(
image_params: CommonBuildParams,
) -> list[str]:
"""
Constructs docker build_cache command based on the parameters passed.
:param image_params: parameters of the image
:return: Command to run as list of string
"""
final_command = []
final_command.extend(["docker"])
final_command.extend(
["buildx", "build", "--builder", get_and_use_docker_context(image_params.builder), "--progress=auto"]
)
final_command.extend(image_params.common_docker_build_flags)
final_command.extend(["--pull"])
final_command.extend(image_params.prepare_arguments_for_docker_build_command())
final_command.extend(["--target", "main", "."])
final_command.extend(
["-f", "Dockerfile" if isinstance(image_params, BuildProdParams) else "Dockerfile.ci"]
)
final_command.extend(["--platform", image_params.platform])
final_command.extend(
[f"--cache-to=type=registry,ref={image_params.get_cache(image_params.platform)},mode=max"]
)
return final_command |
Prepare build command for docker build. Depending on whether we have buildx plugin installed or not,
and whether we run cache preparation, there might be different results:
* if buildx plugin is installed - `docker buildx` command is returned - using regular or cache builder
depending on whether we build regular image or cache
* if no buildx plugin is installed, and we do not prepare cache, regular docker `build` command is used.
* if no buildx plugin is installed, and we prepare cache - we fail. Cache can only be done with buildx
:param image_params: parameters of the image
:return: command to use as docker build command | def prepare_base_build_command(image_params: CommonBuildParams) -> list[str]:
"""
Prepare build command for docker build. Depending on whether we have buildx plugin installed or not,
and whether we run cache preparation, there might be different results:
* if buildx plugin is installed - `docker buildx` command is returned - using regular or cache builder
depending on whether we build regular image or cache
* if no buildx plugin is installed, and we do not prepare cache, regular docker `build` command is used.
* if no buildx plugin is installed, and we prepare cache - we fail. Cache can only be done with buildx
:param image_params: parameters of the image
:return: command to use as docker build command
"""
build_command_param = []
is_buildx_available = check_if_buildx_plugin_installed()
if is_buildx_available:
build_command_param.extend(
[
"buildx",
"build",
"--push" if image_params.push else "--load",
]
)
if not image_params.docker_host:
builder = get_and_use_docker_context(image_params.builder)
build_command_param.extend(
[
"--builder",
builder,
]
)
if builder != "default":
build_command_param.append("--load")
else:
build_command_param.append("build")
return build_command_param |
Constructs docker build command based on the parameters passed.
:param image_params: parameters of the image
:return: Command to run as list of string | def prepare_docker_build_command(
image_params: CommonBuildParams,
) -> list[str]:
"""
Constructs docker build command based on the parameters passed.
:param image_params: parameters of the image
:return: Command to run as list of string
"""
build_command = prepare_base_build_command(
image_params=image_params,
)
final_command = []
final_command.extend(["docker"])
final_command.extend(build_command)
final_command.extend(image_params.common_docker_build_flags)
final_command.extend(["--pull"])
final_command.extend(image_params.prepare_arguments_for_docker_build_command())
final_command.extend(["-t", image_params.airflow_image_name_with_tag, "--target", "main", "."])
final_command.extend(
["-f", "Dockerfile" if isinstance(image_params, BuildProdParams) else "Dockerfile.ci"]
)
final_command.extend(["--platform", image_params.platform])
return final_command |
Constructs docker push command based on the parameters passed.
:param image_params: parameters of the image
:return: Command to run as list of string | def construct_docker_push_command(
image_params: CommonBuildParams,
) -> list[str]:
"""
Constructs docker push command based on the parameters passed.
:param image_params: parameters of the image
:return: Command to run as list of string
"""
return ["docker", "push", image_params.airflow_image_name_with_tag] |
Set value of name parameter to default (indexed by name) if not set.
:param env: dictionary where to set the parameter
:param name: name of parameter
:param default: default value | def set_value_to_default_if_not_set(env: dict[str, str], name: str, default: str):
"""Set value of name parameter to default (indexed by name) if not set.
:param env: dictionary where to set the parameter
:param name: name of parameter
:param default: default value
"""
if env.get(name) is None:
env[name] = os.environ.get(name, default) |
Prepare broker url for celery executor | def prepare_broker_url(params, env_variables):
"""Prepare broker url for celery executor"""
urls = env_variables["CELERY_BROKER_URLS"].split(",")
url_map = {
ALLOWED_CELERY_BROKERS[0]: urls[0],
ALLOWED_CELERY_BROKERS[1]: urls[1],
}
if getattr(params, "celery_broker", None) and params.celery_broker in params.celery_broker in url_map:
env_variables["AIRFLOW__CELERY__BROKER_URL"] = url_map[params.celery_broker] |
Checks if the user has executable permissions on the entrypoints in checked-out airflow repository.. | def check_executable_entrypoint_permissions(quiet: bool = False):
"""
Checks if the user has executable permissions on the entrypoints in checked-out airflow repository..
"""
for entrypoint in SCRIPTS_DOCKER_DIR.glob("entrypoint*.sh"):
if get_verbose() and not quiet:
get_console().print(f"[info]Checking executable permissions on {entrypoint.as_posix()}[/]")
if not os.access(entrypoint.as_posix(), os.X_OK):
get_console().print(
f"[error]You do not have executable permissions on {entrypoint}[/]\n"
f"You likely checked out airflow repo on a filesystem that does not support executable "
f"permissions (for example on a Windows filesystem that is mapped to Linux VM). Airflow "
f"repository should only be checked out on a filesystem that is POSIX compliant."
)
sys.exit(1)
if not quiet:
get_console().print("[success]Executable permissions on entrypoints are OK[/]") |
Removes specified docker networks. If no networks are specified, it removes all unused networks.
Errors are ignored (not even printed in the output), so you can safely call it without checking
if the networks exist.
:param networks: list of networks to remove | def remove_docker_networks(networks: list[str] | None = None) -> None:
"""
Removes specified docker networks. If no networks are specified, it removes all unused networks.
Errors are ignored (not even printed in the output), so you can safely call it without checking
if the networks exist.
:param networks: list of networks to remove
"""
if networks is None:
run_command(
["docker", "network", "prune", "-f"],
check=False,
stderr=DEVNULL,
)
else:
for network in networks:
run_command(
["docker", "network", "rm", network],
check=False,
stderr=DEVNULL,
) |
Subsets and Splits