repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
airflow
|
airflow-main/airflow/www/extensions/init_appbuilder_links.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from airflow.configuration import conf
from airflow.utils.docs import get_docs_url
def init_appbuilder_links(app):
"""Add links to the navbar."""
appbuilder = app.appbuilder
appbuilder.add_link(name="DAGs", href="Airflow.index")
appbuilder.menu.menu.insert(0, appbuilder.menu.menu.pop()) # Place in the first menu slot
appbuilder.add_link(name="Cluster Activity", href="Airflow.cluster_activity")
appbuilder.menu.menu.insert(1, appbuilder.menu.menu.pop()) # Place in the second menu slot
appbuilder.add_link(name="Datasets", href="Airflow.datasets")
appbuilder.menu.menu.insert(2, appbuilder.menu.menu.pop()) # Place in the third menu slot
# Docs links
appbuilder.add_link(name="Documentation", label="Documentation", href=get_docs_url(), category="Docs")
appbuilder.add_link(
name="Documentation", label="Airflow Website", href="https://airflow.apache.org", category="Docs"
)
appbuilder.add_link(
name="Documentation", label="GitHub Repo", href="https://github.com/apache/airflow", category="Docs"
)
if conf.getboolean("webserver", "enable_swagger_ui", fallback=True):
appbuilder.add_link(
name="Documentation",
label="REST API Reference (Swagger UI)",
href="/api/v1./api/v1_swagger_ui_index",
category="Docs",
)
appbuilder.add_link(
name="Documentation", label="REST API Reference (Redoc)", href="RedocView.redoc", category="Docs"
)
| 2,320 | 42.792453 | 108 |
py
|
airflow
|
airflow-main/airflow/www/extensions/init_wsgi_middlewares.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING, Iterable
from urllib.parse import urlsplit
from flask import Flask
from werkzeug.middleware.dispatcher import DispatcherMiddleware
from werkzeug.middleware.proxy_fix import ProxyFix
from airflow.configuration import conf
from airflow.exceptions import AirflowConfigException
if TYPE_CHECKING:
from _typeshed.wsgi import StartResponse, WSGIEnvironment
def _root_app(env: WSGIEnvironment, resp: StartResponse) -> Iterable[bytes]:
resp("404 Not Found", [("Content-Type", "text/plain")])
return [b"Apache Airflow is not at this location"]
def init_wsgi_middleware(flask_app: Flask) -> None:
"""Handle X-Forwarded-* headers and base_url support."""
webserver_base_url = conf.get_mandatory_value("webserver", "BASE_URL", fallback="")
if webserver_base_url.endswith("/"):
raise AirflowConfigException("webserver.base_url conf cannot have a trailing slash.")
# Apply DispatcherMiddleware
base_url = urlsplit(webserver_base_url)[2]
if not base_url or base_url == "/":
base_url = ""
if base_url:
wsgi_app = DispatcherMiddleware(_root_app, mounts={base_url: flask_app.wsgi_app})
flask_app.wsgi_app = wsgi_app # type: ignore[assignment]
# Apply ProxyFix middleware
if conf.getboolean("webserver", "ENABLE_PROXY_FIX"):
flask_app.wsgi_app = ProxyFix( # type: ignore
flask_app.wsgi_app,
x_for=conf.getint("webserver", "PROXY_FIX_X_FOR", fallback=1),
x_proto=conf.getint("webserver", "PROXY_FIX_X_PROTO", fallback=1),
x_host=conf.getint("webserver", "PROXY_FIX_X_HOST", fallback=1),
x_port=conf.getint("webserver", "PROXY_FIX_X_PORT", fallback=1),
x_prefix=conf.getint("webserver", "PROXY_FIX_X_PREFIX", fallback=1),
)
| 2,637 | 41.548387 | 93 |
py
|
airflow
|
airflow-main/airflow/www/extensions/init_manifest_files.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json
import os
from flask import url_for
def configure_manifest_files(app):
"""Loads the manifest file and register the `url_for_asset_` template tag.
:param app:
"""
manifest = {}
def parse_manifest_json():
try:
manifest_file = os.path.join(os.path.dirname(__file__), os.pardir, "static/dist/manifest.json")
with open(manifest_file) as file:
manifest.update(json.load(file))
for source, target in manifest.copy().items():
manifest[source] = os.path.join("dist", target)
except Exception:
print("Please make sure to build the frontend in static/ directory and restart the server")
def get_asset_url(filename):
if app.debug:
parse_manifest_json()
return url_for("static", filename=manifest.get(filename, filename))
parse_manifest_json()
@app.context_processor
def get_url_for_asset():
"""Template tag to return the asset URL.
WebPack renders the assets after minification and modification under the
static/dist folder. This template tag reads the asset name in
``manifest.json`` and returns the appropriate file.
"""
return dict(url_for_asset=get_asset_url)
| 2,113 | 34.233333 | 107 |
py
|
airflow
|
airflow-main/airflow/www/extensions/init_dagbag.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
from airflow.models import DagBag
from airflow.settings import DAGS_FOLDER
def init_dagbag(app):
"""
Create global DagBag for webserver and API. To access it use
``flask.current_app.dag_bag``.
"""
if os.environ.get("SKIP_DAGS_PARSING") == "True":
app.dag_bag = DagBag(os.devnull, include_examples=False)
else:
app.dag_bag = DagBag(DAGS_FOLDER, read_dags_from_db=True)
| 1,242 | 35.558824 | 65 |
py
|
airflow
|
airflow-main/airflow/www/extensions/__init__.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 785 | 45.235294 | 62 |
py
|
airflow
|
airflow-main/airflow/www/extensions/init_jinja_globals.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
import pendulum
import airflow
from airflow.configuration import auth_manager, conf
from airflow.settings import IS_K8S_OR_K8SCELERY_EXECUTOR, STATE_COLORS
from airflow.utils.net import get_hostname
from airflow.utils.platform import get_airflow_git_version
def init_jinja_globals(app):
"""Add extra globals variable to Jinja context."""
server_timezone = conf.get("core", "default_timezone")
if server_timezone == "system":
server_timezone = pendulum.local_timezone().name
elif server_timezone == "utc":
server_timezone = "UTC"
default_ui_timezone = conf.get("webserver", "default_ui_timezone")
if default_ui_timezone == "system":
default_ui_timezone = pendulum.local_timezone().name
elif default_ui_timezone == "utc":
default_ui_timezone = "UTC"
if not default_ui_timezone:
default_ui_timezone = server_timezone
expose_hostname = conf.getboolean("webserver", "EXPOSE_HOSTNAME")
hostname = get_hostname() if expose_hostname else "redact"
try:
airflow_version = airflow.__version__
except Exception as e:
airflow_version = None
logging.error(e)
git_version = get_airflow_git_version()
def prepare_jinja_globals():
extra_globals = {
"server_timezone": server_timezone,
"default_ui_timezone": default_ui_timezone,
"hostname": hostname,
"navbar_color": conf.get("webserver", "NAVBAR_COLOR"),
"log_fetch_delay_sec": conf.getint("webserver", "log_fetch_delay_sec", fallback=2),
"log_auto_tailing_offset": conf.getint("webserver", "log_auto_tailing_offset", fallback=30),
"log_animation_speed": conf.getint("webserver", "log_animation_speed", fallback=1000),
"state_color_mapping": STATE_COLORS,
"airflow_version": airflow_version,
"git_version": git_version,
"k8s_or_k8scelery_executor": IS_K8S_OR_K8SCELERY_EXECUTOR,
"rest_api_enabled": False,
"auth_manager": auth_manager,
"config_test_connection": conf.get("core", "test_connection", fallback="Disabled"),
}
backends = conf.get("api", "auth_backends")
if len(backends) > 0 and backends[0] != "airflow.api.auth.backend.deny_all":
extra_globals["rest_api_enabled"] = True
if "analytics_tool" in conf.getsection("webserver"):
extra_globals.update(
{
"analytics_tool": conf.get("webserver", "ANALYTICS_TOOL"),
"analytics_id": conf.get("webserver", "ANALYTICS_ID"),
}
)
return extra_globals
app.context_processor(prepare_jinja_globals)
| 3,576 | 38.744444 | 104 |
py
|
airflow
|
airflow-main/airflow/www/extensions/init_security.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from importlib import import_module
from flask import g, redirect, url_for
from flask_login import logout_user
from airflow.configuration import auth_manager, conf
from airflow.exceptions import AirflowConfigException, AirflowException
log = logging.getLogger(__name__)
def init_xframe_protection(app):
"""
Add X-Frame-Options header. Use it to avoid click-jacking attacks, by ensuring that their content is not
embedded into other sites.
See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options
"""
x_frame_enabled = conf.getboolean("webserver", "X_FRAME_ENABLED", fallback=True)
if x_frame_enabled:
return
def apply_caching(response):
response.headers["X-Frame-Options"] = "DENY"
return response
app.after_request(apply_caching)
def init_api_experimental_auth(app):
"""Loads authentication backends."""
auth_backends = "airflow.api.auth.backend.default"
try:
auth_backends = conf.get("api", "auth_backends")
except AirflowConfigException:
pass
app.api_auth = []
for backend in auth_backends.split(","):
try:
auth = import_module(backend.strip())
auth.init_app(app)
app.api_auth.append(auth)
except ImportError as err:
log.critical("Cannot import %s for API authentication due to: %s", backend, err)
raise AirflowException(err)
def init_check_user_active(app):
@app.before_request
def check_user_active():
if auth_manager.is_logged_in() and not g.user.is_active:
logout_user()
return redirect(url_for(app.appbuilder.sm.auth_view.endpoint + ".login"))
| 2,544 | 33.391892 | 108 |
py
|
airflow
|
airflow-main/airflow/www/extensions/init_appbuilder.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# mypy: disable-error-code=var-annotated
from __future__ import annotations
import logging
from functools import reduce
from flask import Blueprint, current_app, url_for
from flask_appbuilder import BaseView, __version__
from flask_appbuilder.babel.manager import BabelManager
from flask_appbuilder.const import (
LOGMSG_ERR_FAB_ADD_PERMISSION_MENU,
LOGMSG_ERR_FAB_ADD_PERMISSION_VIEW,
LOGMSG_ERR_FAB_ADDON_IMPORT,
LOGMSG_ERR_FAB_ADDON_PROCESS,
LOGMSG_INF_FAB_ADD_VIEW,
LOGMSG_INF_FAB_ADDON_ADDED,
LOGMSG_WAR_FAB_VIEW_EXISTS,
)
from flask_appbuilder.filters import TemplateFilters
from flask_appbuilder.menu import Menu
from flask_appbuilder.security.manager import BaseSecurityManager
from flask_appbuilder.views import IndexView, UtilView
from sqlalchemy.orm import Session
from airflow import settings
from airflow.configuration import conf
# This product contains a modified portion of 'Flask App Builder' developed by Daniel Vaz Gaspar.
# (https://github.com/dpgaspar/Flask-AppBuilder).
# Copyright 2013, Daniel Vaz Gaspar
# This module contains code imported from FlaskAppbuilder, so lets use _its_ logger name
log = logging.getLogger("flask_appbuilder.base")
def dynamic_class_import(class_path):
"""
Will dynamically import a class from a string path.
:param class_path: string with class path
:return: class
"""
# Split first occurrence of path
try:
tmp = class_path.split(".")
module_path = ".".join(tmp[0:-1])
package = __import__(module_path)
return reduce(getattr, tmp[1:], package)
except Exception as e:
log.exception(e)
log.error(LOGMSG_ERR_FAB_ADDON_IMPORT.format(class_path, e))
class AirflowAppBuilder:
"""
This is the base class for all the framework.
This is where you will register all your views
and create the menu structure.
Will hold your flask app object, all your views, and security classes.
Initialize your application like this for SQLAlchemy::
from flask import Flask
from flask_appbuilder import SQLA, AppBuilder
app = Flask(__name__)
app.config.from_object('config')
db = SQLA(app)
appbuilder = AppBuilder(app, db.session)
When using MongoEngine::
from flask import Flask
from flask_appbuilder import AppBuilder
from flask_appbuilder.security.mongoengine.manager import SecurityManager
from flask_mongoengine import MongoEngine
app = Flask(__name__)
app.config.from_object('config')
dbmongo = MongoEngine(app)
appbuilder = AppBuilder(app, security_manager_class=SecurityManager)
You can also create everything as an application factory.
"""
baseviews: list[BaseView | Session] = []
security_manager_class = None
# Flask app
app = None
# Database Session
session = None
# Security Manager Class
sm: BaseSecurityManager
# Babel Manager Class
bm = None
# dict with addon name has key and intantiated class has value
addon_managers: dict
# temporary list that hold addon_managers config key
_addon_managers: list
menu = None
indexview = None
static_folder = None
static_url_path = None
template_filters = None
def __init__(
self,
app=None,
session: Session | None = None,
menu=None,
indexview=None,
base_template="airflow/main.html",
static_folder="static/appbuilder",
static_url_path="/appbuilder",
security_manager_class=None,
update_perms=conf.getboolean("webserver", "UPDATE_FAB_PERMS"),
auth_rate_limited=conf.getboolean("webserver", "AUTH_RATE_LIMITED", fallback=True),
auth_rate_limit=conf.get("webserver", "AUTH_RATE_LIMIT", fallback="5 per 40 second"),
):
"""
App-builder constructor.
:param app:
The flask app object
:param session:
The SQLAlchemy session object
:param menu:
optional, a previous constructed menu
:param indexview:
optional, your customized indexview
:param static_folder:
optional, your override for the global static folder
:param static_url_path:
optional, your override for the global static url path
:param security_manager_class:
optional, pass your own security manager class
:param update_perms:
optional, update permissions flag (Boolean) you can use
FAB_UPDATE_PERMS config key also
:param auth_rate_limited:
optional, rate limit authentication attempts if set to True (defaults to True)
:param auth_rate_limit:
optional, rate limit authentication attempts configuration (defaults "to 5 per 40 second")
"""
self.baseviews = []
self._addon_managers = []
self.addon_managers = {}
self.menu = menu
self.base_template = base_template
self.security_manager_class = security_manager_class
self.indexview = indexview
self.static_folder = static_folder
self.static_url_path = static_url_path
self.app = app
self.update_perms = update_perms
self.auth_rate_limited = auth_rate_limited
self.auth_rate_limit = auth_rate_limit
if app is not None:
self.init_app(app, session)
def init_app(self, app, session):
"""
Will initialize the Flask app, supporting the app factory pattern.
:param app:
:param session: The SQLAlchemy session
"""
app.config.setdefault("APP_NAME", "F.A.B.")
app.config.setdefault("APP_THEME", "")
app.config.setdefault("APP_ICON", "")
app.config.setdefault("LANGUAGES", {"en": {"flag": "gb", "name": "English"}})
app.config.setdefault("ADDON_MANAGERS", [])
app.config.setdefault("RATELIMIT_ENABLED", self.auth_rate_limited)
app.config.setdefault("FAB_API_MAX_PAGE_SIZE", 100)
app.config.setdefault("FAB_BASE_TEMPLATE", self.base_template)
app.config.setdefault("FAB_STATIC_FOLDER", self.static_folder)
app.config.setdefault("FAB_STATIC_URL_PATH", self.static_url_path)
app.config.setdefault("AUTH_RATE_LIMITED", self.auth_rate_limited)
app.config.setdefault("AUTH_RATE_LIMIT", self.auth_rate_limit)
self.app = app
self.base_template = app.config.get("FAB_BASE_TEMPLATE", self.base_template)
self.static_folder = app.config.get("FAB_STATIC_FOLDER", self.static_folder)
self.static_url_path = app.config.get("FAB_STATIC_URL_PATH", self.static_url_path)
_index_view = app.config.get("FAB_INDEX_VIEW", None)
if _index_view is not None:
self.indexview = dynamic_class_import(_index_view)
else:
self.indexview = self.indexview or IndexView
_menu = app.config.get("FAB_MENU", None)
if _menu is not None:
self.menu = dynamic_class_import(_menu)
else:
self.menu = self.menu or Menu()
if self.update_perms: # default is True, if False takes precedence from config
self.update_perms = app.config.get("FAB_UPDATE_PERMS", True)
_security_manager_class_name = app.config.get("FAB_SECURITY_MANAGER_CLASS", None)
if _security_manager_class_name is not None:
self.security_manager_class = dynamic_class_import(_security_manager_class_name)
if self.security_manager_class is None:
from flask_appbuilder.security.sqla.manager import SecurityManager
self.security_manager_class = SecurityManager
self._addon_managers = app.config["ADDON_MANAGERS"]
self.session = session
self.sm = self.security_manager_class(self)
self.bm = BabelManager(self)
self._add_global_static()
self._add_global_filters()
app.before_request(self.sm.before_request)
self._add_admin_views()
self._add_addon_views()
if self.app:
self._add_menu_permissions()
else:
self.post_init()
self._init_extension(app)
self._swap_url_filter()
def _init_extension(self, app):
app.appbuilder = self
if not hasattr(app, "extensions"):
app.extensions = {}
app.extensions["appbuilder"] = self
def _swap_url_filter(self):
"""
Use our url filtering util function so there is consistency between
FAB and Airflow routes.
"""
from flask_appbuilder.security import views as fab_sec_views
from airflow.www.views import get_safe_url
fab_sec_views.get_safe_redirect = get_safe_url
def post_init(self):
for baseview in self.baseviews:
# instantiate the views and add session
self._check_and_init(baseview)
# Register the views has blueprints
if baseview.__class__.__name__ not in self.get_app.blueprints.keys():
self.register_blueprint(baseview)
# Add missing permissions where needed
self.add_permissions()
@property
def get_app(self):
"""
Get current or configured flask app.
:return: Flask App
"""
if self.app:
return self.app
else:
return current_app
@property
def get_session(self):
"""
Get the current sqlalchemy session.
:return: SQLAlchemy Session
"""
return self.session
@property
def app_name(self):
"""
Get the App name.
:return: String with app name
"""
return self.get_app.config["APP_NAME"]
@property
def app_theme(self):
"""
Get the App theme name.
:return: String app theme name
"""
return self.get_app.config["APP_THEME"]
@property
def app_icon(self):
"""
Get the App icon location.
:return: String with relative app icon location
"""
return self.get_app.config["APP_ICON"]
@property
def languages(self):
return self.get_app.config["LANGUAGES"]
@property
def version(self):
"""
Get the current F.A.B. version.
:return: String with the current F.A.B. version
"""
return __version__
def _add_global_filters(self):
self.template_filters = TemplateFilters(self.get_app, self.sm)
def _add_global_static(self):
bp = Blueprint(
"appbuilder",
"flask_appbuilder.base",
url_prefix="/static",
template_folder="templates",
static_folder=self.static_folder,
static_url_path=self.static_url_path,
)
self.get_app.register_blueprint(bp)
def _add_admin_views(self):
"""Register indexview, utilview (back function), babel views and Security views."""
self.indexview = self._check_and_init(self.indexview)
self.add_view_no_menu(self.indexview)
self.add_view_no_menu(UtilView())
self.bm.register_views()
self.sm.register_views()
def _add_addon_views(self):
"""Register declared addons."""
for addon in self._addon_managers:
addon_class = dynamic_class_import(addon)
if addon_class:
# Instantiate manager with appbuilder (self)
addon_class = addon_class(self)
try:
addon_class.pre_process()
addon_class.register_views()
addon_class.post_process()
self.addon_managers[addon] = addon_class
log.info(LOGMSG_INF_FAB_ADDON_ADDED.format(str(addon)))
except Exception as e:
log.exception(e)
log.error(LOGMSG_ERR_FAB_ADDON_PROCESS.format(addon, e))
def _check_and_init(self, baseview):
if hasattr(baseview, "datamodel"):
baseview.datamodel.session = self.session
if hasattr(baseview, "__call__"):
baseview = baseview()
return baseview
def add_view(
self,
baseview,
name,
href="",
icon="",
label="",
category="",
category_icon="",
category_label="",
menu_cond=None,
):
"""Add your views associated with menus using this method.
:param baseview:
A BaseView type class instantiated or not.
This method will instantiate the class for you if needed.
:param name:
The string name that identifies the menu.
:param href:
Override the generated href for the menu.
You can use an url string or an endpoint name
if non provided default_view from view will be set as href.
:param icon:
Font-Awesome icon name, optional.
:param label:
The label that will be displayed on the menu,
if absent param name will be used
:param category:
The menu category where the menu will be included,
if non provided the view will be accessible as a top menu.
:param category_icon:
Font-Awesome icon name for the category, optional.
:param category_label:
The label that will be displayed on the menu,
if absent param name will be used
:param menu_cond:
If a callable, :code:`menu_cond` will be invoked when
constructing the menu items. If it returns :code:`True`,
then this link will be a part of the menu. Otherwise, it
will not be included in the menu items. Defaults to
:code:`None`, meaning the item will always be present.
Examples::
appbuilder = AppBuilder(app, db)
# Register a view, rendering a top menu without icon.
appbuilder.add_view(MyModelView(), "My View")
# or not instantiated
appbuilder.add_view(MyModelView, "My View")
# Register a view, a submenu "Other View" from "Other" with a phone icon.
appbuilder.add_view(
MyOtherModelView,
"Other View",
icon='fa-phone',
category="Others"
)
# Register a view, with category icon and translation.
appbuilder.add_view(
YetOtherModelView,
"Other View",
icon='fa-phone',
label=_('Other View'),
category="Others",
category_icon='fa-envelop',
category_label=_('Other View')
)
# Register a view whose menu item will be conditionally displayed
appbuilder.add_view(
YourFeatureView,
"Your Feature",
icon='fa-feature',
label=_('Your Feature'),
menu_cond=lambda: is_feature_enabled("your-feature"),
)
# Add a link
appbuilder.add_link("google", href="www.google.com", icon = "fa-google-plus")
"""
baseview = self._check_and_init(baseview)
log.info(LOGMSG_INF_FAB_ADD_VIEW.format(baseview.__class__.__name__, name))
if not self._view_exists(baseview):
baseview.appbuilder = self
self.baseviews.append(baseview)
self._process_inner_views()
if self.app:
self.register_blueprint(baseview)
self._add_permission(baseview)
self.add_limits(baseview)
self.add_link(
name=name,
href=href,
icon=icon,
label=label,
category=category,
category_icon=category_icon,
category_label=category_label,
baseview=baseview,
cond=menu_cond,
)
return baseview
def add_link(
self,
name,
href,
icon="",
label="",
category="",
category_icon="",
category_label="",
baseview=None,
cond=None,
):
"""Add your own links to menu using this method.
:param name:
The string name that identifies the menu.
:param href:
Override the generated href for the menu.
You can use an url string or an endpoint name
:param icon:
Font-Awesome icon name, optional.
:param label:
The label that will be displayed on the menu,
if absent param name will be used
:param category:
The menu category where the menu will be included,
if non provided the view will be accessible as a top menu.
:param category_icon:
Font-Awesome icon name for the category, optional.
:param category_label:
The label that will be displayed on the menu,
if absent param name will be used
:param baseview:
A BaseView type class instantiated.
:param cond:
If a callable, :code:`cond` will be invoked when
constructing the menu items. If it returns :code:`True`,
then this link will be a part of the menu. Otherwise, it
will not be included in the menu items. Defaults to
:code:`None`, meaning the item will always be present.
"""
self.menu.add_link(
name=name,
href=href,
icon=icon,
label=label,
category=category,
category_icon=category_icon,
category_label=category_label,
baseview=baseview,
cond=cond,
)
if self.app:
self._add_permissions_menu(name)
if category:
self._add_permissions_menu(category)
def add_separator(self, category, cond=None):
"""Add a separator to the menu, you will sequentially create the menu.
:param category:
The menu category where the separator will be included.
:param cond:
If a callable, :code:`cond` will be invoked when
constructing the menu items. If it returns :code:`True`,
then this separator will be a part of the menu. Otherwise,
it will not be included in the menu items. Defaults to
:code:`None`, meaning the separator will always be present.
"""
self.menu.add_separator(category, cond=cond)
def add_view_no_menu(self, baseview, endpoint=None, static_folder=None):
"""
Add your views without creating a menu.
:param baseview:
A BaseView type class instantiated.
"""
baseview = self._check_and_init(baseview)
log.info(LOGMSG_INF_FAB_ADD_VIEW.format(baseview.__class__.__name__, ""))
if not self._view_exists(baseview):
baseview.appbuilder = self
self.baseviews.append(baseview)
self._process_inner_views()
if self.app:
self.register_blueprint(baseview, endpoint=endpoint, static_folder=static_folder)
self._add_permission(baseview)
else:
log.warning(LOGMSG_WAR_FAB_VIEW_EXISTS.format(baseview.__class__.__name__))
return baseview
def security_cleanup(self):
"""Clean up security.
This method is useful if you have changed the name of your menus or
classes. Changing them leaves behind permissions that are not associated
with anything. You can use it always or just sometimes to perform a
security cleanup.
.. warning::
This deletes any permission that is no longer part of any registered
view or menu. Only invoke AFTER YOU HAVE REGISTERED ALL VIEWS.
"""
self.sm.security_cleanup(self.baseviews, self.menu)
def security_converge(self, dry=False) -> dict:
"""Migrates all permissions to the new names on all the Roles.
This method is useful when you use:
- ``class_permission_name``
- ``previous_class_permission_name``
- ``method_permission_name``
- ``previous_method_permission_name``
:param dry: If True will not change DB
:return: Dict with all computed necessary operations
"""
return self.sm.security_converge(self.baseviews, self.menu, dry)
def get_url_for_login_with(self, next_url: str | None = None) -> str:
if self.sm.auth_view is None:
return ""
return url_for(f"{self.sm.auth_view.endpoint}.{'login'}", next=next_url)
@property
def get_url_for_login(self):
return url_for(f"{self.sm.auth_view.endpoint}.login")
@property
def get_url_for_logout(self):
return url_for(f"{self.sm.auth_view.endpoint}.logout")
@property
def get_url_for_index(self):
return url_for(f"{self.indexview.endpoint}.{self.indexview.default_view}")
@property
def get_url_for_userinfo(self):
return url_for(f"{self.sm.user_view.endpoint}.userinfo")
def get_url_for_locale(self, lang):
return url_for(
f"{self.bm.locale_view.endpoint}.{self.bm.locale_view.default_view}",
locale=lang,
)
def add_limits(self, baseview) -> None:
if hasattr(baseview, "limits"):
self.sm.add_limit_view(baseview)
def add_permissions(self, update_perms=False):
if self.update_perms or update_perms:
for baseview in self.baseviews:
self._add_permission(baseview, update_perms=update_perms)
self._add_menu_permissions(update_perms=update_perms)
def _add_permission(self, baseview, update_perms=False):
if self.update_perms or update_perms:
try:
self.sm.add_permissions_view(baseview.base_permissions, baseview.class_permission_name)
except Exception as e:
log.exception(e)
log.error(LOGMSG_ERR_FAB_ADD_PERMISSION_VIEW.format(str(e)))
def _add_permissions_menu(self, name, update_perms=False):
if self.update_perms or update_perms:
try:
self.sm.add_permissions_menu(name)
except Exception as e:
log.exception(e)
log.error(LOGMSG_ERR_FAB_ADD_PERMISSION_MENU.format(str(e)))
def _add_menu_permissions(self, update_perms=False):
if self.update_perms or update_perms:
for category in self.menu.get_list():
self._add_permissions_menu(category.name, update_perms=update_perms)
for item in category.childs:
# don't add permission for menu separator
if item.name != "-":
self._add_permissions_menu(item.name, update_perms=update_perms)
def register_blueprint(self, baseview, endpoint=None, static_folder=None):
self.get_app.register_blueprint(
baseview.create_blueprint(self, endpoint=endpoint, static_folder=static_folder)
)
def _view_exists(self, view):
for baseview in self.baseviews:
if baseview.__class__ == view.__class__:
return True
return False
def _process_inner_views(self):
for view in self.baseviews:
for inner_class in view.get_uninit_inner_views():
for v in self.baseviews:
if isinstance(v, inner_class) and v not in view.get_init_inner_views():
view.get_init_inner_views().append(v)
def init_appbuilder(app) -> AirflowAppBuilder:
"""Init `Flask App Builder <https://flask-appbuilder.readthedocs.io/en/latest/>`__."""
from airflow.www.security import AirflowSecurityManager
security_manager_class = app.config.get("SECURITY_MANAGER_CLASS") or AirflowSecurityManager
if not issubclass(security_manager_class, AirflowSecurityManager):
raise Exception(
"""Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager,
not FAB's security manager."""
)
return AirflowAppBuilder(
app=app,
session=settings.Session,
security_manager_class=security_manager_class,
base_template="airflow/main.html",
update_perms=conf.getboolean("webserver", "UPDATE_FAB_PERMS"),
auth_rate_limited=conf.getboolean("webserver", "AUTH_RATE_LIMITED", fallback=True),
auth_rate_limit=conf.get("webserver", "AUTH_RATE_LIMIT", fallback="5 per 40 second"),
)
| 25,537 | 35.798271 | 103 |
py
|
airflow
|
airflow-main/airflow/macros/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import json # noqa
import time # noqa
import uuid # noqa
from datetime import datetime, timedelta
from random import random # noqa
from typing import Any
import dateutil # noqa
from pendulum import DateTime
import airflow.utils.yaml as yaml # noqa
from airflow.utils.deprecation_tools import add_deprecated_classes
__deprecated_classes = {
"hive": {
"closest_ds_partition": "airflow.providers.apache.hive.macros.hive.closest_ds_partition",
"max_partition": "airflow.providers.apache.hive.macros.hive.max_partition",
},
}
add_deprecated_classes(__deprecated_classes, __name__)
def ds_add(ds: str, days: int) -> str:
"""
Add or subtract days from a YYYY-MM-DD.
:param ds: anchor date in ``YYYY-MM-DD`` format to add to
:param days: number of days to add to the ds, you can use negative values
>>> ds_add('2015-01-01', 5)
'2015-01-06'
>>> ds_add('2015-01-06', -5)
'2015-01-01'
"""
if not days:
return str(ds)
dt = datetime.strptime(str(ds), "%Y-%m-%d") + timedelta(days=days)
return dt.strftime("%Y-%m-%d")
def ds_format(ds: str, input_format: str, output_format: str) -> str:
"""
Output datetime string in a given format.
:param ds: input string which contains a date
:param input_format: input string format. E.g. %Y-%m-%d
:param output_format: output string format E.g. %Y-%m-%d
>>> ds_format('2015-01-01', "%Y-%m-%d", "%m-%d-%y")
'01-01-15'
>>> ds_format('1/5/2015', "%m/%d/%Y", "%Y-%m-%d")
'2015-01-05'
"""
return datetime.strptime(str(ds), input_format).strftime(output_format)
def datetime_diff_for_humans(dt: Any, since: DateTime | None = None) -> str:
"""
Return a human-readable/approximate difference between datetimes.
When only one datetime is provided, the comparison will be based on now.
:param dt: The datetime to display the diff for
:param since: When to display the date from. If ``None`` then the diff is
between ``dt`` and now.
"""
import pendulum
return pendulum.instance(dt).diff_for_humans(since)
| 2,931 | 31.577778 | 97 |
py
|
airflow
|
airflow-main/airflow/migrations/db_types.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import sqlalchemy as sa
from alembic import context
from lazy_object_proxy import Proxy
######################################
# Note about this module:
#
# It loads the specific type dynamically at runtime. For IDE/typing support
# there is an associated db_types.pyi. If you add a new type in here, add a
# simple version in there too.
######################################
def _mssql_TIMESTAMP():
from sqlalchemy.dialects import mssql
class DATETIME2(mssql.DATETIME2):
def __init__(self, *args, precision=6, **kwargs):
super().__init__(*args, precision=precision, **kwargs)
return DATETIME2
def _mysql_TIMESTAMP():
from sqlalchemy.dialects import mysql
class TIMESTAMP(mysql.TIMESTAMP):
def __init__(self, *args, fsp=6, timezone=True, **kwargs):
super().__init__(*args, fsp=fsp, timezone=timezone, **kwargs)
return TIMESTAMP
def _sa_TIMESTAMP():
class TIMESTAMP(sa.TIMESTAMP):
def __init__(self, *args, timezone=True, **kwargs):
super().__init__(*args, timezone=timezone, **kwargs)
return TIMESTAMP
def _sa_StringID():
from airflow.models.base import StringID
return StringID
def __getattr__(name):
if name in ["TIMESTAMP", "StringID"]:
def lazy_load():
dialect = context.get_bind().dialect.name
module = globals()
# Lookup the type based on the dialect specific type, or fallback to the generic type
type_ = module.get(f"_{dialect}_{name}", None) or module.get(f"_sa_{name}")
val = module[name] = type_()
return val
# Prior to v1.4 of our Helm chart we didn't correctly initialize the Migration environment, so
# `context.get_bind()` would fail if called at the top level. To make it easier on migration writers
# we make the returned objects lazy.
return Proxy(lazy_load)
raise AttributeError(f"module {__name__} has no attribute {name}")
| 2,809 | 32.058824 | 108 |
py
|
airflow
|
airflow-main/airflow/migrations/utils.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from collections import defaultdict
from contextlib import contextmanager
from sqlalchemy import text
def get_mssql_table_constraints(conn, table_name) -> dict[str, dict[str, list[str]]]:
"""
Returns the primary and unique constraint along with column name.
Some tables like `task_instance` are missing the primary key constraint
name and the name is auto-generated by the SQL server, so this function
helps to retrieve any primary or unique constraint name.
:param conn: sql connection object
:param table_name: table name
:return: a dictionary of ((constraint name, constraint type), column name) of table
"""
query = text(
f"""SELECT tc.CONSTRAINT_NAME , tc.CONSTRAINT_TYPE, ccu.COLUMN_NAME
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc
JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS ccu ON ccu.CONSTRAINT_NAME = tc.CONSTRAINT_NAME
WHERE tc.TABLE_NAME = '{table_name}' AND
(tc.CONSTRAINT_TYPE = 'PRIMARY KEY' or UPPER(tc.CONSTRAINT_TYPE) = 'UNIQUE'
or UPPER(tc.CONSTRAINT_TYPE) = 'FOREIGN KEY')
"""
)
result = conn.execute(query).fetchall()
constraint_dict = defaultdict(lambda: defaultdict(list))
for constraint, constraint_type, col_name in result:
constraint_dict[constraint_type][constraint].append(col_name)
return constraint_dict
@contextmanager
def disable_sqlite_fkeys(op):
if op.get_bind().dialect.name == "sqlite":
op.execute("PRAGMA foreign_keys=off")
yield op
op.execute("PRAGMA foreign_keys=on")
else:
yield op
| 2,406 | 38.459016 | 103 |
py
|
airflow
|
airflow-main/airflow/migrations/__init__.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
| 787 | 42.777778 | 62 |
py
|
airflow
|
airflow-main/airflow/migrations/env.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import contextlib
import sys
from logging.config import fileConfig
from alembic import context
from airflow import models, settings
from airflow.utils.db import compare_server_default, compare_type
def include_object(_, name, type_, *args):
"""Filter objects for autogenerating revisions."""
# Ignore _anything_ to do with Celery, or FlaskSession's tables
if type_ == "table" and (name.startswith("celery_") or name == "session"):
return False
else:
return True
# Make sure everything is imported so that alembic can find it all
models.import_all_models()
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name, disable_existing_loggers=False)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = models.base.Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=settings.SQL_ALCHEMY_CONN,
target_metadata=target_metadata,
literal_binds=True,
compare_type=compare_type,
compare_server_default=compare_server_default,
render_as_batch=True,
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
with contextlib.ExitStack() as stack:
connection = config.attributes.get("connection", None)
if not connection:
connection = stack.push(settings.engine.connect())
context.configure(
connection=connection,
transaction_per_migration=True,
target_metadata=target_metadata,
compare_type=compare_type,
compare_server_default=compare_server_default,
include_object=include_object,
render_as_batch=True,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
if "airflow.www.app" in sys.modules:
# Already imported, make sure we clear out any cached app
from airflow.www.app import purge_cached_app
purge_cached_app()
| 3,809 | 29.725806 | 78 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0002_1_5_0_create_is_encrypted.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``is_encrypted`` column in ``connection`` table
Revision ID: 1507a7289a2f
Revises: e3a246e0dc1
Create Date: 2015-08-18 18:57:51.927315
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = "1507a7289a2f"
down_revision = "e3a246e0dc1"
branch_labels = None
depends_on = None
airflow_version = "1.5.0"
connectionhelper = sa.Table(
"connection", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True), sa.Column("is_encrypted")
)
def upgrade():
# first check if the user already has this done. This should only be
# true for users who are upgrading from a previous version of Airflow
# that predates Alembic integration
conn = op.get_bind()
inspector = inspect(conn)
# this will only be true if 'connection' already exists in the db,
# but not if alembic created it in a previous migration
if "connection" in inspector.get_table_names():
col_names = [c["name"] for c in inspector.get_columns("connection")]
if "is_encrypted" in col_names:
return
op.add_column("connection", sa.Column("is_encrypted", sa.Boolean, unique=False, default=False))
conn = op.get_bind()
conn.execute(connectionhelper.update().values(is_encrypted=False))
def downgrade():
op.drop_column("connection", "is_encrypted")
| 2,195 | 32.784615 | 105 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0032_1_10_0_fix_mysql_not_null_constraint.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Fix MySQL not null constraint
Revision ID: f23433877c24
Revises: 05f30312d566
Create Date: 2018-06-17 10:16:31.412131
"""
from __future__ import annotations
from alembic import op
from sqlalchemy import text
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "f23433877c24"
down_revision = "05f30312d566"
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
def upgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
conn.execute(text("SET time_zone = '+00:00'"))
op.alter_column("task_fail", "execution_date", existing_type=mysql.TIMESTAMP(fsp=6), nullable=False)
op.alter_column("xcom", "execution_date", existing_type=mysql.TIMESTAMP(fsp=6), nullable=False)
op.alter_column("xcom", "timestamp", existing_type=mysql.TIMESTAMP(fsp=6), nullable=False)
def downgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
conn.execute(text("SET time_zone = '+00:00'"))
op.alter_column("xcom", "timestamp", existing_type=mysql.TIMESTAMP(fsp=6), nullable=True)
op.alter_column("xcom", "execution_date", existing_type=mysql.TIMESTAMP(fsp=6), nullable=True)
op.alter_column("task_fail", "execution_date", existing_type=mysql.TIMESTAMP(fsp=6), nullable=True)
| 2,093 | 37.777778 | 108 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0091_2_2_0_add_trigger_table_and_task_info.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Adds ``trigger`` table and deferrable operator columns to task instance
Revision ID: 54bebd308c5f
Revises: 30867afad44a
Create Date: 2021-04-14 12:56:40.688260
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.utils.sqlalchemy import ExtendedJSON
# revision identifiers, used by Alembic.
revision = "54bebd308c5f"
down_revision = "30867afad44a"
branch_labels = None
depends_on = None
airflow_version = "2.2.0"
def upgrade():
"""Apply Adds ``trigger`` table and deferrable operator columns to task instance"""
op.create_table(
"trigger",
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
sa.Column("classpath", sa.String(length=1000), nullable=False),
sa.Column("kwargs", ExtendedJSON(), nullable=False),
sa.Column("created_date", sa.DateTime(), nullable=False),
sa.Column("triggerer_id", sa.Integer(), nullable=True),
)
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.add_column(sa.Column("trigger_id", sa.Integer()))
batch_op.add_column(sa.Column("trigger_timeout", sa.DateTime()))
batch_op.add_column(sa.Column("next_method", sa.String(length=1000)))
batch_op.add_column(sa.Column("next_kwargs", ExtendedJSON()))
batch_op.create_foreign_key(
"task_instance_trigger_id_fkey", "trigger", ["trigger_id"], ["id"], ondelete="CASCADE"
)
batch_op.create_index("ti_trigger_id", ["trigger_id"])
def downgrade():
"""Unapply Adds ``trigger`` table and deferrable operator columns to task instance"""
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.drop_constraint("task_instance_trigger_id_fkey", type_="foreignkey")
batch_op.drop_index("ti_trigger_id")
batch_op.drop_column("trigger_id")
batch_op.drop_column("trigger_timeout")
batch_op.drop_column("next_method")
batch_op.drop_column("next_kwargs")
op.drop_table("trigger")
| 2,832 | 38.901408 | 98 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0017_1_7_1_add_task_fails_journal_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``task_fail`` table
Revision ID: 64de9cddf6c9
Revises: 211e584da130
Create Date: 2016-08-03 14:02:59.203021
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import StringID
# revision identifiers, used by Alembic.
revision = "64de9cddf6c9"
down_revision = "211e584da130"
branch_labels = None
depends_on = None
airflow_version = "1.7.1.3"
def upgrade():
op.create_table(
"task_fail",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("task_id", StringID(), nullable=False),
sa.Column("dag_id", StringID(), nullable=False),
sa.Column("execution_date", sa.DateTime(), nullable=False),
sa.Column("start_date", sa.DateTime(), nullable=True),
sa.Column("end_date", sa.DateTime(), nullable=True),
sa.Column("duration", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
def downgrade():
op.drop_table("task_fail")
| 1,783 | 30.857143 | 67 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0110_2_3_2_add_cascade_to_dag_tag_foreignkey.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add cascade to dag_tag foreign key
Revision ID: 3c94c427fdf6
Revises: 1de7bc13c950
Create Date: 2022-05-03 09:47:41.957710
"""
from __future__ import annotations
from alembic import op
from sqlalchemy import inspect
from airflow.migrations.utils import get_mssql_table_constraints
# revision identifiers, used by Alembic.
revision = "3c94c427fdf6"
down_revision = "1de7bc13c950"
branch_labels = None
depends_on = None
airflow_version = "2.3.2"
def upgrade():
"""Apply Add cascade to dag_tag foreignkey"""
conn = op.get_bind()
if conn.dialect.name in ["sqlite", "mysql"]:
inspector = inspect(conn.engine)
foreignkey = inspector.get_foreign_keys("dag_tag")
with op.batch_alter_table(
"dag_tag",
) as batch_op:
batch_op.drop_constraint(foreignkey[0]["name"], type_="foreignkey")
batch_op.create_foreign_key(
"dag_tag_dag_id_fkey", "dag", ["dag_id"], ["dag_id"], ondelete="CASCADE"
)
else:
with op.batch_alter_table("dag_tag") as batch_op:
if conn.dialect.name == "mssql":
constraints = get_mssql_table_constraints(conn, "dag_tag")
Fk, _ = constraints["FOREIGN KEY"].popitem()
batch_op.drop_constraint(Fk, type_="foreignkey")
if conn.dialect.name == "postgresql":
batch_op.drop_constraint("dag_tag_dag_id_fkey", type_="foreignkey")
batch_op.create_foreign_key(
"dag_tag_dag_id_fkey", "dag", ["dag_id"], ["dag_id"], ondelete="CASCADE"
)
def downgrade():
"""Unapply Add cascade to dag_tag foreignkey"""
conn = op.get_bind()
if conn.dialect.name == "sqlite":
with op.batch_alter_table("dag_tag") as batch_op:
batch_op.drop_constraint("dag_tag_dag_id_fkey", type_="foreignkey")
batch_op.create_foreign_key("fk_dag_tag_dag_id_dag", "dag", ["dag_id"], ["dag_id"])
else:
with op.batch_alter_table("dag_tag") as batch_op:
batch_op.drop_constraint("dag_tag_dag_id_fkey", type_="foreignkey")
batch_op.create_foreign_key(
None,
"dag",
["dag_id"],
["dag_id"],
)
| 3,049 | 36.195122 | 95 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0123_2_6_0_add_dttm_index_on_log_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add dttm index on log table
Revision ID: 6abdffdd4815
Revises: 290244fb8b83
Create Date: 2023-01-13 13:57:14.412028
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "6abdffdd4815"
down_revision = "290244fb8b83"
branch_labels = None
depends_on = None
airflow_version = "2.6.0"
def upgrade():
"""Apply add dttm index on log table"""
op.create_index("idx_log_dttm", "log", ["dttm"], unique=False)
def downgrade():
"""Unapply add dttm index on log table"""
op.drop_index("idx_log_dttm", table_name="log")
| 1,381 | 29.043478 | 66 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0051_1_10_8_add_dagtags_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``DagTags`` table
Revision ID: 7939bcff74ba
Revises: fe461863935f
Create Date: 2020-01-07 19:39:01.247442
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import StringID
# revision identifiers, used by Alembic.
revision = "7939bcff74ba"
down_revision = "fe461863935f"
branch_labels = None
depends_on = None
airflow_version = "1.10.8"
def upgrade():
"""Apply Add ``DagTags`` table"""
op.create_table(
"dag_tag",
sa.Column("name", sa.String(length=100), nullable=False),
sa.Column("dag_id", StringID(), nullable=False),
sa.ForeignKeyConstraint(
["dag_id"],
["dag.dag_id"],
),
sa.PrimaryKeyConstraint("name", "dag_id"),
)
def downgrade():
"""Unapply Add ``DagTags`` table"""
op.drop_table("dag_tag")
| 1,663 | 28.192982 | 65 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0090_2_2_0_rename_concurrency_column_in_dag_table_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Rename ``concurrency`` column in ``dag`` table to`` max_active_tasks``
Revision ID: 30867afad44a
Revises: e9304a3141f0
Create Date: 2021-06-04 22:11:19.849981
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "30867afad44a"
down_revision = "e9304a3141f0"
branch_labels = None
depends_on = None
airflow_version = "2.2.0"
def upgrade():
"""Apply Rename ``concurrency`` column in ``dag`` table to`` max_active_tasks``"""
conn = op.get_bind()
is_sqlite = bool(conn.dialect.name == "sqlite")
if is_sqlite:
op.execute("PRAGMA foreign_keys=off")
with op.batch_alter_table("dag") as batch_op:
batch_op.alter_column(
"concurrency",
new_column_name="max_active_tasks",
type_=sa.Integer(),
nullable=False,
)
if is_sqlite:
op.execute("PRAGMA foreign_keys=on")
def downgrade():
"""Unapply Rename ``concurrency`` column in ``dag`` table to`` max_active_tasks``"""
with op.batch_alter_table("dag") as batch_op:
batch_op.alter_column(
"max_active_tasks",
new_column_name="concurrency",
type_=sa.Integer(),
nullable=False,
)
| 2,071 | 30.876923 | 88 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0077_2_0_0_change_field_in_dagcode_to_mediumtext_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Change field in ``DagCode`` to ``MEDIUMTEXT`` for MySql
Revision ID: e959f08ac86c
Revises: 64a7d6477aae
Create Date: 2020-12-07 16:31:43.982353
"""
from __future__ import annotations
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "e959f08ac86c"
down_revision = "64a7d6477aae"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
def upgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(
table_name="dag_code", column_name="source_code", type_=mysql.MEDIUMTEXT, nullable=False
)
def downgrade():
# Do not downgrade to TEXT as it will break data
pass
| 1,497 | 29.571429 | 100 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0126_2_7_0_add_index_to_task_instance_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add index to task_instance table
Revision ID: 937cbd173ca1
Revises: c804e5c76e3e
Create Date: 2023-05-03 11:31:32.527362
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "937cbd173ca1"
down_revision = "c804e5c76e3e"
branch_labels = None
depends_on = None
airflow_version = "2.7.0"
def upgrade():
"""Apply Add index to task_instance table"""
op.create_index(
"ti_state_incl_start_date",
"task_instance",
["dag_id", "task_id", "state"],
postgresql_include=["start_date"],
)
def downgrade():
"""Unapply Add index to task_instance table"""
op.drop_index("ti_state_incl_start_date", table_name="task_instance")
| 1,522 | 28.862745 | 73 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0019_1_7_1_add_fractional_seconds_to_mysql_tables.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add fractional seconds to MySQL tables
Revision ID: 4addfa1236f1
Revises: f2ca10b85618
Create Date: 2016-09-11 13:39:18.592072
"""
from __future__ import annotations
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "4addfa1236f1"
down_revision = "f2ca10b85618"
branch_labels = None
depends_on = None
airflow_version = "1.7.1.3"
def upgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(table_name="dag", column_name="last_scheduler_run", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag", column_name="last_pickled", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag", column_name="last_expired", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag_pickle", column_name="created_dttm", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag_run", column_name="execution_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag_run", column_name="start_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag_run", column_name="end_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="import_error", column_name="timestamp", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="job", column_name="start_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="job", column_name="end_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="job", column_name="latest_heartbeat", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="log", column_name="dttm", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="log", column_name="execution_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="sla_miss", column_name="execution_date", type_=mysql.DATETIME(fsp=6), nullable=False
)
op.alter_column(table_name="sla_miss", column_name="timestamp", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="task_fail", column_name="execution_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="task_fail", column_name="start_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="task_fail", column_name="end_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="task_instance",
column_name="execution_date",
type_=mysql.DATETIME(fsp=6),
nullable=False,
)
op.alter_column(table_name="task_instance", column_name="start_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="task_instance", column_name="end_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="task_instance", column_name="queued_dttm", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="xcom", column_name="timestamp", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="xcom", column_name="execution_date", type_=mysql.DATETIME(fsp=6))
def downgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(table_name="dag", column_name="last_scheduler_run", type_=mysql.DATETIME())
op.alter_column(table_name="dag", column_name="last_pickled", type_=mysql.DATETIME())
op.alter_column(table_name="dag", column_name="last_expired", type_=mysql.DATETIME())
op.alter_column(table_name="dag_pickle", column_name="created_dttm", type_=mysql.DATETIME())
op.alter_column(table_name="dag_run", column_name="execution_date", type_=mysql.DATETIME())
op.alter_column(table_name="dag_run", column_name="start_date", type_=mysql.DATETIME())
op.alter_column(table_name="dag_run", column_name="end_date", type_=mysql.DATETIME())
op.alter_column(table_name="import_error", column_name="timestamp", type_=mysql.DATETIME())
op.alter_column(table_name="job", column_name="start_date", type_=mysql.DATETIME())
op.alter_column(table_name="job", column_name="end_date", type_=mysql.DATETIME())
op.alter_column(table_name="job", column_name="latest_heartbeat", type_=mysql.DATETIME())
op.alter_column(table_name="log", column_name="dttm", type_=mysql.DATETIME())
op.alter_column(table_name="log", column_name="execution_date", type_=mysql.DATETIME())
op.alter_column(
table_name="sla_miss", column_name="execution_date", type_=mysql.DATETIME(), nullable=False
)
op.alter_column(table_name="sla_miss", column_name="timestamp", type_=mysql.DATETIME())
op.alter_column(table_name="task_fail", column_name="execution_date", type_=mysql.DATETIME())
op.alter_column(table_name="task_fail", column_name="start_date", type_=mysql.DATETIME())
op.alter_column(table_name="task_fail", column_name="end_date", type_=mysql.DATETIME())
op.alter_column(
table_name="task_instance", column_name="execution_date", type_=mysql.DATETIME(), nullable=False
)
op.alter_column(table_name="task_instance", column_name="start_date", type_=mysql.DATETIME())
op.alter_column(table_name="task_instance", column_name="end_date", type_=mysql.DATETIME())
op.alter_column(table_name="task_instance", column_name="queued_dttm", type_=mysql.DATETIME())
op.alter_column(table_name="xcom", column_name="timestamp", type_=mysql.DATETIME())
op.alter_column(table_name="xcom", column_name="execution_date", type_=mysql.DATETIME())
| 6,341 | 50.560976 | 108 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0043_1_10_4_make_taskinstance_pool_not_nullable.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Make ``TaskInstance.pool`` not nullable
Revision ID: 6e96a59344a4
Revises: 939bb1e647c8
Create Date: 2019-06-13 21:51:32.878437
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, String
from sqlalchemy.orm import declarative_base
from airflow.utils.session import create_session
from airflow.utils.sqlalchemy import UtcDateTime
# revision identifiers, used by Alembic.
revision = "6e96a59344a4"
down_revision = "939bb1e647c8"
branch_labels = None
depends_on = None
airflow_version = "1.10.4"
Base = declarative_base()
ID_LEN = 250
class TaskInstance(Base): # type: ignore
"""Minimal model definition for migrations"""
__tablename__ = "task_instance"
task_id = Column(String(), primary_key=True)
dag_id = Column(String(), primary_key=True)
execution_date = Column(UtcDateTime, primary_key=True)
pool = Column(String(50), nullable=False)
def upgrade():
"""Make TaskInstance.pool field not nullable."""
with create_session() as session:
session.query(TaskInstance).filter(TaskInstance.pool.is_(None)).update(
{TaskInstance.pool: "default_pool"}, synchronize_session=False
) # Avoid select updated rows
session.commit()
conn = op.get_bind()
if conn.dialect.name == "mssql":
op.drop_index(index_name="ti_pool", table_name="task_instance")
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("task_instance") as batch_op:
batch_op.alter_column(
column_name="pool",
type_=sa.String(50),
nullable=False,
)
if conn.dialect.name == "mssql":
op.create_index(
index_name="ti_pool", table_name="task_instance", columns=["pool", "state", "priority_weight"]
)
def downgrade():
"""Make TaskInstance.pool field nullable."""
conn = op.get_bind()
if conn.dialect.name == "mssql":
op.drop_index(index_name="ti_pool", table_name="task_instance")
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("task_instance") as batch_op:
batch_op.alter_column(
column_name="pool",
type_=sa.String(50),
nullable=True,
)
if conn.dialect.name == "mssql":
op.create_index(
index_name="ti_pool", table_name="task_instance", columns=["pool", "state", "priority_weight"]
)
with create_session() as session:
session.query(TaskInstance).filter(TaskInstance.pool == "default_pool").update(
{TaskInstance.pool: None}, synchronize_session=False
) # Avoid select updated rows
session.commit()
| 3,510 | 31.813084 | 106 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0074_2_0_0_resource_based_permissions.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Resource based permissions.
Revision ID: 2c6edca13270
Revises: 849da589634d
Create Date: 2020-10-21 00:18:52.529438
"""
from __future__ import annotations
import logging
from airflow.security import permissions
from airflow.www.app import cached_app
# revision identifiers, used by Alembic.
revision = "2c6edca13270"
down_revision = "849da589634d"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
mapping = {
("Airflow", "can_blocked"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
],
("Airflow", "can_clear"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_code"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE),
],
("Airflow", "can_dag_details"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
],
("Airflow", "can_dag_stats"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
],
("Airflow", "can_dagrun_clear"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_dagrun_failed"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN),
],
("Airflow", "can_dagrun_success"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN),
],
("Airflow", permissions.ACTION_CAN_DELETE): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG),
],
("Airflow", "can_duration"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_extra_links"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_failed"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_gantt"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_get_logs_with_metadata"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
],
("Airflow", "can_graph"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
],
("Airflow", "can_index"): [(permissions.ACTION_CAN_READ, "Website")],
("Airflow", "can_landing_times"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_log"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
],
("Airflow", "can_paused"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
],
("Airflow", "can_redirect_to_external_log"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
],
("Airflow", "can_refresh"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
],
("Airflow", "can_refresh_all"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
],
("Airflow", "can_rendered"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_run"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_success"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_task"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_task_instances"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_task_stats"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_last_dagruns"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
],
("Airflow", "can_tree"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
],
("Airflow", "can_tries"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
],
("Airflow", "can_trigger"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN),
],
("Airflow", "can_xcom"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM),
],
("ConfigurationView", "can_conf"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG)],
("Config", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG)],
("DagModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG)],
("DagModelView", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG)],
("DagModelView", "can_show"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG)],
("DagModelView", "show"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG)],
("Dags", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG)],
("Dags", "can_edit"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG)],
("DagRunModelView", "clear"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE),
],
("DagRunModelView", "can_add"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN)],
("DagRunModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN)],
("DagRunModelView", "muldelete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN)],
("DagRunModelView", "set_running"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN)],
("DagRunModelView", "set_failed"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN)],
("DagRunModelView", "set_success"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG_RUN)],
("DagRun", "can_create"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN)],
("DagRun", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN)],
("DagRun", "can_delete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_DAG_RUN)],
("JobModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_JOB)],
("LogModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_AUDIT_LOG),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_AUDIT_LOG),
],
("Logs", permissions.ACTION_CAN_ACCESS_MENU): [
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_AUDIT_LOG)
],
("Log", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG)],
("SlaMissModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_SLA_MISS)],
("TaskInstanceModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE)
],
("TaskInstanceModelView", "clear"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE)],
("TaskInstanceModelView", "set_failed"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE)
],
("TaskInstanceModelView", "set_retry"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE)
],
("TaskInstanceModelView", "set_running"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE)
],
("TaskInstanceModelView", "set_success"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE)
],
("TaskRescheduleModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_RESCHEDULE)
],
("TaskInstance", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE)],
("Tasks", permissions.ACTION_CAN_CREATE): [
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_TASK_INSTANCE)
],
("Tasks", permissions.ACTION_CAN_READ): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE)
],
("Tasks", permissions.ACTION_CAN_EDIT): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE)
],
("Tasks", permissions.ACTION_CAN_DELETE): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE)
],
("ConnectionModelView", "can_add"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_CONNECTION)],
("ConnectionModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION)],
("ConnectionModelView", permissions.ACTION_CAN_EDIT): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_CONNECTION)
],
("ConnectionModelView", permissions.ACTION_CAN_DELETE): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_CONNECTION)
],
("ConnectionModelView", "muldelete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_CONNECTION)],
("Connection", "can_create"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_CONNECTION)],
("Connection", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION)],
("Connection", "can_edit"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_CONNECTION)],
("Connection", "can_delete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_CONNECTION)],
("DagCode", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN)],
("PluginView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_PLUGIN)],
("PoolModelView", "can_add"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_POOL)],
("PoolModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_POOL)],
("PoolModelView", permissions.ACTION_CAN_EDIT): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_POOL)
],
("PoolModelView", permissions.ACTION_CAN_DELETE): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_POOL)
],
("PoolModelView", "muldelete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_POOL)],
("Pool", "can_create"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_POOL)],
("Pool", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_POOL)],
("Pool", "can_edit"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_POOL)],
("Pool", "can_delete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_POOL)],
("VariableModelView", "can_add"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE)],
("VariableModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE)],
("VariableModelView", permissions.ACTION_CAN_EDIT): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_VARIABLE)
],
("VariableModelView", permissions.ACTION_CAN_DELETE): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_VARIABLE)
],
("VariableModelView", "can_varimport"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE)],
("VariableModelView", "muldelete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_VARIABLE)],
("VariableModelView", "varexport"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE)],
("Variable", "can_create"): [(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_VARIABLE)],
("Variable", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_VARIABLE)],
("Variable", "can_edit"): [(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_VARIABLE)],
("Variable", "can_delete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_VARIABLE)],
("XComModelView", "can_list"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM)],
("XComModelView", permissions.ACTION_CAN_DELETE): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_XCOM)
],
("XComModelView", "muldelete"): [(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_XCOM)],
("XCom", "can_read"): [(permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM)],
}
def remap_permissions():
"""Apply Map Airflow permissions."""
appbuilder = cached_app(config={"FAB_UPDATE_PERMS": False}).appbuilder
for old, new in mapping.items():
(old_resource_name, old_action_name) = old
old_permission = appbuilder.sm.get_permission(old_action_name, old_resource_name)
if not old_permission:
continue
for new_action_name, new_resource_name in new:
new_permission = appbuilder.sm.create_permission(new_action_name, new_resource_name)
for role in appbuilder.sm.get_all_roles():
if appbuilder.sm.permission_exists_in_one_or_more_roles(
old_resource_name, old_action_name, [role.id]
):
appbuilder.sm.add_permission_to_role(role, new_permission)
appbuilder.sm.remove_permission_from_role(role, old_permission)
appbuilder.sm.delete_permission(old_action_name, old_resource_name)
if not appbuilder.sm.get_action(old_action_name):
continue
resources = appbuilder.sm.get_all_resources()
if not any(appbuilder.sm.get_permission(old_action_name, resource.name) for resource in resources):
appbuilder.sm.delete_action(old_action_name)
def undo_remap_permissions():
"""Unapply Map Airflow permissions"""
appbuilder = cached_app(config={"FAB_UPDATE_PERMS": False}).appbuilder
for old, new in mapping.items():
(new_resource_name, new_action_name) = new[0]
new_permission = appbuilder.sm.get_permission(new_action_name, new_resource_name)
if not new_permission:
continue
for old_resource_name, old_action_name in old:
old_permission = appbuilder.sm.create_permission(old_action_name, old_resource_name)
for role in appbuilder.sm.get_all_roles():
if appbuilder.sm.permission_exists_in_one_or_more_roles(
new_resource_name, new_action_name, [role.id]
):
appbuilder.sm.add_permission_to_role(role, old_permission)
appbuilder.sm.remove_permission_from_role(role, new_permission)
appbuilder.sm.delete_permission(new_action_name, new_resource_name)
if not appbuilder.sm.get_action(new_action_name):
continue
resources = appbuilder.sm.get_all_resources()
if not any(appbuilder.sm.get_permission(new_action_name, resource.name) for resource in resources):
appbuilder.sm.delete_action(new_action_name)
def upgrade():
"""Apply Resource based permissions."""
log = logging.getLogger()
handlers = log.handlers[:]
remap_permissions()
log.handlers = handlers
def downgrade():
"""Unapply Resource based permissions."""
log = logging.getLogger()
handlers = log.handlers[:]
undo_remap_permissions()
log.handlers = handlers
| 17,894 | 49.694051 | 109 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0124_2_6_0_increase_length_of_user_identifier_columns.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase length of user identifier columns in ``ab_user`` and ``ab_register_user`` tables
Revision ID: 98ae134e6fff
Revises: 6abdffdd4815
Create Date: 2023-01-18 16:21:09.420958
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.utils import get_mssql_table_constraints
# revision identifiers, used by Alembic.
revision = "98ae134e6fff"
down_revision = "6abdffdd4815"
branch_labels = None
depends_on = None
airflow_version = "2.6.0"
def upgrade():
"""Increase length of user identifier columns in ab_user and ab_register_user tables"""
with op.batch_alter_table("ab_user") as batch_op:
batch_op.alter_column("first_name", type_=sa.String(256), existing_nullable=False)
batch_op.alter_column("last_name", type_=sa.String(256), existing_nullable=False)
batch_op.alter_column(
"username",
type_=sa.String(512).with_variant(sa.String(512, collation="NOCASE"), "sqlite"),
existing_nullable=False,
)
batch_op.alter_column("email", type_=sa.String(512), existing_nullable=False)
with op.batch_alter_table("ab_register_user") as batch_op:
batch_op.alter_column("first_name", type_=sa.String(256), existing_nullable=False)
batch_op.alter_column("last_name", type_=sa.String(256), existing_nullable=False)
batch_op.alter_column(
"username",
type_=sa.String(512).with_variant(sa.String(512, collation="NOCASE"), "sqlite"),
existing_nullable=False,
)
batch_op.alter_column("email", type_=sa.String(512), existing_nullable=False)
def downgrade():
"""Revert length of user identifier columns in ab_user and ab_register_user tables"""
conn = op.get_bind()
if conn.dialect.name != "mssql":
with op.batch_alter_table("ab_user") as batch_op:
batch_op.alter_column("first_name", type_=sa.String(64), existing_nullable=False)
batch_op.alter_column("last_name", type_=sa.String(64), existing_nullable=False)
batch_op.alter_column(
"username",
type_=sa.String(256).with_variant(sa.String(256, collation="NOCASE"), "sqlite"),
existing_nullable=False,
)
batch_op.alter_column("email", type_=sa.String(256), existing_nullable=False)
with op.batch_alter_table("ab_register_user") as batch_op:
batch_op.alter_column("first_name", type_=sa.String(64), existing_nullable=False)
batch_op.alter_column("last_name", type_=sa.String(64), existing_nullable=False)
batch_op.alter_column(
"username",
type_=sa.String(256).with_variant(sa.String(256, collation="NOCASE"), "sqlite"),
existing_nullable=False,
)
batch_op.alter_column("email", type_=sa.String(256), existing_nullable=False)
else:
# MSSQL doesn't drop implicit unique constraints it created
# We need to drop the two unique constraints explicitly
with op.batch_alter_table("ab_user") as batch_op:
batch_op.alter_column("first_name", type_=sa.String(64), existing_nullable=False)
batch_op.alter_column("last_name", type_=sa.String(64), existing_nullable=False)
# Drop the unique constraint on username and email
constraints = get_mssql_table_constraints(conn, "ab_user")
for k, _ in constraints.get("UNIQUE").items():
batch_op.drop_constraint(k, type_="unique")
batch_op.alter_column("username", type_=sa.String(256), existing_nullable=False)
batch_op.create_unique_constraint(None, ["username"])
batch_op.alter_column("email", type_=sa.String(256), existing_nullable=False)
batch_op.create_unique_constraint(None, ["email"])
with op.batch_alter_table("ab_register_user") as batch_op:
batch_op.alter_column("first_name", type_=sa.String(64), existing_nullable=False)
batch_op.alter_column("last_name", type_=sa.String(64), existing_nullable=False)
batch_op.alter_column("email", type_=sa.String(256), existing_nullable=False)
# Drop the unique constraint on username
constraints = get_mssql_table_constraints(conn, "ab_register_user")
for k, _ in constraints.get("UNIQUE").items():
batch_op.drop_constraint(k, type_="unique")
batch_op.alter_column("username", type_=sa.String(256), existing_nullable=False)
batch_op.create_unique_constraint(None, ["username"])
| 5,426 | 48.336364 | 96 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0068_2_0_0_drop_kuberesourceversion_and_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Drop ``KubeResourceVersion`` and ``KubeWorkerId``
Revision ID: bef4f3d11e8b
Revises: e1a11ece99cc
Create Date: 2020-09-22 18:45:28.011654
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = "bef4f3d11e8b"
down_revision = "e1a11ece99cc"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
WORKER_UUID_TABLE = "kube_worker_uuid"
WORKER_RESOURCEVERSION_TABLE = "kube_resource_version"
def upgrade():
"""Apply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables"""
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if WORKER_UUID_TABLE in tables:
op.drop_table(WORKER_UUID_TABLE)
if WORKER_RESOURCEVERSION_TABLE in tables:
op.drop_table(WORKER_RESOURCEVERSION_TABLE)
def downgrade():
"""Unapply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables"""
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if WORKER_UUID_TABLE not in tables:
_add_worker_uuid_table()
if WORKER_RESOURCEVERSION_TABLE not in tables:
_add_resource_table()
def _add_worker_uuid_table():
columns_and_constraints = [
sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True),
sa.Column("worker_uuid", sa.String(255)),
]
conn = op.get_bind()
# alembic creates an invalid SQL for mssql and mysql dialects
if conn.dialect.name in {"mysql"}:
columns_and_constraints.append(sa.CheckConstraint("one_row_id<>0", name="kube_worker_one_row_id"))
elif conn.dialect.name not in {"mssql"}:
columns_and_constraints.append(sa.CheckConstraint("one_row_id", name="kube_worker_one_row_id"))
table = op.create_table(WORKER_UUID_TABLE, *columns_and_constraints)
op.bulk_insert(table, [{"worker_uuid": ""}])
def _add_resource_table():
columns_and_constraints = [
sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True),
sa.Column("resource_version", sa.String(255)),
]
conn = op.get_bind()
# alembic creates an invalid SQL for mssql and mysql dialects
if conn.dialect.name in {"mysql"}:
columns_and_constraints.append(
sa.CheckConstraint("one_row_id<>0", name="kube_resource_version_one_row_id")
)
elif conn.dialect.name not in {"mssql"}:
columns_and_constraints.append(
sa.CheckConstraint("one_row_id", name="kube_resource_version_one_row_id")
)
table = op.create_table(WORKER_RESOURCEVERSION_TABLE, *columns_and_constraints)
op.bulk_insert(table, [{"resource_version": ""}])
| 3,556 | 32.242991 | 106 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0055_1_10_11_add_precision_to_execution_date_in_mysql.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add Precision to ``execution_date`` in ``RenderedTaskInstanceFields`` table
Revision ID: a66efa278eea
Revises: 952da73b5eff
Create Date: 2020-06-16 21:44:02.883132
"""
from __future__ import annotations
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "a66efa278eea"
down_revision = "952da73b5eff"
branch_labels = None
depends_on = None
airflow_version = "1.10.11"
TABLE_NAME = "rendered_task_instance_fields"
COLUMN_NAME = "execution_date"
def upgrade():
"""Add Precision to ``execution_date`` in ``RenderedTaskInstanceFields`` table for MySQL"""
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(
table_name=TABLE_NAME, column_name=COLUMN_NAME, type_=mysql.TIMESTAMP(fsp=6), nullable=False
)
def downgrade():
"""Unapply Add Precision to ``execution_date`` in ``RenderedTaskInstanceFields`` table"""
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(
table_name=TABLE_NAME, column_name=COLUMN_NAME, type_=mysql.TIMESTAMP(), nullable=False
)
| 1,925 | 32.789474 | 104 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0118_2_4_2_add_missing_autoinc_fab.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add missing auto-increment to columns on FAB tables
Revision ID: b0d31815b5a6
Revises: ecb43d2a1842
Create Date: 2022-10-05 13:16:45.638490
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "b0d31815b5a6"
down_revision = "ecb43d2a1842"
branch_labels = None
depends_on = None
airflow_version = "2.4.2"
def upgrade():
"""Apply migration.
If these columns are already of the right type (i.e. created by our
migration in 1.10.13 rather than FAB itself in an earlier version), this
migration will issue an alter statement to change them to what they already
are -- i.e. its a no-op.
These tables are small (100 to low 1k rows at most), so it's not too costly
to change them.
"""
conn = op.get_bind()
if conn.dialect.name in ["mssql", "sqlite"]:
# 1.10.12 didn't support SQL Server, so it couldn't have gotten this wrong --> nothing to correct
# SQLite autoinc was "implicit" for an INTEGER NOT NULL PRIMARY KEY
return
for table in (
"ab_permission",
"ab_view_menu",
"ab_role",
"ab_permission_view",
"ab_permission_view_role",
"ab_user",
"ab_user_role",
"ab_register_user",
):
with op.batch_alter_table(table) as batch:
kwargs = {}
if conn.dialect.name == "postgresql":
kwargs["server_default"] = sa.Sequence(f"{table}_id_seq").next_value()
else:
kwargs["autoincrement"] = True
batch.alter_column("id", existing_type=sa.Integer(), existing_nullable=False, **kwargs)
def downgrade():
"""Unapply add_missing_autoinc_fab"""
# No downgrade needed, these _should_ have applied from 1.10.13 but didn't due to a previous bug!
| 2,636 | 32.379747 | 105 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0056_1_10_12_add_dag_hash_column_to_serialized_dag_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``dag_hash`` Column to ``serialized_dag`` table
Revision ID: da3f683c3a5a
Revises: a66efa278eea
Create Date: 2020-08-07 20:52:09.178296
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "da3f683c3a5a"
down_revision = "a66efa278eea"
branch_labels = None
depends_on = None
airflow_version = "1.10.12"
def upgrade():
"""Apply Add ``dag_hash`` Column to ``serialized_dag`` table"""
op.add_column(
"serialized_dag",
sa.Column("dag_hash", sa.String(32), nullable=False, server_default="Hash not calculated yet"),
)
def downgrade():
"""Unapply Add ``dag_hash`` Column to ``serialized_dag`` table"""
op.drop_column("serialized_dag", "dag_hash")
| 1,563 | 30.918367 | 103 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0069_2_0_0_add_scheduling_decision_to_dagrun_and_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``scheduling_decision`` to ``DagRun`` and ``DAG``
Revision ID: 98271e7606e2
Revises: bef4f3d11e8b
Create Date: 2020-10-01 12:13:32.968148
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import TIMESTAMP
# revision identifiers, used by Alembic.
revision = "98271e7606e2"
down_revision = "bef4f3d11e8b"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
def upgrade():
"""Apply Add ``scheduling_decision`` to ``DagRun`` and ``DAG``"""
conn = op.get_bind()
is_sqlite = bool(conn.dialect.name == "sqlite")
is_mssql = bool(conn.dialect.name == "mssql")
if is_sqlite:
op.execute("PRAGMA foreign_keys=off")
with op.batch_alter_table("dag_run", schema=None) as batch_op:
batch_op.add_column(sa.Column("last_scheduling_decision", TIMESTAMP, nullable=True))
batch_op.create_index("idx_last_scheduling_decision", ["last_scheduling_decision"], unique=False)
batch_op.add_column(sa.Column("dag_hash", sa.String(32), nullable=True))
with op.batch_alter_table("dag", schema=None) as batch_op:
batch_op.add_column(sa.Column("next_dagrun", TIMESTAMP, nullable=True))
batch_op.add_column(sa.Column("next_dagrun_create_after", TIMESTAMP, nullable=True))
# Create with nullable and no default, then ALTER to set values, to avoid table level lock
batch_op.add_column(sa.Column("concurrency", sa.Integer(), nullable=True))
batch_op.add_column(sa.Column("has_task_concurrency_limits", sa.Boolean(), nullable=True))
batch_op.create_index("idx_next_dagrun_create_after", ["next_dagrun_create_after"], unique=False)
try:
from airflow.configuration import conf
concurrency = conf.getint("core", "max_active_tasks_per_dag", fallback=16)
except: # noqa
concurrency = 16
# Set it to true here as it makes us take the slow/more complete path, and when it's next parsed by the
# DagParser it will get set to correct value.
op.execute(
f"""
UPDATE dag SET
concurrency={concurrency},
has_task_concurrency_limits={1 if is_sqlite or is_mssql else sa.true()}
where concurrency IS NULL
"""
)
with op.batch_alter_table("dag", schema=None) as batch_op:
batch_op.alter_column("concurrency", type_=sa.Integer(), nullable=False)
batch_op.alter_column("has_task_concurrency_limits", type_=sa.Boolean(), nullable=False)
if is_sqlite:
op.execute("PRAGMA foreign_keys=on")
def downgrade():
"""Unapply Add ``scheduling_decision`` to ``DagRun`` and ``DAG``"""
conn = op.get_bind()
is_sqlite = bool(conn.dialect.name == "sqlite")
if is_sqlite:
op.execute("PRAGMA foreign_keys=off")
with op.batch_alter_table("dag_run", schema=None) as batch_op:
batch_op.drop_index("idx_last_scheduling_decision")
batch_op.drop_column("last_scheduling_decision")
batch_op.drop_column("dag_hash")
with op.batch_alter_table("dag", schema=None) as batch_op:
batch_op.drop_index("idx_next_dagrun_create_after")
batch_op.drop_column("next_dagrun_create_after")
batch_op.drop_column("next_dagrun")
batch_op.drop_column("concurrency")
batch_op.drop_column("has_task_concurrency_limits")
if is_sqlite:
op.execute("PRAGMA foreign_keys=on")
| 4,219 | 36.678571 | 107 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0013_1_7_0_add_a_column_to_track_the_encryption_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add a column to track the encryption state of the 'Extra' field in connection
Revision ID: bba5a7cfc896
Revises: bbc73705a13e
Create Date: 2016-01-29 15:10:32.656425
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "bba5a7cfc896"
down_revision = "bbc73705a13e"
branch_labels = None
depends_on = None
airflow_version = "1.7.0"
def upgrade():
op.add_column("connection", sa.Column("is_extra_encrypted", sa.Boolean, default=False))
def downgrade():
op.drop_column("connection", "is_extra_encrypted")
| 1,392 | 30.659091 | 91 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0034_1_10_0_index_taskfail.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Create index on ``task_fail`` table
Revision ID: 9635ae0956e7
Revises: 856955da8476
Create Date: 2018-06-17 21:40:01.963540
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "9635ae0956e7"
down_revision = "856955da8476"
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
def upgrade():
op.create_index(
"idx_task_fail_dag_task_date", "task_fail", ["dag_id", "task_id", "execution_date"], unique=False
)
def downgrade():
op.drop_index("idx_task_fail_dag_task_date", table_name="task_fail")
| 1,384 | 30.477273 | 105 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0061_2_0_0_increase_length_of_pool_name.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase length of pool name
Revision ID: b25a55525161
Revises: bbf4a7ad0465
Create Date: 2020-03-09 08:48:14.534700
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.models.base import COLLATION_ARGS
# revision identifiers, used by Alembic.
revision = "b25a55525161"
down_revision = "bbf4a7ad0465"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
def upgrade():
"""Increase column length of pool name from 50 to 256 characters"""
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("slot_pool", table_args=sa.UniqueConstraint("pool")) as batch_op:
batch_op.alter_column("pool", type_=sa.String(256, **COLLATION_ARGS))
def downgrade():
"""Revert Increased length of pool name from 256 to 50 characters"""
with op.batch_alter_table("slot_pool", table_args=sa.UniqueConstraint("pool")) as batch_op:
batch_op.alter_column("pool", type_=sa.String(50))
| 1,775 | 33.823529 | 95 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0093_2_2_0_taskinstance_keyed_to_dagrun.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Change ``TaskInstance`` and ``TaskReschedule`` tables from execution_date to run_id.
Revision ID: 7b2661a43ba3
Revises: 142555e44c17
Create Date: 2021-07-15 15:26:12.710749
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy.sql import and_, column, select, table
from airflow.migrations.db_types import TIMESTAMP, StringID
from airflow.migrations.utils import get_mssql_table_constraints
ID_LEN = 250
# revision identifiers, used by Alembic.
revision = "7b2661a43ba3"
down_revision = "142555e44c17"
branch_labels = None
depends_on = None
airflow_version = "2.2.0"
# Just Enough Table to run the conditions for update.
task_instance = table(
"task_instance",
column("task_id", sa.String),
column("dag_id", sa.String),
column("run_id", sa.String),
column("execution_date", sa.TIMESTAMP),
)
task_reschedule = table(
"task_reschedule",
column("task_id", sa.String),
column("dag_id", sa.String),
column("run_id", sa.String),
column("execution_date", sa.TIMESTAMP),
)
dag_run = table(
"dag_run",
column("dag_id", sa.String),
column("run_id", sa.String),
column("execution_date", sa.TIMESTAMP),
)
def upgrade():
"""Apply Change ``TaskInstance`` and ``TaskReschedule`` tables from execution_date to run_id."""
conn = op.get_bind()
dialect_name = conn.dialect.name
dt_type = TIMESTAMP
string_id_col_type = StringID()
if dialect_name == "sqlite":
naming_convention = {
"uq": "%(table_name)s_%(column_0_N_name)s_key",
}
# The naming_convention force the previously un-named UNIQUE constraints to have the right name
with op.batch_alter_table(
"dag_run", naming_convention=naming_convention, recreate="always"
) as batch_op:
batch_op.alter_column("dag_id", existing_type=string_id_col_type, nullable=False)
batch_op.alter_column("run_id", existing_type=string_id_col_type, nullable=False)
batch_op.alter_column("execution_date", existing_type=dt_type, nullable=False)
elif dialect_name == "mysql":
with op.batch_alter_table("dag_run") as batch_op:
batch_op.alter_column(
"dag_id", existing_type=sa.String(length=ID_LEN), type_=string_id_col_type, nullable=False
)
batch_op.alter_column(
"run_id", existing_type=sa.String(length=ID_LEN), type_=string_id_col_type, nullable=False
)
batch_op.alter_column("execution_date", existing_type=dt_type, nullable=False)
inspector = sa.inspect(conn.engine)
unique_keys = inspector.get_unique_constraints("dag_run")
for unique_key in unique_keys:
batch_op.drop_constraint(unique_key["name"], type_="unique")
batch_op.create_unique_constraint(
"dag_run_dag_id_execution_date_key", ["dag_id", "execution_date"]
)
batch_op.create_unique_constraint("dag_run_dag_id_run_id_key", ["dag_id", "run_id"])
elif dialect_name == "mssql":
with op.batch_alter_table("dag_run") as batch_op:
batch_op.drop_index("idx_not_null_dag_id_execution_date")
batch_op.drop_index("idx_not_null_dag_id_run_id")
batch_op.drop_index("dag_id_state")
batch_op.drop_index("idx_dag_run_dag_id")
batch_op.drop_index("idx_dag_run_running_dags")
batch_op.drop_index("idx_dag_run_queued_dags")
batch_op.alter_column("dag_id", existing_type=string_id_col_type, nullable=False)
batch_op.alter_column("execution_date", existing_type=dt_type, nullable=False)
batch_op.alter_column("run_id", existing_type=string_id_col_type, nullable=False)
# _Somehow_ mssql was missing these constraints entirely
batch_op.create_unique_constraint(
"dag_run_dag_id_execution_date_key", ["dag_id", "execution_date"]
)
batch_op.create_unique_constraint("dag_run_dag_id_run_id_key", ["dag_id", "run_id"])
batch_op.create_index("dag_id_state", ["dag_id", "state"], unique=False)
batch_op.create_index("idx_dag_run_dag_id", ["dag_id"])
batch_op.create_index(
"idx_dag_run_running_dags",
["state", "dag_id"],
mssql_where=sa.text("state='running'"),
)
batch_op.create_index(
"idx_dag_run_queued_dags",
["state", "dag_id"],
mssql_where=sa.text("state='queued'"),
)
else:
# Make sure DagRun PK columns are non-nullable
with op.batch_alter_table("dag_run", schema=None) as batch_op:
batch_op.alter_column("dag_id", existing_type=string_id_col_type, nullable=False)
batch_op.alter_column("execution_date", existing_type=dt_type, nullable=False)
batch_op.alter_column("run_id", existing_type=string_id_col_type, nullable=False)
# First create column nullable
op.add_column("task_instance", sa.Column("run_id", type_=string_id_col_type, nullable=True))
op.add_column("task_reschedule", sa.Column("run_id", type_=string_id_col_type, nullable=True))
#
# TaskReschedule has a FK to TaskInstance, so we have to update that before
# we can drop the TI.execution_date column
update_query = _multi_table_update(dialect_name, task_reschedule, task_reschedule.c.run_id)
op.execute(update_query)
with op.batch_alter_table("task_reschedule", schema=None) as batch_op:
batch_op.alter_column(
"run_id", existing_type=string_id_col_type, existing_nullable=True, nullable=False
)
batch_op.drop_constraint("task_reschedule_dag_task_date_fkey", type_="foreignkey")
if dialect_name == "mysql":
# Mysql creates an index and a constraint -- we have to drop both
batch_op.drop_index("task_reschedule_dag_task_date_fkey")
batch_op.alter_column(
"dag_id", existing_type=sa.String(length=ID_LEN), type_=string_id_col_type, nullable=False
)
batch_op.drop_index("idx_task_reschedule_dag_task_date")
# Then update the new column by selecting the right value from DagRun
# But first we will drop and recreate indexes to make it faster
if dialect_name == "postgresql":
# Recreate task_instance, without execution_date and with dagrun.run_id
op.execute(
"""
CREATE TABLE new_task_instance AS SELECT
ti.task_id,
ti.dag_id,
dag_run.run_id,
ti.start_date,
ti.end_date,
ti.duration,
ti.state,
ti.try_number,
ti.hostname,
ti.unixname,
ti.job_id,
ti.pool,
ti.queue,
ti.priority_weight,
ti.operator,
ti.queued_dttm,
ti.pid,
ti.max_tries,
ti.executor_config,
ti.pool_slots,
ti.queued_by_job_id,
ti.external_executor_id,
ti.trigger_id,
ti.trigger_timeout,
ti.next_method,
ti.next_kwargs
FROM task_instance ti
INNER JOIN dag_run ON dag_run.dag_id = ti.dag_id AND dag_run.execution_date = ti.execution_date;
"""
)
op.drop_table("task_instance")
op.rename_table("new_task_instance", "task_instance")
# Fix up columns after the 'create table as select'
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.alter_column(
"pool", existing_type=string_id_col_type, existing_nullable=True, nullable=False
)
batch_op.alter_column("max_tries", existing_type=sa.Integer(), server_default="-1")
batch_op.alter_column(
"pool_slots", existing_type=sa.Integer(), existing_nullable=True, nullable=False
)
else:
update_query = _multi_table_update(dialect_name, task_instance, task_instance.c.run_id)
op.execute(update_query)
with op.batch_alter_table("task_instance", schema=None) as batch_op:
if dialect_name != "postgresql":
# TODO: Is this right for non-postgres?
if dialect_name == "mssql":
constraints = get_mssql_table_constraints(conn, "task_instance")
pk, _ = constraints["PRIMARY KEY"].popitem()
batch_op.drop_constraint(pk, type_="primary")
elif dialect_name not in ("sqlite"):
batch_op.drop_constraint("task_instance_pkey", type_="primary")
batch_op.drop_index("ti_dag_date")
batch_op.drop_index("ti_state_lkp")
batch_op.drop_column("execution_date")
# Then make it non-nullable
batch_op.alter_column(
"run_id", existing_type=string_id_col_type, existing_nullable=True, nullable=False
)
batch_op.alter_column(
"dag_id", existing_type=string_id_col_type, existing_nullable=True, nullable=False
)
batch_op.create_primary_key("task_instance_pkey", ["dag_id", "task_id", "run_id"])
batch_op.create_foreign_key(
"task_instance_dag_run_fkey",
"dag_run",
["dag_id", "run_id"],
["dag_id", "run_id"],
ondelete="CASCADE",
)
batch_op.create_index("ti_dag_run", ["dag_id", "run_id"])
batch_op.create_index("ti_state_lkp", ["dag_id", "task_id", "run_id", "state"])
if dialect_name == "postgresql":
batch_op.create_index("ti_dag_state", ["dag_id", "state"])
batch_op.create_index("ti_job_id", ["job_id"])
batch_op.create_index("ti_pool", ["pool", "state", "priority_weight"])
batch_op.create_index("ti_state", ["state"])
batch_op.create_foreign_key(
"task_instance_trigger_id_fkey", "trigger", ["trigger_id"], ["id"], ondelete="CASCADE"
)
batch_op.create_index("ti_trigger_id", ["trigger_id"])
with op.batch_alter_table("task_reschedule", schema=None) as batch_op:
batch_op.drop_column("execution_date")
batch_op.create_index(
"idx_task_reschedule_dag_task_run",
["dag_id", "task_id", "run_id"],
unique=False,
)
# _Now_ there is a unique constraint on the columns in TI we can re-create the FK from TaskReschedule
batch_op.create_foreign_key(
"task_reschedule_ti_fkey",
"task_instance",
["dag_id", "task_id", "run_id"],
["dag_id", "task_id", "run_id"],
ondelete="CASCADE",
)
# https://docs.microsoft.com/en-us/sql/relational-databases/errors-events/mssqlserver-1785-database-engine-error?view=sql-server-ver15
ondelete = "CASCADE" if dialect_name != "mssql" else "NO ACTION"
batch_op.create_foreign_key(
"task_reschedule_dr_fkey",
"dag_run",
["dag_id", "run_id"],
["dag_id", "run_id"],
ondelete=ondelete,
)
def downgrade():
"""Unapply Change ``TaskInstance`` and ``TaskReschedule`` tables from execution_date to run_id."""
dialect_name = op.get_bind().dialect.name
dt_type = TIMESTAMP
string_id_col_type = StringID()
op.add_column("task_instance", sa.Column("execution_date", dt_type, nullable=True))
op.add_column("task_reschedule", sa.Column("execution_date", dt_type, nullable=True))
update_query = _multi_table_update(dialect_name, task_instance, task_instance.c.execution_date)
op.execute(update_query)
update_query = _multi_table_update(dialect_name, task_reschedule, task_reschedule.c.execution_date)
op.execute(update_query)
with op.batch_alter_table("task_reschedule", schema=None) as batch_op:
batch_op.alter_column("execution_date", existing_type=dt_type, existing_nullable=True, nullable=False)
# Can't drop PK index while there is a FK referencing it
batch_op.drop_constraint("task_reschedule_ti_fkey", type_="foreignkey")
batch_op.drop_constraint("task_reschedule_dr_fkey", type_="foreignkey")
batch_op.drop_index("idx_task_reschedule_dag_task_run")
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.drop_constraint("task_instance_pkey", type_="primary")
batch_op.alter_column("execution_date", existing_type=dt_type, existing_nullable=True, nullable=False)
if dialect_name != "mssql":
batch_op.alter_column(
"dag_id", existing_type=string_id_col_type, existing_nullable=False, nullable=True
)
batch_op.create_primary_key("task_instance_pkey", ["dag_id", "task_id", "execution_date"])
batch_op.drop_constraint("task_instance_dag_run_fkey", type_="foreignkey")
batch_op.drop_index("ti_dag_run")
batch_op.drop_index("ti_state_lkp")
batch_op.create_index("ti_state_lkp", ["dag_id", "task_id", "execution_date", "state"])
batch_op.create_index("ti_dag_date", ["dag_id", "execution_date"], unique=False)
batch_op.drop_column("run_id")
with op.batch_alter_table("task_reschedule", schema=None) as batch_op:
batch_op.drop_column("run_id")
batch_op.create_index(
"idx_task_reschedule_dag_task_date",
["dag_id", "task_id", "execution_date"],
unique=False,
)
# Can only create FK once there is an index on these columns
batch_op.create_foreign_key(
"task_reschedule_dag_task_date_fkey",
"task_instance",
["dag_id", "task_id", "execution_date"],
["dag_id", "task_id", "execution_date"],
ondelete="CASCADE",
)
if dialect_name == "mysql":
batch_op.create_index(
"task_reschedule_dag_task_date_fkey", ["dag_id", "execution_date"], unique=False
)
if dialect_name == "mssql":
with op.batch_alter_table("dag_run", schema=None) as batch_op:
batch_op.drop_constraint("dag_run_dag_id_execution_date_key", type_="unique")
batch_op.drop_constraint("dag_run_dag_id_run_id_key", type_="unique")
batch_op.drop_index("dag_id_state")
batch_op.drop_index("idx_dag_run_running_dags")
batch_op.drop_index("idx_dag_run_queued_dags")
batch_op.drop_index("idx_dag_run_dag_id")
batch_op.alter_column("dag_id", existing_type=string_id_col_type, nullable=True)
batch_op.alter_column("execution_date", existing_type=dt_type, nullable=True)
batch_op.alter_column("run_id", existing_type=string_id_col_type, nullable=True)
batch_op.create_index("dag_id_state", ["dag_id", "state"], unique=False)
batch_op.create_index("idx_dag_run_dag_id", ["dag_id"])
batch_op.create_index(
"idx_dag_run_running_dags",
["state", "dag_id"],
mssql_where=sa.text("state='running'"),
)
batch_op.create_index(
"idx_dag_run_queued_dags",
["state", "dag_id"],
mssql_where=sa.text("state='queued'"),
)
op.execute(
"""CREATE UNIQUE NONCLUSTERED INDEX idx_not_null_dag_id_execution_date
ON dag_run(dag_id,execution_date)
WHERE dag_id IS NOT NULL and execution_date is not null"""
)
op.execute(
"""CREATE UNIQUE NONCLUSTERED INDEX idx_not_null_dag_id_run_id
ON dag_run(dag_id,run_id)
WHERE dag_id IS NOT NULL and run_id is not null"""
)
else:
with op.batch_alter_table("dag_run", schema=None) as batch_op:
batch_op.drop_index("dag_id_state")
batch_op.alter_column("run_id", existing_type=sa.VARCHAR(length=250), nullable=True)
batch_op.alter_column("execution_date", existing_type=dt_type, nullable=True)
batch_op.alter_column("dag_id", existing_type=sa.VARCHAR(length=250), nullable=True)
batch_op.create_index("dag_id_state", ["dag_id", "state"], unique=False)
def _multi_table_update(dialect_name, target, column):
condition = dag_run.c.dag_id == target.c.dag_id
if column == target.c.run_id:
condition = and_(condition, dag_run.c.execution_date == target.c.execution_date)
else:
condition = and_(condition, dag_run.c.run_id == target.c.run_id)
if dialect_name == "sqlite":
# Most SQLite versions don't support multi table update (and SQLA doesn't know about it anyway), so we
# need to do a Correlated subquery update
sub_q = select(dag_run.c[column.name]).where(condition)
return target.update().values({column: sub_q})
else:
return target.update().where(condition).values({column: dag_run.c[column.name]})
| 18,038 | 42.890511 | 142 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0044_1_10_7_add_serialized_dag_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``serialized_dag`` table
Revision ID: d38e04c12aa2
Revises: 6e96a59344a4
Create Date: 2019-08-01 14:39:35.616417
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import text
from sqlalchemy.dialects import mysql
from airflow.migrations.db_types import StringID
# revision identifiers, used by Alembic.
revision = "d38e04c12aa2"
down_revision = "6e96a59344a4"
branch_labels = None
depends_on = None
airflow_version = "1.10.7"
def upgrade():
"""Upgrade version."""
json_type = sa.JSON
conn = op.get_bind()
if conn.dialect.name != "postgresql":
# Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
# versions, check for the function existing.
try:
conn.execute(text("SELECT JSON_VALID(1)")).fetchone()
except (sa.exc.OperationalError, sa.exc.ProgrammingError):
json_type = sa.Text
op.create_table(
"serialized_dag",
sa.Column("dag_id", StringID(), nullable=False),
sa.Column("fileloc", sa.String(length=2000), nullable=False),
sa.Column("fileloc_hash", sa.Integer(), nullable=False),
sa.Column("data", json_type(), nullable=False),
sa.Column("last_updated", sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint("dag_id"),
)
op.create_index("idx_fileloc_hash", "serialized_dag", ["fileloc_hash"])
if conn.dialect.name == "mysql":
conn.execute(text("SET time_zone = '+00:00'"))
cur = conn.execute(text("SELECT @@explicit_defaults_for_timestamp"))
res = cur.fetchall()
if res[0][0] == 0:
raise Exception("Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql")
op.alter_column(
table_name="serialized_dag",
column_name="last_updated",
type_=mysql.TIMESTAMP(fsp=6),
nullable=False,
)
else:
# sqlite and mssql datetime are fine as is. Therefore, not converting
if conn.dialect.name in ("sqlite", "mssql"):
return
# we try to be database agnostic, but not every db (e.g. sqlserver)
# supports per session time zones
if conn.dialect.name == "postgresql":
conn.execute(text("set timezone=UTC"))
op.alter_column(
table_name="serialized_dag",
column_name="last_updated",
type_=sa.TIMESTAMP(timezone=True),
)
def downgrade():
"""Downgrade version."""
op.drop_table("serialized_dag")
| 3,356 | 32.909091 | 107 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0025_1_8_2_add_ti_job_id_index.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Create index on ``job_id`` column in ``task_instance`` table
Revision ID: 947454bf1dff
Revises: bdaa763e6c56
Create Date: 2017-08-15 15:12:13.845074
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "947454bf1dff"
down_revision = "bdaa763e6c56"
branch_labels = None
depends_on = None
airflow_version = "1.8.2"
def upgrade():
op.create_index("ti_job_id", "task_instance", ["job_id"], unique=False)
def downgrade():
op.drop_index("ti_job_id", table_name="task_instance")
| 1,339 | 30.162791 | 75 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0005_1_5_2_job_id_indices.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add indices in ``job`` table
Revision ID: 52d714495f0
Revises: 338e90f54d61
Create Date: 2015-10-20 03:17:01.962542
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "52d714495f0"
down_revision = "338e90f54d61"
branch_labels = None
depends_on = None
airflow_version = "1.5.2"
def upgrade():
op.create_index("idx_job_state_heartbeat", "job", ["state", "latest_heartbeat"], unique=False)
def downgrade():
op.drop_index("idx_job_state_heartbeat", table_name="job")
| 1,332 | 30 | 98 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0108_2_3_0_default_dag_view_grid.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Update dag.default_view to grid.
Revision ID: b1b348e02d07
Revises: 75d5ed6c2b43
Create Date: 2022-04-19 17:25:00.872220
"""
from __future__ import annotations
from alembic import op
from sqlalchemy import String
from sqlalchemy.sql import column, table
# revision identifiers, used by Alembic.
revision = "b1b348e02d07"
down_revision = "75d5ed6c2b43"
branch_labels = None
depends_on = "75d5ed6c2b43"
airflow_version = "2.3.0"
dag = table("dag", column("default_view", String))
def upgrade():
op.execute(
dag.update()
.where(dag.c.default_view == op.inline_literal("tree"))
.values({"default_view": op.inline_literal("grid")})
)
def downgrade():
op.execute(
dag.update()
.where(dag.c.default_view == op.inline_literal("grid"))
.values({"default_view": op.inline_literal("tree")})
)
| 1,646 | 28.410714 | 63 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0016_1_7_1_add_ti_state_index.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add TI state index
Revision ID: 211e584da130
Revises: 2e82aab8ef20
Create Date: 2016-06-30 10:54:24.323588
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "211e584da130"
down_revision = "2e82aab8ef20"
branch_labels = None
depends_on = None
airflow_version = "1.7.1.3"
def upgrade():
op.create_index("ti_state", "task_instance", ["state"], unique=False)
def downgrade():
op.drop_index("ti_state", table_name="task_instance")
| 1,296 | 29.162791 | 73 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0099_2_3_0_add_task_log_filename_template_model.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``LogTemplate`` table to track changes to config values ``log_filename_template``
Revision ID: f9da662e7089
Revises: 786e3737b18f
Create Date: 2021-12-09 06:11:21.044940
"""
from __future__ import annotations
from alembic import op
from sqlalchemy import Column, ForeignKey, Integer, Text
from airflow.migrations.utils import disable_sqlite_fkeys
from airflow.utils.sqlalchemy import UtcDateTime
# Revision identifiers, used by Alembic.
revision = "f9da662e7089"
down_revision = "786e3737b18f"
branch_labels = None
depends_on = None
airflow_version = "2.3.0"
def upgrade():
"""Add model for task log template and establish fk on task instance."""
op.create_table(
"log_template",
Column("id", Integer, primary_key=True, autoincrement=True),
Column("filename", Text, nullable=False),
Column("elasticsearch_id", Text, nullable=False),
Column("created_at", UtcDateTime, nullable=False),
)
dag_run_log_filename_id = Column(
"log_template_id",
Integer,
ForeignKey("log_template.id", name="task_instance_log_template_id_fkey", ondelete="NO ACTION"),
)
with disable_sqlite_fkeys(op), op.batch_alter_table("dag_run") as batch_op:
batch_op.add_column(dag_run_log_filename_id)
def downgrade():
"""Remove fk on task instance and model for task log filename template."""
with op.batch_alter_table("dag_run") as batch_op:
batch_op.drop_constraint("task_instance_log_template_id_fkey", type_="foreignkey")
batch_op.drop_column("log_template_id")
op.drop_table("log_template")
| 2,389 | 35.769231 | 103 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0101_2_3_0_add_data_compressed_to_serialized_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add data_compressed to serialized_dag
Revision ID: a3bcd0914482
Revises: e655c0453f75
Create Date: 2022-02-03 22:40:59.841119
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "a3bcd0914482"
down_revision = "e655c0453f75"
branch_labels = None
depends_on = None
airflow_version = "2.3.0"
def upgrade():
with op.batch_alter_table("serialized_dag") as batch_op:
batch_op.alter_column("data", existing_type=sa.JSON, nullable=True)
batch_op.add_column(sa.Column("data_compressed", sa.LargeBinary, nullable=True))
def downgrade():
with op.batch_alter_table("serialized_dag") as batch_op:
batch_op.alter_column("data", existing_type=sa.JSON, nullable=False)
batch_op.drop_column("data_compressed")
| 1,617 | 32.708333 | 88 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0027_1_10_0_add_time_zone_awareness.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add time zone awareness
Revision ID: 0e2a74e0fc9f
Revises: d2ae31099d61
Create Date: 2017-11-10 22:22:31.326152
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import text
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "0e2a74e0fc9f"
down_revision = "d2ae31099d61"
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
def upgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
conn.execute(text("SET time_zone = '+00:00'"))
cur = conn.execute(text("SELECT @@explicit_defaults_for_timestamp"))
res = cur.fetchall()
if res[0][0] == 0:
raise Exception("Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql")
op.alter_column(
table_name="chart",
column_name="last_modified",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(
table_name="dag",
column_name="last_scheduler_run",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(table_name="dag", column_name="last_pickled", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(table_name="dag", column_name="last_expired", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(
table_name="dag_pickle",
column_name="created_dttm",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(
table_name="dag_run",
column_name="execution_date",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(table_name="dag_run", column_name="start_date", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(table_name="dag_run", column_name="end_date", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(
table_name="import_error",
column_name="timestamp",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(table_name="job", column_name="start_date", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(table_name="job", column_name="end_date", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(
table_name="job",
column_name="latest_heartbeat",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(table_name="log", column_name="dttm", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(table_name="log", column_name="execution_date", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(
table_name="sla_miss",
column_name="execution_date",
type_=mysql.TIMESTAMP(fsp=6),
nullable=False,
)
op.alter_column(table_name="sla_miss", column_name="timestamp", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(
table_name="task_fail",
column_name="execution_date",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(
table_name="task_fail",
column_name="start_date",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(table_name="task_fail", column_name="end_date", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(
table_name="task_instance",
column_name="execution_date",
type_=mysql.TIMESTAMP(fsp=6),
nullable=False,
)
op.alter_column(
table_name="task_instance",
column_name="start_date",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(
table_name="task_instance",
column_name="end_date",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(
table_name="task_instance",
column_name="queued_dttm",
type_=mysql.TIMESTAMP(fsp=6),
)
op.alter_column(table_name="xcom", column_name="timestamp", type_=mysql.TIMESTAMP(fsp=6))
op.alter_column(
table_name="xcom",
column_name="execution_date",
type_=mysql.TIMESTAMP(fsp=6),
)
else:
# sqlite and mssql datetime are fine as is. Therefore, not converting
if conn.dialect.name in ("sqlite", "mssql"):
return
# we try to be database agnostic, but not every db (e.g. sqlserver)
# supports per session time zones
if conn.dialect.name == "postgresql":
conn.execute(text("set timezone=UTC"))
op.alter_column(
table_name="chart",
column_name="last_modified",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="dag",
column_name="last_scheduler_run",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="dag",
column_name="last_pickled",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="dag",
column_name="last_expired",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="dag_pickle",
column_name="created_dttm",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="dag_run",
column_name="execution_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="dag_run",
column_name="start_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="dag_run",
column_name="end_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="import_error",
column_name="timestamp",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="job",
column_name="start_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(table_name="job", column_name="end_date", type_=sa.TIMESTAMP(timezone=True))
op.alter_column(
table_name="job",
column_name="latest_heartbeat",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(table_name="log", column_name="dttm", type_=sa.TIMESTAMP(timezone=True))
op.alter_column(
table_name="log",
column_name="execution_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="sla_miss",
column_name="execution_date",
type_=sa.TIMESTAMP(timezone=True),
nullable=False,
)
op.alter_column(
table_name="sla_miss",
column_name="timestamp",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="task_fail",
column_name="execution_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="task_fail",
column_name="start_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="task_fail",
column_name="end_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="task_instance",
column_name="execution_date",
type_=sa.TIMESTAMP(timezone=True),
nullable=False,
)
op.alter_column(
table_name="task_instance",
column_name="start_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="task_instance",
column_name="end_date",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="task_instance",
column_name="queued_dttm",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="xcom",
column_name="timestamp",
type_=sa.TIMESTAMP(timezone=True),
)
op.alter_column(
table_name="xcom",
column_name="execution_date",
type_=sa.TIMESTAMP(timezone=True),
)
def downgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
conn.execute(text("SET time_zone = '+00:00'"))
op.alter_column(table_name="chart", column_name="last_modified", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="dag",
column_name="last_scheduler_run",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(table_name="dag", column_name="last_pickled", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag", column_name="last_expired", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="dag_pickle",
column_name="created_dttm",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(
table_name="dag_run",
column_name="execution_date",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(table_name="dag_run", column_name="start_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="dag_run", column_name="end_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="import_error",
column_name="timestamp",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(table_name="job", column_name="start_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="job", column_name="end_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="job",
column_name="latest_heartbeat",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(table_name="log", column_name="dttm", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="log", column_name="execution_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="sla_miss",
column_name="execution_date",
type_=mysql.DATETIME(fsp=6),
nullable=False,
)
op.alter_column(table_name="sla_miss", column_name="timestamp", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="task_fail",
column_name="execution_date",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(
table_name="task_fail",
column_name="start_date",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(table_name="task_fail", column_name="end_date", type_=mysql.DATETIME(fsp=6))
op.alter_column(
table_name="task_instance",
column_name="execution_date",
type_=mysql.DATETIME(fsp=6),
nullable=False,
)
op.alter_column(
table_name="task_instance",
column_name="start_date",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(
table_name="task_instance",
column_name="end_date",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(
table_name="task_instance",
column_name="queued_dttm",
type_=mysql.DATETIME(fsp=6),
)
op.alter_column(table_name="xcom", column_name="timestamp", type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name="xcom", column_name="execution_date", type_=mysql.DATETIME(fsp=6))
else:
if conn.dialect.name in ("sqlite", "mssql"):
return
# we try to be database agnostic, but not every db (e.g. sqlserver)
# supports per session time zones
if conn.dialect.name == "postgresql":
conn.execute(text("set timezone=UTC"))
op.alter_column(table_name="chart", column_name="last_modified", type_=sa.DateTime())
op.alter_column(table_name="dag", column_name="last_scheduler_run", type_=sa.DateTime())
op.alter_column(table_name="dag", column_name="last_pickled", type_=sa.DateTime())
op.alter_column(table_name="dag", column_name="last_expired", type_=sa.DateTime())
op.alter_column(table_name="dag_pickle", column_name="created_dttm", type_=sa.DateTime())
op.alter_column(table_name="dag_run", column_name="execution_date", type_=sa.DateTime())
op.alter_column(table_name="dag_run", column_name="start_date", type_=sa.DateTime())
op.alter_column(table_name="dag_run", column_name="end_date", type_=sa.DateTime())
op.alter_column(table_name="import_error", column_name="timestamp", type_=sa.DateTime())
op.alter_column(table_name="job", column_name="start_date", type_=sa.DateTime())
op.alter_column(table_name="job", column_name="end_date", type_=sa.DateTime())
op.alter_column(table_name="job", column_name="latest_heartbeat", type_=sa.DateTime())
op.alter_column(table_name="log", column_name="dttm", type_=sa.DateTime())
op.alter_column(table_name="log", column_name="execution_date", type_=sa.DateTime())
op.alter_column(
table_name="sla_miss",
column_name="execution_date",
type_=sa.DateTime(),
nullable=False,
)
op.alter_column(table_name="sla_miss", column_name="timestamp", type_=sa.DateTime())
op.alter_column(table_name="task_fail", column_name="execution_date", type_=sa.DateTime())
op.alter_column(table_name="task_fail", column_name="start_date", type_=sa.DateTime())
op.alter_column(table_name="task_fail", column_name="end_date", type_=sa.DateTime())
op.alter_column(
table_name="task_instance",
column_name="execution_date",
type_=sa.DateTime(),
nullable=False,
)
op.alter_column(table_name="task_instance", column_name="start_date", type_=sa.DateTime())
op.alter_column(table_name="task_instance", column_name="end_date", type_=sa.DateTime())
op.alter_column(table_name="task_instance", column_name="queued_dttm", type_=sa.DateTime())
op.alter_column(table_name="xcom", column_name="timestamp", type_=sa.DateTime())
op.alter_column(table_name="xcom", column_name="execution_date", type_=sa.DateTime())
| 15,269 | 34.929412 | 107 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0119_2_4_3_add_case_insensitive_unique_constraint_for_username.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add case-insensitive unique constraint for username
Revision ID: e07f49787c9d
Revises: b0d31815b5a6
Create Date: 2022-10-25 17:29:46.432326
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "e07f49787c9d"
down_revision = "b0d31815b5a6"
branch_labels = None
depends_on = None
airflow_version = "2.4.3"
def upgrade():
"""Apply Add case-insensitive unique constraint"""
conn = op.get_bind()
if conn.dialect.name == "postgresql":
op.create_index("idx_ab_user_username", "ab_user", [sa.text("LOWER(username)")], unique=True)
op.create_index(
"idx_ab_register_user_username", "ab_register_user", [sa.text("LOWER(username)")], unique=True
)
elif conn.dialect.name == "sqlite":
with op.batch_alter_table("ab_user") as batch_op:
batch_op.alter_column(
"username",
existing_type=sa.String(64),
_type=sa.String(64, collation="NOCASE"),
unique=True,
nullable=False,
)
with op.batch_alter_table("ab_register_user") as batch_op:
batch_op.alter_column(
"username",
existing_type=sa.String(64),
_type=sa.String(64, collation="NOCASE"),
unique=True,
nullable=False,
)
def downgrade():
"""Unapply Add case-insensitive unique constraint"""
conn = op.get_bind()
if conn.dialect.name == "postgresql":
op.drop_index("idx_ab_user_username", table_name="ab_user")
op.drop_index("idx_ab_register_user_username", table_name="ab_register_user")
elif conn.dialect.name == "sqlite":
with op.batch_alter_table("ab_user") as batch_op:
batch_op.alter_column(
"username",
existing_type=sa.String(64, collation="NOCASE"),
_type=sa.String(64),
unique=True,
nullable=False,
)
with op.batch_alter_table("ab_register_user") as batch_op:
batch_op.alter_column(
"username",
existing_type=sa.String(64, collation="NOCASE"),
_type=sa.String(64),
unique=True,
nullable=False,
)
| 3,155 | 34.066667 | 106 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0106_2_3_0_update_migration_for_fab_tables_to_add_missing_constraints.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Update migration for FAB tables to add missing constraints
Revision ID: 909884dea523
Revises: 48925b2719cb
Create Date: 2022-03-21 08:33:01.635688
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.utils import get_mssql_table_constraints
# revision identifiers, used by Alembic.
revision = "909884dea523"
down_revision = "48925b2719cb"
branch_labels = None
depends_on = None
airflow_version = "2.3.0"
def upgrade():
"""Apply Update migration for FAB tables to add missing constraints"""
conn = op.get_bind()
if conn.dialect.name == "sqlite":
op.execute("PRAGMA foreign_keys=OFF")
with op.batch_alter_table("ab_view_menu", schema=None) as batch_op:
batch_op.create_unique_constraint(batch_op.f("ab_view_menu_name_uq"), ["name"])
op.execute("PRAGMA foreign_keys=ON")
elif conn.dialect.name == "mysql":
with op.batch_alter_table("ab_register_user", schema=None) as batch_op:
batch_op.alter_column("username", existing_type=sa.String(256), nullable=False)
batch_op.alter_column("email", existing_type=sa.String(256), nullable=False)
with op.batch_alter_table("ab_user", schema=None) as batch_op:
batch_op.alter_column("username", existing_type=sa.String(256), nullable=False)
batch_op.alter_column("email", existing_type=sa.String(256), nullable=False)
elif conn.dialect.name == "mssql":
with op.batch_alter_table("ab_register_user") as batch_op:
# Drop the unique constraint on username and email
constraints = get_mssql_table_constraints(conn, "ab_register_user")
for k, _ in constraints.get("UNIQUE").items():
batch_op.drop_constraint(k, type_="unique")
batch_op.alter_column("username", existing_type=sa.String(256), nullable=False)
batch_op.create_unique_constraint(None, ["username"])
batch_op.alter_column("email", existing_type=sa.String(256), nullable=False)
with op.batch_alter_table("ab_user") as batch_op:
# Drop the unique constraint on username and email
constraints = get_mssql_table_constraints(conn, "ab_user")
for k, _ in constraints.get("UNIQUE").items():
batch_op.drop_constraint(k, type_="unique")
batch_op.alter_column("username", existing_type=sa.String(256), nullable=False)
batch_op.create_unique_constraint(None, ["username"])
batch_op.alter_column("email", existing_type=sa.String(256), nullable=False)
batch_op.create_unique_constraint(None, ["email"])
def downgrade():
"""Unapply Update migration for FAB tables to add missing constraints"""
conn = op.get_bind()
if conn.dialect.name == "sqlite":
op.execute("PRAGMA foreign_keys=OFF")
with op.batch_alter_table("ab_view_menu", schema=None) as batch_op:
batch_op.drop_constraint("ab_view_menu_name_uq", type_="unique")
op.execute("PRAGMA foreign_keys=ON")
elif conn.dialect.name == "mysql":
with op.batch_alter_table("ab_user", schema=None) as batch_op:
batch_op.alter_column("email", existing_type=sa.String(256), nullable=True)
batch_op.alter_column("username", existing_type=sa.String(256), nullable=True, unique=True)
with op.batch_alter_table("ab_register_user", schema=None) as batch_op:
batch_op.alter_column("email", existing_type=sa.String(256), nullable=True)
batch_op.alter_column("username", existing_type=sa.String(256), nullable=True, unique=True)
elif conn.dialect.name == "mssql":
with op.batch_alter_table("ab_register_user") as batch_op:
# Drop the unique constraint on username and email
constraints = get_mssql_table_constraints(conn, "ab_register_user")
for k, _ in constraints.get("UNIQUE").items():
batch_op.drop_constraint(k, type_="unique")
batch_op.alter_column("username", existing_type=sa.String(256), nullable=False, unique=True)
batch_op.create_unique_constraint(None, ["username"])
batch_op.alter_column("email", existing_type=sa.String(256), nullable=False, unique=True)
with op.batch_alter_table("ab_user") as batch_op:
# Drop the unique constraint on username and email
constraints = get_mssql_table_constraints(conn, "ab_user")
for k, _ in constraints.get("UNIQUE").items():
batch_op.drop_constraint(k, type_="unique")
batch_op.alter_column("username", existing_type=sa.String(256), nullable=True)
batch_op.create_unique_constraint(None, ["username"])
batch_op.alter_column("email", existing_type=sa.String(256), nullable=True, unique=True)
batch_op.create_unique_constraint(None, ["email"])
| 5,708 | 51.861111 | 104 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0115_2_4_0_remove_smart_sensors.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Remove smart sensors
Revision ID: f4ff391becb5
Revises: 0038cd0c28b4
Create Date: 2022-08-03 11:33:44.777945
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import func
from sqlalchemy.sql import column, table
from airflow.migrations.db_types import TIMESTAMP, StringID
# revision identifiers, used by Alembic.
revision = "f4ff391becb5"
down_revision = "0038cd0c28b4"
branch_labels = None
depends_on = None
airflow_version = "2.4.0"
def upgrade():
"""Apply Remove smart sensors"""
op.drop_table("sensor_instance")
"""Minimal model definition for migrations"""
task_instance = table("task_instance", column("state", sa.String))
op.execute(task_instance.update().where(task_instance.c.state == "sensing").values({"state": "failed"}))
def downgrade():
"""Unapply Remove smart sensors"""
op.create_table(
"sensor_instance",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("task_id", StringID(), nullable=False),
sa.Column("dag_id", StringID(), nullable=False),
sa.Column("execution_date", TIMESTAMP, nullable=False),
sa.Column("state", sa.String(length=20), nullable=True),
sa.Column("try_number", sa.Integer(), nullable=True),
sa.Column("start_date", TIMESTAMP, nullable=True),
sa.Column("operator", sa.String(length=1000), nullable=False),
sa.Column("op_classpath", sa.String(length=1000), nullable=False),
sa.Column("hashcode", sa.BigInteger(), nullable=False),
sa.Column("shardcode", sa.Integer(), nullable=False),
sa.Column("poke_context", sa.Text(), nullable=False),
sa.Column("execution_context", sa.Text(), nullable=True),
sa.Column("created_at", TIMESTAMP, default=func.now, nullable=False),
sa.Column("updated_at", TIMESTAMP, default=func.now, nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ti_primary_key", "sensor_instance", ["dag_id", "task_id", "execution_date"], unique=True)
op.create_index("si_hashcode", "sensor_instance", ["hashcode"], unique=False)
op.create_index("si_shardcode", "sensor_instance", ["shardcode"], unique=False)
op.create_index("si_state_shard", "sensor_instance", ["state", "shardcode"], unique=False)
op.create_index("si_updated_at", "sensor_instance", ["updated_at"], unique=False)
| 3,189 | 40.428571 | 110 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0038_1_10_2_add_sm_dag_index.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Merge migrations Heads.
Revision ID: 03bc53e68815
Revises: 0a2a5b66e19d, bf00311e1990
Create Date: 2018-11-24 20:21:46.605414
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "03bc53e68815"
down_revision = ("0a2a5b66e19d", "bf00311e1990")
branch_labels = None
depends_on = None
airflow_version = "1.10.2"
def upgrade():
op.create_index("sm_dag", "sla_miss", ["dag_id"], unique=False)
def downgrade():
op.drop_index("sm_dag", table_name="sla_miss")
| 1,317 | 30.380952 | 67 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0078_2_0_1_remove_can_read_permission_on_config_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Remove ``can_read`` permission on config resource for ``User`` and ``Viewer`` role
Revision ID: 82b7c48c147f
Revises: e959f08ac86c
Create Date: 2021-02-04 12:45:58.138224
"""
from __future__ import annotations
import logging
from airflow.security import permissions
from airflow.www.app import cached_app
# revision identifiers, used by Alembic.
revision = "82b7c48c147f"
down_revision = "e959f08ac86c"
branch_labels = None
depends_on = None
airflow_version = "2.0.1"
def upgrade():
"""Remove can_read action from config resource for User and Viewer role"""
log = logging.getLogger()
handlers = log.handlers[:]
appbuilder = cached_app(config={"FAB_UPDATE_PERMS": False}).appbuilder
roles_to_modify = [role for role in appbuilder.sm.get_all_roles() if role.name in ["User", "Viewer"]]
can_read_on_config_perm = appbuilder.sm.get_permission(
permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG
)
for role in roles_to_modify:
if appbuilder.sm.permission_exists_in_one_or_more_roles(
permissions.RESOURCE_CONFIG, permissions.ACTION_CAN_READ, [role.id]
):
appbuilder.sm.remove_permission_from_role(role, can_read_on_config_perm)
log.handlers = handlers
def downgrade():
"""Add can_read action on config resource for User and Viewer role"""
appbuilder = cached_app(config={"FAB_UPDATE_PERMS": False}).appbuilder
roles_to_modify = [role for role in appbuilder.sm.get_all_roles() if role.name in ["User", "Viewer"]]
can_read_on_config_perm = appbuilder.sm.get_permission(
permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG
)
for role in roles_to_modify:
if not appbuilder.sm.permission_exists_in_one_or_more_roles(
permissions.RESOURCE_CONFIG, permissions.ACTION_CAN_READ, [role.id]
):
appbuilder.sm.add_permission_to_role(role, can_read_on_config_perm)
| 2,716 | 36.219178 | 105 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0092_2_2_0_add_data_interval_start_end_to_dagmodel_and_dagrun.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add data_interval_[start|end] to DagModel and DagRun.
Revision ID: 142555e44c17
Revises: 54bebd308c5f
Create Date: 2021-06-09 08:28:02.089817
"""
from __future__ import annotations
from alembic import op
from sqlalchemy import Column
from airflow.migrations.db_types import TIMESTAMP
# Revision identifiers, used by Alembic.
revision = "142555e44c17"
down_revision = "54bebd308c5f"
branch_labels = None
depends_on = None
airflow_version = "2.2.0"
def upgrade():
"""Apply data_interval fields to DagModel and DagRun."""
with op.batch_alter_table("dag_run") as batch_op:
batch_op.add_column(Column("data_interval_start", TIMESTAMP))
batch_op.add_column(Column("data_interval_end", TIMESTAMP))
with op.batch_alter_table("dag") as batch_op:
batch_op.add_column(Column("next_dagrun_data_interval_start", TIMESTAMP))
batch_op.add_column(Column("next_dagrun_data_interval_end", TIMESTAMP))
def downgrade():
"""Unapply data_interval fields to DagModel and DagRun."""
with op.batch_alter_table("dag_run") as batch_op:
batch_op.drop_column("data_interval_start")
batch_op.drop_column("data_interval_end")
with op.batch_alter_table("dag") as batch_op:
batch_op.drop_column("next_dagrun_data_interval_start")
batch_op.drop_column("next_dagrun_data_interval_end")
| 2,138 | 35.87931 | 81 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0009_1_6_0_dagrun_config.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``conf`` column in ``dag_run`` table
Revision ID: 40e67319e3a9
Revises: 2e541a1dcfed
Create Date: 2015-10-29 08:36:31.726728
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "40e67319e3a9"
down_revision = "2e541a1dcfed"
branch_labels = None
depends_on = None
airflow_version = "1.6.0"
def upgrade():
op.add_column("dag_run", sa.Column("conf", sa.PickleType(), nullable=True))
def downgrade():
op.drop_column("dag_run", "conf")
| 1,326 | 29.159091 | 79 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0112_2_4_0_add_dagwarning_model.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add DagWarning model
Revision ID: 424117c37d18
Revises: 3c94c427fdf6
Create Date: 2022-04-27 15:57:36.736743
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import TIMESTAMP, StringID
# revision identifiers, used by Alembic.
revision = "424117c37d18"
down_revision = "f5fcbda3e651"
branch_labels = None
depends_on = None
airflow_version = "2.4.0"
def upgrade():
"""Apply Add DagWarning model"""
op.create_table(
"dag_warning",
sa.Column("dag_id", StringID(), primary_key=True),
sa.Column("warning_type", sa.String(length=50), primary_key=True),
sa.Column("message", sa.Text(), nullable=False),
sa.Column("timestamp", TIMESTAMP, nullable=False),
sa.ForeignKeyConstraint(
("dag_id",),
["dag.dag_id"],
name="dcw_dag_id_fkey",
ondelete="CASCADE",
),
)
def downgrade():
"""Unapply Add DagWarning model"""
op.drop_table("dag_warning")
| 1,824 | 28.918033 | 74 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0103_2_3_0_add_callback_request_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add callback request table
Revision ID: c97c2ab6aa23
Revises: c306b5b5ae4a
Create Date: 2022-01-28 21:11:11.857010
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import func
from airflow.migrations.db_types import TIMESTAMP
from airflow.utils.sqlalchemy import ExtendedJSON
# revision identifiers, used by Alembic.
revision = "c97c2ab6aa23"
down_revision = "c306b5b5ae4a"
branch_labels = None
depends_on = None
airflow_version = "2.3.0"
TABLE_NAME = "callback_request"
def upgrade():
op.create_table(
TABLE_NAME,
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("created_at", TIMESTAMP, default=func.now, nullable=False),
sa.Column("priority_weight", sa.Integer(), default=1, nullable=False),
sa.Column("callback_data", ExtendedJSON, nullable=False),
sa.Column("callback_type", sa.String(20), nullable=False),
sa.Column("dag_directory", sa.String(length=1000), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
def downgrade():
op.drop_table(TABLE_NAME)
| 1,910 | 31.389831 | 78 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0026_1_8_2_increase_text_size_for_mysql.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase text size for MySQL (not relevant for other DBs' text types)
Revision ID: d2ae31099d61
Revises: 947454bf1dff
Create Date: 2017-08-18 17:07:16.686130
"""
from __future__ import annotations
from alembic import op
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = "d2ae31099d61"
down_revision = "947454bf1dff"
branch_labels = None
depends_on = None
airflow_version = "1.8.2"
def upgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(table_name="variable", column_name="val", type_=mysql.MEDIUMTEXT)
def downgrade():
conn = op.get_bind()
if conn.dialect.name == "mysql":
op.alter_column(table_name="variable", column_name="val", type_=mysql.TEXT)
| 1,549 | 31.291667 | 89 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0084_2_1_0_resource_based_permissions_for_default_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Resource based permissions for default ``Flask-AppBuilder`` views
Revision ID: a13f7613ad25
Revises: e165e7455d70
Create Date: 2021-03-20 21:23:05.793378
"""
from __future__ import annotations
import logging
from airflow.security import permissions
from airflow.www.app import cached_app
# revision identifiers, used by Alembic.
revision = "a13f7613ad25"
down_revision = "e165e7455d70"
branch_labels = None
depends_on = None
airflow_version = "2.1.0"
mapping = {
("PermissionModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_ACTION),
],
("PermissionViewModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_PERMISSION),
],
("ResetMyPasswordView", "can_this_form_get"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PASSWORD),
],
("ResetMyPasswordView", "can_this_form_post"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_MY_PASSWORD),
],
("ResetPasswordView", "can_this_form_get"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_PASSWORD),
],
("ResetPasswordView", "can_this_form_post"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_PASSWORD),
],
("RoleModelView", "can_delete"): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_ROLE),
],
("RoleModelView", "can_download"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_ROLE),
],
("RoleModelView", "can_show"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_ROLE),
],
("RoleModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_ROLE),
],
("RoleModelView", "can_edit"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_ROLE),
],
("RoleModelView", "can_add"): [
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_ROLE),
],
("RoleModelView", "can_copyrole"): [
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_ROLE),
],
("ViewMenuModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_RESOURCE),
],
("UserDBModelView", "can_add"): [
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_RESOURCE),
],
("UserDBModelView", "can_userinfo"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PROFILE),
],
("UserDBModelView", "can_download"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_RESOURCE),
],
("UserDBModelView", "can_show"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_RESOURCE),
],
("UserDBModelView", "can_list"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_RESOURCE),
],
("UserDBModelView", "can_edit"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_RESOURCE),
],
("UserDBModelView", "resetmypassword"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PASSWORD),
],
("UserDBModelView", "resetpasswords"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_PASSWORD),
],
("UserDBModelView", "userinfoedit"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_MY_PROFILE),
],
("UserDBModelView", "can_delete"): [
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_RESOURCE),
],
("UserInfoEditView", "can_this_form_get"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PROFILE),
],
("UserInfoEditView", "can_this_form_post"): [
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_MY_PROFILE),
],
("UserStatsChartView", "can_chart"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_USER_STATS_CHART),
],
("UserLDAPModelView", "can_userinfo"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PROFILE),
],
("UserOAuthModelView", "can_userinfo"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PROFILE),
],
("UserOIDModelView", "can_userinfo"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PROFILE),
],
("UserRemoteUserModelView", "can_userinfo"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_MY_PROFILE),
],
("DagRunModelView", "can_clear"): [
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_TASK_INSTANCE),
],
}
def remap_permissions():
"""Apply Map Airflow permissions."""
appbuilder = cached_app(config={"FAB_UPDATE_PERMS": False}).appbuilder
for old, new in mapping.items():
(old_resource_name, old_action_name) = old
old_permission = appbuilder.sm.get_permission(old_action_name, old_resource_name)
if not old_permission:
continue
for new_action_name, new_resource_name in new:
new_permission = appbuilder.sm.create_permission(new_action_name, new_resource_name)
for role in appbuilder.sm.get_all_roles():
if appbuilder.sm.permission_exists_in_one_or_more_roles(
old_resource_name, old_action_name, [role.id]
):
appbuilder.sm.add_permission_to_role(role, new_permission)
appbuilder.sm.remove_permission_from_role(role, old_permission)
appbuilder.sm.delete_permission(old_action_name, old_resource_name)
if not appbuilder.sm.get_action(old_action_name):
continue
resources = appbuilder.sm.get_all_resources()
if not any(appbuilder.sm.get_permission(old_action_name, resource.name) for resource in resources):
appbuilder.sm.delete_action(old_action_name)
def undo_remap_permissions():
"""Unapply Map Airflow permissions"""
appbuilder = cached_app(config={"FAB_UPDATE_PERMS": False}).appbuilder
for old, new in mapping.items():
(new_resource_name, new_action_name) = new[0]
new_permission = appbuilder.sm.get_permission(new_action_name, new_resource_name)
if not new_permission:
continue
for old_action_name, old_resource_name in old:
old_permission = appbuilder.sm.create_permission(old_action_name, old_resource_name)
for role in appbuilder.sm.get_all_roles():
if appbuilder.sm.permission_exists_in_one_or_more_roles(
new_resource_name, new_action_name, [role.id]
):
appbuilder.sm.add_permission_to_role(role, old_permission)
appbuilder.sm.remove_permission_from_role(role, new_permission)
appbuilder.sm.delete_permission(new_action_name, new_resource_name)
if not appbuilder.sm.get_action(new_action_name):
continue
resources = appbuilder.sm.get_all_resources()
if not any(appbuilder.sm.get_permission(new_action_name, resource.name) for resource in resources):
appbuilder.sm.delete_action(new_action_name)
def upgrade():
"""Apply Resource based permissions."""
log = logging.getLogger()
handlers = log.handlers[:]
remap_permissions()
log.handlers = handlers
def downgrade():
"""Unapply Resource based permissions."""
log = logging.getLogger()
handlers = log.handlers[:]
undo_remap_permissions()
log.handlers = handlers
| 8,120 | 38.614634 | 107 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0033_1_10_0_fix_sqlite_foreign_key.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Fix Sqlite foreign key
Revision ID: 856955da8476
Revises: f23433877c24
Create Date: 2018-06-17 15:54:53.844230
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "856955da8476"
down_revision = "f23433877c24"
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
def upgrade():
"""Fix broken foreign-key constraint for existing SQLite DBs."""
conn = op.get_bind()
if conn.dialect.name == "sqlite":
# Fix broken foreign-key constraint for existing SQLite DBs.
#
# Re-define tables and use copy_from to avoid reflection
# which would fail because referenced user table doesn't exist.
#
# Use batch_alter_table to support SQLite workaround.
chart_table = sa.Table(
"chart",
sa.MetaData(),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("label", sa.String(length=200), nullable=True),
sa.Column("conn_id", sa.String(length=250), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("chart_type", sa.String(length=100), nullable=True),
sa.Column("sql_layout", sa.String(length=50), nullable=True),
sa.Column("sql", sa.Text(), nullable=True),
sa.Column("y_log_scale", sa.Boolean(), nullable=True),
sa.Column("show_datatable", sa.Boolean(), nullable=True),
sa.Column("show_sql", sa.Boolean(), nullable=True),
sa.Column("height", sa.Integer(), nullable=True),
sa.Column("default_params", sa.String(length=5000), nullable=True),
sa.Column("x_is_date", sa.Boolean(), nullable=True),
sa.Column("iteration_no", sa.Integer(), nullable=True),
sa.Column("last_modified", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
with op.batch_alter_table("chart", copy_from=chart_table) as batch_op:
batch_op.create_foreign_key("chart_user_id_fkey", "users", ["user_id"], ["id"])
def downgrade():
# Downgrade would fail because the broken FK constraint can't be re-created.
pass
| 3,019 | 39.810811 | 91 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0015_1_7_1_rename_user_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Rename user table
Revision ID: 2e82aab8ef20
Revises: 1968acfc09e3
Create Date: 2016-04-02 19:28:15.211915
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "2e82aab8ef20"
down_revision = "1968acfc09e3"
branch_labels = None
depends_on = None
airflow_version = "1.7.1"
def upgrade():
op.rename_table("user", "users")
def downgrade():
op.rename_table("users", "user")
| 1,235 | 27.744186 | 62 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0003_1_5_0_for_compatibility.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Maintain history for compatibility with earlier migrations
Revision ID: 13eb55f81627
Revises: 1507a7289a2f
Create Date: 2015-08-23 05:12:49.732174
"""
from __future__ import annotations
# revision identifiers, used by Alembic.
revision = "13eb55f81627"
down_revision = "1507a7289a2f"
branch_labels = None
depends_on = None
airflow_version = "1.5.0"
def upgrade():
pass
def downgrade():
pass
| 1,196 | 28.195122 | 62 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0120_2_5_0_add_updated_at_to_dagrun_and_ti.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add updated_at column to DagRun and TaskInstance
Revision ID: ee8d93fcc81e
Revises: e07f49787c9d
Create Date: 2022-09-08 19:08:37.623121
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import TIMESTAMP
# revision identifiers, used by Alembic.
revision = "ee8d93fcc81e"
down_revision = "e07f49787c9d"
branch_labels = None
depends_on = None
airflow_version = "2.5.0"
def upgrade():
"""Apply add updated_at column to DagRun and TaskInstance"""
with op.batch_alter_table("task_instance") as batch_op:
batch_op.add_column(sa.Column("updated_at", TIMESTAMP, default=sa.func.now))
with op.batch_alter_table("dag_run") as batch_op:
batch_op.add_column(sa.Column("updated_at", TIMESTAMP, default=sa.func.now))
def downgrade():
"""Unapply add updated_at column to DagRun and TaskInstance"""
with op.batch_alter_table("task_instance") as batch_op:
batch_op.drop_column("updated_at")
with op.batch_alter_table("dag_run") as batch_op:
batch_op.drop_column("updated_at")
| 1,887 | 31.551724 | 84 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0097_2_3_0_increase_length_of_email_and_username.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase length of email and username in ``ab_user`` and ``ab_register_user`` table to ``256`` characters
Revision ID: 5e3ec427fdd3
Revises: 587bdf053233
Create Date: 2021-12-01 11:49:26.390210
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.utils import get_mssql_table_constraints
# revision identifiers, used by Alembic.
revision = "5e3ec427fdd3"
down_revision = "587bdf053233"
branch_labels = None
depends_on = None
airflow_version = "2.3.0"
def upgrade():
"""Increase length of email from 64 to 256 characters"""
with op.batch_alter_table("ab_user") as batch_op:
batch_op.alter_column("username", type_=sa.String(256))
batch_op.alter_column("email", type_=sa.String(256))
with op.batch_alter_table("ab_register_user") as batch_op:
batch_op.alter_column("username", type_=sa.String(256))
batch_op.alter_column("email", type_=sa.String(256))
def downgrade():
"""Revert length of email from 256 to 64 characters"""
conn = op.get_bind()
if conn.dialect.name != "mssql":
with op.batch_alter_table("ab_user") as batch_op:
batch_op.alter_column("username", type_=sa.String(64), nullable=False)
batch_op.alter_column("email", type_=sa.String(64))
with op.batch_alter_table("ab_register_user") as batch_op:
batch_op.alter_column("username", type_=sa.String(64))
batch_op.alter_column("email", type_=sa.String(64))
else:
# MSSQL doesn't drop implicit unique constraints it created
# We need to drop the two unique constraints explicitly
with op.batch_alter_table("ab_user") as batch_op:
# Drop the unique constraint on username and email
constraints = get_mssql_table_constraints(conn, "ab_user")
unique_key, _ = constraints["UNIQUE"].popitem()
batch_op.drop_constraint(unique_key, type_="unique")
unique_key, _ = constraints["UNIQUE"].popitem()
batch_op.drop_constraint(unique_key, type_="unique")
batch_op.alter_column("username", type_=sa.String(64), nullable=False)
batch_op.create_unique_constraint(None, ["username"])
batch_op.alter_column("email", type_=sa.String(64))
batch_op.create_unique_constraint(None, ["email"])
with op.batch_alter_table("ab_register_user") as batch_op:
# Drop the unique constraint on username and email
constraints = get_mssql_table_constraints(conn, "ab_register_user")
for k, _ in constraints.get("UNIQUE").items():
batch_op.drop_constraint(k, type_="unique")
batch_op.alter_column("username", type_=sa.String(64))
batch_op.create_unique_constraint(None, ["username"])
batch_op.alter_column("email", type_=sa.String(64))
| 3,678 | 43.325301 | 108 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0086_2_1_4_add_max_active_runs_column_to_dagmodel_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``max_active_runs`` column to ``dag_model`` table
Revision ID: 092435bf5d12
Revises: 97cdd93827b8
Create Date: 2021-09-06 21:29:24.728923
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision = "092435bf5d12"
down_revision = "97cdd93827b8"
branch_labels = None
depends_on = None
airflow_version = "2.1.4"
def upgrade():
"""Apply Add ``max_active_runs`` column to ``dag_model`` table"""
op.add_column("dag", sa.Column("max_active_runs", sa.Integer(), nullable=True))
with op.batch_alter_table("dag_run", schema=None) as batch_op:
# Add index to dag_run.dag_id and also add index to dag_run.state where state==running
batch_op.create_index("idx_dag_run_dag_id", ["dag_id"])
batch_op.create_index(
"idx_dag_run_running_dags",
["state", "dag_id"],
postgresql_where=text("state='running'"),
mssql_where=text("state='running'"),
sqlite_where=text("state='running'"),
)
def downgrade():
"""Unapply Add ``max_active_runs`` column to ``dag_model`` table"""
with op.batch_alter_table("dag") as batch_op:
batch_op.drop_column("max_active_runs")
with op.batch_alter_table("dag_run", schema=None) as batch_op:
# Drop index to dag_run.dag_id and also drop index to dag_run.state where state==running
batch_op.drop_index("idx_dag_run_dag_id")
batch_op.drop_index("idx_dag_run_running_dags")
| 2,336 | 36.693548 | 96 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0087_2_1_4_add_index_on_state_dag_id_for_queued_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add index on state, dag_id for queued ``dagrun``
Revision ID: ccde3e26fe78
Revises: 092435bf5d12
Create Date: 2021-09-08 16:35:34.867711
"""
from __future__ import annotations
from alembic import op
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision = "ccde3e26fe78"
down_revision = "092435bf5d12"
branch_labels = None
depends_on = None
airflow_version = "2.1.4"
def upgrade():
"""Apply Add index on state, dag_id for queued ``dagrun``"""
with op.batch_alter_table("dag_run") as batch_op:
batch_op.create_index(
"idx_dag_run_queued_dags",
["state", "dag_id"],
postgresql_where=text("state='queued'"),
mssql_where=text("state='queued'"),
sqlite_where=text("state='queued'"),
)
def downgrade():
"""Unapply Add index on state, dag_id for queued ``dagrun``"""
with op.batch_alter_table("dag_run") as batch_op:
batch_op.drop_index("idx_dag_run_queued_dags")
| 1,778 | 31.944444 | 66 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0028_1_10_0_add_kubernetes_resource_checkpointing.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add Kubernetes resource check-pointing
Revision ID: 33ae817a1ff4
Revises: 947454bf1dff
Create Date: 2017-09-11 15:26:47.598494
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = "33ae817a1ff4"
down_revision = "d2ae31099d61"
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
RESOURCE_TABLE = "kube_resource_version"
def upgrade():
conn = op.get_bind()
inspector = inspect(conn)
if RESOURCE_TABLE not in inspector.get_table_names():
columns_and_constraints = [
sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True),
sa.Column("resource_version", sa.String(255)),
]
# alembic creates an invalid SQL for mssql and mysql dialects
if conn.dialect.name in {"mysql"}:
columns_and_constraints.append(
sa.CheckConstraint("one_row_id<>0", name="kube_resource_version_one_row_id")
)
elif conn.dialect.name not in {"mssql"}:
columns_and_constraints.append(
sa.CheckConstraint("one_row_id", name="kube_resource_version_one_row_id")
)
table = op.create_table(RESOURCE_TABLE, *columns_and_constraints)
op.bulk_insert(table, [{"resource_version": ""}])
def downgrade():
conn = op.get_bind()
inspector = inspect(conn)
if RESOURCE_TABLE in inspector.get_table_names():
op.drop_table(RESOURCE_TABLE)
| 2,333 | 32.342857 | 92 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0018_1_7_1_add_dag_stats_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``dag_stats`` table
Revision ID: f2ca10b85618
Revises: 64de9cddf6c9
Create Date: 2016-07-20 15:08:28.247537
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import StringID
# revision identifiers, used by Alembic.
revision = "f2ca10b85618"
down_revision = "64de9cddf6c9"
branch_labels = None
depends_on = None
airflow_version = "1.7.1.3"
def upgrade():
op.create_table(
"dag_stats",
sa.Column("dag_id", StringID(), nullable=False),
sa.Column("state", sa.String(length=50), nullable=False),
sa.Column("count", sa.Integer(), nullable=False, default=0),
sa.Column("dirty", sa.Boolean(), nullable=False, default=False),
sa.PrimaryKeyConstraint("dag_id", "state"),
)
def downgrade():
op.drop_table("dag_stats")
| 1,639 | 29.943396 | 72 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0067_2_0_0_add_external_executor_id_to_ti.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add external executor ID to TI
Revision ID: e1a11ece99cc
Revises: b247b1e3d1ed
Create Date: 2020-09-12 08:23:45.698865
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "e1a11ece99cc"
down_revision = "b247b1e3d1ed"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
def upgrade():
"""Apply Add external executor ID to TI"""
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.add_column(sa.Column("external_executor_id", sa.String(length=250), nullable=True))
def downgrade():
"""Unapply Add external executor ID to TI"""
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.drop_column("external_executor_id")
| 1,594 | 32.229167 | 100 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0029_1_10_0_add_executor_config_to_task_instance.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``executor_config`` column to ``task_instance`` table
Revision ID: 33ae817a1ff4
Revises: 947454bf1dff
Create Date: 2017-09-11 15:26:47.598494
"""
from __future__ import annotations
import dill
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "27c6a30d7c24"
down_revision = "33ae817a1ff4"
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
TASK_INSTANCE_TABLE = "task_instance"
NEW_COLUMN = "executor_config"
def upgrade():
op.add_column(TASK_INSTANCE_TABLE, sa.Column(NEW_COLUMN, sa.PickleType(pickler=dill)))
def downgrade():
op.drop_column(TASK_INSTANCE_TABLE, NEW_COLUMN)
| 1,449 | 29.851064 | 90 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0081_2_0_2_rename_last_scheduler_run_column.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Rename ``last_scheduler_run`` column in ``DAG`` table to ``last_parsed_time``
Revision ID: 2e42bb497a22
Revises: 8646922c8a04
Create Date: 2021-03-04 19:50:38.880942
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mssql
# revision identifiers, used by Alembic.
revision = "2e42bb497a22"
down_revision = "8646922c8a04"
branch_labels = None
depends_on = None
airflow_version = "2.0.2"
def upgrade():
"""Apply Rename ``last_scheduler_run`` column in ``DAG`` table to ``last_parsed_time``"""
conn = op.get_bind()
if conn.dialect.name == "mssql":
with op.batch_alter_table("dag") as batch_op:
batch_op.alter_column(
"last_scheduler_run", new_column_name="last_parsed_time", type_=mssql.DATETIME2(precision=6)
)
else:
with op.batch_alter_table("dag") as batch_op:
batch_op.alter_column(
"last_scheduler_run", new_column_name="last_parsed_time", type_=sa.TIMESTAMP(timezone=True)
)
def downgrade():
"""Unapply Rename ``last_scheduler_run`` column in ``DAG`` table to ``last_parsed_time``"""
conn = op.get_bind()
if conn.dialect.name == "mssql":
with op.batch_alter_table("dag") as batch_op:
batch_op.alter_column(
"last_parsed_time", new_column_name="last_scheduler_run", type_=mssql.DATETIME2(precision=6)
)
else:
with op.batch_alter_table("dag") as batch_op:
batch_op.alter_column(
"last_parsed_time", new_column_name="last_scheduler_run", type_=sa.TIMESTAMP(timezone=True)
)
| 2,463 | 35.776119 | 108 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0037_1_10_2_add_task_reschedule_table.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``task_reschedule`` table
Revision ID: 0a2a5b66e19d
Revises: 9635ae0956e7
Create Date: 2018-06-17 22:50:00.053620
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import TIMESTAMP, StringID
# revision identifiers, used by Alembic.
revision = "0a2a5b66e19d"
down_revision = "9635ae0956e7"
branch_labels = None
depends_on = None
airflow_version = "1.10.2"
TABLE_NAME = "task_reschedule"
INDEX_NAME = "idx_" + TABLE_NAME + "_dag_task_date"
def upgrade():
# See 0e2a74e0fc9f_add_time_zone_awareness
timestamp = TIMESTAMP
if op.get_bind().dialect.name == "mssql":
# We need to keep this as it was for this old migration on mssql
timestamp = sa.DateTime()
op.create_table(
TABLE_NAME,
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("task_id", StringID(), nullable=False),
sa.Column("dag_id", StringID(), nullable=False),
# use explicit server_default=None otherwise mysql implies defaults for first timestamp column
sa.Column("execution_date", timestamp, nullable=False, server_default=None),
sa.Column("try_number", sa.Integer(), nullable=False),
sa.Column("start_date", timestamp, nullable=False),
sa.Column("end_date", timestamp, nullable=False),
sa.Column("duration", sa.Integer(), nullable=False),
sa.Column("reschedule_date", timestamp, nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["task_id", "dag_id", "execution_date"],
["task_instance.task_id", "task_instance.dag_id", "task_instance.execution_date"],
name="task_reschedule_dag_task_date_fkey",
),
)
op.create_index(INDEX_NAME, TABLE_NAME, ["dag_id", "task_id", "execution_date"], unique=False)
def downgrade():
op.drop_index(INDEX_NAME, table_name=TABLE_NAME)
op.drop_table(TABLE_NAME)
| 2,748 | 36.148649 | 102 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0042_1_10_3_task_reschedule_fk_on_cascade_delete.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""task reschedule foreign key on cascade delete
Revision ID: 939bb1e647c8
Revises: dd4ecb8fbee3
Create Date: 2019-02-04 20:21:50.669751
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "939bb1e647c8"
down_revision = "dd4ecb8fbee3"
branch_labels = None
depends_on = None
airflow_version = "1.10.3"
def upgrade():
with op.batch_alter_table("task_reschedule") as batch_op:
batch_op.drop_constraint("task_reschedule_dag_task_date_fkey", type_="foreignkey")
batch_op.create_foreign_key(
"task_reschedule_dag_task_date_fkey",
"task_instance",
["task_id", "dag_id", "execution_date"],
["task_id", "dag_id", "execution_date"],
ondelete="CASCADE",
)
def downgrade():
with op.batch_alter_table("task_reschedule") as batch_op:
batch_op.drop_constraint("task_reschedule_dag_task_date_fkey", type_="foreignkey")
batch_op.create_foreign_key(
"task_reschedule_dag_task_date_fkey",
"task_instance",
["task_id", "dag_id", "execution_date"],
["task_id", "dag_id", "execution_date"],
)
| 1,990 | 33.929825 | 90 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0057_1_10_13_add_fab_tables.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Create FAB Tables
Revision ID: 92c57b58940d
Revises: da3f683c3a5a
Create Date: 2020-11-13 19:27:10.161814
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = "92c57b58940d"
down_revision = "da3f683c3a5a"
branch_labels = None
depends_on = None
airflow_version = "1.10.13"
def upgrade():
"""Create FAB Tables"""
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if "ab_permission" not in tables:
op.create_table(
"ab_permission",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("name", sa.String(length=100), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
if "ab_view_menu" not in tables:
op.create_table(
"ab_view_menu",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("name", sa.String(length=100), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
if "ab_role" not in tables:
op.create_table(
"ab_role",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("name", sa.String(length=64), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
if "ab_permission_view" not in tables:
op.create_table(
"ab_permission_view",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("permission_id", sa.Integer(), nullable=True),
sa.Column("view_menu_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["permission_id"], ["ab_permission.id"]),
sa.ForeignKeyConstraint(["view_menu_id"], ["ab_view_menu.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("permission_id", "view_menu_id"),
)
if "ab_permission_view_role" not in tables:
op.create_table(
"ab_permission_view_role",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("permission_view_id", sa.Integer(), nullable=True),
sa.Column("role_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["permission_view_id"], ["ab_permission_view.id"]),
sa.ForeignKeyConstraint(["role_id"], ["ab_role.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("permission_view_id", "role_id"),
)
if "ab_user" not in tables:
op.create_table(
"ab_user",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("first_name", sa.String(length=64), nullable=False),
sa.Column("last_name", sa.String(length=64), nullable=False),
sa.Column("username", sa.String(length=64), nullable=False),
sa.Column("password", sa.String(length=256), nullable=True),
sa.Column("active", sa.Boolean(), nullable=True),
sa.Column("email", sa.String(length=64), nullable=False),
sa.Column("last_login", sa.DateTime(), nullable=True),
sa.Column("login_count", sa.Integer(), nullable=True),
sa.Column("fail_login_count", sa.Integer(), nullable=True),
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),
sa.Column("created_by_fk", sa.Integer(), nullable=True),
sa.Column("changed_by_fk", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]),
sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("email"),
sa.UniqueConstraint("username"),
)
if "ab_user_role" not in tables:
op.create_table(
"ab_user_role",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("role_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["role_id"],
["ab_role.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["ab_user.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("user_id", "role_id"),
)
if "ab_register_user" not in tables:
op.create_table(
"ab_register_user",
sa.Column("id", sa.Integer(), nullable=False, primary_key=True),
sa.Column("first_name", sa.String(length=64), nullable=False),
sa.Column("last_name", sa.String(length=64), nullable=False),
sa.Column("username", sa.String(length=64), nullable=False),
sa.Column("password", sa.String(length=256), nullable=True),
sa.Column("email", sa.String(length=64), nullable=False),
sa.Column("registration_date", sa.DateTime(), nullable=True),
sa.Column("registration_hash", sa.String(length=256), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("username"),
)
def downgrade():
"""Drop FAB Tables"""
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
fab_tables = [
"ab_permission",
"ab_view_menu",
"ab_role",
"ab_permission_view",
"ab_permission_view_role",
"ab_user",
"ab_user_role",
"ab_register_user",
]
for table in fab_tables:
if table in tables:
indexes = inspector.get_foreign_keys(table)
for index in indexes:
if conn.dialect.name != "sqlite":
op.drop_constraint(index.get("name"), table, type_="foreignkey")
for table in fab_tables:
if table in tables:
if conn.dialect.name == "sqlite":
op.execute("PRAGMA foreign_keys=off")
op.drop_table(table)
op.execute("PRAGMA foreign_keys=on")
else:
op.drop_table(table)
| 7,225 | 38.271739 | 87 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0031_1_10_0_merge_heads.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Merge migrations Heads
Revision ID: 05f30312d566
Revises: 86770d1215c0, 0e2a74e0fc9f
Create Date: 2018-06-17 10:47:23.339972
"""
from __future__ import annotations
# revision identifiers, used by Alembic.
revision = "05f30312d566"
down_revision = ("86770d1215c0", "0e2a74e0fc9f")
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
def upgrade():
pass
def downgrade():
pass
| 1,191 | 28.8 | 62 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0024_1_8_2_make_xcom_value_column_a_large_binary.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Make xcom value column a large binary
Revision ID: bdaa763e6c56
Revises: cc1e65623dc7
Create Date: 2017-08-14 16:06:31.568971
"""
from __future__ import annotations
import dill
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "bdaa763e6c56"
down_revision = "cc1e65623dc7"
branch_labels = None
depends_on = None
airflow_version = "1.8.2"
def upgrade():
# There can be data truncation here as LargeBinary can be smaller than the pickle
# type.
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("xcom") as batch_op:
batch_op.alter_column("value", type_=sa.LargeBinary())
def downgrade():
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("xcom") as batch_op:
batch_op.alter_column("value", type_=sa.PickleType(pickler=dill))
| 1,668 | 31.72549 | 85 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0040_1_10_3_add_fields_to_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``description`` and ``default_view`` column to ``dag`` table
Revision ID: c8ffec048a3b
Revises: 41f5f12752f8
Create Date: 2018-12-23 21:55:46.463634
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "c8ffec048a3b"
down_revision = "41f5f12752f8"
branch_labels = None
depends_on = None
airflow_version = "1.10.3"
def upgrade():
op.add_column("dag", sa.Column("description", sa.Text(), nullable=True))
op.add_column("dag", sa.Column("default_view", sa.String(25), nullable=True))
def downgrade():
op.drop_column("dag", "description")
op.drop_column("dag", "default_view")
| 1,475 | 31.086957 | 81 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0117_2_4_0_add_processor_subdir_to_dagmodel_and_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add processor_subdir column to DagModel, SerializedDagModel and CallbackRequest tables.
Revision ID: ecb43d2a1842
Revises: 1486deb605b4
Create Date: 2022-08-26 11:30:11.249580
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "ecb43d2a1842"
down_revision = "1486deb605b4"
branch_labels = None
depends_on = None
airflow_version = "2.4.0"
def upgrade():
"""Apply add processor_subdir to DagModel and SerializedDagModel"""
conn = op.get_bind()
with op.batch_alter_table("dag") as batch_op:
if conn.dialect.name == "mysql":
batch_op.add_column(sa.Column("processor_subdir", sa.Text(length=2000), nullable=True))
else:
batch_op.add_column(sa.Column("processor_subdir", sa.String(length=2000), nullable=True))
with op.batch_alter_table("serialized_dag") as batch_op:
if conn.dialect.name == "mysql":
batch_op.add_column(sa.Column("processor_subdir", sa.Text(length=2000), nullable=True))
else:
batch_op.add_column(sa.Column("processor_subdir", sa.String(length=2000), nullable=True))
with op.batch_alter_table("callback_request") as batch_op:
batch_op.drop_column("dag_directory")
if conn.dialect.name == "mysql":
batch_op.add_column(sa.Column("processor_subdir", sa.Text(length=2000), nullable=True))
else:
batch_op.add_column(sa.Column("processor_subdir", sa.String(length=2000), nullable=True))
def downgrade():
"""Unapply Add processor_subdir to DagModel and SerializedDagModel"""
conn = op.get_bind()
with op.batch_alter_table("dag", schema=None) as batch_op:
batch_op.drop_column("processor_subdir")
with op.batch_alter_table("serialized_dag", schema=None) as batch_op:
batch_op.drop_column("processor_subdir")
with op.batch_alter_table("callback_request") as batch_op:
batch_op.drop_column("processor_subdir")
if conn.dialect.name == "mysql":
batch_op.add_column(sa.Column("dag_directory", sa.Text(length=1000), nullable=True))
else:
batch_op.add_column(sa.Column("dag_directory", sa.String(length=1000), nullable=True))
| 3,045 | 38.558442 | 101 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0125_2_6_2_add_onupdate_cascade_to_taskmap.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``onupdate`` cascade to ``task_map`` table
Revision ID: c804e5c76e3e
Revises: 98ae134e6fff
Create Date: 2023-05-19 23:30:57.368617
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "c804e5c76e3e"
down_revision = "98ae134e6fff"
branch_labels = None
depends_on = None
airflow_version = "2.6.2"
def upgrade():
"""Apply Add onupdate cascade to taskmap"""
with op.batch_alter_table("task_map") as batch_op:
batch_op.drop_constraint("task_map_task_instance_fkey", type_="foreignkey")
batch_op.create_foreign_key(
"task_map_task_instance_fkey",
"task_instance",
["dag_id", "task_id", "run_id", "map_index"],
["dag_id", "task_id", "run_id", "map_index"],
ondelete="CASCADE",
onupdate="CASCADE",
)
def downgrade():
"""Unapply Add onupdate cascade to taskmap"""
with op.batch_alter_table("task_map") as batch_op:
batch_op.drop_constraint("task_map_task_instance_fkey", type_="foreignkey")
batch_op.create_foreign_key(
"task_map_task_instance_fkey",
"task_instance",
["dag_id", "task_id", "run_id", "map_index"],
["dag_id", "task_id", "run_id", "map_index"],
ondelete="CASCADE",
)
| 2,133 | 32.873016 | 83 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0062_2_0_0_add_dagrun_run_type.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Add ``run_type`` column in ``dag_run`` table
Revision ID: 3c20cacc0044
Revises: b25a55525161
Create Date: 2020-04-08 13:35:25.671327
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, Integer, String, inspect
from sqlalchemy.orm import declarative_base
from airflow.utils.types import DagRunType
# revision identifiers, used by Alembic.
revision = "3c20cacc0044"
down_revision = "b25a55525161"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
Base = declarative_base()
class DagRun(Base): # type: ignore
"""Minimal model definition for migrations"""
__tablename__ = "dag_run"
id = Column(Integer, primary_key=True)
run_id = Column(String())
run_type = Column(String(50), nullable=False)
def upgrade():
"""Apply Add ``run_type`` column in ``dag_run`` table"""
run_type_col_type = sa.String(length=50)
conn = op.get_bind()
inspector = inspect(conn)
dag_run_columns = [col.get("name") for col in inspector.get_columns("dag_run")]
if "run_type" not in dag_run_columns:
# Add nullable column
with op.batch_alter_table("dag_run") as batch_op:
batch_op.add_column(sa.Column("run_type", run_type_col_type, nullable=True))
# Generate run type for existing records
sessionmaker = sa.orm.sessionmaker()
session = sessionmaker(bind=conn)
for run_type in DagRunType:
session.query(DagRun).filter(DagRun.run_id.like(f"{run_type.value}__%")).update(
{DagRun.run_type: run_type.value}, synchronize_session=False
)
session.query(DagRun).filter(DagRun.run_type.is_(None)).update(
{DagRun.run_type: DagRunType.MANUAL.value}, synchronize_session=False
)
session.commit()
# Make run_type not nullable
with op.batch_alter_table("dag_run") as batch_op:
batch_op.alter_column(
"run_type", existing_type=run_type_col_type, type_=run_type_col_type, nullable=False
)
def downgrade():
"""Unapply Add ``run_type`` column in ``dag_run`` table"""
op.drop_column("dag_run", "run_type")
| 2,993 | 31.193548 | 100 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0048_1_10_3_remove_dag_stat_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Remove ``dag_stat`` table
Revision ID: a56c9515abdc
Revises: c8ffec048a3b
Create Date: 2018-12-27 10:27:59.715872
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "a56c9515abdc"
down_revision = "c8ffec048a3b"
branch_labels = None
depends_on = None
airflow_version = "1.10.3"
def upgrade():
"""Drop dag_stats table"""
op.drop_table("dag_stats")
def downgrade():
"""Create dag_stats table"""
op.create_table(
"dag_stats",
sa.Column("dag_id", sa.String(length=250), nullable=False),
sa.Column("state", sa.String(length=50), nullable=False),
sa.Column("count", sa.Integer(), nullable=False, default=0),
sa.Column("dirty", sa.Boolean(), nullable=False, default=False),
sa.PrimaryKeyConstraint("dag_id", "state"),
)
| 1,665 | 30.433962 | 72 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0073_2_0_0_prefix_dag_permissions.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Prefix DAG permissions.
Revision ID: 849da589634d
Revises: 45ba3f1493b9
Create Date: 2020-10-01 17:25:10.006322
"""
from __future__ import annotations
from flask_appbuilder import SQLA
from airflow import settings
from airflow.security import permissions
from airflow.www.fab_security.sqla.models import Action, Permission, Resource
# revision identifiers, used by Alembic.
revision = "849da589634d"
down_revision = "45ba3f1493b9"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
def prefix_individual_dag_permissions(session):
dag_perms = ["can_dag_read", "can_dag_edit"]
prefix = "DAG:"
perms = (
session.query(Permission)
.join(Action)
.filter(Action.name.in_(dag_perms))
.join(Resource)
.filter(Resource.name != "all_dags")
.filter(Resource.name.notlike(prefix + "%"))
.all()
)
resource_ids = {permission.resource.id for permission in perms}
vm_query = session.query(Resource).filter(Resource.id.in_(resource_ids))
vm_query.update({Resource.name: prefix + Resource.name}, synchronize_session=False)
session.commit()
def remove_prefix_in_individual_dag_permissions(session):
dag_perms = ["can_read", "can_edit"]
prefix = "DAG:"
perms = (
session.query(Permission)
.join(Action)
.filter(Action.name.in_(dag_perms))
.join(Resource)
.filter(Resource.name.like(prefix + "%"))
.all()
)
for permission in perms:
permission.resource.name = permission.resource.name[len(prefix) :]
session.commit()
def get_or_create_dag_resource(session):
dag_resource = get_resource_query(session, permissions.RESOURCE_DAG).first()
if dag_resource:
return dag_resource
dag_resource = Resource()
dag_resource.name = permissions.RESOURCE_DAG
session.add(dag_resource)
session.commit()
return dag_resource
def get_or_create_all_dag_resource(session):
all_dag_resource = get_resource_query(session, "all_dags").first()
if all_dag_resource:
return all_dag_resource
all_dag_resource = Resource()
all_dag_resource.name = "all_dags"
session.add(all_dag_resource)
session.commit()
return all_dag_resource
def get_or_create_action(session, action_name):
action = get_action_query(session, action_name).first()
if action:
return action
action = Action()
action.name = action_name
session.add(action)
session.commit()
return action
def get_resource_query(session, resource_name):
return session.query(Resource).filter(Resource.name == resource_name)
def get_action_query(session, action_name):
return session.query(Action).filter(Action.name == action_name)
def get_permission_with_action_query(session, action):
return session.query(Permission).filter(Permission.action == action)
def get_permission_with_resource_query(session, resource):
return session.query(Permission).filter(Permission.resource_id == resource.id)
def update_permission_action(session, permission_query, action):
permission_query.update({Permission.action_id: action.id}, synchronize_session=False)
session.commit()
def get_permission(session, resource, action):
return (
session.query(Permission)
.filter(Permission.resource == resource)
.filter(Permission.action == action)
.first()
)
def update_permission_resource(session, permission_query, resource):
for permission in permission_query.all():
if not get_permission(session, resource, permission.action):
permission.resource = resource
else:
session.delete(permission)
session.commit()
def migrate_to_new_dag_permissions(db):
# Prefix individual dag perms with `DAG:`
prefix_individual_dag_permissions(db.session)
# Update existing permissions to use `can_read` instead of `can_dag_read`
can_dag_read_action = get_action_query(db.session, "can_dag_read").first()
old_can_dag_read_permissions = get_permission_with_action_query(db.session, can_dag_read_action)
can_read_action = get_or_create_action(db.session, "can_read")
update_permission_action(db.session, old_can_dag_read_permissions, can_read_action)
# Update existing permissions to use `can_edit` instead of `can_dag_edit`
can_dag_edit_action = get_action_query(db.session, "can_dag_edit").first()
old_can_dag_edit_permissions = get_permission_with_action_query(db.session, can_dag_edit_action)
can_edit_action = get_or_create_action(db.session, "can_edit")
update_permission_action(db.session, old_can_dag_edit_permissions, can_edit_action)
# Update existing permissions for `all_dags` resource to use `DAGs` resource.
all_dags_resource = get_resource_query(db.session, "all_dags").first()
if all_dags_resource:
old_all_dags_permission = get_permission_with_resource_query(db.session, all_dags_resource)
dag_resource = get_or_create_dag_resource(db.session)
update_permission_resource(db.session, old_all_dags_permission, dag_resource)
# Delete the `all_dags` resource
db.session.delete(all_dags_resource)
# Delete `can_dag_read` action
if can_dag_read_action:
db.session.delete(can_dag_read_action)
# Delete `can_dag_edit` action
if can_dag_edit_action:
db.session.delete(can_dag_edit_action)
db.session.commit()
def undo_migrate_to_new_dag_permissions(session):
# Remove prefix from individual dag perms
remove_prefix_in_individual_dag_permissions(session)
# Update existing permissions to use `can_dag_read` instead of `can_read`
can_read_action = get_action_query(session, "can_read").first()
new_can_read_permissions = get_permission_with_action_query(session, can_read_action)
can_dag_read_action = get_or_create_action(session, "can_dag_read")
update_permission_action(session, new_can_read_permissions, can_dag_read_action)
# Update existing permissions to use `can_dag_edit` instead of `can_edit`
can_edit_action = get_action_query(session, "can_edit").first()
new_can_edit_permissions = get_permission_with_action_query(session, can_edit_action)
can_dag_edit_action = get_or_create_action(session, "can_dag_edit")
update_permission_action(session, new_can_edit_permissions, can_dag_edit_action)
# Update existing permissions for `DAGs` resource to use `all_dags` resource.
dag_resource = get_resource_query(session, permissions.RESOURCE_DAG).first()
if dag_resource:
new_dag_permission = get_permission_with_resource_query(session, dag_resource)
old_all_dag_resource = get_or_create_all_dag_resource(session)
update_permission_resource(session, new_dag_permission, old_all_dag_resource)
# Delete the `DAG` resource
session.delete(dag_resource)
# Delete `can_read` action
if can_read_action:
session.delete(can_read_action)
# Delete `can_edit` action
if can_edit_action:
session.delete(can_edit_action)
session.commit()
def upgrade():
db = SQLA()
db.session = settings.Session
migrate_to_new_dag_permissions(db)
db.session.commit()
db.session.close()
def downgrade():
db = SQLA()
db.session = settings.Session
undo_migrate_to_new_dag_permissions(db.session)
| 8,175 | 33.066667 | 100 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0082_2_1_0_increase_pool_name_size_in_taskinstance.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase maximum length of pool name in ``task_instance`` table to ``256`` characters
Revision ID: 90d1635d7b86
Revises: 2e42bb497a22
Create Date: 2021-04-05 09:37:54.848731
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "90d1635d7b86"
down_revision = "2e42bb497a22"
branch_labels = None
depends_on = None
airflow_version = "2.1.0"
def upgrade():
"""Apply Increase maximum length of pool name in ``task_instance`` table to ``256`` characters"""
with op.batch_alter_table("task_instance") as batch_op:
batch_op.alter_column("pool", type_=sa.String(256), nullable=False)
def downgrade():
"""Unapply Increase maximum length of pool name in ``task_instance`` table to ``256`` characters"""
conn = op.get_bind()
if conn.dialect.name == "mssql":
with op.batch_alter_table("task_instance") as batch_op:
batch_op.drop_index("ti_pool")
batch_op.alter_column("pool", type_=sa.String(50), nullable=False)
batch_op.create_index("ti_pool", ["pool"])
else:
with op.batch_alter_table("task_instance") as batch_op:
batch_op.alter_column("pool", type_=sa.String(50), nullable=False)
| 2,051 | 36.309091 | 103 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0109_2_3_1_add_index_for_event_in_log.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add index for ``event`` column in ``log`` table.
Revision ID: 1de7bc13c950
Revises: b1b348e02d07
Create Date: 2022-05-10 18:18:43.484829
"""
from __future__ import annotations
from alembic import op
# revision identifiers, used by Alembic.
revision = "1de7bc13c950"
down_revision = "b1b348e02d07"
branch_labels = None
depends_on = None
airflow_version = "2.3.1"
def upgrade():
"""Apply Add index for ``event`` column in ``log`` table."""
op.create_index("idx_log_event", "log", ["event"], unique=False)
def downgrade():
"""Unapply Add index for ``event`` column in ``log`` table."""
op.drop_index("idx_log_event", table_name="log")
| 1,446 | 31.155556 | 68 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0060_2_0_0_remove_id_column_from_xcom.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Remove id column from xcom
Revision ID: bbf4a7ad0465
Revises: cf5dc11e79ad
Create Date: 2019-10-29 13:53:09.445943
"""
from __future__ import annotations
from collections import defaultdict
from alembic import op
from sqlalchemy import Column, Integer, inspect, text
# revision identifiers, used by Alembic.
revision = "bbf4a7ad0465"
down_revision = "cf5dc11e79ad"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
def get_table_constraints(conn, table_name) -> dict[tuple[str, str], list[str]]:
"""
This function return primary and unique constraint
along with column name. Some tables like `task_instance`
is missing the primary key constraint name and the name is
auto-generated by the SQL server. so this function helps to
retrieve any primary or unique constraint name.
:param conn: sql connection object
:param table_name: table name
:return: a dictionary of ((constraint name, constraint type), column name) of table
"""
query = text(
f"""SELECT tc.CONSTRAINT_NAME , tc.CONSTRAINT_TYPE, ccu.COLUMN_NAME
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc
JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS ccu ON ccu.CONSTRAINT_NAME = tc.CONSTRAINT_NAME
WHERE tc.TABLE_NAME = '{table_name}' AND
(tc.CONSTRAINT_TYPE = 'PRIMARY KEY' or UPPER(tc.CONSTRAINT_TYPE) = 'UNIQUE')
"""
)
result = conn.execute(query).fetchall()
constraint_dict = defaultdict(list)
for constraint, constraint_type, column in result:
constraint_dict[(constraint, constraint_type)].append(column)
return constraint_dict
def drop_column_constraints(operator, column_name, constraint_dict):
"""
Drop a primary key or unique constraint
:param operator: batch_alter_table for the table
:param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
"""
for constraint, columns in constraint_dict.items():
if column_name in columns:
if constraint[1].lower().startswith("primary"):
operator.drop_constraint(constraint[0], type_="primary")
elif constraint[1].lower().startswith("unique"):
operator.drop_constraint(constraint[0], type_="unique")
def create_constraints(operator, column_name, constraint_dict):
"""
Create a primary key or unique constraint
:param operator: batch_alter_table for the table
:param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
"""
for constraint, columns in constraint_dict.items():
if column_name in columns:
if constraint[1].lower().startswith("primary"):
operator.create_primary_key(constraint_name=constraint[0], columns=columns)
elif constraint[1].lower().startswith("unique"):
operator.create_unique_constraint(constraint_name=constraint[0], columns=columns)
def upgrade():
"""Apply Remove id column from xcom"""
conn = op.get_bind()
inspector = inspect(conn)
with op.batch_alter_table("xcom") as bop:
xcom_columns = [col.get("name") for col in inspector.get_columns("xcom")]
if "id" in xcom_columns:
if conn.dialect.name == "mssql":
constraint_dict = get_table_constraints(conn, "xcom")
drop_column_constraints(operator=bop, column_name="id", constraint_dict=constraint_dict)
bop.drop_column("id")
bop.drop_index("idx_xcom_dag_task_date")
# mssql doesn't allow primary keys with nullable columns
if conn.dialect.name != "mssql":
bop.create_primary_key("pk_xcom", ["dag_id", "task_id", "key", "execution_date"])
def downgrade():
"""Unapply Remove id column from xcom"""
conn = op.get_bind()
with op.batch_alter_table("xcom") as bop:
if conn.dialect.name != "mssql":
bop.drop_constraint("pk_xcom", type_="primary")
bop.add_column(Column("id", Integer, nullable=False))
bop.create_primary_key("id", ["id"])
bop.create_index("idx_xcom_dag_task_date", ["dag_id", "task_id", "key", "execution_date"])
| 4,980 | 39.169355 | 104 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0004_1_5_0_more_logging_into_task_isntance.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``operator`` and ``queued_dttm`` to ``task_instance`` table
Revision ID: 338e90f54d61
Revises: 13eb55f81627
Create Date: 2015-08-25 06:09:20.460147
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "338e90f54d61"
down_revision = "13eb55f81627"
branch_labels = None
depends_on = None
airflow_version = "1.5.0"
def upgrade():
op.add_column("task_instance", sa.Column("operator", sa.String(length=1000), nullable=True))
op.add_column("task_instance", sa.Column("queued_dttm", sa.DateTime(), nullable=True))
def downgrade():
op.drop_column("task_instance", "queued_dttm")
op.drop_column("task_instance", "operator")
| 1,518 | 32.021739 | 96 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0014_1_7_0_add_is_encrypted_column_to_variable_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``is_encrypted`` column to variable table
Revision ID: 1968acfc09e3
Revises: bba5a7cfc896
Create Date: 2016-02-02 17:20:55.692295
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "1968acfc09e3"
down_revision = "bba5a7cfc896"
branch_labels = None
depends_on = None
airflow_version = "1.7.0"
def upgrade():
op.add_column("variable", sa.Column("is_encrypted", sa.Boolean, default=False))
def downgrade():
op.drop_column("variable", "is_encrypted")
| 1,344 | 29.568182 | 83 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0094_2_2_3_add_has_import_errors_column_to_dagmodel.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add has_import_errors column to DagModel
Revision ID: be2bfac3da23
Revises: 7b2661a43ba3
Create Date: 2021-11-04 20:33:11.009547
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "be2bfac3da23"
down_revision = "7b2661a43ba3"
branch_labels = None
depends_on = None
airflow_version = "2.2.3"
def upgrade():
"""Apply Add has_import_errors column to DagModel"""
op.add_column("dag", sa.Column("has_import_errors", sa.Boolean(), server_default="0"))
def downgrade():
"""Unapply Add has_import_errors column to DagModel"""
with op.batch_alter_table("dag") as batch_op:
batch_op.drop_column("has_import_errors", mssql_drop_default=True)
| 1,540 | 31.787234 | 90 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0006_1_6_0_adding_extra_to_log.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Adding ``extra`` column to ``Log`` table
Revision ID: 502898887f84
Revises: 52d714495f0
Create Date: 2015-11-03 22:50:49.794097
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "502898887f84"
down_revision = "52d714495f0"
branch_labels = None
depends_on = None
airflow_version = "1.6.0"
def upgrade():
op.add_column("log", sa.Column("extra", sa.Text(), nullable=True))
def downgrade():
op.drop_column("log", "extra")
| 1,312 | 28.840909 | 70 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0050_1_10_7_increase_length_for_connection_password.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase length for connection password
Revision ID: fe461863935f
Revises: 08364691d074
Create Date: 2019-12-08 09:47:09.033009
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "fe461863935f"
down_revision = "08364691d074"
branch_labels = None
depends_on = None
airflow_version = "1.10.7"
def upgrade():
"""Apply Increase length for connection password"""
with op.batch_alter_table("connection", schema=None) as batch_op:
batch_op.alter_column(
"password",
existing_type=sa.VARCHAR(length=500),
type_=sa.String(length=5000),
existing_nullable=True,
)
def downgrade():
"""Unapply Increase length for connection password"""
with op.batch_alter_table("connection", schema=None) as batch_op:
batch_op.alter_column(
"password",
existing_type=sa.String(length=5000),
type_=sa.VARCHAR(length=500),
existing_nullable=True,
)
| 1,848 | 30.87931 | 69 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0085_2_1_3_add_queued_at_column_to_dagrun_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``queued_at`` column in ``dag_run`` table
Revision ID: 97cdd93827b8
Revises: a13f7613ad25
Create Date: 2021-06-29 21:53:48.059438
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import TIMESTAMP
# revision identifiers, used by Alembic.
revision = "97cdd93827b8"
down_revision = "a13f7613ad25"
branch_labels = None
depends_on = None
airflow_version = "2.1.3"
def upgrade():
"""Apply Add ``queued_at`` column in ``dag_run`` table"""
op.add_column("dag_run", sa.Column("queued_at", TIMESTAMP, nullable=True))
def downgrade():
"""Unapply Add ``queued_at`` column in ``dag_run`` table"""
with op.batch_alter_table("dag_run") as batch_op:
batch_op.drop_column("queued_at")
| 1,565 | 30.959184 | 78 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0111_2_3_3_add_indexes_for_cascade_deletes.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add indexes for CASCADE deletes on task_instance
Some databases don't add indexes on the FK columns so we have to add them for performance on CASCADE deletes.
Revision ID: f5fcbda3e651
Revises: 3c94c427fdf6
Create Date: 2022-06-15 18:04:54.081789
"""
from __future__ import annotations
from alembic import context, op
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision = "f5fcbda3e651"
down_revision = "3c94c427fdf6"
branch_labels = None
depends_on = None
airflow_version = "2.3.3"
def _mysql_tables_where_indexes_already_present(conn):
"""
If user downgraded and is upgrading again, we have to check for existing
indexes on mysql because we can't (and don't) drop them as part of the
downgrade.
"""
to_check = [
("xcom", "idx_xcom_task_instance"),
("task_reschedule", "idx_task_reschedule_dag_run"),
("task_fail", "idx_task_fail_task_instance"),
]
tables = set()
for tbl, idx in to_check:
if conn.execute(text(f"show indexes from {tbl} where Key_name = '{idx}'")).first():
tables.add(tbl)
return tables
def upgrade():
"""Apply Add indexes for CASCADE deletes"""
conn = op.get_bind()
tables_to_skip = set()
# mysql requires indexes for FKs, so adding had the effect of renaming, and we cannot remove.
if conn.dialect.name == "mysql" and not context.is_offline_mode():
tables_to_skip.update(_mysql_tables_where_indexes_already_present(conn))
if "task_fail" not in tables_to_skip:
with op.batch_alter_table("task_fail", schema=None) as batch_op:
batch_op.create_index("idx_task_fail_task_instance", ["dag_id", "task_id", "run_id", "map_index"])
if "task_reschedule" not in tables_to_skip:
with op.batch_alter_table("task_reschedule", schema=None) as batch_op:
batch_op.create_index("idx_task_reschedule_dag_run", ["dag_id", "run_id"])
if "xcom" not in tables_to_skip:
with op.batch_alter_table("xcom", schema=None) as batch_op:
batch_op.create_index("idx_xcom_task_instance", ["dag_id", "task_id", "run_id", "map_index"])
def downgrade():
"""Unapply Add indexes for CASCADE deletes"""
conn = op.get_bind()
# mysql requires indexes for FKs, so adding had the effect of renaming, and we cannot remove.
if conn.dialect.name == "mysql":
return
with op.batch_alter_table("xcom", schema=None) as batch_op:
batch_op.drop_index("idx_xcom_task_instance")
with op.batch_alter_table("task_reschedule", schema=None) as batch_op:
batch_op.drop_index("idx_task_reschedule_dag_run")
with op.batch_alter_table("task_fail", schema=None) as batch_op:
batch_op.drop_index("idx_task_fail_task_instance")
| 3,562 | 36.114583 | 110 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0058_1_10_13_increase_length_of_fab_ab_view_menu_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column
Revision ID: 03afc6b6f902
Revises: 92c57b58940d
Create Date: 2020-11-13 22:21:41.619565
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy import inspect
from airflow.migrations.db_types import StringID
# revision identifiers, used by Alembic.
revision = "03afc6b6f902"
down_revision = "92c57b58940d"
branch_labels = None
depends_on = None
airflow_version = "1.10.13"
def upgrade():
"""Apply Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column"""
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if "ab_view_menu" in tables:
if conn.dialect.name == "sqlite":
op.execute("PRAGMA foreign_keys=off")
op.execute(
"""
CREATE TABLE IF NOT EXISTS ab_view_menu_dg_tmp
(
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR(250) NOT NULL UNIQUE
);
"""
)
op.execute("INSERT INTO ab_view_menu_dg_tmp(id, name) select id, name from ab_view_menu;")
op.execute("DROP TABLE ab_view_menu")
op.execute("ALTER TABLE ab_view_menu_dg_tmp rename to ab_view_menu;")
op.execute("PRAGMA foreign_keys=on")
else:
op.alter_column(
table_name="ab_view_menu",
column_name="name",
type_=StringID(length=250),
nullable=False,
)
def downgrade():
"""Unapply Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column"""
conn = op.get_bind()
inspector = inspect(conn)
tables = inspector.get_table_names()
if "ab_view_menu" in tables:
if conn.dialect.name == "sqlite":
op.execute("PRAGMA foreign_keys=off")
op.execute(
"""
CREATE TABLE IF NOT EXISTS ab_view_menu_dg_tmp
(
id INTEGER NOT NULL PRIMARY KEY,
name VARCHAR(100) NOT NULL UNIQUE
);
"""
)
op.execute("INSERT INTO ab_view_menu_dg_tmp(id, name) select id, name from ab_view_menu;")
op.execute("DROP TABLE ab_view_menu")
op.execute("ALTER TABLE ab_view_menu_dg_tmp rename to ab_view_menu;")
op.execute("PRAGMA foreign_keys=on")
else:
op.alter_column(
table_name="ab_view_menu", column_name="name", type_=sa.String(length=100), nullable=False
)
| 3,424 | 34.309278 | 106 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0080_2_0_2_change_default_pool_slots_to_1.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Change default ``pool_slots`` to ``1``
Revision ID: 8646922c8a04
Revises: 449b4072c2da
Create Date: 2021-02-23 23:19:22.409973
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "8646922c8a04"
down_revision = "449b4072c2da"
branch_labels = None
depends_on = None
airflow_version = "2.0.2"
def upgrade():
"""Change default ``pool_slots`` to ``1`` and make pool_slots not nullable"""
op.execute("UPDATE task_instance SET pool_slots = 1 WHERE pool_slots IS NULL")
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.alter_column("pool_slots", existing_type=sa.Integer, nullable=False, server_default="1")
def downgrade():
"""Unapply Change default ``pool_slots`` to ``1``"""
conn = op.get_bind()
if conn.dialect.name == "mssql":
inspector = sa.inspect(conn.engine)
columns = inspector.get_columns("task_instance")
for col in columns:
if col["name"] == "pool_slots" and col["default"] == "('1')":
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.alter_column(
"pool_slots", existing_type=sa.Integer, nullable=True, server_default=None
)
else:
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.alter_column("pool_slots", existing_type=sa.Integer, nullable=True, server_default=None)
| 2,318 | 37.65 | 109 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0107_2_3_0_add_map_index_to_log.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add map_index to Log.
Revision ID: 75d5ed6c2b43
Revises: 909884dea523
Create Date: 2022-03-15 16:35:54.816863
"""
from __future__ import annotations
from alembic import op
from sqlalchemy import Column, Integer
# Revision identifiers, used by Alembic.
revision = "75d5ed6c2b43"
down_revision = "909884dea523"
branch_labels = None
depends_on = None
airflow_version = "2.3.0"
def upgrade():
"""Add map_index to Log."""
op.add_column("log", Column("map_index", Integer))
def downgrade():
"""Remove map_index from Log."""
with op.batch_alter_table("log") as batch_op:
batch_op.drop_column("map_index")
| 1,419 | 29.869565 | 62 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0066_2_0_0_add_queued_by_job_id_to_ti.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add queued by Job ID to TI
Revision ID: b247b1e3d1ed
Revises: e38be357a868
Create Date: 2020-09-04 11:53:00.978882
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "b247b1e3d1ed"
down_revision = "e38be357a868"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
def upgrade():
"""Apply Add queued by Job ID to TI"""
with op.batch_alter_table("task_instance") as batch_op:
batch_op.add_column(sa.Column("queued_by_job_id", sa.Integer(), nullable=True))
def downgrade():
"""Unapply Add queued by Job ID to TI"""
with op.batch_alter_table("task_instance") as batch_op:
batch_op.drop_column("queued_by_job_id")
| 1,539 | 31.083333 | 87 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0122_2_5_0_add_is_orphaned_to_datasetmodel.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add is_orphaned to DatasetModel
Revision ID: 290244fb8b83
Revises: 1986afd32c1b
Create Date: 2022-11-22 00:12:53.432961
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "290244fb8b83"
down_revision = "1986afd32c1b"
branch_labels = None
depends_on = None
airflow_version = "2.5.0"
def upgrade():
"""Add is_orphaned to DatasetModel"""
with op.batch_alter_table("dataset") as batch_op:
batch_op.add_column(
sa.Column(
"is_orphaned",
sa.Boolean,
default=False,
nullable=False,
server_default="0",
)
)
def downgrade():
"""Remove is_orphaned from DatasetModel"""
with op.batch_alter_table("dataset") as batch_op:
batch_op.drop_column("is_orphaned", mssql_drop_default=True)
| 1,701 | 28.344828 | 68 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0070_2_0_0_fix_mssql_exec_date_rendered_task_instance.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""fix_mssql_exec_date_rendered_task_instance_fields_for_MSSQL
Revision ID: 52d53670a240
Revises: 98271e7606e2
Create Date: 2020-10-13 15:13:24.911486
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mssql
# revision identifiers, used by Alembic.
revision = "52d53670a240"
down_revision = "98271e7606e2"
branch_labels = None
depends_on = None
airflow_version = "2.0.0"
TABLE_NAME = "rendered_task_instance_fields"
def upgrade():
"""
Recreate RenderedTaskInstanceFields table changing timestamp to datetime2(6) when using MSSQL as
backend
"""
conn = op.get_bind()
if conn.dialect.name == "mssql":
json_type = sa.Text
op.drop_table(TABLE_NAME)
op.create_table(
TABLE_NAME,
sa.Column("dag_id", sa.String(length=250), nullable=False),
sa.Column("task_id", sa.String(length=250), nullable=False),
sa.Column("execution_date", mssql.DATETIME2, nullable=False),
sa.Column("rendered_fields", json_type(), nullable=False),
sa.PrimaryKeyConstraint("dag_id", "task_id", "execution_date"),
)
def downgrade():
"""
Recreate RenderedTaskInstanceFields table changing datetime2(6) to timestamp when using MSSQL as
backend
"""
conn = op.get_bind()
if conn.dialect.name == "mssql":
json_type = sa.Text
op.drop_table(TABLE_NAME)
op.create_table(
TABLE_NAME,
sa.Column("dag_id", sa.String(length=250), nullable=False),
sa.Column("task_id", sa.String(length=250), nullable=False),
sa.Column("execution_date", sa.TIMESTAMP, nullable=False),
sa.Column("rendered_fields", json_type(), nullable=False),
sa.PrimaryKeyConstraint("dag_id", "task_id", "execution_date"),
)
| 2,666 | 32.759494 | 100 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0041_1_10_3_add_schedule_interval_to_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add schedule interval to dag
Revision ID: dd4ecb8fbee3
Revises: c8ffec048a3b
Create Date: 2018-12-27 18:39:25.748032
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "dd4ecb8fbee3"
down_revision = "c8ffec048a3b"
branch_labels = None
depends_on = None
airflow_version = "1.10.3"
def upgrade():
op.add_column("dag", sa.Column("schedule_interval", sa.Text(), nullable=True))
def downgrade():
op.drop_column("dag", "schedule_interval")
| 1,327 | 29.181818 | 82 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0030_1_10_0_add_kubernetes_scheduler_uniqueness.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add kubernetes scheduler uniqueness
Revision ID: 86770d1215c0
Revises: 27c6a30d7c24
Create Date: 2018-04-03 15:31:20.814328
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "86770d1215c0"
down_revision = "27c6a30d7c24"
branch_labels = None
depends_on = None
airflow_version = "1.10.0"
RESOURCE_TABLE = "kube_worker_uuid"
def upgrade():
columns_and_constraints = [
sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True),
sa.Column("worker_uuid", sa.String(255)),
]
conn = op.get_bind()
# alembic creates an invalid SQL for mssql and mysql dialects
if conn.dialect.name in {"mysql"}:
columns_and_constraints.append(sa.CheckConstraint("one_row_id<>0", name="kube_worker_one_row_id"))
elif conn.dialect.name not in {"mssql"}:
columns_and_constraints.append(sa.CheckConstraint("one_row_id", name="kube_worker_one_row_id"))
table = op.create_table(RESOURCE_TABLE, *columns_and_constraints)
op.bulk_insert(table, [{"worker_uuid": ""}])
def downgrade():
op.drop_table(RESOURCE_TABLE)
| 1,960 | 31.147541 | 106 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0045_1_10_7_add_root_dag_id_to_dag.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``root_dag_id`` to ``DAG``
Revision ID: b3b105409875
Revises: d38e04c12aa2
Create Date: 2019-09-28 23:20:01.744775
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.migrations.db_types import StringID
# revision identifiers, used by Alembic.
revision = "b3b105409875"
down_revision = "d38e04c12aa2"
branch_labels = None
depends_on = None
airflow_version = "1.10.7"
def upgrade():
"""Apply Add ``root_dag_id`` to ``DAG``"""
op.add_column("dag", sa.Column("root_dag_id", StringID(), nullable=True))
op.create_index("idx_root_dag_id", "dag", ["root_dag_id"], unique=False)
def downgrade():
"""Unapply Add ``root_dag_id`` to ``DAG``"""
op.drop_index("idx_root_dag_id", table_name="dag")
op.drop_column("dag", "root_dag_id")
| 1,596 | 30.94 | 77 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0054_1_10_10_add_dag_code_table.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``dag_code`` table
Revision ID: 952da73b5eff
Revises: 852ae6c715af
Create Date: 2020-03-12 12:39:01.797462
"""
from __future__ import annotations
import sqlalchemy as sa
from alembic import op
from airflow.models.dagcode import DagCode
# revision identifiers, used by Alembic.
revision = "952da73b5eff"
down_revision = "852ae6c715af"
branch_labels = None
depends_on = None
airflow_version = "1.10.10"
def upgrade():
"""Create DagCode Table."""
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class SerializedDagModel(Base):
__tablename__ = "serialized_dag"
# There are other columns here, but these are the only ones we need for the SELECT/UPDATE we are doing
dag_id = sa.Column(sa.String(250), primary_key=True)
fileloc = sa.Column(sa.String(2000), nullable=False)
fileloc_hash = sa.Column(sa.BigInteger, nullable=False)
"""Apply add source code table"""
op.create_table(
"dag_code",
sa.Column("fileloc_hash", sa.BigInteger(), nullable=False, primary_key=True, autoincrement=False),
sa.Column("fileloc", sa.String(length=2000), nullable=False),
sa.Column("source_code", sa.UnicodeText(), nullable=False),
sa.Column("last_updated", sa.TIMESTAMP(timezone=True), nullable=False),
)
conn = op.get_bind()
if conn.dialect.name != "sqlite":
if conn.dialect.name == "mssql":
op.drop_index(index_name="idx_fileloc_hash", table_name="serialized_dag")
op.alter_column(
table_name="serialized_dag", column_name="fileloc_hash", type_=sa.BigInteger(), nullable=False
)
if conn.dialect.name == "mssql":
op.create_index(
index_name="idx_fileloc_hash", table_name="serialized_dag", columns=["fileloc_hash"]
)
sessionmaker = sa.orm.sessionmaker()
session = sessionmaker(bind=conn)
serialized_dags = session.query(SerializedDagModel).all()
for dag in serialized_dags:
dag.fileloc_hash = DagCode.dag_fileloc_hash(dag.fileloc)
session.merge(dag)
session.commit()
def downgrade():
"""Unapply add source code table"""
op.drop_table("dag_code")
| 3,022 | 33.352273 | 110 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0102_2_3_0_switch_xcom_table_to_use_run_id.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Switch XCom table to use ``run_id`` and add ``map_index``.
Revision ID: c306b5b5ae4a
Revises: a3bcd0914482
Create Date: 2022-01-19 03:20:35.329037
"""
from __future__ import annotations
from typing import Sequence
from alembic import op
from sqlalchemy import Column, Integer, LargeBinary, MetaData, Table, and_, literal_column, select
from airflow.migrations.db_types import TIMESTAMP, StringID
from airflow.migrations.utils import get_mssql_table_constraints
# Revision identifiers, used by Alembic.
revision = "c306b5b5ae4a"
down_revision = "a3bcd0914482"
branch_labels = None
depends_on = None
airflow_version = "2.3.0"
metadata = MetaData()
def _get_new_xcom_columns() -> Sequence[Column]:
return [
Column("dag_run_id", Integer(), nullable=False),
Column("task_id", StringID(), nullable=False),
Column("key", StringID(length=512), nullable=False),
Column("value", LargeBinary),
Column("timestamp", TIMESTAMP, nullable=False),
Column("dag_id", StringID(), nullable=False),
Column("run_id", StringID(), nullable=False),
Column("map_index", Integer, nullable=False, server_default="-1"),
]
def _get_old_xcom_columns() -> Sequence[Column]:
return [
Column("key", StringID(length=512), nullable=False, primary_key=True),
Column("value", LargeBinary),
Column("timestamp", TIMESTAMP, nullable=False),
Column("task_id", StringID(length=250), nullable=False, primary_key=True),
Column("dag_id", StringID(length=250), nullable=False, primary_key=True),
Column("execution_date", TIMESTAMP, nullable=False, primary_key=True),
]
def _get_dagrun_table() -> Table:
return Table(
"dag_run",
metadata,
Column("id", Integer, primary_key=True),
Column("dag_id", StringID(), nullable=False),
Column("run_id", StringID(), nullable=False),
Column("execution_date", TIMESTAMP, nullable=False),
)
def upgrade():
"""Switch XCom table to use run_id.
For performance reasons, this is done by creating a new table with needed
data pre-populated, adding back constraints we need, and renaming it to
replace the existing XCom table.
"""
conn = op.get_bind()
is_sqlite = conn.dialect.name == "sqlite"
op.create_table("__airflow_tmp_xcom", *_get_new_xcom_columns())
xcom = Table("xcom", metadata, *_get_old_xcom_columns())
dagrun = _get_dagrun_table()
query = select(
[
dagrun.c.id,
xcom.c.task_id,
xcom.c.key,
xcom.c.value,
xcom.c.timestamp,
xcom.c.dag_id,
dagrun.c.run_id,
literal_column("-1"),
],
).select_from(
xcom.join(
right=dagrun,
onclause=and_(
xcom.c.dag_id == dagrun.c.dag_id,
xcom.c.execution_date == dagrun.c.execution_date,
),
),
)
op.execute(f"INSERT INTO __airflow_tmp_xcom {query.selectable.compile(op.get_bind())}")
if is_sqlite:
op.execute("PRAGMA foreign_keys=off")
op.drop_table("xcom")
if is_sqlite:
op.execute("PRAGMA foreign_keys=on")
op.rename_table("__airflow_tmp_xcom", "xcom")
with op.batch_alter_table("xcom") as batch_op:
batch_op.create_primary_key("xcom_pkey", ["dag_run_id", "task_id", "map_index", "key"])
batch_op.create_index("idx_xcom_key", ["key"])
batch_op.create_foreign_key(
"xcom_task_instance_fkey",
"task_instance",
["dag_id", "task_id", "run_id", "map_index"],
["dag_id", "task_id", "run_id", "map_index"],
ondelete="CASCADE",
)
def downgrade():
"""Switch XCom table back to use execution_date.
Basically an inverse operation.
"""
conn = op.get_bind()
op.create_table("__airflow_tmp_xcom", *_get_old_xcom_columns())
xcom = Table("xcom", metadata, *_get_new_xcom_columns())
# Remoe XCom entries from mapped tis.
op.execute(xcom.delete().where(xcom.c.map_index != -1))
dagrun = _get_dagrun_table()
query = select(
[
xcom.c.key,
xcom.c.value,
xcom.c.timestamp,
xcom.c.task_id,
xcom.c.dag_id,
dagrun.c.execution_date,
],
).select_from(
xcom.join(
right=dagrun,
onclause=and_(
xcom.c.dag_id == dagrun.c.dag_id,
xcom.c.run_id == dagrun.c.run_id,
),
),
)
op.execute(f"INSERT INTO __airflow_tmp_xcom {query.selectable.compile(op.get_bind())}")
op.drop_table("xcom")
op.rename_table("__airflow_tmp_xcom", "xcom")
if conn.dialect.name == "mssql":
constraints = get_mssql_table_constraints(conn, "xcom")
pk, _ = constraints["PRIMARY KEY"].popitem()
op.drop_constraint(pk, "xcom", type_="primary")
op.create_primary_key(
constraint_name="pk_xcom",
table_name="xcom",
columns=["dag_id", "task_id", "execution_date", "key"],
)
| 5,925 | 31.922222 | 98 |
py
|
airflow
|
airflow-main/airflow/migrations/versions/0088_2_2_0_improve_mssql_compatibility.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Improve MSSQL compatibility
Revision ID: 83f031fd9f1c
Revises: ccde3e26fe78
Create Date: 2021-04-06 12:22:02.197726
"""
from __future__ import annotations
from collections import defaultdict
import sqlalchemy as sa
from alembic import op
from sqlalchemy import text
from sqlalchemy.dialects import mssql
from airflow.migrations.db_types import TIMESTAMP
# revision identifiers, used by Alembic.
revision = "83f031fd9f1c"
down_revision = "ccde3e26fe78"
branch_labels = None
depends_on = None
airflow_version = "2.2.0"
def is_table_empty(conn, table_name):
"""
This function checks if the MS SQL table is empty
:param conn: SQL connection object
:param table_name: table name
:return: Booelan indicating if the table is present
"""
return conn.execute(text(f"select TOP 1 * from {table_name}")).first() is None
def get_table_constraints(conn, table_name) -> dict[tuple[str, str], list[str]]:
"""
This function return primary and unique constraint
along with column name. some tables like task_instance
is missing primary key constraint name and the name is
auto-generated by sql server. so this function helps to
retrieve any primary or unique constraint name.
:param conn: sql connection object
:param table_name: table name
:return: a dictionary of ((constraint name, constraint type), column name) of table
"""
query = text(
f"""SELECT tc.CONSTRAINT_NAME , tc.CONSTRAINT_TYPE, ccu.COLUMN_NAME
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc
JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS ccu ON ccu.CONSTRAINT_NAME = tc.CONSTRAINT_NAME
WHERE tc.TABLE_NAME = '{table_name}' AND
(tc.CONSTRAINT_TYPE = 'PRIMARY KEY' or UPPER(tc.CONSTRAINT_TYPE) = 'UNIQUE')
"""
)
result = conn.execute(query).fetchall()
constraint_dict = defaultdict(list)
for constraint, constraint_type, column in result:
constraint_dict[(constraint, constraint_type)].append(column)
return constraint_dict
def drop_column_constraints(operator, column_name, constraint_dict):
"""
Drop a primary key or unique constraint
:param operator: batch_alter_table for the table
:param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
"""
for constraint, columns in constraint_dict.items():
if column_name in columns:
if constraint[1].lower().startswith("primary"):
operator.drop_constraint(constraint[0], type_="primary")
elif constraint[1].lower().startswith("unique"):
operator.drop_constraint(constraint[0], type_="unique")
def create_constraints(operator, column_name, constraint_dict):
"""
Create a primary key or unique constraint
:param operator: batch_alter_table for the table
:param constraint_dict: a dictionary of ((constraint name, constraint type), column name) of table
"""
for constraint, columns in constraint_dict.items():
if column_name in columns:
if constraint[1].lower().startswith("primary"):
operator.create_primary_key(constraint_name=constraint[0], columns=columns)
elif constraint[1].lower().startswith("unique"):
operator.create_unique_constraint(constraint_name=constraint[0], columns=columns)
def _is_timestamp(conn, table_name, column_name):
query = text(
f"""SELECT
TYPE_NAME(C.USER_TYPE_ID) AS DATA_TYPE
FROM SYS.COLUMNS C
JOIN SYS.TYPES T
ON C.USER_TYPE_ID=T.USER_TYPE_ID
WHERE C.OBJECT_ID=OBJECT_ID('{table_name}') and C.NAME='{column_name}';
"""
)
column_type = conn.execute(query).fetchone()[0]
return column_type == "timestamp"
def recreate_mssql_ts_column(conn, op, table_name, column_name):
"""
Drop the timestamp column and recreate it as
datetime or datetime2(6)
"""
if _is_timestamp(conn, table_name, column_name) and is_table_empty(conn, table_name):
with op.batch_alter_table(table_name) as batch_op:
constraint_dict = get_table_constraints(conn, table_name)
drop_column_constraints(batch_op, column_name, constraint_dict)
batch_op.drop_column(column_name=column_name)
batch_op.add_column(sa.Column(column_name, TIMESTAMP, nullable=False))
create_constraints(batch_op, column_name, constraint_dict)
def alter_mssql_datetime_column(conn, op, table_name, column_name, nullable):
"""Update the datetime column to datetime2(6)"""
op.alter_column(
table_name=table_name,
column_name=column_name,
type_=mssql.DATETIME2(precision=6),
nullable=nullable,
)
def upgrade():
"""Improve compatibility with MSSQL backend"""
conn = op.get_bind()
if conn.dialect.name != "mssql":
return
recreate_mssql_ts_column(conn, op, "dag_code", "last_updated")
recreate_mssql_ts_column(conn, op, "rendered_task_instance_fields", "execution_date")
alter_mssql_datetime_column(conn, op, "serialized_dag", "last_updated", False)
op.alter_column(table_name="xcom", column_name="timestamp", type_=TIMESTAMP, nullable=False)
with op.batch_alter_table("task_reschedule") as task_reschedule_batch_op:
task_reschedule_batch_op.alter_column(column_name="end_date", type_=TIMESTAMP, nullable=False)
task_reschedule_batch_op.alter_column(column_name="reschedule_date", type_=TIMESTAMP, nullable=False)
task_reschedule_batch_op.alter_column(column_name="start_date", type_=TIMESTAMP, nullable=False)
with op.batch_alter_table("task_fail") as task_fail_batch_op:
task_fail_batch_op.drop_index("idx_task_fail_dag_task_date")
task_fail_batch_op.alter_column(column_name="execution_date", type_=TIMESTAMP, nullable=False)
task_fail_batch_op.create_index(
"idx_task_fail_dag_task_date", ["dag_id", "task_id", "execution_date"], unique=False
)
with op.batch_alter_table("task_instance") as task_instance_batch_op:
task_instance_batch_op.drop_index("ti_state_lkp")
task_instance_batch_op.create_index(
"ti_state_lkp", ["dag_id", "task_id", "execution_date", "state"], unique=False
)
constraint_dict = get_table_constraints(conn, "dag_run")
for constraint, columns in constraint_dict.items():
if "dag_id" in columns:
if constraint[1].lower().startswith("unique"):
op.drop_constraint(constraint[0], "dag_run", type_="unique")
# create filtered indexes
conn.execute(
text(
"""CREATE UNIQUE NONCLUSTERED INDEX idx_not_null_dag_id_execution_date
ON dag_run(dag_id,execution_date)
WHERE dag_id IS NOT NULL and execution_date is not null"""
)
)
conn.execute(
text(
"""CREATE UNIQUE NONCLUSTERED INDEX idx_not_null_dag_id_run_id
ON dag_run(dag_id,run_id)
WHERE dag_id IS NOT NULL and run_id is not null"""
)
)
def downgrade():
"""Reverse MSSQL backend compatibility improvements"""
conn = op.get_bind()
if conn.dialect.name != "mssql":
return
op.alter_column(table_name="xcom", column_name="timestamp", type_=TIMESTAMP, nullable=True)
with op.batch_alter_table("task_reschedule") as task_reschedule_batch_op:
task_reschedule_batch_op.alter_column(column_name="end_date", type_=TIMESTAMP, nullable=True)
task_reschedule_batch_op.alter_column(column_name="reschedule_date", type_=TIMESTAMP, nullable=True)
task_reschedule_batch_op.alter_column(column_name="start_date", type_=TIMESTAMP, nullable=True)
with op.batch_alter_table("task_fail") as task_fail_batch_op:
task_fail_batch_op.drop_index("idx_task_fail_dag_task_date")
task_fail_batch_op.alter_column(column_name="execution_date", type_=TIMESTAMP, nullable=False)
task_fail_batch_op.create_index(
"idx_task_fail_dag_task_date", ["dag_id", "task_id", "execution_date"], unique=False
)
with op.batch_alter_table("task_instance") as task_instance_batch_op:
task_instance_batch_op.drop_index("ti_state_lkp")
task_instance_batch_op.create_index(
"ti_state_lkp", ["dag_id", "task_id", "execution_date"], unique=False
)
op.create_unique_constraint("UQ__dag_run__dag_id_run_id", "dag_run", ["dag_id", "run_id"])
op.create_unique_constraint("UQ__dag_run__dag_id_execution_date", "dag_run", ["dag_id", "execution_date"])
op.drop_index("idx_not_null_dag_id_execution_date", table_name="dag_run")
op.drop_index("idx_not_null_dag_id_run_id", table_name="dag_run")
| 9,499 | 41.986425 | 110 |
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.