code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
182
url
stringlengths
46
251
license
stringclasses
4 values
def listen_to(signal): """Context Manager that listens to signals and records emissions Example: with listen_to(user_logged_in) as listener: login_user(user) # Assert that a single emittance of the specific args was seen. listener.assert_heard_one(app, user=user)) # Of course, you can always just look at the list yourself self.assertEqual(1, len(listener.heard)) """ class _SignalsCaught: def __init__(self): self.heard = [] def add(self, *args, **kwargs): """The actual handler of the signal.""" self.heard.append((args, kwargs)) def assert_heard_one(self, *args, **kwargs): """The signal fired once, and with the arguments given""" if len(self.heard) == 0: raise AssertionError("No signals were fired") elif len(self.heard) > 1: msg = f"{len(self.heard)} signals were fired" raise AssertionError(msg) elif self.heard[0] != (args, kwargs): raise AssertionError( "One signal was heard, but with incorrect" f" arguments: Got ({self.heard[0]}) expected" f" ({args}, {kwargs})" ) def assert_heard_none(self, *args, **kwargs): """The signal fired no times""" if len(self.heard) >= 1: msg = f"{len(self.heard)} signals were fired" raise AssertionError(msg) results = _SignalsCaught() signal.connect(results.add) try: yield results finally: signal.disconnect(results.add)
Context Manager that listens to signals and records emissions Example: with listen_to(user_logged_in) as listener: login_user(user) # Assert that a single emittance of the specific args was seen. listener.assert_heard_one(app, user=user)) # Of course, you can always just look at the list yourself self.assertEqual(1, len(listener.heard))
listen_to
python
maxcountryman/flask-login
tests/test_login.py
https://github.com/maxcountryman/flask-login/blob/master/tests/test_login.py
MIT
def encode_cookie(payload, key=None): """ This will encode a ``str`` value into a cookie, and sign that cookie with the app's secret key. :param payload: The value to encode, as `str`. :type payload: str :param key: The key to use when creating the cookie digest. If not specified, the SECRET_KEY value from app config will be used. :type key: str """ return f"{payload}|{_cookie_digest(payload, key=key)}"
This will encode a ``str`` value into a cookie, and sign that cookie with the app's secret key. :param payload: The value to encode, as `str`. :type payload: str :param key: The key to use when creating the cookie digest. If not specified, the SECRET_KEY value from app config will be used. :type key: str
encode_cookie
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def decode_cookie(cookie, key=None): """ This decodes a cookie given by `encode_cookie`. If verification of the cookie fails, ``None`` will be implicitly returned. :param cookie: An encoded cookie. :type cookie: str :param key: The key to use when creating the cookie digest. If not specified, the SECRET_KEY value from app config will be used. :type key: str """ try: payload, digest = cookie.rsplit("|", 1) if hasattr(digest, "decode"): digest = digest.decode("ascii") # pragma: no cover except ValueError: return if hmac.compare_digest(_cookie_digest(payload, key=key), digest): return payload
This decodes a cookie given by `encode_cookie`. If verification of the cookie fails, ``None`` will be implicitly returned. :param cookie: An encoded cookie. :type cookie: str :param key: The key to use when creating the cookie digest. If not specified, the SECRET_KEY value from app config will be used. :type key: str
decode_cookie
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def make_next_param(login_url, current_url): """ Reduces the scheme and host from a given URL so it can be passed to the given `login` URL more efficiently. :param login_url: The login URL being redirected to. :type login_url: str :param current_url: The URL to reduce. :type current_url: str """ l_url = urlsplit(login_url) c_url = urlsplit(current_url) if (not l_url.scheme or l_url.scheme == c_url.scheme) and ( not l_url.netloc or l_url.netloc == c_url.netloc ): return urlunsplit(("", "", c_url.path, c_url.query, "")) return current_url
Reduces the scheme and host from a given URL so it can be passed to the given `login` URL more efficiently. :param login_url: The login URL being redirected to. :type login_url: str :param current_url: The URL to reduce. :type current_url: str
make_next_param
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def expand_login_view(login_view): """ Returns the url for the login view, expanding the view name to a url if needed. :param login_view: The name of the login view or a URL for the login view. :type login_view: str """ if login_view.startswith(("https://", "http://", "/")): return login_view return url_for(login_view)
Returns the url for the login view, expanding the view name to a url if needed. :param login_view: The name of the login view or a URL for the login view. :type login_view: str
expand_login_view
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def login_url(login_view, next_url=None, next_field="next"): """ Creates a URL for redirecting to a login page. If only `login_view` is provided, this will just return the URL for it. If `next_url` is provided, however, this will append a ``next=URL`` parameter to the query string so that the login view can redirect back to that URL. Flask-Login's default unauthorized handler uses this function when redirecting to your login url. To force the host name used, set `FORCE_HOST_FOR_REDIRECTS` to a host. This prevents from redirecting to external sites if `SERVER_NAME` is not configured. :param login_view: The name of the login view. (Alternately, the actual URL to the login view.) :type login_view: str :param next_url: The URL to give the login view for redirection. :type next_url: str :param next_field: What field to store the next URL in. (It defaults to ``next``.) :type next_field: str """ base = expand_login_view(login_view) if next_url is None: return base parsed_result = urlsplit(base) md = parse_qs(parsed_result.query, keep_blank_values=True) md[next_field] = make_next_param(base, next_url) netloc = current_app.config.get("FORCE_HOST_FOR_REDIRECTS") or parsed_result.netloc parsed_result = parsed_result._replace( netloc=netloc, query=urlencode(md, doseq=True) ) return urlunsplit(parsed_result)
Creates a URL for redirecting to a login page. If only `login_view` is provided, this will just return the URL for it. If `next_url` is provided, however, this will append a ``next=URL`` parameter to the query string so that the login view can redirect back to that URL. Flask-Login's default unauthorized handler uses this function when redirecting to your login url. To force the host name used, set `FORCE_HOST_FOR_REDIRECTS` to a host. This prevents from redirecting to external sites if `SERVER_NAME` is not configured. :param login_view: The name of the login view. (Alternately, the actual URL to the login view.) :type login_view: str :param next_url: The URL to give the login view for redirection. :type next_url: str :param next_field: What field to store the next URL in. (It defaults to ``next``.) :type next_field: str
login_url
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def login_fresh(): """ This returns ``True`` if the current login is fresh. """ return session.get("_fresh", False)
This returns ``True`` if the current login is fresh.
login_fresh
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def login_remembered(): """ This returns ``True`` if the current login is remembered across sessions. """ config = current_app.config cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) has_cookie = cookie_name in request.cookies and session.get("_remember") != "clear" if has_cookie: cookie = request.cookies[cookie_name] user_id = decode_cookie(cookie) return user_id is not None return False
This returns ``True`` if the current login is remembered across sessions.
login_remembered
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def login_user(user, remember=False, duration=None, force=False, fresh=True): """ Logs a user in. You should pass the actual user object to this. If the user's `is_active` property is ``False``, they will not be logged in unless `force` is ``True``. This will return ``True`` if the log in attempt succeeds, and ``False`` if it fails (i.e. because the user is inactive). :param user: The user object to log in. :type user: object :param remember: Whether to remember the user after their session expires. Defaults to ``False``. :type remember: bool :param duration: The amount of time before the remember cookie expires. If ``None`` the value set in the settings is used. Defaults to ``None``. :type duration: :class:`datetime.timedelta` :param force: If the user is inactive, setting this to ``True`` will log them in regardless. Defaults to ``False``. :type force: bool :param fresh: setting this to ``False`` will log in the user with a session marked as not "fresh". Defaults to ``True``. :type fresh: bool """ if not force and not user.is_active: return False user_id = getattr(user, current_app.login_manager.id_attribute)() session["_user_id"] = user_id session["_fresh"] = fresh session["_id"] = current_app.login_manager._session_identifier_generator() if remember: session["_remember"] = "set" if duration is not None: try: # equal to timedelta.total_seconds() but works with Python 2.6 session["_remember_seconds"] = ( duration.microseconds + (duration.seconds + duration.days * 24 * 3600) * 10**6 ) / 10.0**6 except AttributeError as e: raise Exception( f"duration must be a datetime.timedelta, instead got: {duration}" ) from e current_app.login_manager._update_request_context_with_user(user) user_logged_in.send(current_app._get_current_object(), user=_get_user()) return True
Logs a user in. You should pass the actual user object to this. If the user's `is_active` property is ``False``, they will not be logged in unless `force` is ``True``. This will return ``True`` if the log in attempt succeeds, and ``False`` if it fails (i.e. because the user is inactive). :param user: The user object to log in. :type user: object :param remember: Whether to remember the user after their session expires. Defaults to ``False``. :type remember: bool :param duration: The amount of time before the remember cookie expires. If ``None`` the value set in the settings is used. Defaults to ``None``. :type duration: :class:`datetime.timedelta` :param force: If the user is inactive, setting this to ``True`` will log them in regardless. Defaults to ``False``. :type force: bool :param fresh: setting this to ``False`` will log in the user with a session marked as not "fresh". Defaults to ``True``. :type fresh: bool
login_user
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def logout_user(): """ Logs a user out. (You do not need to pass the actual user.) This will also clean up the remember me cookie if it exists. """ user = _get_user() if "_user_id" in session: session.pop("_user_id") if "_fresh" in session: session.pop("_fresh") if "_id" in session: session.pop("_id") cookie_name = current_app.config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) if cookie_name in request.cookies: session["_remember"] = "clear" if "_remember_seconds" in session: session.pop("_remember_seconds") user_logged_out.send(current_app._get_current_object(), user=user) current_app.login_manager._update_request_context_with_user() return True
Logs a user out. (You do not need to pass the actual user.) This will also clean up the remember me cookie if it exists.
logout_user
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def confirm_login(): """ This sets the current session as fresh. Sessions become stale when they are reloaded from a cookie. """ session["_fresh"] = True session["_id"] = current_app.login_manager._session_identifier_generator() user_login_confirmed.send(current_app._get_current_object())
This sets the current session as fresh. Sessions become stale when they are reloaded from a cookie.
confirm_login
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def login_required(func): """ If you decorate a view with this, it will ensure that the current user is logged in and authenticated before calling the actual view. (If they are not, it calls the :attr:`LoginManager.unauthorized` callback.) For example:: @app.route('/post') @login_required def post(): pass If there are only certain times you need to require that your user is logged in, you can do so with:: if not current_user.is_authenticated: return current_app.login_manager.unauthorized() ...which is essentially the code that this function adds to your views. .. Note :: Per `W3 guidelines for CORS preflight requests <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_, HTTP ``OPTIONS`` requests are exempt from login checks. :param func: The view function to decorate. :type func: function """ @wraps(func) def decorated_view(*args, **kwargs): if request.method in EXEMPT_METHODS: pass elif not current_user.is_authenticated: return current_app.login_manager.unauthorized() # flask 1.x compatibility # current_app.ensure_sync is only available in Flask >= 2.0 if callable(getattr(current_app, "ensure_sync", None)): return current_app.ensure_sync(func)(*args, **kwargs) return func(*args, **kwargs) return decorated_view
If you decorate a view with this, it will ensure that the current user is logged in and authenticated before calling the actual view. (If they are not, it calls the :attr:`LoginManager.unauthorized` callback.) For example:: @app.route('/post') @login_required def post(): pass If there are only certain times you need to require that your user is logged in, you can do so with:: if not current_user.is_authenticated: return current_app.login_manager.unauthorized() ...which is essentially the code that this function adds to your views. .. Note :: Per `W3 guidelines for CORS preflight requests <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_, HTTP ``OPTIONS`` requests are exempt from login checks. :param func: The view function to decorate. :type func: function
login_required
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def fresh_login_required(func): """ If you decorate a view with this, it will ensure that the current user's login is fresh - i.e. their session was not restored from a 'remember me' cookie. Sensitive operations, like changing a password or e-mail, should be protected with this, to impede the efforts of cookie thieves. If the user is not authenticated, :meth:`LoginManager.unauthorized` is called as normal. If they are authenticated, but their session is not fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that case, you will need to provide a :attr:`LoginManager.refresh_view`.) Behaves identically to the :func:`login_required` decorator with respect to configuration variables. .. Note :: Per `W3 guidelines for CORS preflight requests <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_, HTTP ``OPTIONS`` requests are exempt from login checks. :param func: The view function to decorate. :type func: function """ @wraps(func) def decorated_view(*args, **kwargs): if request.method in EXEMPT_METHODS: pass elif not current_user.is_authenticated: return current_app.login_manager.unauthorized() elif not login_fresh(): return current_app.login_manager.needs_refresh() try: # current_app.ensure_sync available in Flask >= 2.0 return current_app.ensure_sync(func)(*args, **kwargs) except AttributeError: # pragma: no cover return func(*args, **kwargs) return decorated_view
If you decorate a view with this, it will ensure that the current user's login is fresh - i.e. their session was not restored from a 'remember me' cookie. Sensitive operations, like changing a password or e-mail, should be protected with this, to impede the efforts of cookie thieves. If the user is not authenticated, :meth:`LoginManager.unauthorized` is called as normal. If they are authenticated, but their session is not fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that case, you will need to provide a :attr:`LoginManager.refresh_view`.) Behaves identically to the :func:`login_required` decorator with respect to configuration variables. .. Note :: Per `W3 guidelines for CORS preflight requests <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_, HTTP ``OPTIONS`` requests are exempt from login checks. :param func: The view function to decorate. :type func: function
fresh_login_required
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def set_login_view(login_view, blueprint=None): """ Sets the login view for the app or blueprint. If a blueprint is passed, the login view is set for this blueprint on ``blueprint_login_views``. :param login_view: The user object to log in. :type login_view: str :param blueprint: The blueprint which this login view should be set on. Defaults to ``None``. :type blueprint: object """ num_login_views = len(current_app.login_manager.blueprint_login_views) if blueprint is not None or num_login_views != 0: (current_app.login_manager.blueprint_login_views[blueprint.name]) = login_view if ( current_app.login_manager.login_view is not None and None not in current_app.login_manager.blueprint_login_views ): ( current_app.login_manager.blueprint_login_views[None] ) = current_app.login_manager.login_view current_app.login_manager.login_view = None else: current_app.login_manager.login_view = login_view
Sets the login view for the app or blueprint. If a blueprint is passed, the login view is set for this blueprint on ``blueprint_login_views``. :param login_view: The user object to log in. :type login_view: str :param blueprint: The blueprint which this login view should be set on. Defaults to ``None``. :type blueprint: object
set_login_view
python
maxcountryman/flask-login
src/flask_login/utils.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/utils.py
MIT
def init_app(self, app, add_context_processor=True): """ Configures an application. This registers an `after_request` call, and attaches this `LoginManager` to it as `app.login_manager`. :param app: The :class:`flask.Flask` object to configure. :type app: :class:`flask.Flask` :param add_context_processor: Whether to add a context processor to the app that adds a `current_user` variable to the template. Defaults to ``True``. :type add_context_processor: bool """ app.login_manager = self app.after_request(self._update_remember_cookie) if add_context_processor: app.context_processor(_user_context_processor)
Configures an application. This registers an `after_request` call, and attaches this `LoginManager` to it as `app.login_manager`. :param app: The :class:`flask.Flask` object to configure. :type app: :class:`flask.Flask` :param add_context_processor: Whether to add a context processor to the app that adds a `current_user` variable to the template. Defaults to ``True``. :type add_context_processor: bool
init_app
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def unauthorized(self): """ This is called when the user is required to log in. If you register a callback with :meth:`LoginManager.unauthorized_handler`, then it will be called. Otherwise, it will take the following actions: - Flash :attr:`LoginManager.login_message` to the user. - If the app is using blueprints find the login view for the current blueprint using `blueprint_login_views`. If the app is not using blueprints or the login view for the current blueprint is not specified use the value of `login_view`. - Redirect the user to the login view. (The page they were attempting to access will be passed in the ``next`` query string variable, so you can redirect there if present instead of the homepage. Alternatively, it will be added to the session as ``next`` if USE_SESSION_FOR_NEXT is set.) If :attr:`LoginManager.login_view` is not defined, then it will simply raise a HTTP 401 (Unauthorized) error instead. This should be returned from a view or before/after_request function, otherwise the redirect will have no effect. """ user_unauthorized.send(current_app._get_current_object()) if self.unauthorized_callback: return self.unauthorized_callback() if request.blueprint in self.blueprint_login_views: login_view = self.blueprint_login_views[request.blueprint] else: login_view = self.login_view if not login_view: abort(401) if self.login_message: if self.localize_callback is not None: flash( self.localize_callback(self.login_message), category=self.login_message_category, ) else: flash(self.login_message, category=self.login_message_category) config = current_app.config if config.get("USE_SESSION_FOR_NEXT", USE_SESSION_FOR_NEXT): login_url = expand_login_view(login_view) session["_id"] = self._session_identifier_generator() session["next"] = make_next_param(login_url, request.url) redirect_url = make_login_url(login_view) else: redirect_url = make_login_url(login_view, next_url=request.url) return redirect(redirect_url)
This is called when the user is required to log in. If you register a callback with :meth:`LoginManager.unauthorized_handler`, then it will be called. Otherwise, it will take the following actions: - Flash :attr:`LoginManager.login_message` to the user. - If the app is using blueprints find the login view for the current blueprint using `blueprint_login_views`. If the app is not using blueprints or the login view for the current blueprint is not specified use the value of `login_view`. - Redirect the user to the login view. (The page they were attempting to access will be passed in the ``next`` query string variable, so you can redirect there if present instead of the homepage. Alternatively, it will be added to the session as ``next`` if USE_SESSION_FOR_NEXT is set.) If :attr:`LoginManager.login_view` is not defined, then it will simply raise a HTTP 401 (Unauthorized) error instead. This should be returned from a view or before/after_request function, otherwise the redirect will have no effect.
unauthorized
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def user_loader(self, callback): """ This sets the callback for reloading a user from the session. The function you set should take a user ID (a ``str``) and return a user object, or ``None`` if the user does not exist. :param callback: The callback for retrieving a user object. :type callback: callable """ self._user_callback = callback return self.user_callback
This sets the callback for reloading a user from the session. The function you set should take a user ID (a ``str``) and return a user object, or ``None`` if the user does not exist. :param callback: The callback for retrieving a user object. :type callback: callable
user_loader
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def user_callback(self): """Gets the user_loader callback set by user_loader decorator.""" return self._user_callback
Gets the user_loader callback set by user_loader decorator.
user_callback
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def request_loader(self, callback): """ This sets the callback for loading a user from a Flask request. The function you set should take Flask request object and return a user object, or `None` if the user does not exist. :param callback: The callback for retrieving a user object. :type callback: callable """ self._request_callback = callback return self.request_callback
This sets the callback for loading a user from a Flask request. The function you set should take Flask request object and return a user object, or `None` if the user does not exist. :param callback: The callback for retrieving a user object. :type callback: callable
request_loader
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def request_callback(self): """Gets the request_loader callback set by request_loader decorator.""" return self._request_callback
Gets the request_loader callback set by request_loader decorator.
request_callback
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def unauthorized_handler(self, callback): """ This will set the callback for the `unauthorized` method, which among other things is used by `login_required`. It takes no arguments, and should return a response to be sent to the user instead of their normal view. :param callback: The callback for unauthorized users. :type callback: callable """ self.unauthorized_callback = callback return callback
This will set the callback for the `unauthorized` method, which among other things is used by `login_required`. It takes no arguments, and should return a response to be sent to the user instead of their normal view. :param callback: The callback for unauthorized users. :type callback: callable
unauthorized_handler
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def needs_refresh_handler(self, callback): """ This will set the callback for the `needs_refresh` method, which among other things is used by `fresh_login_required`. It takes no arguments, and should return a response to be sent to the user instead of their normal view. :param callback: The callback for unauthorized users. :type callback: callable """ self.needs_refresh_callback = callback return callback
This will set the callback for the `needs_refresh` method, which among other things is used by `fresh_login_required`. It takes no arguments, and should return a response to be sent to the user instead of their normal view. :param callback: The callback for unauthorized users. :type callback: callable
needs_refresh_handler
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def needs_refresh(self): """ This is called when the user is logged in, but they need to be reauthenticated because their session is stale. If you register a callback with `needs_refresh_handler`, then it will be called. Otherwise, it will take the following actions: - Flash :attr:`LoginManager.needs_refresh_message` to the user. - Redirect the user to :attr:`LoginManager.refresh_view`. (The page they were attempting to access will be passed in the ``next`` query string variable, so you can redirect there if present instead of the homepage.) If :attr:`LoginManager.refresh_view` is not defined, then it will simply raise a HTTP 401 (Unauthorized) error instead. This should be returned from a view or before/after_request function, otherwise the redirect will have no effect. """ user_needs_refresh.send(current_app._get_current_object()) if self.needs_refresh_callback: return self.needs_refresh_callback() if not self.refresh_view: abort(401) if self.needs_refresh_message: if self.localize_callback is not None: flash( self.localize_callback(self.needs_refresh_message), category=self.needs_refresh_message_category, ) else: flash( self.needs_refresh_message, category=self.needs_refresh_message_category, ) config = current_app.config if config.get("USE_SESSION_FOR_NEXT", USE_SESSION_FOR_NEXT): login_url = expand_login_view(self.refresh_view) session["_id"] = self._session_identifier_generator() session["next"] = make_next_param(login_url, request.url) redirect_url = make_login_url(self.refresh_view) else: login_url = self.refresh_view redirect_url = make_login_url(login_url, next_url=request.url) return redirect(redirect_url)
This is called when the user is logged in, but they need to be reauthenticated because their session is stale. If you register a callback with `needs_refresh_handler`, then it will be called. Otherwise, it will take the following actions: - Flash :attr:`LoginManager.needs_refresh_message` to the user. - Redirect the user to :attr:`LoginManager.refresh_view`. (The page they were attempting to access will be passed in the ``next`` query string variable, so you can redirect there if present instead of the homepage.) If :attr:`LoginManager.refresh_view` is not defined, then it will simply raise a HTTP 401 (Unauthorized) error instead. This should be returned from a view or before/after_request function, otherwise the redirect will have no effect.
needs_refresh
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def _update_request_context_with_user(self, user=None): """Store the given user as ctx.user.""" if user is None: user = self.anonymous_user() g._login_user = user
Store the given user as ctx.user.
_update_request_context_with_user
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def _load_user(self): """Loads user from session or remember_me cookie as applicable""" if self._user_callback is None and self._request_callback is None: raise Exception( "Missing user_loader or request_loader. Refer to " "http://flask-login.readthedocs.io/#how-it-works " "for more info." ) user_accessed.send(current_app._get_current_object()) # Check SESSION_PROTECTION if self._session_protection_failed(): return self._update_request_context_with_user() user = None # Load user from Flask Session user_id = session.get("_user_id") if user_id is not None and self._user_callback is not None: user = self._user_callback(user_id) # Load user from Remember Me Cookie or Request Loader if user is None: config = current_app.config cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) has_cookie = ( cookie_name in request.cookies and session.get("_remember") != "clear" ) if has_cookie: cookie = request.cookies[cookie_name] user = self._load_user_from_remember_cookie(cookie) elif self._request_callback: user = self._load_user_from_request(request) return self._update_request_context_with_user(user)
Loads user from session or remember_me cookie as applicable
_load_user
python
maxcountryman/flask-login
src/flask_login/login_manager.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/login_manager.py
MIT
def __eq__(self, other): """ Checks the equality of two `UserMixin` objects using `get_id`. """ if isinstance(other, UserMixin): return self.get_id() == other.get_id() return NotImplemented
Checks the equality of two `UserMixin` objects using `get_id`.
__eq__
python
maxcountryman/flask-login
src/flask_login/mixins.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/mixins.py
MIT
def __ne__(self, other): """ Checks the inequality of two `UserMixin` objects using `get_id`. """ equal = self.__eq__(other) if equal is NotImplemented: return NotImplemented return not equal
Checks the inequality of two `UserMixin` objects using `get_id`.
__ne__
python
maxcountryman/flask-login
src/flask_login/mixins.py
https://github.com/maxcountryman/flask-login/blob/master/src/flask_login/mixins.py
MIT
def vector_shape(n_inducing: int) -> Tuple[int, int]: """Shape of a vector with n_inducing rows and 1 column.""" return (n_inducing, 1)
Shape of a vector with n_inducing rows and 1 column.
vector_shape
python
JaxGaussianProcesses/GPJax
tests/test_variational_families.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/tests/test_variational_families.py
Apache-2.0
def matrix_shape(n_inducing: int) -> Tuple[int, int]: """Shape of a matrix with n_inducing rows and 1 column.""" return (n_inducing, n_inducing)
Shape of a matrix with n_inducing rows and 1 column.
matrix_shape
python
JaxGaussianProcesses/GPJax
tests/test_variational_families.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/tests/test_variational_families.py
Apache-2.0
def vector_val(val: float) -> Callable[[int], Float[Array, "n_inducing 1"]]: """Vector of shape (n_inducing, 1) filled with val.""" def vector_val_fn(n_inducing: int): return val * jnp.ones(vector_shape(n_inducing)) return vector_val_fn
Vector of shape (n_inducing, 1) filled with val.
vector_val
python
JaxGaussianProcesses/GPJax
tests/test_variational_families.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/tests/test_variational_families.py
Apache-2.0
def diag_matrix_val( val: float, ) -> Callable[[int], Float[Array, "n_inducing n_inducing"]]: """Diagonal matrix of shape (n_inducing, n_inducing) filled with val.""" def diag_matrix_fn(n_inducing: int) -> Float[Array, "n_inducing n_inducing"]: return jnp.eye(n_inducing) * val return diag_matrix_fn
Diagonal matrix of shape (n_inducing, n_inducing) filled with val.
diag_matrix_val
python
JaxGaussianProcesses/GPJax
tests/test_variational_families.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/tests/test_variational_families.py
Apache-2.0
def fun(x, y): """In practice, the first argument will be the latent function values""" return x**2 + y
In practice, the first argument will be the latent function values
test_quadrature.test_quadrature.test.fun
python
JaxGaussianProcesses/GPJax
tests/test_integrators.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/tests/test_integrators.py
Apache-2.0
def approx_equal(res: jnp.ndarray, actual: jnp.ndarray) -> bool: """Check if two arrays are approximately equal.""" return jnp.linalg.norm(res - actual) < 1e-5
Check if two arrays are approximately equal.
approx_equal
python
JaxGaussianProcesses/GPJax
tests/test_gaussian_distribution.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/tests/test_gaussian_distribution.py
Apache-2.0
def test_arccosine_special_case(order: int): """For certain values of weight variance (1.0) and bias variance (0.0), we can test our calculations using the Monte Carlo expansion of the arccosine kernel, e.g. see Eq. (1) of https://cseweb.ucsd.edu/~saul/papers/nips09_kernel.pdf. """ kernel = ArcCosine( weight_variance=jnp.array([1.0, 1.0]), bias_variance=1e-25, order=order ) # Inputs close(ish) together a = jnp.array([[0.0, 0.0]]) b = jnp.array([[2.0, 2.0]]) # calc cross-covariance exactly Kab_exact = kernel.cross_covariance(a, b) # calc cross-covariance using samples weights = jax.random.normal(jr.PRNGKey(123), (10_000, 2)) # [S, d] weights_a = jnp.matmul(weights, a.T) # [S, 1] weights_b = jnp.matmul(weights, b.T) # [S, 1] H_a = jnp.heaviside(weights_a, 0.5) H_b = jnp.heaviside(weights_b, 0.5) integrands = H_a * H_b * (weights_a**order) * (weights_b**order) Kab_approx = 2.0 * jnp.mean(integrands) assert jnp.max(Kab_approx - Kab_exact) < 1e-4
For certain values of weight variance (1.0) and bias variance (0.0), we can test our calculations using the Monte Carlo expansion of the arccosine kernel, e.g. see Eq. (1) of https://cseweb.ucsd.edu/~saul/papers/nips09_kernel.pdf.
test_arccosine_special_case
python
JaxGaussianProcesses/GPJax
tests/test_kernels/test_nonstationary.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/tests/test_kernels/test_nonstationary.py
Apache-2.0
def __init__( self, num_datapoints: int, integrator: AbstractIntegrator = GHQuadratureIntegrator(), ): """Initializes the likelihood. Args: num_datapoints (int): the number of data points. integrator (AbstractIntegrator): The integrator to be used for computing expected log likelihoods. Must be an instance of `AbstractIntegrator`. """ self.num_datapoints = num_datapoints self.integrator = integrator
Initializes the likelihood. Args: num_datapoints (int): the number of data points. integrator (AbstractIntegrator): The integrator to be used for computing expected log likelihoods. Must be an instance of `AbstractIntegrator`.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/likelihoods.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/likelihoods.py
Apache-2.0
def _do_callback(_) -> int: """Perform the callback.""" jax.debug.callback(func, *args) return _dummy_result
Perform the callback.
_callback._do_callback
python
JaxGaussianProcesses/GPJax
gpjax/scan.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/scan.py
Apache-2.0
def _not_callback(_) -> int: """Do nothing.""" return _dummy_result
Do nothing.
_callback._not_callback
python
JaxGaussianProcesses/GPJax
gpjax/scan.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/scan.py
Apache-2.0
def _callback(cond: ScalarBool, func: Callable, *args: Any) -> None: r"""Callback a function for a given argument if a condition is true. Args: cond (bool): The condition. func (Callable): The function to call. *args (Any): The arguments to pass to the function. """ # lax.cond requires a result, so we use a dummy result. _dummy_result = 0 def _do_callback(_) -> int: """Perform the callback.""" jax.debug.callback(func, *args) return _dummy_result def _not_callback(_) -> int: """Do nothing.""" return _dummy_result _ = lax.cond(cond, _do_callback, _not_callback, operand=None)
# lax.cond requires a result, so we use a dummy result. _dummy_result = 0 def _do_callback(_) -> int: """Perform the callback.
_callback
python
JaxGaussianProcesses/GPJax
gpjax/scan.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/scan.py
Apache-2.0
def _set_running(*args: Any) -> None: """Set the tqdm progress bar to running.""" _progress_bar.set_description("Running", refresh=False)
Set the tqdm progress bar to running.
_set_running
python
JaxGaussianProcesses/GPJax
gpjax/scan.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/scan.py
Apache-2.0
def _update_tqdm(*args: Any) -> None: """Update the tqdm progress bar with the latest objective value.""" _value, _iter_num = args _progress_bar.update(_iter_num.item()) if log_value and _value is not None: _progress_bar.set_postfix({"Value": f"{_value: .2f}"})
Update the tqdm progress bar with the latest objective value.
_update_tqdm
python
JaxGaussianProcesses/GPJax
gpjax/scan.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/scan.py
Apache-2.0
def _close_tqdm(*args: Any) -> None: """Close the tqdm progress bar.""" _progress_bar.close()
Close the tqdm progress bar.
_close_tqdm
python
JaxGaussianProcesses/GPJax
gpjax/scan.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/scan.py
Apache-2.0
def get_batch(train_data: Dataset, batch_size: int, key: KeyArray) -> Dataset: """Batch the data into mini-batches. Sampling is done with replacement. Args: train_data (Dataset): The training dataset. batch_size (int): The batch size. key (KeyArray): The random key to use for the batch selection. Returns ------- Dataset: The batched dataset. """ x, y, n = train_data.X, train_data.y, train_data.n # Subsample mini-batch indices with replacement. indices = jr.choice(key, n, (batch_size,), replace=True) return Dataset(X=x[indices], y=y[indices])
Batch the data into mini-batches. Sampling is done with replacement. Args: train_data (Dataset): The training dataset. batch_size (int): The batch size. key (KeyArray): The random key to use for the batch selection. Returns ------- Dataset: The batched dataset.
get_batch
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def _check_model(model: tp.Any) -> None: """Check that the model is a subclass of nnx.Module.""" if not isinstance(model, nnx.Module): raise TypeError( "Expected model to be a subclass of nnx.Module. " f"Got {model} of type {type(model)}." )
Check that the model is a subclass of nnx.Module.
_check_model
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def _check_train_data(train_data: tp.Any) -> None: """Check that the train_data is of type gpjax.Dataset.""" if not isinstance(train_data, Dataset): raise TypeError( "Expected train_data to be of type gpjax.Dataset. " f"Got {train_data} of type {type(train_data)}." )
Check that the train_data is of type gpjax.Dataset.
_check_train_data
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def _check_optim(optim: tp.Any) -> None: """Check that the optimiser is of type GradientTransformation.""" if not isinstance(optim, ox.GradientTransformation): raise TypeError( "Expected optim to be of type optax.GradientTransformation. " f"Got {optim} of type {type(optim)}." )
Check that the optimiser is of type GradientTransformation.
_check_optim
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def _check_num_iters(num_iters: tp.Any) -> None: """Check that the number of iterations is of type int and positive.""" if not isinstance(num_iters, int): raise TypeError( "Expected num_iters to be of type int. " f"Got {num_iters} of type {type(num_iters)}." ) if num_iters <= 0: raise ValueError(f"Expected num_iters to be positive. Got {num_iters}.")
Check that the number of iterations is of type int and positive.
_check_num_iters
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def _check_log_rate(log_rate: tp.Any) -> None: """Check that the log rate is of type int and positive.""" if not isinstance(log_rate, int): raise TypeError( "Expected log_rate to be of type int. " f"Got {log_rate} of type {type(log_rate)}." ) if not log_rate > 0: raise ValueError(f"Expected log_rate to be positive. Got {log_rate}.")
Check that the log rate is of type int and positive.
_check_log_rate
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def _check_verbose(verbose: tp.Any) -> None: """Check that the verbose is of type bool.""" if not isinstance(verbose, bool): raise TypeError( "Expected verbose to be of type bool. " f"Got {verbose} of type {type(verbose)}." )
Check that the verbose is of type bool.
_check_verbose
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def _check_batch_size(batch_size: tp.Any) -> None: """Check that the batch size is of type int and positive if not minus 1.""" if not isinstance(batch_size, int): raise TypeError( "Expected batch_size to be of type int. " f"Got {batch_size} of type {type(batch_size)}." ) if not batch_size == -1 and not batch_size > 0: raise ValueError(f"Expected batch_size to be positive or -1. Got {batch_size}.")
Check that the batch size is of type int and positive if not minus 1.
_check_batch_size
python
JaxGaussianProcesses/GPJax
gpjax/fit.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/fit.py
Apache-2.0
def lower_cholesky(A: LinearOperator) -> Triangular: # noqa: F811 """Returns the lower Cholesky factor of a linear operator. Args: A: The input linear operator. Returns: Triangular: The lower Cholesky factor of A. """ if PSD not in A.annotations: raise ValueError( "Expected LinearOperator to be PSD, did you forget to use cola.PSD?" ) return Triangular(jnp.linalg.cholesky(A.to_dense()), lower=True)
Returns the lower Cholesky factor of a linear operator. Args: A: The input linear operator. Returns: Triangular: The lower Cholesky factor of A.
lower_cholesky
python
JaxGaussianProcesses/GPJax
gpjax/lower_cholesky.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/lower_cholesky.py
Apache-2.0
def num_inducing(self) -> int: """The number of inducing inputs.""" return self.inducing_inputs.value.shape[0]
The number of inducing inputs.
num_inducing
python
JaxGaussianProcesses/GPJax
gpjax/variational_families.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/variational_families.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initialise the AbstractKernel class. Args: active_dims: the indices of the input dimensions that are active in the kernel's evaluation, represented by a list of integers or a slice object. Defaults to a full slice. n_dims: the number of input dimensions of the kernel. compute_engine: the computation engine that is used to compute the kernel's cross-covariance and gram matrices. Defaults to DenseKernelComputation. """ active_dims = active_dims or slice(None) _check_active_dims(active_dims) _check_n_dims(n_dims) self.active_dims, self.n_dims = _check_dims_compat(active_dims, n_dims) self.compute_engine = compute_engine
Initialise the AbstractKernel class. Args: active_dims: the indices of the input dimensions that are active in the kernel's evaluation, represented by a list of integers or a slice object. Defaults to a full slice. n_dims: the number of input dimensions of the kernel. compute_engine: the computation engine that is used to compute the kernel's cross-covariance and gram matrices. Defaults to DenseKernelComputation.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/base.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/base.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, degree: int = 2, shift: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 0.0, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. degree: The degree of the polynomial. shift: The shift parameter of the kernel. variance: The variance of the kernel. n_dims: The number of input dimensions. compute_engine: The computation engine that the kernel uses to compute the covariance matrix. """ super().__init__(active_dims, n_dims, compute_engine) self.degree = degree if isinstance(shift, nnx.Variable): self.shift = shift else: self.shift = PositiveReal(shift) if tp.TYPE_CHECKING: self.shift = tp.cast(PositiveReal[ScalarArray], self.shift) if isinstance(variance, nnx.Variable): self.variance = variance else: self.variance = PositiveReal(variance) if tp.TYPE_CHECKING: self.variance = tp.cast(PositiveReal[ScalarArray], self.variance) self.name = f"Polynomial (degree {self.degree})"
Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. degree: The degree of the polynomial. shift: The shift parameter of the kernel. variance: The variance of the kernel. n_dims: The number of input dimensions. compute_engine: The computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/nonstationary/polynomial.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/nonstationary/polynomial.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, order: tp.Literal[0, 1, 2] = 0, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, weight_variance: tp.Union[ WeightVarianceCompatible, nnx.Variable[WeightVariance] ] = 1.0, bias_variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. order: The order of the kernel. Must be 0, 1 or 2. variance: The variance of the kernel σ. weight_variance: The weight variance of the kernel. bias_variance: The bias variance of the kernel. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix. """ if order not in [0, 1, 2]: raise ValueError("ArcCosine kernel only implemented for orders 0, 1 and 2.") self.order = order if isinstance(weight_variance, nnx.Variable): self.weight_variance = weight_variance else: self.weight_variance = PositiveReal(weight_variance) if tp.TYPE_CHECKING: self.weight_variance = tp.cast( PositiveReal[WeightVariance], self.weight_variance ) if isinstance(variance, nnx.Variable): self.variance = variance else: self.variance = PositiveReal(variance) if tp.TYPE_CHECKING: self.variance = tp.cast(PositiveReal[ScalarArray], self.variance) if isinstance(bias_variance, nnx.Variable): self.bias_variance = bias_variance else: self.bias_variance = PositiveReal(bias_variance) if tp.TYPE_CHECKING: self.bias_variance = tp.cast( PositiveReal[ScalarArray], self.bias_variance ) self.name = f"ArcCosine (order {self.order})" super().__init__(active_dims, n_dims, compute_engine)
Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. order: The order of the kernel. Must be 0, 1 or 2. variance: The variance of the kernel σ. weight_variance: The weight variance of the kernel. bias_variance: The bias variance of the kernel. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/nonstationary/arccosine.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/nonstationary/arccosine.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. variance: the variance of the kernel σ. n_dims: The number of input dimensions. compute_engine: The computation engine that the kernel uses to compute the covariance matrix. """ super().__init__(active_dims, n_dims, compute_engine) if isinstance(variance, nnx.Variable): self.variance = variance else: self.variance = PositiveReal(variance) if tp.TYPE_CHECKING: self.variance = tp.cast(PositiveReal[ScalarArray], self.variance)
Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. variance: the variance of the kernel σ. n_dims: The number of input dimensions. compute_engine: The computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/nonstationary/linear.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/nonstationary/linear.py
Apache-2.0
def __call__(self, x: Float[Array, "D 1"], y: Float[Array, "D 1"]) -> None: """Superfluous for RFFs.""" raise RuntimeError("RFFs do not have a kernel function.")
Superfluous for RFFs.
__call__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/approximations/rff.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/approximations/rff.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, lengthscale: tp.Union[LengthscaleCompatible, nnx.Variable[Lengthscale]] = 1.0, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, period: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initializes the kernel. Args: active_dims: the indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. period: the period of the kernel p. n_dims: the number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: the computation engine that the kernel uses to compute the covariance matrix. """ if isinstance(period, nnx.Variable): self.period = period else: self.period = PositiveReal(period) super().__init__(active_dims, lengthscale, variance, n_dims, compute_engine)
Initializes the kernel. Args: active_dims: the indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. period: the period of the kernel p. n_dims: the number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: the computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/stationary/periodic.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/stationary/periodic.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = ConstantDiagonalKernelComputation(), ): """Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. variance: the variance of the kernel σ. n_dims: The number of input dimensions. compute_engine: The computation engine that the kernel uses to compute the covariance matrix """ super().__init__(active_dims, 1.0, variance, n_dims, compute_engine)
Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. variance: the variance of the kernel σ. n_dims: The number of input dimensions. compute_engine: The computation engine that the kernel uses to compute the covariance matrix
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/stationary/white.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/stationary/white.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, lengthscale: tp.Union[LengthscaleCompatible, nnx.Variable[Lengthscale]] = 1.0, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix. """ super().__init__(active_dims, n_dims, compute_engine) self.n_dims = _validate_lengthscale(lengthscale, self.n_dims) if isinstance(lengthscale, nnx.Variable): self.lengthscale = lengthscale else: self.lengthscale = PositiveReal(lengthscale) # static typing if tp.TYPE_CHECKING: self.lengthscale = tp.cast(PositiveReal[Lengthscale], self.lengthscale) if isinstance(variance, nnx.Variable): self.variance = variance else: self.variance = PositiveReal(variance) # static typing if tp.TYPE_CHECKING: self.variance = tp.cast(PositiveReal[ScalarFloat], self.variance)
Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/stationary/base.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/stationary/base.py
Apache-2.0
def _check_lengthscale(lengthscale: tp.Any): """Check that the lengthscale is a valid value.""" if isinstance(lengthscale, nnx.Variable): _check_lengthscale(lengthscale.value) return if not isinstance(lengthscale, (int, float, jnp.ndarray, list, tuple)): raise TypeError( f"Expected `lengthscale` to be a array-like. Got {lengthscale}." ) if isinstance(lengthscale, (jnp.ndarray, list)): ls_shape = jnp.shape(jnp.asarray(lengthscale)) if len(ls_shape) > 1: raise ValueError( f"Expected `lengthscale` to be a scalar or 1D array. " f"Got `lengthscale` with shape {ls_shape}." )
Check that the lengthscale is a valid value.
_check_lengthscale
python
JaxGaussianProcesses/GPJax
gpjax/kernels/stationary/base.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/stationary/base.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, lengthscale: tp.Union[LengthscaleCompatible, nnx.Variable[Lengthscale]] = 1.0, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, alpha: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. alpha: the alpha parameter of the kernel α. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix. """ if isinstance(alpha, nnx.Variable): self.alpha = alpha else: self.alpha = PositiveReal(alpha) super().__init__(active_dims, lengthscale, variance, n_dims, compute_engine)
Initializes the kernel. Args: active_dims: The indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. alpha: the alpha parameter of the kernel α. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/stationary/rational_quadratic.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/stationary/rational_quadratic.py
Apache-2.0
def __init__( self, active_dims: tp.Union[list[int], slice, None] = None, lengthscale: tp.Union[LengthscaleCompatible, nnx.Variable[Lengthscale]] = 1.0, variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, power: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = DenseKernelComputation(), ): """Initializes the kernel. Args: active_dims: the indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. power: the power of the kernel κ. n_dims: the number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: the computation engine that the kernel uses to compute the covariance matrix. """ if isinstance(power, nnx.Variable): self.power = power else: self.power = SigmoidBounded(power) super().__init__(active_dims, lengthscale, variance, n_dims, compute_engine)
Initializes the kernel. Args: active_dims: the indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. power: the power of the kernel κ. n_dims: the number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: the computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/stationary/powered_exponential.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/stationary/powered_exponential.py
Apache-2.0
def __init__( self, laplacian: Num[Array, "N N"], active_dims: tp.Union[list[int], slice, None] = None, lengthscale: tp.Union[ScalarFloat, Float[Array, " D"], Parameter] = 1.0, variance: tp.Union[ScalarFloat, Parameter] = 1.0, smoothness: ScalarFloat = 1.0, n_dims: tp.Union[int, None] = None, compute_engine: AbstractKernelComputation = EigenKernelComputation(), ): """Initializes the kernel. Args: laplacian: the Laplacian matrix of the graph. active_dims: The indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. smoothness: the smoothness parameter of the Matérn kernel. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix. """ if isinstance(smoothness, Parameter): self.smoothness = smoothness else: self.smoothness = PositiveReal(smoothness) self.laplacian = Static(laplacian) evals, eigenvectors = jnp.linalg.eigh(self.laplacian.value) self.eigenvectors = Static(eigenvectors) self.eigenvalues = Static(evals.reshape(-1, 1)) self.num_vertex = self.eigenvalues.value.shape[0] super().__init__(active_dims, lengthscale, variance, n_dims, compute_engine)
Initializes the kernel. Args: laplacian: the Laplacian matrix of the graph. active_dims: The indices of the input dimensions that the kernel operates on. lengthscale: the lengthscale(s) of the kernel ℓ. If a scalar or an array of length 1, the kernel is isotropic, meaning that the same lengthscale is used for all input dimensions. If an array with length > 1, the kernel is anisotropic, meaning that a different lengthscale is used for each input. variance: the variance of the kernel σ. smoothness: the smoothness parameter of the Matérn kernel. n_dims: The number of input dimensions. If `lengthscale` is an array, this argument is ignored. compute_engine: The computation engine that the kernel uses to compute the covariance matrix.
__init__
python
JaxGaussianProcesses/GPJax
gpjax/kernels/non_euclidean/graph.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/kernels/non_euclidean/graph.py
Apache-2.0
def __post_init__(self): """ At initialisation we check that the posterior handlers and datasets are consistent (i.e. have the same tags), and then initialise the posteriors, optimizing them using the corresponding datasets. """ self.datasets = copy.copy( self.datasets ) # Ensure initial datasets passed in to DecisionMaker are not mutated from within if self.batch_size < 1: raise ValueError( f"Batch size must be greater than 0, got {self.batch_size}." ) # Check that posterior handlers and datasets are consistent if self.posterior_handlers.keys() != self.datasets.keys(): raise ValueError( "Posterior handlers and datasets must have the same keys. " f"Got posterior handlers keys {self.posterior_handlers.keys()} and " f"datasets keys {self.datasets.keys()}." ) # Initialize posteriors self.posteriors: Dict[str, AbstractPosterior] = {} for tag, posterior_handler in self.posterior_handlers.items(): self.posteriors[tag] = posterior_handler.get_posterior( self.datasets[tag], optimize=True, key=self.key )
At initialisation we check that the posterior handlers and datasets are consistent (i.e. have the same tags), and then initialise the posteriors, optimizing them using the corresponding datasets.
__post_init__
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/decision_maker.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/decision_maker.py
Apache-2.0
def ask(self, key: KeyArray) -> Float[Array, "B D"]: """ Get the point(s) to be queried next. Args: key (KeyArray): JAX PRNG key for controlling random state. Returns: Float[Array, "1 D"]: Point to be queried next """ raise NotImplementedError
Get the point(s) to be queried next. Args: key (KeyArray): JAX PRNG key for controlling random state. Returns: Float[Array, "1 D"]: Point to be queried next
ask
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/decision_maker.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/decision_maker.py
Apache-2.0
def tell(self, observation_datasets: Mapping[str, Dataset], key: KeyArray): """ Add newly observed data to datasets and update the corresponding posteriors. Args: observation_datasets: dictionary of datasets containing new observations. Tags are used to distinguish datasets, and correspond to tags in `posterior_handlers` and `self.datasets`. key: JAX PRNG key for controlling random state. """ if observation_datasets.keys() != self.datasets.keys(): raise ValueError( "Observation datasets and existing datasets must have the same keys. " f"Got observation datasets keys {observation_datasets.keys()} and " f"existing datasets keys {self.datasets.keys()}." ) for tag, observation_dataset in observation_datasets.items(): self.datasets[tag] += observation_dataset for tag, posterior_handler in self.posterior_handlers.items(): key, _ = jr.split(key) self.posteriors[tag] = posterior_handler.update_posterior( self.datasets[tag], self.posteriors[tag], optimize=True, key=key )
Add newly observed data to datasets and update the corresponding posteriors. Args: observation_datasets: dictionary of datasets containing new observations. Tags are used to distinguish datasets, and correspond to tags in `posterior_handlers` and `self.datasets`. key: JAX PRNG key for controlling random state.
tell
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/decision_maker.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/decision_maker.py
Apache-2.0
def run( self, n_steps: int, black_box_function_evaluator: FunctionEvaluator ) -> Mapping[str, Dataset]: """ Run the decision making loop continuously for for `n_steps`. This is broken down into three main steps: 1. Call the `ask` method to get the point to be queried next. 2. Call the `black_box_function_evaluator` to evaluate the black box functions of interest at the point chosen to be queried. 3. Call the `tell` method to update the datasets and posteriors with the newly observed data. In addition to this, after the `ask` step, the functions in the `post_ask` list are executed, taking as arguments the decision maker and the point chosen to be queried next. Similarly, after the `tell` step, the functions in the `post_tell` list are executed, taking the decision maker as the sole argument. Args: n_steps (int): Number of steps to run the decision making loop for. black_box_function_evaluator (FunctionEvaluator): Function evaluator which evaluates the black box functions of interest at supplied points. Returns: Mapping[str, Dataset]: Dictionary of datasets containing the observations made throughout the decision making loop, as well as the initial data supplied when initialising the `DecisionMaker`. """ for _ in range(n_steps): query_point = self.ask(self.key) for post_ask_method in self.post_ask: post_ask_method(self, query_point) self.key, _ = jr.split(self.key) observation_datasets = black_box_function_evaluator(query_point) self.tell(observation_datasets, self.key) for post_tell_method in self.post_tell: post_tell_method(self) return self.datasets
Run the decision making loop continuously for for `n_steps`. This is broken down into three main steps: 1. Call the `ask` method to get the point to be queried next. 2. Call the `black_box_function_evaluator` to evaluate the black box functions of interest at the point chosen to be queried. 3. Call the `tell` method to update the datasets and posteriors with the newly observed data. In addition to this, after the `ask` step, the functions in the `post_ask` list are executed, taking as arguments the decision maker and the point chosen to be queried next. Similarly, after the `tell` step, the functions in the `post_tell` list are executed, taking the decision maker as the sole argument. Args: n_steps (int): Number of steps to run the decision making loop for. black_box_function_evaluator (FunctionEvaluator): Function evaluator which evaluates the black box functions of interest at supplied points. Returns: Mapping[str, Dataset]: Dictionary of datasets containing the observations made throughout the decision making loop, as well as the initial data supplied when initialising the `DecisionMaker`.
run
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/decision_maker.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/decision_maker.py
Apache-2.0
def ask(self, key: KeyArray) -> Float[Array, "B D"]: """ Get updated utility function(s) and return the point(s) which maximises it/them. This method also stores the utility function(s) in `self.current_utility_functions` so that they can be accessed after the ask function has been called. This is useful for non-deterministic utility functions, which may differ between calls to `ask` due to the splitting of `self.key`. Note that in general `SinglePointUtilityFunction`s are only capable of generating one point to be queried at each iteration of the decision making loop (i.e. `self.batch_size` must be 1). However, Thompson sampling can be used in a batched setting by drawing a batch of different samples from the GP posterior. This is done by calling `build_utility_function` with different keys sequentilly, and optimising each of these individual samples in sequence in order to obtain `self.batch_size` points to query next. Args: key (KeyArray): JAX PRNG key for controlling random state. Returns: Float[Array, "B D"]: Point(s) to be queried next. """ self.current_utility_functions = [] maximizers = [] # We currently only allow Thompson sampling to be run with batch size > 1. More # batched utility functions may be added in the future. if isinstance(self.utility_function_builder, ThompsonSampling) or ( (not isinstance(self.utility_function_builder, ThompsonSampling)) and (self.batch_size == 1) ): # Draw 'self.batch_size' Thompson samples and optimize each of them in order to # obtain 'self.batch_size' points to query next. for _ in range(self.batch_size): decision_function = ( self.utility_function_builder.build_utility_function( self.posteriors, self.datasets, key ) ) self.current_utility_functions.append(decision_function) _, key = jr.split(key) maximizer = self.utility_maximizer.maximize( decision_function, self.search_space, key ) maximizers.append(maximizer) _, key = jr.split(key) maximizers = jnp.concatenate(maximizers) return maximizers else: raise NotImplementedError( "Only Thompson sampling currently supports batch size > 1." )
Get updated utility function(s) and return the point(s) which maximises it/them. This method also stores the utility function(s) in `self.current_utility_functions` so that they can be accessed after the ask function has been called. This is useful for non-deterministic utility functions, which may differ between calls to `ask` due to the splitting of `self.key`. Note that in general `SinglePointUtilityFunction`s are only capable of generating one point to be queried at each iteration of the decision making loop (i.e. `self.batch_size` must be 1). However, Thompson sampling can be used in a batched setting by drawing a batch of different samples from the GP posterior. This is done by calling `build_utility_function` with different keys sequentilly, and optimising each of these individual samples in sequence in order to obtain `self.batch_size` points to query next. Args: key (KeyArray): JAX PRNG key for controlling random state. Returns: Float[Array, "B D"]: Point(s) to be queried next.
ask
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/decision_maker.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/decision_maker.py
Apache-2.0
def build_function_evaluator( functions: Dict[str, Callable[[Float[Array, "N D"]], Float[Array, "N 1"]]], ) -> FunctionEvaluator: """ Takes a dictionary of functions and returns a `FunctionEvaluator` which can be used to evaluate each of the functions at a supplied set of points and return a dictionary of datasets storing the evaluated points. """ return lambda x: {tag: Dataset(x, f(x)) for tag, f in functions.items()}
Takes a dictionary of functions and returns a `FunctionEvaluator` which can be used to evaluate each of the functions at a supplied set of points and return a dictionary of datasets storing the evaluated points.
build_function_evaluator
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utils.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utils.py
Apache-2.0
def get_best_latent_observation_val( posterior: AbstractPosterior, dataset: Dataset ) -> Float[Array, ""]: """ Takes a posterior and dataset and returns the best (latent) function value in the dataset, corresponding to the minimum of the posterior mean value evaluated at locations in the dataset. In the noiseless case, this corresponds to the minimum value in the dataset. """ return jnp.min(posterior(dataset.X, dataset).mean())
Takes a posterior and dataset and returns the best (latent) function value in the dataset, corresponding to the minimum of the posterior mean value evaluated at locations in the dataset. In the noiseless case, this corresponds to the minimum value in the dataset.
get_best_latent_observation_val
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utils.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utils.py
Apache-2.0
def get_posterior( self, dataset: Dataset, optimize: bool, key: Optional[KeyArray] = None ) -> AbstractPosterior: """ Initialise (and optionally optimize) a posterior using the given dataset. Args: dataset: dataset to get posterior for. optimize: whether to optimize the posterior hyperparameters. key: a JAX PRNG key which is used for optimizing the posterior hyperparameters. Returns: Posterior for the given dataset. """ posterior = self.prior * self.likelihood_builder(dataset.n) if optimize: if key is None: raise ValueError( "A key must be provided in order to optimize the posterior." ) posterior = self._optimize_posterior(posterior, dataset, key) return posterior
Initialise (and optionally optimize) a posterior using the given dataset. Args: dataset: dataset to get posterior for. optimize: whether to optimize the posterior hyperparameters. key: a JAX PRNG key which is used for optimizing the posterior hyperparameters. Returns: Posterior for the given dataset.
get_posterior
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/posterior_handler.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/posterior_handler.py
Apache-2.0
def update_posterior( self, dataset: Dataset, previous_posterior: AbstractPosterior, optimize: bool, key: Optional[KeyArray] = None, ) -> AbstractPosterior: """ Update the given posterior with the given dataset. This needs to be done when the number of datapoints in the (training) dataset of the posterior changes, as the `AbstractLikelihood` class requires the number of datapoints to be specified. Hyperparameters may or may not be optimized, depending on the value of the `optimize` parameter. Note that the updated poterior will be initialised with the same prior hyperparameters as the previous posterior, but the likelihood will be re-initialised with the new number of datapoints, and hyperparameters set as in the `likelihood_builder` function. Args: dataset: dataset to get posterior for. previous_posterior: posterior being updated. This is supplied as one may wish to simply increase the number of datapoints in the likelihood, without optimizing the posterior hyperparameters, in which case the previous posterior can be used to obtain the previously set prior hyperparameters. optimize: whether to optimize the posterior hyperparameters. key: A JAX PRNG key which is used for optimizing the posterior hyperparameters. """ posterior = previous_posterior.prior * self.likelihood_builder(dataset.n) if optimize: if key is None: raise ValueError( "A key must be provided in order to optimize the posterior." ) posterior = self._optimize_posterior(posterior, dataset, key) return posterior
Update the given posterior with the given dataset. This needs to be done when the number of datapoints in the (training) dataset of the posterior changes, as the `AbstractLikelihood` class requires the number of datapoints to be specified. Hyperparameters may or may not be optimized, depending on the value of the `optimize` parameter. Note that the updated poterior will be initialised with the same prior hyperparameters as the previous posterior, but the likelihood will be re-initialised with the new number of datapoints, and hyperparameters set as in the `likelihood_builder` function. Args: dataset: dataset to get posterior for. previous_posterior: posterior being updated. This is supplied as one may wish to simply increase the number of datapoints in the likelihood, without optimizing the posterior hyperparameters, in which case the previous posterior can be used to obtain the previously set prior hyperparameters. optimize: whether to optimize the posterior hyperparameters. key: A JAX PRNG key which is used for optimizing the posterior hyperparameters.
update_posterior
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/posterior_handler.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/posterior_handler.py
Apache-2.0
def _optimize_posterior( self, posterior: AbstractPosterior, dataset: Dataset, key: KeyArray ) -> AbstractPosterior: """ Takes a posterior and corresponding dataset and optimizes the posterior using the GPJax `fit` method. Args: posterior: Posterior being optimized. dataset: Dataset used for optimizing posterior. key: A JAX PRNG key for generating random numbers. Returns: Optimized posterior. """ opt_posterior, _ = gpx.fit( model=posterior, objective=self.optimization_objective, train_data=dataset, optim=self.optimizer, num_iters=self.num_optimization_iters, safe=True, key=key, verbose=False, ) return opt_posterior
Takes a posterior and corresponding dataset and optimizes the posterior using the GPJax `fit` method. Args: posterior: Posterior being optimized. dataset: Dataset used for optimizing posterior. key: A JAX PRNG key for generating random numbers. Returns: Optimized posterior.
_optimize_posterior
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/posterior_handler.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/posterior_handler.py
Apache-2.0
def sample(self, num_points: int, key: KeyArray) -> Float[Array, "N D"]: """Sample points from the search space. Args: num_points (int): Number of points to be sampled from the search space. key (KeyArray): JAX PRNG key. Returns: Float[Array, "N D"]: `num_points` points sampled from the search space. """ raise NotImplementedError
Sample points from the search space. Args: num_points (int): Number of points to be sampled from the search space. key (KeyArray): JAX PRNG key. Returns: Float[Array, "N D"]: `num_points` points sampled from the search space.
sample
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/search_space.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/search_space.py
Apache-2.0
def dimensionality(self) -> int: """Dimensionality of the search space. Returns: int: Dimensionality of the search space. """ raise NotImplementedError
Dimensionality of the search space. Returns: int: Dimensionality of the search space.
dimensionality
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/search_space.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/search_space.py
Apache-2.0
def sample(self, num_points: int, key: KeyArray) -> Float[Array, "N D"]: """Sample points from the search space using a Halton sequence. Args: num_points (int): Number of points to be sampled from the search space. key (KeyArray): JAX PRNG key. Returns: Float[Array, "N D"]: `num_points` points sampled using the Halton sequence from the search space. """ if num_points <= 0: raise ValueError("Number of points must be greater than 0.") initial_sample = tfp.mcmc.sample_halton_sequence( dim=self.dimensionality, num_results=num_points, seed=key ) return ( self.lower_bounds + (self.upper_bounds - self.lower_bounds) * initial_sample )
Sample points from the search space using a Halton sequence. Args: num_points (int): Number of points to be sampled from the search space. key (KeyArray): JAX PRNG key. Returns: Float[Array, "N D"]: `num_points` points sampled using the Halton sequence from the search space.
sample
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/search_space.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/search_space.py
Apache-2.0
def _get_discrete_maximizer( query_points: Float[Array, "N D"], utility_function: SinglePointUtilityFunction ) -> Float[Array, "1 D"]: """Get the point which maximises the utility function evaluated at a given set of points. Args: query_points: set of points at which to evaluate the utility function, as an array of shape `[n_points, n_dims]`. utility_function: the single point utility function to be evaluated at `query_points`. Returns: Array of shape `[1, n_dims]` representing the point which maximises the utility function. """ utility_function_values = utility_function(query_points) max_utility_function_value_idx = jnp.argmax( utility_function_values, axis=0, keepdims=True ) best_sample_point = jnp.take_along_axis( query_points, max_utility_function_value_idx, axis=0 ) return best_sample_point
Get the point which maximises the utility function evaluated at a given set of points. Args: query_points: set of points at which to evaluate the utility function, as an array of shape `[n_points, n_dims]`. utility_function: the single point utility function to be evaluated at `query_points`. Returns: Array of shape `[1, n_dims]` representing the point which maximises the utility function.
_get_discrete_maximizer
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utility_maximizer.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utility_maximizer.py
Apache-2.0
def maximize( self, utility_function: SinglePointUtilityFunction, search_space: AbstractSearchSpace, key: KeyArray, ) -> Float[Array, "1 D"]: """Maximize the given utility function over the search space provided. Args: utility_function: utility function to be maximized. search_space: search space over which to maximize the utility function. key: JAX PRNG key. Returns: Float[Array, "1 D"]: Point at which the utility function is maximized. """ raise NotImplementedError
Maximize the given utility function over the search space provided. Args: utility_function: utility function to be maximized. search_space: search space over which to maximize the utility function. key: JAX PRNG key. Returns: Float[Array, "1 D"]: Point at which the utility function is maximized.
maximize
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utility_maximizer.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utility_maximizer.py
Apache-2.0
def _scalar_utility_function(x: Float[Array, "1 D"]) -> ScalarFloat: """ The Jaxopt minimizer requires a function which returns a scalar. It calls the utility function with one point at a time, so the utility function returns an array of shape [1, 1], so we index to return a scalar. Note that we also return the negative of the utility function - this is because utility functions should be *maximimized* but the Jaxopt minimizer minimizes functions. """ return -utility_function(x)[0][0]
The Jaxopt minimizer requires a function which returns a scalar. It calls the utility function with one point at a time, so the utility function returns an array of shape [1, 1], so we index to return a scalar. Note that we also return the negative of the utility function - this is because utility functions should be *maximimized* but the Jaxopt minimizer minimizes functions.
maximize._scalar_utility_function
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utility_maximizer.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utility_maximizer.py
Apache-2.0
def generate_dataset( self, num_points: int, key: KeyArray, obs_stddev: float = 0.0 ) -> Dataset: """ Generate a toy dataset from the test function. Args: num_points (int): Number of points to sample. key (KeyArray): JAX PRNG key. obs_stddev (float): (Optional) standard deviation of Gaussian distributed noise added to observations. Returns: Dataset: Dataset of points sampled from the test function. """ X = self.search_space.sample(num_points=num_points, key=key) gaussian_noise = tfp.distributions.Normal( jnp.zeros(num_points), obs_stddev * jnp.ones(num_points) ) y = self.evaluate(X) + jnp.transpose( gaussian_noise.sample(sample_shape=[1], seed=key) ) return Dataset(X=X, y=y)
Generate a toy dataset from the test function. Args: num_points (int): Number of points to sample. key (KeyArray): JAX PRNG key. obs_stddev (float): (Optional) standard deviation of Gaussian distributed noise added to observations. Returns: Dataset: Dataset of points sampled from the test function.
generate_dataset
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/test_functions/continuous_functions.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/test_functions/continuous_functions.py
Apache-2.0
def generate_test_points( self, num_points: int, key: KeyArray ) -> Float[Array, "N D"]: """ Generate test points from the search space of the test function. Args: num_points (int): Number of points to sample. key (KeyArray): JAX PRNG key. Returns: Float[Array, 'N D']: Test points sampled from the search space. """ return self.search_space.sample(num_points=num_points, key=key)
Generate test points from the search space of the test function. Args: num_points (int): Number of points to sample. key (KeyArray): JAX PRNG key. Returns: Float[Array, 'N D']: Test points sampled from the search space.
generate_test_points
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/test_functions/continuous_functions.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/test_functions/continuous_functions.py
Apache-2.0
def evaluate(self, x: Float[Array, "N D"]) -> Float[Array, "N 1"]: """ Evaluate the test function at a set of points. Args: x (Float[Array, 'N D']): Points to evaluate the test function at. Returns: Float[Array, 'N 1']: Values of the test function at the points. """ raise NotImplementedError
Evaluate the test function at a set of points. Args: x (Float[Array, 'N D']): Points to evaluate the test function at. Returns: Float[Array, 'N 1']: Values of the test function at the points.
evaluate
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/test_functions/continuous_functions.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/test_functions/continuous_functions.py
Apache-2.0
def generate_dataset(self, num_points: int, key: KeyArray) -> Dataset: """ Generate a toy dataset from the test function. Args: num_points (int): Number of points to sample. key (KeyArray): JAX PRNG key. Returns: Dataset: Dataset of points sampled from the test function. """ X = self.search_space.sample(num_points=num_points, key=key) y = self.evaluate(X) return Dataset(X=X, y=y)
Generate a toy dataset from the test function. Args: num_points (int): Number of points to sample. key (KeyArray): JAX PRNG key. Returns: Dataset: Dataset of points sampled from the test function.
generate_dataset
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/test_functions/non_conjugate_functions.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/test_functions/non_conjugate_functions.py
Apache-2.0
def evaluate(self, x: Float[Array, "N 1"]) -> Int[Array, "N 1"]: """ Evaluate the test function at a set of points. Function taken from https://docs.jaxgaussianprocesses.com/_examples/poisson/#dataset. Args: x (Float[Array, 'N D']): Points to evaluate the test function at. Returns: Float[Array, 'N 1']: Values of the test function at the points. """ key = jr.key(42) f = lambda x: 2.0 * jnp.sin(3 * x) + 0.5 * x return jr.poisson(key, jnp.exp(f(x)))
Evaluate the test function at a set of points. Function taken from https://docs.jaxgaussianprocesses.com/_examples/poisson/#dataset. Args: x (Float[Array, 'N D']): Points to evaluate the test function at. Returns: Float[Array, 'N 1']: Values of the test function at the points.
evaluate
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/test_functions/non_conjugate_functions.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/test_functions/non_conjugate_functions.py
Apache-2.0
def build_utility_function( self, posteriors: Mapping[str, ConjugatePosterior], datasets: Mapping[str, Dataset], key: KeyArray, ) -> SinglePointUtilityFunction: """ Constructs the probability of improvement utility function using the predictive posterior of the objective function. Args: posteriors (Mapping[str, AbstractPosterior]): Dictionary of posteriors to be used to form the utility function. One of the posteriors must correspond to the `OBJECTIVE` key, as we sample from the objective posterior to form the utility function. datasets (Mapping[str, Dataset]): Dictionary of datasets which may be used to form the utility function. Keys in `datasets` should correspond to keys in `posteriors`. One of the datasets must correspond to the `OBJECTIVE` key. key (KeyArray): JAX PRNG key used for random number generation. Since the probability of improvement is computed deterministically from the predictive posterior, the key is not used. Returns: SinglePointUtilityFunction: the probability of improvement utility function. """ self.check_objective_present(posteriors, datasets) objective_posterior = posteriors[OBJECTIVE] if not isinstance(objective_posterior, ConjugatePosterior): raise ValueError( "Objective posterior must be a ConjugatePosterior to compute the Probability of Improvement using a Gaussian CDF." ) objective_dataset = datasets[OBJECTIVE] if ( objective_dataset.X is None or objective_dataset.n == 0 or objective_dataset.y is None ): raise ValueError( "Objective dataset must be non-empty to compute the " "Probability of Improvement (since we need a " "`best_y` value)." ) def probability_of_improvement(x_test: Num[Array, "N D"]): best_y = get_best_latent_observation_val( objective_posterior, objective_dataset ) predictive_dist = objective_posterior.predict(x_test, objective_dataset) normal_dist = tfp.distributions.Normal( loc=predictive_dist.mean(), scale=predictive_dist.stddev(), ) return normal_dist.cdf(best_y).reshape(-1, 1) return probability_of_improvement
Constructs the probability of improvement utility function using the predictive posterior of the objective function. Args: posteriors (Mapping[str, AbstractPosterior]): Dictionary of posteriors to be used to form the utility function. One of the posteriors must correspond to the `OBJECTIVE` key, as we sample from the objective posterior to form the utility function. datasets (Mapping[str, Dataset]): Dictionary of datasets which may be used to form the utility function. Keys in `datasets` should correspond to keys in `posteriors`. One of the datasets must correspond to the `OBJECTIVE` key. key (KeyArray): JAX PRNG key used for random number generation. Since the probability of improvement is computed deterministically from the predictive posterior, the key is not used. Returns: SinglePointUtilityFunction: the probability of improvement utility function.
build_utility_function
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utility_functions/probability_of_improvement.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utility_functions/probability_of_improvement.py
Apache-2.0
def build_utility_function( self, posteriors: Mapping[str, ConjugatePosterior], datasets: Mapping[str, Dataset], key: KeyArray, ) -> SinglePointUtilityFunction: """ Draw an approximate sample from the posterior of the objective model and return the *negative* of this sample as a utility function, as utility functions are *maximised*. Args: posteriors (Mapping[str, ConjugatePosterior]): Dictionary of posteriors to be used to form the utility function. One of the posteriors must correspond to the `OBJECTIVE` key, as we sample from the objective posterior to form the utility function. datasets (Mapping[str, Dataset]): Dictionary of datasets which may be used to form the utility function. Keys in `datasets` should correspond to keys in `posteriors`. One of the datasets must correspond to the `OBJECTIVE` key. key (KeyArray): JAX PRNG key used for random number generation. This can be changed to draw different samples. Returns: SinglePointUtilityFunction: An appproximate sample from the objective model posterior to to be *maximised* in order to decide which point to query next. """ self.check_objective_present(posteriors, datasets) objective_posterior = posteriors[OBJECTIVE] if not isinstance(objective_posterior, ConjugatePosterior): raise ValueError( "Objective posterior must be a ConjugatePosterior to draw an approximate sample." ) objective_dataset = datasets[OBJECTIVE] thompson_sample = objective_posterior.sample_approx( num_samples=1, train_data=objective_dataset, key=key, num_features=self.num_features, ) return lambda x: -1.0 * thompson_sample(x) # Utility functions are *maximised*
Draw an approximate sample from the posterior of the objective model and return the *negative* of this sample as a utility function, as utility functions are *maximised*. Args: posteriors (Mapping[str, ConjugatePosterior]): Dictionary of posteriors to be used to form the utility function. One of the posteriors must correspond to the `OBJECTIVE` key, as we sample from the objective posterior to form the utility function. datasets (Mapping[str, Dataset]): Dictionary of datasets which may be used to form the utility function. Keys in `datasets` should correspond to keys in `posteriors`. One of the datasets must correspond to the `OBJECTIVE` key. key (KeyArray): JAX PRNG key used for random number generation. This can be changed to draw different samples. Returns: SinglePointUtilityFunction: An appproximate sample from the objective model posterior to to be *maximised* in order to decide which point to query next.
build_utility_function
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utility_functions/thompson_sampling.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utility_functions/thompson_sampling.py
Apache-2.0
def check_objective_present( self, posteriors: Mapping[str, AbstractPosterior], datasets: Mapping[str, Dataset], ) -> None: """ Check that the objective posterior and dataset are present in the posteriors and datasets. Args: posteriors: dictionary of posteriors to be used to form the utility function. datasets: dictionary of datasets which may be used to form the utility function. Raises: ValueError: If the objective posterior or dataset are not present in the posteriors or datasets. """ if OBJECTIVE not in posteriors.keys(): raise ValueError("Objective posterior not found in posteriors") elif OBJECTIVE not in datasets.keys(): raise ValueError("Objective dataset not found in datasets")
Check that the objective posterior and dataset are present in the posteriors and datasets. Args: posteriors: dictionary of posteriors to be used to form the utility function. datasets: dictionary of datasets which may be used to form the utility function. Raises: ValueError: If the objective posterior or dataset are not present in the posteriors or datasets.
check_objective_present
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utility_functions/base.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utility_functions/base.py
Apache-2.0
def build_utility_function( self, posteriors: Mapping[str, AbstractPosterior], datasets: Mapping[str, Dataset], key: KeyArray, ) -> SinglePointUtilityFunction: """ Build a `UtilityFunction` from a set of posteriors and datasets. Args: posteriors: dictionary of posteriors to be used to form the utility function. datasets: dictionary of datasets which may be used to form the utility function. key: JAX PRNG key used for random number generation. Returns: SinglePointUtilityFunction: Utility function to be *maximised* in order to decide which point to query next. """ raise NotImplementedError
Build a `UtilityFunction` from a set of posteriors and datasets. Args: posteriors: dictionary of posteriors to be used to form the utility function. datasets: dictionary of datasets which may be used to form the utility function. key: JAX PRNG key used for random number generation. Returns: SinglePointUtilityFunction: Utility function to be *maximised* in order to decide which point to query next.
build_utility_function
python
JaxGaussianProcesses/GPJax
gpjax/decision_making/utility_functions/base.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/gpjax/decision_making/utility_functions/base.py
Apache-2.0
def process_file(file: Path, out_file: Path | None = None, execute: bool = False): """Converts a python file to markdown using jupytext and nbconvert.""" out_dir = out_file.parent command = f"cd {out_dir.as_posix()} && " out_file = out_file.relative_to(out_dir).as_posix() if execute: command += f"jupytext --to ipynb {file} --output - " command += ( f"| jupyter nbconvert --to markdown --execute --stdin --output {out_file}" ) else: command += f"jupytext --to markdown {file} --output {out_file}" subprocess.run(command, shell=True, check=False)
Converts a python file to markdown using jupytext and nbconvert.
process_file
python
JaxGaussianProcesses/GPJax
docs/scripts/gen_examples.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/docs/scripts/gen_examples.py
Apache-2.0
def is_modified(file: Path, out_file: Path): """Check if the output file is older than the input file.""" return out_file.exists() and out_file.stat().st_mtime < file.stat().st_mtime
Check if the output file is older than the input file.
is_modified
python
JaxGaussianProcesses/GPJax
docs/scripts/gen_examples.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/docs/scripts/gen_examples.py
Apache-2.0
def confidence_ellipse(x, y, ax, n_std=3.0, facecolor="none", **kwargs): """ Create a plot of the covariance confidence ellipse of *x* and *y*. Parameters ---------- x, y : array-like, shape (n, ) Input data. ax : matplotlib.axes.Axes The axes object to draw the ellipse into. n_std : float The number of standard deviations to determine the ellipse's radiuses. **kwargs Forwarded to `~matplotlib.patches.Ellipse` Returns ------- matplotlib.patches.Ellipse """ x = np.array(x) y = np.array(y) if x.size != y.size: raise ValueError("x and y must be the same size") cov = np.cov(x, y) pearson = cov[0, 1] / np.sqrt(cov[0, 0] * cov[1, 1]) # Using a special case to obtain the eigenvalues of this # two-dimensionl dataset. ell_radius_x = np.sqrt(1 + pearson) ell_radius_y = np.sqrt(1 - pearson) ellipse = Ellipse( (0, 0), width=ell_radius_x * 2, height=ell_radius_y * 2, facecolor=facecolor, **kwargs, ) # Calculating the stdandard deviation of x from # the squareroot of the variance and multiplying # with the given number of standard deviations. scale_x = np.sqrt(cov[0, 0]) * n_std mean_x = np.mean(x) # calculating the stdandard deviation of y ... scale_y = np.sqrt(cov[1, 1]) * n_std mean_y = np.mean(y) transf = ( transforms.Affine2D() .rotate_deg(45) .scale(scale_x, scale_y) .translate(mean_x, mean_y) ) ellipse.set_transform(transf + ax.transData) return ax.add_patch(ellipse)
Create a plot of the covariance confidence ellipse of *x* and *y*. Parameters ---------- x, y : array-like, shape (n, ) Input data. ax : matplotlib.axes.Axes The axes object to draw the ellipse into. n_std : float The number of standard deviations to determine the ellipse's radiuses. **kwargs Forwarded to `~matplotlib.patches.Ellipse` Returns ------- matplotlib.patches.Ellipse
confidence_ellipse
python
JaxGaussianProcesses/GPJax
examples/utils.py
https://github.com/JaxGaussianProcesses/GPJax/blob/master/examples/utils.py
Apache-2.0
def pytest_addoption(parser): """Define pytest command-line option""" group = parser.getgroup("jupyter_book") group.addoption( "--jb-tempdir", dest="jb_tempdir", default=None, help="Specify a directory in which to create tempdirs", )
Define pytest command-line option
pytest_addoption
python
jupyter-book/jupyter-book
conftest.py
https://github.com/jupyter-book/jupyter-book/blob/master/conftest.py
BSD-3-Clause
def test_myst_init(cli: CliRunner, temp_with_override): """Test adding myst metadata to text files.""" path = temp_with_override.joinpath("tmp.md").absolute() text = "TEST" with open(path, "w") as ff: ff.write(text) init_myst_file(path, kernel="python3") # Make sure it runs properly. Default kernel should be python3 new_text = path.read_text(encoding="utf8") assert "format_name: myst" in new_text assert "TEST" == new_text.strip().split("\n")[-1] assert "name: python3" in new_text # Make sure the CLI works too with warnings.catch_warnings(): warnings.simplefilter("error") result = cli.invoke(myst_init, f"{path} --kernel python3".split()) # old versions of jupytext give: UserWarning: myst-parse failed unexpectedly assert result.exit_code == 0 # Non-existent kernel with pytest.raises(Exception) as err: init_myst_file(path, kernel="blah") assert "Did not find kernel: blah" in str(err) # Missing file with pytest.raises(Exception) as err: init_myst_file(path.joinpath("MISSING"), kernel="python3") assert "Markdown file not found:" in str(err)
Test adding myst metadata to text files.
test_myst_init
python
jupyter-book/jupyter-book
tests/test_utils.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/test_utils.py
BSD-3-Clause
def test_toc_startwithlist(cli: CliRunner, temp_with_override, file_regression): """Testing a basic _toc.yml for tableofcontents directive""" path_output = temp_with_override.joinpath("mybook").absolute() # Regular TOC should work p_toc = path_books.joinpath("toc") path_toc = p_toc.joinpath("_toc_startwithlist.yml") result = cli.invoke( build, [ p_toc.as_posix(), "--path-output", path_output.as_posix(), "--toc", path_toc.as_posix(), "-W", ], ) # print(result.output) assert result.exit_code == 0 path_toc_directive = path_output.joinpath("_build", "html", "index.html") # print(path_toc_directive.read_text(encoding="utf8")) # get the tableofcontents markup soup = BeautifulSoup(path_toc_directive.read_text(encoding="utf8"), "html.parser") toc = soup.find_all("div", class_="toctree-wrapper") assert len(toc) == 1 file_regression.check(toc[0].prettify(), extension=".html", encoding="utf8")
Testing a basic _toc.yml for tableofcontents directive
test_toc_startwithlist
python
jupyter-book/jupyter-book
tests/test_tocdirective.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/test_tocdirective.py
BSD-3-Clause
def test_toc_parts(cli: CliRunner, temp_with_override, file_regression): """Testing `header` in _toc.yml""" path_input = temp_with_override.joinpath("mybook_input").absolute() path_output = temp_with_override.joinpath("mybook").absolute() # Regular TOC should work p_toc = path_books.joinpath("toc") shutil.copytree(p_toc, path_input) # setup correct files (path_input / "subfolder" / "asubpage.md").unlink() for i in range(4): (path_input / "subfolder" / f"asubpage{i+1}.md").write_text( f"# A subpage {i+1}\n", encoding="utf8" ) path_toc = path_input.joinpath("_toc_parts.yml") result = cli.invoke( build, [ path_input.as_posix(), "--path-output", path_output.as_posix(), "--toc", path_toc.as_posix(), "-W", ], ) # print(result.output) assert result.exit_code == 0 path_index = path_output.joinpath("_build", "html", "index.html") # get the tableofcontents markup soup = BeautifulSoup(path_index.read_text(encoding="utf8"), "html.parser") toc = soup.find_all("div", class_="toctree-wrapper") assert len(toc) == 2 file_regression.check( toc[0].prettify(), basename="test_toc_parts_directive", extension=f"{SPHINX_VERSION}.html", encoding="utf8", ) # check the sidebar structure is correct file_regression.check( soup.select(".bd-links")[0].prettify(), basename="test_toc_parts_sidebar", extension=f"{SPHINX_VERSION}.html", encoding="utf8", )
Testing `header` in _toc.yml
test_toc_parts
python
jupyter-book/jupyter-book
tests/test_tocdirective.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/test_tocdirective.py
BSD-3-Clause
def test_toc_urllink(cli: CliRunner, temp_with_override, file_regression): """Testing with additional `url` link key in _toc.yml""" path_output = temp_with_override.joinpath("mybook").absolute() # Regular TOC should work p_toc = path_books.joinpath("toc") path_toc = p_toc.joinpath("_toc_urllink.yml") result = cli.invoke( build, [ p_toc.as_posix(), "--path-output", path_output.as_posix(), "--toc", path_toc.as_posix(), ], ) print(result.output) assert result.exit_code == 0 path_toc_directive = path_output.joinpath("_build", "html", "index.html") # get the tableofcontents markup soup = BeautifulSoup(path_toc_directive.read_text(encoding="utf8"), "html.parser") toc = soup.find_all("div", class_="toctree-wrapper") assert len(toc) == 1 file_regression.check(toc[0].prettify(), extension=".html", encoding="utf8")
Testing with additional `url` link key in _toc.yml
test_toc_urllink
python
jupyter-book/jupyter-book
tests/test_tocdirective.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/test_tocdirective.py
BSD-3-Clause
def test_toc_latex_parts(cli: CliRunner, temp_with_override, file_regression): """Testing LaTex output""" path_input = temp_with_override.joinpath("mybook_input").absolute() path_output = temp_with_override.joinpath("mybook").absolute() # Regular TOC should work p_toc = path_books.joinpath("toc") shutil.copytree(p_toc, path_input) # setup correct files (path_input / "subfolder" / "asubpage.md").unlink() for i in range(4): (path_input / "subfolder" / f"asubpage{i+1}.md").write_text( f"# A subpage {i+1}\n", encoding="utf8" ) path_toc = path_input.joinpath("_toc_parts.yml") result = cli.invoke( build, [ path_input.as_posix(), "--path-output", path_output.as_posix(), "--toc", path_toc.as_posix(), "--builder", "pdflatex", "-W", ], ) assert result.exit_code == 0, result.output # reading the tex file path_output_file = path_output.joinpath("_build", "latex", "toc-tests.tex") file_content = TexSoup(path_output_file.read_text()) # checking the table of contents which is a list with '\begin{itemize}' itemizes = file_content.find_all("itemize") file_regression.check( str(itemizes[0]) + "\n" + str(itemizes[1]), extension=".tex", encoding="utf8" )
Testing LaTex output
test_toc_latex_parts
python
jupyter-book/jupyter-book
tests/test_tocdirective.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/test_tocdirective.py
BSD-3-Clause
def build_resources(temp_with_override): """Copys ./books and ./books/tocs to a temporary directory and yields the paths as `pathlib.Path` objects. """ src = Path(__file__).parent.resolve().joinpath("books").absolute() dst = temp_with_override / "books" shutil.copytree(src, dst) yield Path(dst), Path(dst) / "toc" shutil.rmtree(dst)
Copys ./books and ./books/tocs to a temporary directory and yields the paths as `pathlib.Path` objects.
build_resources
python
jupyter-book/jupyter-book
tests/conftest.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/conftest.py
BSD-3-Clause
def pages(temp_with_override): """Copys ./pages to a temporary directory and yields the path as a `pathlib.Path` object. """ src = Path(__file__).parent.joinpath("pages").absolute() dst = temp_with_override / "pages" shutil.copytree(src, dst) yield Path(dst) shutil.rmtree(dst)
Copys ./pages to a temporary directory and yields the path as a `pathlib.Path` object.
pages
python
jupyter-book/jupyter-book
tests/conftest.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/conftest.py
BSD-3-Clause
def cli(): """Provides a click.testing CliRunner object for invoking CLI commands.""" runner = CliRunner() yield runner del runner
Provides a click.testing CliRunner object for invoking CLI commands.
cli
python
jupyter-book/jupyter-book
tests/conftest.py
https://github.com/jupyter-book/jupyter-book/blob/master/tests/conftest.py
BSD-3-Clause