response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Set child trace_id of TraceElement at the top of the stack. | def trace_set_child_id(child_key: str, child_run_id: str) -> None:
"""Set child trace_id of TraceElement at the top of the stack."""
if node := trace_stack_top(trace_stack_cv):
node.set_child_id(child_key, child_run_id) |
Set the result of TraceElement at the top of the stack. | def trace_set_result(**kwargs: Any) -> None:
"""Set the result of TraceElement at the top of the stack."""
if node := trace_stack_top(trace_stack_cv):
node.set_result(**kwargs) |
Update the result of TraceElement at the top of the stack. | def trace_update_result(**kwargs: Any) -> None:
"""Update the result of TraceElement at the top of the stack."""
if node := trace_stack_top(trace_stack_cv):
node.update_result(**kwargs) |
Set stop reason. | def script_execution_set(reason: str, response: ServiceResponse = None) -> None:
"""Set stop reason."""
if (data := script_execution_cv.get()) is None:
return
data.script_execution = reason
data.response = response |
Return the stop reason. | def script_execution_get() -> str | None:
"""Return the stop reason."""
if (data := script_execution_cv.get()) is None:
return None
return data.script_execution |
Go deeper in the config tree.
Can not be used as a decorator on couroutine functions. | def trace_path(suffix: str | list[str]) -> Generator[None, None, None]:
"""Go deeper in the config tree.
Can not be used as a decorator on couroutine functions.
"""
count = trace_path_push(suffix)
try:
yield
finally:
trace_path_pop(count) |
Go deeper in the config tree.
To be used as a decorator on coroutine functions. | def async_trace_path(
suffix: str | list[str],
) -> Callable[
[Callable[[*_Ts], Coroutine[Any, Any, None]]],
Callable[[*_Ts], Coroutine[Any, Any, None]],
]:
"""Go deeper in the config tree.
To be used as a decorator on coroutine functions.
"""
def _trace_path_decorator(
func: Callable[[*_Ts], Coroutine[Any, Any, None]],
) -> Callable[[*_Ts], Coroutine[Any, Any, None]]:
"""Decorate a coroutine function."""
@wraps(func)
async def async_wrapper(*args: *_Ts) -> None:
"""Catch and log exception."""
with trace_path(suffix):
await func(*args)
return async_wrapper
return _trace_path_decorator |
Return a flattened representation of dict data. | def recursive_flatten(
prefix: str, data: dict[str, dict[str, Any] | str]
) -> dict[str, str]:
"""Return a flattened representation of dict data."""
output: dict[str, str] = {}
for key, value in data.items():
if isinstance(value, dict):
output.update(recursive_flatten(f"{prefix}{key}.", value))
else:
output[f"{prefix}{key}"] = value
return output |
Return the translation json file location for a component.
For component:
- components/hue/translations/nl.json | def component_translation_path(language: str, integration: Integration) -> pathlib.Path:
"""Return the translation json file location for a component.
For component:
- components/hue/translations/nl.json
"""
return integration.file_path / "translations" / f"{language}.json" |
Load and parse translation.json files. | def _load_translations_files_by_language(
translation_files: dict[str, dict[str, pathlib.Path]],
) -> dict[str, dict[str, Any]]:
"""Load and parse translation.json files."""
loaded: dict[str, dict[str, Any]] = {}
for language, component_translation_file in translation_files.items():
loaded_for_language: dict[str, Any] = {}
loaded[language] = loaded_for_language
for component, translation_file in component_translation_file.items():
loaded_json = load_json(translation_file)
if not isinstance(loaded_json, dict):
_LOGGER.warning(
"Translation file is unexpected type %s. Expected dict for %s",
type(loaded_json),
translation_file,
)
continue
loaded_for_language[component] = loaded_json
return loaded |
Build the resources response for the given components. | def build_resources(
translation_strings: dict[str, dict[str, dict[str, Any] | str]],
components: set[str],
category: str,
) -> dict[str, dict[str, Any] | str]:
"""Build the resources response for the given components."""
# Build response
return {
component: category_strings
for component in components
if (component_strings := translation_strings.get(component))
and (category_strings := component_strings.get(category))
} |
Return all cached backend translations.
If integration is specified, return translations for it.
Otherwise, default to all loaded integrations. | def async_get_cached_translations(
hass: HomeAssistant,
language: str,
category: str,
integration: str | None = None,
) -> dict[str, str]:
"""Return all cached backend translations.
If integration is specified, return translations for it.
Otherwise, default to all loaded integrations.
"""
components = {integration} if integration else hass.config.top_level_components
return _async_get_translations_cache(hass).get_cached(
language, category, components
) |
Return the translation cache. | def _async_get_translations_cache(hass: HomeAssistant) -> _TranslationCache:
"""Return the translation cache."""
return _TranslationCache(hass) |
Create translation cache and register listeners for translation loaders.
Listeners load translations for every loaded component and after config change. | def async_setup(hass: HomeAssistant) -> None:
"""Create translation cache and register listeners for translation loaders.
Listeners load translations for every loaded component and after config change.
"""
cache = _TranslationCache(hass)
current_language = hass.config.language
_async_get_translations_cache(hass)
@callback
def _async_load_translations_filter(event_data: Mapping[str, Any]) -> bool:
"""Filter out unwanted events."""
nonlocal current_language
if (
new_language := event_data.get("language")
) and new_language != current_language:
current_language = new_language
return True
return False
async def _async_load_translations(event: Event) -> None:
new_language = event.data["language"]
_LOGGER.debug("Loading translations for language: %s", new_language)
await cache.async_load(new_language, hass.config.components)
hass.bus.async_listen(
EVENT_CORE_CONFIG_UPDATE,
_async_load_translations,
event_filter=_async_load_translations_filter,
) |
Return if the given components are loaded for the language. | def async_translations_loaded(hass: HomeAssistant, components: set[str]) -> bool:
"""Return if the given components are loaded for the language."""
return _async_get_translations_cache(hass).async_is_loaded(
hass.config.language, components
) |
Return a translated exception message.
Defaults to English, requires translations to already be cached. | def async_get_exception_message(
translation_domain: str,
translation_key: str,
translation_placeholders: dict[str, str] | None = None,
) -> str:
"""Return a translated exception message.
Defaults to English, requires translations to already be cached.
"""
language = "en"
hass = async_get_hass()
localize_key = (
f"component.{translation_domain}.exceptions.{translation_key}.message"
)
translations = async_get_cached_translations(hass, language, "exceptions")
if localize_key in translations:
if message := translations[localize_key]:
message = message.rstrip(".")
if not translation_placeholders:
return message
with suppress(KeyError):
message = message.format(**translation_placeholders)
return message
# We return the translation key when was not found in the cache
return translation_key |
Translate provided state using cached translations for currently selected language. | def async_translate_state(
hass: HomeAssistant,
state: str,
domain: str,
platform: str | None,
translation_key: str | None,
device_class: str | None,
) -> str:
"""Translate provided state using cached translations for currently selected language."""
if state in [STATE_UNAVAILABLE, STATE_UNKNOWN]:
return state
language = hass.config.language
if platform is not None and translation_key is not None:
localize_key = (
f"component.{platform}.entity.{domain}.{translation_key}.state.{state}"
)
translations = async_get_cached_translations(hass, language, "entity")
if localize_key in translations:
return translations[localize_key]
translations = async_get_cached_translations(hass, language, "entity_component")
if device_class is not None:
localize_key = (
f"component.{domain}.entity_component.{device_class}.state.{state}"
)
if localize_key in translations:
return translations[localize_key]
localize_key = f"component.{domain}.entity_component._.state.{state}"
if localize_key in translations:
return translations[localize_key]
return state |
Wrap trigger action with extra vars if configured.
If action is a coroutine function, a coroutine function will be returned.
If action is a callback, a callback will be returned. | def _trigger_action_wrapper(
hass: HomeAssistant, action: Callable, conf: ConfigType
) -> Callable:
"""Wrap trigger action with extra vars if configured.
If action is a coroutine function, a coroutine function will be returned.
If action is a callback, a callback will be returned.
"""
if CONF_VARIABLES not in conf:
return action
# Check for partials to properly determine if coroutine function
check_func = action
while isinstance(check_func, functools.partial):
check_func = check_func.func
wrapper_func: Callable[..., None] | Callable[..., Coroutine[Any, Any, None]]
if asyncio.iscoroutinefunction(check_func):
async_action = cast(Callable[..., Coroutine[Any, Any, None]], action)
@functools.wraps(async_action)
async def async_with_vars(
run_variables: dict[str, Any], context: Context | None = None
) -> None:
"""Wrap action with extra vars."""
trigger_variables = conf[CONF_VARIABLES]
run_variables.update(trigger_variables.async_render(hass, run_variables))
await action(run_variables, context)
wrapper_func = async_with_vars
else:
@functools.wraps(action)
async def with_vars(
run_variables: dict[str, Any], context: Context | None = None
) -> None:
"""Wrap action with extra vars."""
trigger_variables = conf[CONF_VARIABLES]
run_variables.update(trigger_variables.async_render(hass, run_variables))
action(run_variables, context)
if is_callback(check_func):
with_vars = callback(with_vars)
wrapper_func = with_vars
return wrapper_func |
Return a schema with default name. | def make_template_entity_base_schema(default_name: str) -> vol.Schema:
"""Return a schema with default name."""
return vol.Schema(
{
vol.Optional(CONF_ICON): cv.template,
vol.Optional(CONF_NAME, default=default_name): cv.template,
vol.Optional(CONF_PICTURE): cv.template,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
) |
Break a component config into different platforms.
For example, will find 'switch', 'switch 2', 'switch 3', .. etc
Async friendly. | def config_per_platform(
config: ConfigType, domain: str
) -> Iterable[tuple[str | None, ConfigType]]:
"""Break a component config into different platforms.
For example, will find 'switch', 'switch 2', 'switch 3', .. etc
Async friendly.
"""
# pylint: disable-next=import-outside-toplevel
from homeassistant import config as ha_config
# pylint: disable-next=import-outside-toplevel
from .deprecation import _print_deprecation_warning
_print_deprecation_warning(
config_per_platform,
"config.config_per_platform",
"function",
"called",
"2024.6",
)
return ha_config.config_per_platform(config, domain) |
Extract keys from config for given domain name.
Async friendly. | def extract_domain_configs(config: ConfigType, domain: str) -> Sequence[str]:
"""Extract keys from config for given domain name.
Async friendly.
"""
# pylint: disable-next=import-outside-toplevel
from homeassistant import config as ha_config
# pylint: disable-next=import-outside-toplevel
from .deprecation import _print_deprecation_warning
_print_deprecation_warning(
extract_domain_configs,
"config.extract_domain_configs",
"function",
"called",
"2024.6",
)
return ha_config.extract_domain_configs(config, domain) |
Handle Home Assistant auth provider script. | def run(args):
"""Handle Home Assistant auth provider script."""
parser = argparse.ArgumentParser(description="Manage Home Assistant users")
parser.add_argument("--script", choices=["auth"])
parser.add_argument(
"-c",
"--config",
default=get_default_config_dir(),
help="Directory that contains the Home Assistant configuration",
)
subparsers = parser.add_subparsers(dest="func")
subparsers.required = True
parser_list = subparsers.add_parser("list")
parser_list.set_defaults(func=list_users)
parser_add = subparsers.add_parser("add")
parser_add.add_argument("username", type=str)
parser_add.add_argument("password", type=str)
parser_add.set_defaults(func=add_user)
parser_validate_login = subparsers.add_parser("validate")
parser_validate_login.add_argument("username", type=str)
parser_validate_login.add_argument("password", type=str)
parser_validate_login.set_defaults(func=validate_login)
parser_change_pw = subparsers.add_parser("change_password")
parser_change_pw.add_argument("username", type=str)
parser_change_pw.add_argument("new_password", type=str)
parser_change_pw.set_defaults(func=change_password)
asyncio.set_event_loop_policy(runner.HassEventLoopPolicy(False))
asyncio.run(run_command(parser.parse_args(args))) |
Color helper. | def color(the_color, *args, reset=None):
"""Color helper."""
# pylint: disable-next=import-outside-toplevel
from colorlog.escape_codes import escape_codes, parse_colors
try:
if not args:
assert reset is None, "You cannot reset if nothing being printed"
return parse_colors(the_color)
return parse_colors(the_color) + " ".join(args) + escape_codes[reset or "reset"]
except KeyError as k:
raise ValueError(f"Invalid color {k!s} in {the_color}") from k |
Handle check config commandline script. | def run(script_args: list) -> int:
"""Handle check config commandline script."""
parser = argparse.ArgumentParser(description="Check Home Assistant configuration.")
parser.add_argument("--script", choices=["check_config"])
parser.add_argument(
"-c",
"--config",
default=get_default_config_dir(),
help="Directory that contains the Home Assistant configuration",
)
parser.add_argument(
"-i",
"--info",
nargs="?",
default=None,
const="all",
help="Show a portion of the config",
)
parser.add_argument(
"-f", "--files", action="store_true", help="Show used configuration files"
)
parser.add_argument(
"-s", "--secrets", action="store_true", help="Show secret information"
)
args, unknown = parser.parse_known_args()
if unknown:
print(color("red", "Unknown arguments:", ", ".join(unknown)))
config_dir = os.path.join(os.getcwd(), args.config)
print(color("bold", "Testing configuration at", config_dir))
res = check(config_dir, args.secrets)
domain_info: list[str] = []
if args.info:
domain_info = args.info.split(",")
if args.files:
print(color(C_HEAD, "yaml files"), "(used /", color("red", "not used") + ")")
deps = os.path.join(config_dir, "deps")
yaml_files = [
f
for f in glob(os.path.join(config_dir, "**/*.yaml"), recursive=True)
if not f.startswith(deps)
]
for yfn in sorted(yaml_files):
the_color = "" if yfn in res["yaml_files"] else "red"
print(color(the_color, "-", yfn))
if res["except"]:
print(color("bold_white", "Failed config"))
for domain, config in res["except"].items():
domain_info.append(domain)
print(" ", color("bold_red", domain + ":"), color("red", "", reset="red"))
dump_dict(config, reset="red")
print(color("reset"))
if res["warn"]:
print(color("bold_white", "Incorrect config"))
for domain, config in res["warn"].items():
domain_info.append(domain)
print(
" ",
color("bold_yellow", domain + ":"),
color("yellow", "", reset="yellow"),
)
dump_dict(config, reset="yellow")
print(color("reset"))
if domain_info:
if "all" in domain_info:
print(color("bold_white", "Successful config (all)"))
for domain, config in res["components"].items():
print(" ", color(C_HEAD, domain + ":"))
dump_dict(config)
else:
print(color("bold_white", "Successful config (partial)"))
for domain in domain_info:
if domain == ERROR_STR:
continue
print(" ", color(C_HEAD, domain + ":"))
dump_dict(res["components"].get(domain))
if args.secrets:
flatsecret: dict[str, str] = {}
for sfn, sdict in res["secret_cache"].items():
sss = []
for skey in sdict:
if skey in flatsecret:
_LOGGER.error(
"Duplicated secrets in files %s and %s", flatsecret[skey], sfn
)
flatsecret[skey] = sfn
sss.append(color("green", skey) if skey in res["secrets"] else skey)
print(color(C_HEAD, "Secrets from", sfn + ":"), ", ".join(sss))
print(color(C_HEAD, "Used Secrets:"))
for skey, sval in res["secrets"].items():
if sval is None:
print(" -", skey + ":", color("red", "not found"))
continue
print(" -", skey + ":", sval)
return len(res["except"]) |
Perform a check by mocking hass load functions. | def check(config_dir, secrets=False):
"""Perform a check by mocking hass load functions."""
logging.getLogger("homeassistant.loader").setLevel(logging.CRITICAL)
res: dict[str, Any] = {
"yaml_files": OrderedDict(), # yaml_files loaded
"secrets": OrderedDict(), # secret cache and secrets loaded
"except": OrderedDict(), # critical exceptions raised (with config)
"warn": OrderedDict(), # non critical exceptions raised (with config)
#'components' is a HomeAssistantConfig
"secret_cache": {},
}
# pylint: disable-next=possibly-unused-variable
def mock_load(filename, secrets=None):
"""Mock hass.util.load_yaml to save config file names."""
res["yaml_files"][filename] = True
return MOCKS["load"][1](filename, secrets)
# pylint: disable-next=possibly-unused-variable
def mock_secrets(ldr, node):
"""Mock _get_secrets."""
try:
val = MOCKS["secrets"][1](ldr, node)
except HomeAssistantError:
val = None
res["secrets"][node.value] = val
return val
# Patches with local mock functions
for key, val in MOCKS.items():
if not secrets and key == "secrets":
continue
# The * in the key is removed to find the mock_function (side_effect)
# This allows us to use one side_effect to patch multiple locations
mock_function = locals()[f"mock_{key.replace('*', '')}"]
PATCHES[key] = patch(val[0], side_effect=mock_function)
# Start all patches
for pat in PATCHES.values():
pat.start()
if secrets:
# Ensure !secrets point to the patched function
yaml_loader.add_constructor("!secret", yaml_loader.secret_yaml)
def secrets_proxy(*args):
secrets = Secrets(*args)
res["secret_cache"] = secrets._cache # pylint: disable=protected-access
return secrets
try:
with patch.object(yaml_loader, "Secrets", secrets_proxy):
res["components"] = asyncio.run(async_check_config(config_dir))
res["secret_cache"] = {
str(key): val for key, val in res["secret_cache"].items()
}
for err in res["components"].errors:
domain = err.domain or ERROR_STR
res["except"].setdefault(domain, []).append(err.message)
if err.config:
res["except"].setdefault(domain, []).append(err.config)
for err in res["components"].warnings:
domain = err.domain or WARNING_STR
res["warn"].setdefault(domain, []).append(err.message)
if err.config:
res["warn"].setdefault(domain, []).append(err.config)
except Exception as err: # pylint: disable=broad-except
print(color("red", "Fatal error while loading config:"), str(err))
res["except"].setdefault(ERROR_STR, []).append(str(err))
finally:
# Stop all patches
for pat in PATCHES.values():
pat.stop()
if secrets:
# Ensure !secrets point to the original function
yaml_loader.add_constructor("!secret", yaml_loader.secret_yaml)
return res |
Display line config source. | def line_info(obj, **kwargs):
"""Display line config source."""
if hasattr(obj, "__config_file__"):
return color(
"cyan", f"[source {obj.__config_file__}:{obj.__line__ or '?'}]", **kwargs
)
return "?" |
Display a dict.
A friendly version of print yaml_loader.yaml.dump(config). | def dump_dict(layer, indent_count=3, listi=False, **kwargs):
"""Display a dict.
A friendly version of print yaml_loader.yaml.dump(config).
"""
def sort_dict_key(val):
"""Return the dict key for sorting."""
key = str(val[0]).lower()
return "0" if key == "platform" else key
indent_str = indent_count * " "
if listi or isinstance(layer, list):
indent_str = indent_str[:-1] + "-"
if isinstance(layer, Mapping):
for key, value in sorted(layer.items(), key=sort_dict_key):
if isinstance(value, (dict, list)):
print(indent_str, str(key) + ":", line_info(value, **kwargs))
dump_dict(value, indent_count + 2, **kwargs)
else:
print(indent_str, str(key) + ":", value, line_info(key, **kwargs))
indent_str = indent_count * " "
if isinstance(layer, Sequence):
for i in layer:
if isinstance(i, dict):
dump_dict(i, indent_count + 2, True, **kwargs)
else:
print(" ", indent_str, i) |
Handle ensure config commandline script. | def run(args):
"""Handle ensure config commandline script."""
parser = argparse.ArgumentParser(
description="Ensure a Home Assistant config exists, creates one if necessary."
)
parser.add_argument(
"-c",
"--config",
metavar="path_to_config_dir",
default=config_util.get_default_config_dir(),
help="Directory that contains the Home Assistant configuration",
)
parser.add_argument("--script", choices=["ensure_config"])
args = parser.parse_args()
config_dir = os.path.join(os.getcwd(), args.config)
# Test if configuration directory exists
if not os.path.isdir(config_dir):
print("Creating directory", config_dir)
os.makedirs(config_dir, exist_ok=True)
config_path = asyncio.run(async_run(config_dir))
print("Configuration file:", config_path)
return 0 |
Run a script. | def run(args: list[str]) -> int:
"""Run a script."""
scripts = []
path = os.path.dirname(__file__)
for fil in os.listdir(path):
if fil == "__pycache__":
continue
if os.path.isdir(os.path.join(path, fil)):
scripts.append(fil)
elif fil != "__init__.py" and fil.endswith(".py"):
scripts.append(fil[:-3])
if not args:
print("Please specify a script to run.")
print("Available scripts:", ", ".join(scripts))
return 1
if args[0] not in scripts:
print("Invalid script specified.")
print("Available scripts:", ", ".join(scripts))
return 1
script = importlib.import_module(f"homeassistant.scripts.{args[0]}")
config_dir = extract_config_dir()
loop = asyncio.get_event_loop()
if not is_virtual_env():
loop.run_until_complete(async_mount_local_lib_path(config_dir))
_pip_kwargs = pip_kwargs(config_dir)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
for req in getattr(script, "REQUIREMENTS", []):
if is_installed(req):
continue
if not install_package(req, **_pip_kwargs):
print("Aborting script, could not install dependency", req)
return 1
asyncio.set_event_loop_policy(runner.HassEventLoopPolicy(False))
return script.run(args[1:]) |
Extract the config dir from the arguments or get the default. | def extract_config_dir(args: Sequence[str] | None = None) -> str:
"""Extract the config dir from the arguments or get the default."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-c", "--config", default=None)
parsed_args = parser.parse_known_args(args)[0]
return (
os.path.join(os.getcwd(), parsed_args.config)
if parsed_args.config
else get_default_config_dir()
) |
Handle benchmark commandline script. | def run(args):
"""Handle benchmark commandline script."""
# Disable logging
logging.getLogger("homeassistant.core").setLevel(logging.CRITICAL)
parser = argparse.ArgumentParser(description="Run a Home Assistant benchmark.")
parser.add_argument("name", choices=BENCHMARKS)
parser.add_argument("--script", choices=["benchmark"])
args = parser.parse_args()
bench = BENCHMARKS[args.name]
print("Using event loop:", asyncio.get_event_loop_policy().loop_name)
with suppress(KeyboardInterrupt):
while True:
asyncio.run(run_benchmark(bench)) |
Decorate to mark a benchmark. | def benchmark(func: _CallableT) -> _CallableT:
"""Decorate to mark a benchmark."""
BENCHMARKS[func.__name__] = func
return func |
Create a state changed event from a old and new state. | def _create_state_changed_event_from_old_new(
entity_id, event_time_fired, old_state, new_state
):
"""Create a state changed event from a old and new state."""
attributes = {}
if new_state is not None:
attributes = new_state.get("attributes")
attributes_json = json.dumps(attributes, cls=JSONEncoder)
if attributes_json == "null":
attributes_json = "{}"
row = collections.namedtuple(
"Row",
[
"event_type"
"event_data"
"time_fired"
"context_id"
"context_user_id"
"state"
"entity_id"
"domain"
"attributes"
"state_id",
"old_state_id",
],
)
row.event_type = EVENT_STATE_CHANGED
row.event_data = "{}"
row.attributes = attributes_json
row.time_fired = event_time_fired
row.state = new_state and new_state.get("state")
row.entity_id = entity_id
row.domain = entity_id and core.split_entity_id(entity_id)[0]
row.context_id = None
row.context_user_id = None
row.old_state_id = old_state and 1
row.state_id = new_state and 1
# pylint: disable-next=import-outside-toplevel
from homeassistant.components import logbook
return logbook.LazyEventPartialState(row, {}) |
Set up to run via launchd on OS X. | def install_osx():
"""Set up to run via launchd on OS X."""
with os.popen("which hass") as inp:
hass_path = inp.read().strip()
with os.popen("whoami") as inp:
user = inp.read().strip()
template_path = os.path.join(os.path.dirname(__file__), "launchd.plist")
with open(template_path, encoding="utf-8") as tinp:
plist = tinp.read()
plist = plist.replace("$HASS_PATH$", hass_path)
plist = plist.replace("$USER$", user)
path = os.path.expanduser("~/Library/LaunchAgents/org.homeassistant.plist")
try:
with open(path, "w", encoding="utf-8") as outp:
outp.write(plist)
except OSError as err:
print(f"Unable to write to {path}", err)
return
os.popen(f"launchctl load -w -F {path}")
print("Home Assistant has been installed. Open it here: http://localhost:8123") |
Unload from launchd on OS X. | def uninstall_osx():
"""Unload from launchd on OS X."""
path = os.path.expanduser("~/Library/LaunchAgents/org.homeassistant.plist")
os.popen(f"launchctl unload {path}")
print("Home Assistant has been uninstalled.") |
Handle OSX commandline script. | def run(args):
"""Handle OSX commandline script."""
commands = "install", "uninstall", "restart"
if not args or args[0] not in commands:
print("Invalid command. Available commands:", ", ".join(commands))
return 1
if args[0] == "install":
install_osx()
return 0
if args[0] == "uninstall":
uninstall_osx()
return 0
if args[0] == "restart":
uninstall_osx()
# A small delay is needed on some systems to let the unload finish.
time.sleep(0.5)
install_osx()
return 0 |
Serialize an aiohttp response to a dictionary. | def serialize_response(response: web.Response) -> dict[str, Any]:
"""Serialize an aiohttp response to a dictionary."""
if (body := response.body) is None:
body_decoded = None
elif isinstance(body, payload.StringPayload):
# pylint: disable-next=protected-access
body_decoded = body._value.decode(body.encoding)
elif isinstance(body, bytes):
body_decoded = body.decode(response.charset or "utf-8")
else:
raise TypeError("Unknown payload encoding")
return {
"status": response.status,
"body": body_decoded,
"headers": dict(response.headers),
} |
Create a task from a coroutine and schedule it to run immediately. | def create_eager_task(
coro: Coroutine[Any, Any, _T],
*,
name: str | None = None,
loop: AbstractEventLoop | None = None,
) -> Task[_T]:
"""Create a task from a coroutine and schedule it to run immediately."""
if not loop:
try:
loop = get_running_loop()
except RuntimeError:
# If there is no running loop, create_eager_task is being called from
# the wrong thread.
# Late import to avoid circular dependencies
# pylint: disable-next=import-outside-toplevel
from homeassistant.helpers import frame
frame.report("attempted to create an asyncio task from a thread")
raise
return Task(coro, loop=loop, name=name, eager_start=True) |
Return True if task is cancelling. | def cancelling(task: Future[Any]) -> bool:
"""Return True if task is cancelling."""
return bool((cancelling_ := getattr(task, "cancelling", None)) and cancelling_()) |
Submit a callback object to a given event loop.
Return a concurrent.futures.Future to access the result. | def run_callback_threadsafe(
loop: AbstractEventLoop, callback: Callable[[*_Ts], _T], *args: *_Ts
) -> concurrent.futures.Future[_T]:
"""Submit a callback object to a given event loop.
Return a concurrent.futures.Future to access the result.
"""
if (ident := loop.__dict__.get("_thread_ident")) and ident == threading.get_ident():
raise RuntimeError("Cannot be called from within the event loop")
future: concurrent.futures.Future[_T] = concurrent.futures.Future()
def run_callback() -> None:
"""Run callback and store result."""
try:
future.set_result(callback(*args))
except Exception as exc: # pylint: disable=broad-except
if future.set_running_or_notify_cancel():
future.set_exception(exc)
else:
_LOGGER.warning("Exception on lost future: ", exc_info=True)
loop.call_soon_threadsafe(run_callback)
if hasattr(loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE):
#
# If the final `HomeAssistant.async_block_till_done` in
# `HomeAssistant.async_stop` has already been called, the callback
# will never run and, `future.result()` will block forever which
# will prevent the thread running this code from shutting down which
# will result in a deadlock when the main thread attempts to shutdown
# the executor and `.join()` the thread running this code.
#
# To prevent this deadlock we do the following on shutdown:
#
# 1. Set the _SHUTDOWN_RUN_CALLBACK_THREADSAFE attr on this function
# by calling `shutdown_run_callback_threadsafe`
# 2. Call `hass.async_block_till_done` at least once after shutdown
# to ensure all callbacks have run
# 3. Raise an exception here to ensure `future.result()` can never be
# called and hit the deadlock since once `shutdown_run_callback_threadsafe`
# we cannot promise the callback will be executed.
#
raise RuntimeError("The event loop is in the process of shutting down.")
return future |
Call when run_callback_threadsafe should prevent creating new futures.
We must finish all callbacks before the executor is shutdown
or we can end up in a deadlock state where:
`executor.result()` is waiting for its `._condition`
and the executor shutdown is trying to `.join()` the
executor thread.
This function is considered irreversible and should only ever
be called when Home Assistant is going to shutdown and
python is going to exit. | def shutdown_run_callback_threadsafe(loop: AbstractEventLoop) -> None:
"""Call when run_callback_threadsafe should prevent creating new futures.
We must finish all callbacks before the executor is shutdown
or we can end up in a deadlock state where:
`executor.result()` is waiting for its `._condition`
and the executor shutdown is trying to `.join()` the
executor thread.
This function is considered irreversible and should only ever
be called when Home Assistant is going to shutdown and
python is going to exit.
"""
setattr(loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE, True) |
Convert color name to RGB hex value. | def color_name_to_rgb(color_name: str) -> RGBColor:
"""Convert color name to RGB hex value."""
# COLORS map has no spaces in it, so make the color_name have no
# spaces in it as well for matching purposes
hex_value = COLORS.get(color_name.replace(" ", "").lower())
if not hex_value:
raise ValueError("Unknown color")
return hex_value |
Convert from RGB color to XY color. | def color_RGB_to_xy(
iR: int, iG: int, iB: int, Gamut: GamutType | None = None
) -> tuple[float, float]:
"""Convert from RGB color to XY color."""
return color_RGB_to_xy_brightness(iR, iG, iB, Gamut)[:2] |
Convert from RGB color to XY color. | def color_RGB_to_xy_brightness(
iR: int, iG: int, iB: int, Gamut: GamutType | None = None
) -> tuple[float, float, int]:
"""Convert from RGB color to XY color."""
if iR + iG + iB == 0:
return 0.0, 0.0, 0
R = iR / 255
B = iB / 255
G = iG / 255
# Gamma correction
R = pow((R + 0.055) / (1.0 + 0.055), 2.4) if (R > 0.04045) else (R / 12.92)
G = pow((G + 0.055) / (1.0 + 0.055), 2.4) if (G > 0.04045) else (G / 12.92)
B = pow((B + 0.055) / (1.0 + 0.055), 2.4) if (B > 0.04045) else (B / 12.92)
# Wide RGB D65 conversion formula
X = R * 0.664511 + G * 0.154324 + B * 0.162028
Y = R * 0.283881 + G * 0.668433 + B * 0.047685
Z = R * 0.000088 + G * 0.072310 + B * 0.986039
# Convert XYZ to xy
x = X / (X + Y + Z)
y = Y / (X + Y + Z)
# Brightness
Y = 1 if Y > 1 else Y
brightness = round(Y * 255)
# Check if the given xy value is within the color-reach of the lamp.
if Gamut:
in_reach = check_point_in_lamps_reach((x, y), Gamut)
if not in_reach:
xy_closest = get_closest_point_to_point((x, y), Gamut)
x = xy_closest[0]
y = xy_closest[1]
return round(x, 3), round(y, 3), brightness |
Convert from XY to a normalized RGB. | def color_xy_to_RGB(
vX: float, vY: float, Gamut: GamutType | None = None
) -> tuple[int, int, int]:
"""Convert from XY to a normalized RGB."""
return color_xy_brightness_to_RGB(vX, vY, 255, Gamut) |
Convert from XYZ to RGB. | def color_xy_brightness_to_RGB(
vX: float, vY: float, ibrightness: int, Gamut: GamutType | None = None
) -> tuple[int, int, int]:
"""Convert from XYZ to RGB."""
if Gamut and not check_point_in_lamps_reach((vX, vY), Gamut):
xy_closest = get_closest_point_to_point((vX, vY), Gamut)
vX = xy_closest[0]
vY = xy_closest[1]
brightness = ibrightness / 255.0
if brightness == 0.0:
return (0, 0, 0)
Y = brightness
if vY == 0.0:
vY += 0.00000000001
X = (Y / vY) * vX
Z = (Y / vY) * (1 - vX - vY)
# Convert to RGB using Wide RGB D65 conversion.
r = X * 1.656492 - Y * 0.354851 - Z * 0.255038
g = -X * 0.707196 + Y * 1.655397 + Z * 0.036152
b = X * 0.051713 - Y * 0.121364 + Z * 1.011530
# Apply reverse gamma correction.
r, g, b = (
12.92 * x if (x <= 0.0031308) else ((1.0 + 0.055) * pow(x, (1.0 / 2.4)) - 0.055)
for x in (r, g, b)
)
# Bring all negative components to zero.
r, g, b = (max(0, x) for x in (r, g, b))
# If one component is greater than 1, weight components by that value.
max_component = max(r, g, b)
if max_component > 1:
r, g, b = (x / max_component for x in (r, g, b))
ir, ig, ib = (int(x * 255) for x in (r, g, b))
return (ir, ig, ib) |
Convert a hsb into its rgb representation. | def color_hsb_to_RGB(fH: float, fS: float, fB: float) -> tuple[int, int, int]:
"""Convert a hsb into its rgb representation."""
if fS == 0.0:
fV = int(fB * 255)
return fV, fV, fV
r = g = b = 0
h = fH / 60
f = h - float(math.floor(h))
p = fB * (1 - fS)
q = fB * (1 - fS * f)
t = fB * (1 - (fS * (1 - f)))
if int(h) == 0:
r = int(fB * 255)
g = int(t * 255)
b = int(p * 255)
elif int(h) == 1:
r = int(q * 255)
g = int(fB * 255)
b = int(p * 255)
elif int(h) == 2:
r = int(p * 255)
g = int(fB * 255)
b = int(t * 255)
elif int(h) == 3:
r = int(p * 255)
g = int(q * 255)
b = int(fB * 255)
elif int(h) == 4:
r = int(t * 255)
g = int(p * 255)
b = int(fB * 255)
elif int(h) == 5:
r = int(fB * 255)
g = int(p * 255)
b = int(q * 255)
return (r, g, b) |
Convert an rgb color to its hsv representation.
Hue is scaled 0-360
Sat is scaled 0-100
Val is scaled 0-100 | def color_RGB_to_hsv(iR: float, iG: float, iB: float) -> tuple[float, float, float]:
"""Convert an rgb color to its hsv representation.
Hue is scaled 0-360
Sat is scaled 0-100
Val is scaled 0-100
"""
fHSV = colorsys.rgb_to_hsv(iR / 255.0, iG / 255.0, iB / 255.0)
return round(fHSV[0] * 360, 3), round(fHSV[1] * 100, 3), round(fHSV[2] * 100, 3) |
Convert an rgb color to its hs representation. | def color_RGB_to_hs(iR: float, iG: float, iB: float) -> tuple[float, float]:
"""Convert an rgb color to its hs representation."""
return color_RGB_to_hsv(iR, iG, iB)[:2] |
Convert an hsv color into its rgb representation.
Hue is scaled 0-360
Sat is scaled 0-100
Val is scaled 0-100 | def color_hsv_to_RGB(iH: float, iS: float, iV: float) -> tuple[int, int, int]:
"""Convert an hsv color into its rgb representation.
Hue is scaled 0-360
Sat is scaled 0-100
Val is scaled 0-100
"""
fRGB = colorsys.hsv_to_rgb(iH / 360, iS / 100, iV / 100)
return (int(fRGB[0] * 255), int(fRGB[1] * 255), int(fRGB[2] * 255)) |
Convert an hsv color into its rgb representation. | def color_hs_to_RGB(iH: float, iS: float) -> tuple[int, int, int]:
"""Convert an hsv color into its rgb representation."""
return color_hsv_to_RGB(iH, iS, 100) |
Convert an xy color to its hs representation. | def color_xy_to_hs(
vX: float, vY: float, Gamut: GamutType | None = None
) -> tuple[float, float]:
"""Convert an xy color to its hs representation."""
h, s, _ = color_RGB_to_hsv(*color_xy_to_RGB(vX, vY, Gamut))
return h, s |
Convert an hs color to its xy representation. | def color_hs_to_xy(
iH: float, iS: float, Gamut: GamutType | None = None
) -> tuple[float, float]:
"""Convert an hs color to its xy representation."""
return color_RGB_to_xy(*color_hs_to_RGB(iH, iS), Gamut) |
Match the maximum value of the output to the input. | def match_max_scale(
input_colors: tuple[int, ...], output_colors: tuple[float, ...]
) -> tuple[int, ...]:
"""Match the maximum value of the output to the input."""
max_in = max(input_colors)
max_out = max(output_colors)
if max_out == 0:
factor = 0.0
else:
factor = max_in / max_out
return tuple(int(round(i * factor)) for i in output_colors) |
Convert an rgb color to an rgbw representation. | def color_rgb_to_rgbw(r: int, g: int, b: int) -> tuple[int, int, int, int]:
"""Convert an rgb color to an rgbw representation."""
# Calculate the white channel as the minimum of input rgb channels.
# Subtract the white portion from the remaining rgb channels.
w = min(r, g, b)
rgbw = (r - w, g - w, b - w, w)
# Match the output maximum value to the input. This ensures the full
# channel range is used.
return match_max_scale((r, g, b), rgbw) |
Convert an rgbw color to an rgb representation. | def color_rgbw_to_rgb(r: int, g: int, b: int, w: int) -> tuple[int, int, int]:
"""Convert an rgbw color to an rgb representation."""
# Add the white channel to the rgb channels.
rgb = (r + w, g + w, b + w)
# Match the output maximum value to the input. This ensures the
# output doesn't overflow.
return match_max_scale((r, g, b, w), rgb) |
Convert an rgb color to an rgbww representation. | def color_rgb_to_rgbww(
r: int, g: int, b: int, min_kelvin: int, max_kelvin: int
) -> tuple[int, int, int, int, int]:
"""Convert an rgb color to an rgbww representation."""
# Find the color temperature when both white channels have equal brightness
max_mireds = color_temperature_kelvin_to_mired(min_kelvin)
min_mireds = color_temperature_kelvin_to_mired(max_kelvin)
mired_range = max_mireds - min_mireds
mired_midpoint = min_mireds + mired_range / 2
color_temp_kelvin = color_temperature_mired_to_kelvin(mired_midpoint)
w_r, w_g, w_b = color_temperature_to_rgb(color_temp_kelvin)
# Find the ratio of the midpoint white in the input rgb channels
white_level = min(
r / w_r if w_r else 0, g / w_g if w_g else 0, b / w_b if w_b else 0
)
# Subtract the white portion from the rgb channels.
rgb = (r - w_r * white_level, g - w_g * white_level, b - w_b * white_level)
rgbww = (*rgb, round(white_level * 255), round(white_level * 255))
# Match the output maximum value to the input. This ensures the full
# channel range is used.
return match_max_scale((r, g, b), rgbww) |
Convert an rgbww color to an rgb representation. | def color_rgbww_to_rgb(
r: int, g: int, b: int, cw: int, ww: int, min_kelvin: int, max_kelvin: int
) -> tuple[int, int, int]:
"""Convert an rgbww color to an rgb representation."""
# Calculate color temperature of the white channels
max_mireds = color_temperature_kelvin_to_mired(min_kelvin)
min_mireds = color_temperature_kelvin_to_mired(max_kelvin)
mired_range = max_mireds - min_mireds
try:
ct_ratio = ww / (cw + ww)
except ZeroDivisionError:
ct_ratio = 0.5
color_temp_mired = min_mireds + ct_ratio * mired_range
if color_temp_mired:
color_temp_kelvin = color_temperature_mired_to_kelvin(color_temp_mired)
else:
color_temp_kelvin = 0
w_r, w_g, w_b = color_temperature_to_rgb(color_temp_kelvin)
white_level = max(cw, ww) / 255
# Add the white channels to the rgb channels.
rgb = (r + w_r * white_level, g + w_g * white_level, b + w_b * white_level)
# Match the output maximum value to the input. This ensures the
# output doesn't overflow.
return match_max_scale((r, g, b, cw, ww), rgb) |
Return a RGB color from a hex color string. | def color_rgb_to_hex(r: int, g: int, b: int) -> str:
"""Return a RGB color from a hex color string."""
return f"{round(r):02x}{round(g):02x}{round(b):02x}" |
Return an RGB color value list from a hex color string. | def rgb_hex_to_rgb_list(hex_string: str) -> list[int]:
"""Return an RGB color value list from a hex color string."""
return [
int(hex_string[i : i + len(hex_string) // 3], 16)
for i in range(0, len(hex_string), len(hex_string) // 3)
] |
Return an hs color from a color temperature in Kelvin. | def color_temperature_to_hs(color_temperature_kelvin: float) -> tuple[float, float]:
"""Return an hs color from a color temperature in Kelvin."""
return color_RGB_to_hs(*color_temperature_to_rgb(color_temperature_kelvin)) |
Return an RGB color from a color temperature in Kelvin.
This is a rough approximation based on the formula provided by T. Helland
http://www.tannerhelland.com/4435/convert-temperature-rgb-algorithm-code/ | def color_temperature_to_rgb(
color_temperature_kelvin: float,
) -> tuple[float, float, float]:
"""Return an RGB color from a color temperature in Kelvin.
This is a rough approximation based on the formula provided by T. Helland
http://www.tannerhelland.com/4435/convert-temperature-rgb-algorithm-code/
"""
# range check
if color_temperature_kelvin < 1000:
color_temperature_kelvin = 1000
elif color_temperature_kelvin > 40000:
color_temperature_kelvin = 40000
tmp_internal = color_temperature_kelvin / 100.0
red = _get_red(tmp_internal)
green = _get_green(tmp_internal)
blue = _get_blue(tmp_internal)
return red, green, blue |
Convert color temperature in kelvin to rgbcw.
Returns a (r, g, b, cw, ww) tuple. | def color_temperature_to_rgbww(
temperature: int, brightness: int, min_kelvin: int, max_kelvin: int
) -> tuple[int, int, int, int, int]:
"""Convert color temperature in kelvin to rgbcw.
Returns a (r, g, b, cw, ww) tuple.
"""
max_mireds = color_temperature_kelvin_to_mired(min_kelvin)
min_mireds = color_temperature_kelvin_to_mired(max_kelvin)
temperature = color_temperature_kelvin_to_mired(temperature)
mired_range = max_mireds - min_mireds
cold = ((max_mireds - temperature) / mired_range) * brightness
warm = brightness - cold
return (0, 0, 0, round(cold), round(warm)) |
Convert rgbcw to color temperature in kelvin.
Returns a tuple (color_temperature, brightness). | def rgbww_to_color_temperature(
rgbww: tuple[int, int, int, int, int], min_kelvin: int, max_kelvin: int
) -> tuple[int, int]:
"""Convert rgbcw to color temperature in kelvin.
Returns a tuple (color_temperature, brightness).
"""
_, _, _, cold, warm = rgbww
return _white_levels_to_color_temperature(cold, warm, min_kelvin, max_kelvin) |
Convert whites to color temperature in kelvin.
Returns a tuple (color_temperature, brightness). | def _white_levels_to_color_temperature(
cold: int, warm: int, min_kelvin: int, max_kelvin: int
) -> tuple[int, int]:
"""Convert whites to color temperature in kelvin.
Returns a tuple (color_temperature, brightness).
"""
max_mireds = color_temperature_kelvin_to_mired(min_kelvin)
min_mireds = color_temperature_kelvin_to_mired(max_kelvin)
brightness = warm / 255 + cold / 255
if brightness == 0:
# Return the warmest color if brightness is 0
return (min_kelvin, 0)
return round(
color_temperature_mired_to_kelvin(
((cold / 255 / brightness) * (min_mireds - max_mireds)) + max_mireds
)
), min(255, round(brightness * 255)) |
Convert an xy color to a color temperature in Kelvin.
Uses McCamy's approximation (https://doi.org/10.1002/col.5080170211),
close enough for uses between 2000 K and 10000 K. | def color_xy_to_temperature(x: float, y: float) -> int:
"""Convert an xy color to a color temperature in Kelvin.
Uses McCamy's approximation (https://doi.org/10.1002/col.5080170211),
close enough for uses between 2000 K and 10000 K.
"""
n = (x - 0.3320) / (0.1858 - y)
CCT = 437 * (n**3) + 3601 * (n**2) + 6861 * n + 5517
return int(CCT) |
Clamp the given color component value between the given min and max values.
The range defined by the minimum and maximum values is inclusive, i.e. given a
color_component of 0 and a minimum of 10, the returned value is 10. | def _clamp(color_component: float, minimum: float = 0, maximum: float = 255) -> float:
"""Clamp the given color component value between the given min and max values.
The range defined by the minimum and maximum values is inclusive, i.e. given a
color_component of 0 and a minimum of 10, the returned value is 10.
"""
color_component_out = max(color_component, minimum)
return min(color_component_out, maximum) |
Get the red component of the temperature in RGB space. | def _get_red(temperature: float) -> float:
"""Get the red component of the temperature in RGB space."""
if temperature <= 66:
return 255
tmp_red = 329.698727446 * math.pow(temperature - 60, -0.1332047592)
return _clamp(tmp_red) |
Get the green component of the given color temp in RGB space. | def _get_green(temperature: float) -> float:
"""Get the green component of the given color temp in RGB space."""
if temperature <= 66:
green = 99.4708025861 * math.log(temperature) - 161.1195681661
else:
green = 288.1221695283 * math.pow(temperature - 60, -0.0755148492)
return _clamp(green) |
Get the blue component of the given color temperature in RGB space. | def _get_blue(temperature: float) -> float:
"""Get the blue component of the given color temperature in RGB space."""
if temperature >= 66:
return 255
if temperature <= 19:
return 0
blue = 138.5177312231 * math.log(temperature - 10) - 305.0447927307
return _clamp(blue) |
Convert absolute mired shift to degrees kelvin. | def color_temperature_mired_to_kelvin(mired_temperature: float) -> int:
"""Convert absolute mired shift to degrees kelvin."""
return math.floor(1000000 / mired_temperature) |
Convert degrees kelvin to mired shift. | def color_temperature_kelvin_to_mired(kelvin_temperature: float) -> int:
"""Convert degrees kelvin to mired shift."""
return math.floor(1000000 / kelvin_temperature) |
Calculate the cross product of two XYPoints. | def cross_product(p1: XYPoint, p2: XYPoint) -> float:
"""Calculate the cross product of two XYPoints."""
return float(p1.x * p2.y - p1.y * p2.x) |
Calculate the distance between two XYPoints. | def get_distance_between_two_points(one: XYPoint, two: XYPoint) -> float:
"""Calculate the distance between two XYPoints."""
dx = one.x - two.x
dy = one.y - two.y
return math.sqrt(dx * dx + dy * dy) |
Find the closest point from P to a line defined by A and B.
This point will be reproducible by the lamp
as it is on the edge of the gamut. | def get_closest_point_to_line(A: XYPoint, B: XYPoint, P: XYPoint) -> XYPoint:
"""Find the closest point from P to a line defined by A and B.
This point will be reproducible by the lamp
as it is on the edge of the gamut.
"""
AP = XYPoint(P.x - A.x, P.y - A.y)
AB = XYPoint(B.x - A.x, B.y - A.y)
ab2 = AB.x * AB.x + AB.y * AB.y
ap_ab = AP.x * AB.x + AP.y * AB.y
t = ap_ab / ab2
if t < 0.0:
t = 0.0
elif t > 1.0:
t = 1.0
return XYPoint(A.x + AB.x * t, A.y + AB.y * t) |
Get the closest matching color within the gamut of the light.
Should only be used if the supplied color is outside of the color gamut. | def get_closest_point_to_point(
xy_tuple: tuple[float, float], Gamut: GamutType
) -> tuple[float, float]:
"""Get the closest matching color within the gamut of the light.
Should only be used if the supplied color is outside of the color gamut.
"""
xy_point = XYPoint(xy_tuple[0], xy_tuple[1])
# find the closest point on each line in the CIE 1931 'triangle'.
pAB = get_closest_point_to_line(Gamut.red, Gamut.green, xy_point)
pAC = get_closest_point_to_line(Gamut.blue, Gamut.red, xy_point)
pBC = get_closest_point_to_line(Gamut.green, Gamut.blue, xy_point)
# Get the distances per point and see which point is closer to our Point.
dAB = get_distance_between_two_points(xy_point, pAB)
dAC = get_distance_between_two_points(xy_point, pAC)
dBC = get_distance_between_two_points(xy_point, pBC)
lowest = dAB
closest_point = pAB
if dAC < lowest:
lowest = dAC
closest_point = pAC
if dBC < lowest:
lowest = dBC
closest_point = pBC
# Change the xy value to a value which is within the reach of the lamp.
cx = closest_point.x
cy = closest_point.y
return (cx, cy) |
Check if the provided XYPoint can be recreated by a Hue lamp. | def check_point_in_lamps_reach(p: tuple[float, float], Gamut: GamutType) -> bool:
"""Check if the provided XYPoint can be recreated by a Hue lamp."""
v1 = XYPoint(Gamut.green.x - Gamut.red.x, Gamut.green.y - Gamut.red.y)
v2 = XYPoint(Gamut.blue.x - Gamut.red.x, Gamut.blue.y - Gamut.red.y)
q = XYPoint(p[0] - Gamut.red.x, p[1] - Gamut.red.y)
s = cross_product(q, v2) / cross_product(v1, v2)
t = cross_product(v1, q) / cross_product(v1, v2)
return (s >= 0.0) and (t >= 0.0) and (s + t <= 1.0) |
Check if the supplied gamut is valid. | def check_valid_gamut(Gamut: GamutType) -> bool:
"""Check if the supplied gamut is valid."""
# Check if the three points of the supplied gamut are not on the same line.
v1 = XYPoint(Gamut.green.x - Gamut.red.x, Gamut.green.y - Gamut.red.y)
v2 = XYPoint(Gamut.blue.x - Gamut.red.x, Gamut.blue.y - Gamut.red.y)
not_on_line = cross_product(v1, v2) > 0.0001
# Check if all six coordinates of the gamut lie between 0 and 1.
red_valid = (
Gamut.red.x >= 0 and Gamut.red.x <= 1 and Gamut.red.y >= 0 and Gamut.red.y <= 1
)
green_valid = (
Gamut.green.x >= 0
and Gamut.green.x <= 1
and Gamut.green.y >= 0
and Gamut.green.y <= 1
)
blue_valid = (
Gamut.blue.x >= 0
and Gamut.blue.x <= 1
and Gamut.blue.y >= 0
and Gamut.blue.y <= 1
)
return not_on_line and red_valid and green_valid and blue_valid |
Given a brightness_scale convert a brightness to a single value.
Do not include 0 if the light is off for value 0.
Given a brightness low_high_range of (1,100) this function
will return:
255: 100.0
127: ~49.8039
10: ~3.9216 | def brightness_to_value(low_high_range: tuple[float, float], brightness: int) -> float:
"""Given a brightness_scale convert a brightness to a single value.
Do not include 0 if the light is off for value 0.
Given a brightness low_high_range of (1,100) this function
will return:
255: 100.0
127: ~49.8039
10: ~3.9216
"""
return scale_to_ranged_value((1, 255), low_high_range, brightness) |
Given a brightness_scale convert a single value to a brightness.
Do not include 0 if the light is off for value 0.
Given a brightness low_high_range of (1,100) this function
will return:
100: 255
50: 128
4: 10
The value will be clamped between 1..255 to ensure valid value. | def value_to_brightness(low_high_range: tuple[float, float], value: float) -> int:
"""Given a brightness_scale convert a single value to a brightness.
Do not include 0 if the light is off for value 0.
Given a brightness low_high_range of (1,100) this function
will return:
100: 255
50: 128
4: 10
The value will be clamped between 1..255 to ensure valid value.
"""
return min(
255,
max(1, round(scale_to_ranged_value(low_high_range, (1, 255), value))),
) |
Set a default time zone to be used when none is specified.
Async friendly. | def set_default_time_zone(time_zone: dt.tzinfo) -> None:
"""Set a default time zone to be used when none is specified.
Async friendly.
"""
# pylint: disable-next=global-statement
global DEFAULT_TIME_ZONE # noqa: PLW0603
assert isinstance(time_zone, dt.tzinfo)
DEFAULT_TIME_ZONE = time_zone |
Get time zone from string. Return None if unable to determine.
Async friendly. | def get_time_zone(time_zone_str: str) -> dt.tzinfo | None:
"""Get time zone from string. Return None if unable to determine.
Async friendly.
"""
try:
return zoneinfo.ZoneInfo(time_zone_str)
except zoneinfo.ZoneInfoNotFoundError:
return None |
Get now in specified time zone. | def now(time_zone: dt.tzinfo | None = None) -> dt.datetime:
"""Get now in specified time zone."""
return dt.datetime.now(time_zone or DEFAULT_TIME_ZONE) |
Return a datetime as UTC time.
Assumes datetime without tzinfo to be in the DEFAULT_TIME_ZONE. | def as_utc(dattim: dt.datetime) -> dt.datetime:
"""Return a datetime as UTC time.
Assumes datetime without tzinfo to be in the DEFAULT_TIME_ZONE.
"""
if dattim.tzinfo == UTC:
return dattim
if dattim.tzinfo is None:
dattim = dattim.replace(tzinfo=DEFAULT_TIME_ZONE)
return dattim.astimezone(UTC) |
Convert a date/time into a unix time (seconds since 1970). | def as_timestamp(dt_value: dt.datetime | str) -> float:
"""Convert a date/time into a unix time (seconds since 1970)."""
parsed_dt: dt.datetime | None
if isinstance(dt_value, dt.datetime):
parsed_dt = dt_value
else:
parsed_dt = parse_datetime(str(dt_value))
if parsed_dt is None:
raise ValueError("not a valid date/time.")
return parsed_dt.timestamp() |
Convert a UTC datetime object to local time zone. | def as_local(dattim: dt.datetime) -> dt.datetime:
"""Convert a UTC datetime object to local time zone."""
if dattim.tzinfo == DEFAULT_TIME_ZONE:
return dattim
if dattim.tzinfo is None:
dattim = dattim.replace(tzinfo=DEFAULT_TIME_ZONE)
return dattim.astimezone(DEFAULT_TIME_ZONE) |
Fast conversion of a datetime in UTC to a timestamp. | def utc_to_timestamp(utc_dt: dt.datetime) -> float:
"""Fast conversion of a datetime in UTC to a timestamp."""
# Taken from
# https://github.com/python/cpython/blob/3.10/Lib/zoneinfo/_zoneinfo.py#L185
return (
(utc_dt.toordinal() - EPOCHORDINAL) * 86400
+ utc_dt.hour * 3600
+ utc_dt.minute * 60
+ utc_dt.second
+ (utc_dt.microsecond / 1000000)
) |
Return local datetime object of start of day from date or datetime. | def start_of_local_day(dt_or_d: dt.date | dt.datetime | None = None) -> dt.datetime:
"""Return local datetime object of start of day from date or datetime."""
if dt_or_d is None:
date: dt.date = now().date()
elif isinstance(dt_or_d, dt.datetime):
date = dt_or_d.date()
else:
date = dt_or_d
return dt.datetime.combine(date, dt.time(), tzinfo=DEFAULT_TIME_ZONE) |
Parse a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
If the input isn't well formatted, returns None if raise_on_error is False
or raises ValueError if it's True. | def parse_datetime(dt_str: str, *, raise_on_error: bool = False) -> dt.datetime | None:
"""Parse a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
If the input isn't well formatted, returns None if raise_on_error is False
or raises ValueError if it's True.
"""
# First try if the string can be parsed by the fast ciso8601 library
with suppress(ValueError, IndexError):
return ciso8601.parse_datetime(dt_str)
# ciso8601 failed to parse the string, fall back to regex
if not (match := DATETIME_RE.match(dt_str)):
if raise_on_error:
raise ValueError
return None
kws: dict[str, Any] = match.groupdict()
if kws["microsecond"]:
kws["microsecond"] = kws["microsecond"].ljust(6, "0")
tzinfo_str = kws.pop("tzinfo")
tzinfo: dt.tzinfo | None = None
if tzinfo_str == "Z":
tzinfo = UTC
elif tzinfo_str is not None:
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
offset_hours = int(tzinfo_str[1:3])
offset = dt.timedelta(hours=offset_hours, minutes=offset_mins)
if tzinfo_str[0] == "-":
offset = -offset
tzinfo = dt.timezone(offset)
kws = {k: int(v) for k, v in kws.items() if v is not None}
kws["tzinfo"] = tzinfo
return dt.datetime(**kws) |
Convert a date string to a date object. | def parse_date(dt_str: str) -> dt.date | None:
"""Convert a date string to a date object."""
try:
return dt.datetime.strptime(dt_str, DATE_STR_FORMAT).date()
except ValueError: # If dt_str did not match our format
return None |
Parse a duration string and return a datetime.timedelta.
Also supports ISO 8601 representation and PostgreSQL's day-time interval
format. | def parse_duration(value: str) -> dt.timedelta | None:
"""Parse a duration string and return a datetime.timedelta.
Also supports ISO 8601 representation and PostgreSQL's day-time interval
format.
"""
match = (
STANDARD_DURATION_RE.match(value)
or ISO8601_DURATION_RE.match(value)
or POSTGRES_INTERVAL_RE.match(value)
)
if match:
kws = match.groupdict()
sign = -1 if kws.pop("sign", "+") == "-" else 1
if kws.get("microseconds"):
kws["microseconds"] = kws["microseconds"].ljust(6, "0")
time_delta_args: dict[str, float] = {
k: float(v.replace(",", ".")) for k, v in kws.items() if v is not None
}
days = dt.timedelta(float(time_delta_args.pop("days", 0.0) or 0.0))
if match.re == ISO8601_DURATION_RE:
days *= sign
return days + sign * dt.timedelta(**time_delta_args)
return None |
Parse a time string (00:20:00) into Time object.
Return None if invalid. | def parse_time(time_str: str) -> dt.time | None:
"""Parse a time string (00:20:00) into Time object.
Return None if invalid.
"""
parts = str(time_str).split(":")
if len(parts) < 2:
return None
try:
hour = int(parts[0])
minute = int(parts[1])
second = int(parts[2]) if len(parts) > 2 else 0
return dt.time(hour, minute, second)
except ValueError:
# ValueError if value cannot be converted to an int or not in range
return None |
Return a string representation of a time diff. | def _get_timestring(timediff: float, precision: int = 1) -> str:
"""Return a string representation of a time diff."""
def formatn(number: int, unit: str) -> str:
"""Add "unit" if it's plural."""
if number == 1:
return f"1 {unit} "
return f"{number:d} {unit}s "
if timediff == 0.0:
return "0 seconds"
units = ("year", "month", "day", "hour", "minute", "second")
factors = (365 * 24 * 60 * 60, 30 * 24 * 60 * 60, 24 * 60 * 60, 60 * 60, 60, 1)
result_string: str = ""
current_precision = 0
for i, current_factor in enumerate(factors):
selected_unit = units[i]
if timediff < current_factor:
continue
current_precision = current_precision + 1
if current_precision == precision:
return (
result_string + formatn(round(timediff / current_factor), selected_unit)
).rstrip()
curr_diff = int(timediff // current_factor)
result_string += formatn(curr_diff, selected_unit)
timediff -= (curr_diff) * current_factor
return result_string.rstrip() |
Take a datetime and return its "age" as a string.
The age can be in second, minute, hour, day, month and year.
depth number of units will be returned, with the last unit rounded
The date must be in the past or a ValueException will be raised. | def get_age(date: dt.datetime, precision: int = 1) -> str:
"""Take a datetime and return its "age" as a string.
The age can be in second, minute, hour, day, month and year.
depth number of units will be returned, with the last unit rounded
The date must be in the past or a ValueException will be raised.
"""
delta = (now() - date).total_seconds()
rounded_delta = round(delta)
if rounded_delta < 0:
raise ValueError("Time value is in the future")
return _get_timestring(rounded_delta, precision) |
Take a datetime and return its "age" as a string.
The age can be in second, minute, hour, day, month and year.
depth number of units will be returned, with the last unit rounded
The date must be in the future or a ValueException will be raised. | def get_time_remaining(date: dt.datetime, precision: int = 1) -> str:
"""Take a datetime and return its "age" as a string.
The age can be in second, minute, hour, day, month and year.
depth number of units will be returned, with the last unit rounded
The date must be in the future or a ValueException will be raised.
"""
delta = (date - now()).total_seconds()
rounded_delta = round(delta)
if rounded_delta < 0:
raise ValueError("Time value is in the past")
return _get_timestring(rounded_delta, precision) |
Parse the time expression part and return a list of times to match. | def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> list[int]:
"""Parse the time expression part and return a list of times to match."""
if parameter is None or parameter == "*":
res = list(range(min_value, max_value + 1))
elif isinstance(parameter, str):
if parameter.startswith("/"):
parameter = int(parameter[1:])
res = [x for x in range(min_value, max_value + 1) if x % parameter == 0]
else:
res = [int(parameter)]
elif not hasattr(parameter, "__iter__"):
res = [int(parameter)]
else:
res = sorted(int(x) for x in parameter)
for val in res:
if val < min_value or val > max_value:
raise ValueError(
f"Time expression '{parameter}': parameter {val} out of range "
f"({min_value} to {max_value})"
)
return res |
Return the offset when crossing the DST barrier. | def _dst_offset_diff(dattim: dt.datetime) -> dt.timedelta:
"""Return the offset when crossing the DST barrier."""
delta = dt.timedelta(hours=24)
return (dattim + delta).utcoffset() - ( # type: ignore[operator]
dattim - delta
).utcoffset() |
Return the first value in arr greater or equal to cmp.
Return None if no such value exists. | def _lower_bound(arr: list[int], cmp: int) -> int | None:
"""Return the first value in arr greater or equal to cmp.
Return None if no such value exists.
"""
if (left := bisect.bisect_left(arr, cmp)) == len(arr):
return None
return arr[left] |
Find the next datetime from now for which the time expression matches.
The algorithm looks at each time unit separately and tries to find the
next one that matches for each. If any of them would roll over, all
time units below that are reset to the first matching value.
Timezones are also handled (the tzinfo of the now object is used),
including daylight saving time. | def find_next_time_expression_time(
now: dt.datetime, # pylint: disable=redefined-outer-name
seconds: list[int],
minutes: list[int],
hours: list[int],
) -> dt.datetime:
"""Find the next datetime from now for which the time expression matches.
The algorithm looks at each time unit separately and tries to find the
next one that matches for each. If any of them would roll over, all
time units below that are reset to the first matching value.
Timezones are also handled (the tzinfo of the now object is used),
including daylight saving time.
"""
if not seconds or not minutes or not hours:
raise ValueError("Cannot find a next time: Time expression never matches!")
while True:
# Reset microseconds and fold; fold (for ambiguous DST times) will be
# handled later.
result = now.replace(microsecond=0, fold=0)
# Match next second
if (next_second := _lower_bound(seconds, result.second)) is None:
# No second to match in this minute. Roll-over to next minute.
next_second = seconds[0]
result += dt.timedelta(minutes=1)
if result.second != next_second:
result = result.replace(second=next_second)
# Match next minute
next_minute = _lower_bound(minutes, result.minute)
if next_minute != result.minute:
# We're in the next minute. Seconds needs to be reset.
result = result.replace(second=seconds[0])
if next_minute is None:
# No minute to match in this hour. Roll-over to next hour.
next_minute = minutes[0]
result += dt.timedelta(hours=1)
if result.minute != next_minute:
result = result.replace(minute=next_minute)
# Match next hour
next_hour = _lower_bound(hours, result.hour)
if next_hour != result.hour:
# We're in the next hour. Seconds+minutes needs to be reset.
result = result.replace(second=seconds[0], minute=minutes[0])
if next_hour is None:
# No minute to match in this day. Roll-over to next day.
next_hour = hours[0]
result += dt.timedelta(days=1)
if result.hour != next_hour:
result = result.replace(hour=next_hour)
if result.tzinfo in (None, UTC):
# Using UTC, no DST checking needed
return result
if not _datetime_exists(result):
# When entering DST and clocks are turned forward.
# There are wall clock times that don't "exist" (an hour is skipped).
# -> trigger on the next time that 1. matches the pattern and 2. does exist
# for example:
# on 2021.03.28 02:00:00 in CET timezone clocks are turned forward an hour
# with pattern "02:30", don't run on 28 mar (such a wall time does not
# exist on this day) instead run at 02:30 the next day
# We solve this edge case by just iterating one second until the result
# exists (max. 3600 operations, which should be fine for an edge case that
# happens once a year)
now += dt.timedelta(seconds=1)
continue
if not _datetime_ambiguous(now):
return result
# When leaving DST and clocks are turned backward.
# Then there are wall clock times that are ambiguous i.e. exist with DST and
# without DST. The logic above does not take into account if a given pattern
# matches _twice_ in a day.
# Example: on 2021.10.31 02:00:00 in CET timezone clocks are turned
# backward an hour.
if _datetime_ambiguous(result):
# `now` and `result` are both ambiguous, so the next match happens
# _within_ the current fold.
# Examples:
# 1. 2021.10.31 02:00:00+02:00 with pattern 02:30
# -> 2021.10.31 02:30:00+02:00
# 2. 2021.10.31 02:00:00+01:00 with pattern 02:30
# -> 2021.10.31 02:30:00+01:00
return result.replace(fold=now.fold)
if now.fold == 0:
# `now` is in the first fold, but result is not ambiguous (meaning it no
# longer matches within the fold).
# -> Check if result matches in the next fold. If so, emit that match
# Turn back the time by the DST offset, effectively run the algorithm on
# the first fold. If it matches on the first fold, that means it will also
# match on the second one.
# Example: 2021.10.31 02:45:00+02:00 with pattern 02:30
# -> 2021.10.31 02:30:00+01:00
check_result = find_next_time_expression_time(
now + _dst_offset_diff(now), seconds, minutes, hours
)
if _datetime_ambiguous(check_result):
return check_result.replace(fold=1)
return result |
Check if a datetime exists. | def _datetime_exists(dattim: dt.datetime) -> bool:
"""Check if a datetime exists."""
assert dattim.tzinfo is not None
original_tzinfo = dattim.tzinfo
# Check if we can round trip to UTC
return dattim == dattim.astimezone(UTC).astimezone(original_tzinfo) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.