Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
test_turn_on | (hass, light_devices) | Test the light turns of successfully. | Test the light turns of successfully. | async def test_turn_on(hass, light_devices):
"""Test the light turns of successfully."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light", "turn_on", {ATTR_ENTITY_ID: "light.color_dimmer_1"}, blocking=True
)
# Assert
state = hass.states.get("light.color_dimmer_1")
assert state is not None
assert state.state == "on" | [
"async",
"def",
"test_turn_on",
"(",
"hass",
",",
"light_devices",
")",
":",
"# Arrange",
"await",
"setup_platform",
"(",
"hass",
",",
"LIGHT_DOMAIN",
",",
"devices",
"=",
"light_devices",
")",
"# Act",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"light\"",
",",
"\"turn_on\"",
",",
"{",
"ATTR_ENTITY_ID",
":",
"\"light.color_dimmer_1\"",
"}",
",",
"blocking",
"=",
"True",
")",
"# Assert",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"light.color_dimmer_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"\"on\""
] | [
155,
0
] | [
166,
30
] | python | en | ['en', 'en', 'en'] | True |
test_turn_on_with_brightness | (hass, light_devices) | Test the light turns on to the specified brightness. | Test the light turns on to the specified brightness. | async def test_turn_on_with_brightness(hass, light_devices):
"""Test the light turns on to the specified brightness."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{
ATTR_ENTITY_ID: "light.color_dimmer_1",
ATTR_BRIGHTNESS: 75,
ATTR_TRANSITION: 2,
},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_1")
assert state is not None
assert state.state == "on"
# round-trip rounding error (expected)
assert state.attributes[ATTR_BRIGHTNESS] == 74 | [
"async",
"def",
"test_turn_on_with_brightness",
"(",
"hass",
",",
"light_devices",
")",
":",
"# Arrange",
"await",
"setup_platform",
"(",
"hass",
",",
"LIGHT_DOMAIN",
",",
"devices",
"=",
"light_devices",
")",
"# Act",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"light\"",
",",
"\"turn_on\"",
",",
"{",
"ATTR_ENTITY_ID",
":",
"\"light.color_dimmer_1\"",
",",
"ATTR_BRIGHTNESS",
":",
"75",
",",
"ATTR_TRANSITION",
":",
"2",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"# Assert",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"light.color_dimmer_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"# round-trip rounding error (expected)",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_BRIGHTNESS",
"]",
"==",
"74"
] | [
169,
0
] | [
189,
50
] | python | en | ['en', 'en', 'en'] | True |
test_turn_on_with_minimal_brightness | (hass, light_devices) |
Test lights set to lowest brightness when converted scale would be zero.
SmartThings light brightness is a percentage (0-100), but Home Assistant uses a
0-255 scale. This tests if a really low value (1-2) is passed, we don't
set the level to zero, which turns off the lights in SmartThings.
|
Test lights set to lowest brightness when converted scale would be zero. | async def test_turn_on_with_minimal_brightness(hass, light_devices):
"""
Test lights set to lowest brightness when converted scale would be zero.
SmartThings light brightness is a percentage (0-100), but Home Assistant uses a
0-255 scale. This tests if a really low value (1-2) is passed, we don't
set the level to zero, which turns off the lights in SmartThings.
"""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{ATTR_ENTITY_ID: "light.color_dimmer_1", ATTR_BRIGHTNESS: 2},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_1")
assert state is not None
assert state.state == "on"
# round-trip rounding error (expected)
assert state.attributes[ATTR_BRIGHTNESS] == 3 | [
"async",
"def",
"test_turn_on_with_minimal_brightness",
"(",
"hass",
",",
"light_devices",
")",
":",
"# Arrange",
"await",
"setup_platform",
"(",
"hass",
",",
"LIGHT_DOMAIN",
",",
"devices",
"=",
"light_devices",
")",
"# Act",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"light\"",
",",
"\"turn_on\"",
",",
"{",
"ATTR_ENTITY_ID",
":",
"\"light.color_dimmer_1\"",
",",
"ATTR_BRIGHTNESS",
":",
"2",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"# Assert",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"light.color_dimmer_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"# round-trip rounding error (expected)",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_BRIGHTNESS",
"]",
"==",
"3"
] | [
192,
0
] | [
214,
49
] | python | en | ['en', 'error', 'th'] | False |
test_turn_on_with_color | (hass, light_devices) | Test the light turns on with color. | Test the light turns on with color. | async def test_turn_on_with_color(hass, light_devices):
"""Test the light turns on with color."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_HS_COLOR: (180, 50)},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_HS_COLOR] == (180, 50) | [
"async",
"def",
"test_turn_on_with_color",
"(",
"hass",
",",
"light_devices",
")",
":",
"# Arrange",
"await",
"setup_platform",
"(",
"hass",
",",
"LIGHT_DOMAIN",
",",
"devices",
"=",
"light_devices",
")",
"# Act",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"light\"",
",",
"\"turn_on\"",
",",
"{",
"ATTR_ENTITY_ID",
":",
"\"light.color_dimmer_2\"",
",",
"ATTR_HS_COLOR",
":",
"(",
"180",
",",
"50",
")",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"# Assert",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"light.color_dimmer_2\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_HS_COLOR",
"]",
"==",
"(",
"180",
",",
"50",
")"
] | [
217,
0
] | [
232,
55
] | python | en | ['en', 'en', 'en'] | True |
test_turn_on_with_color_temp | (hass, light_devices) | Test the light turns on with color temp. | Test the light turns on with color temp. | async def test_turn_on_with_color_temp(hass, light_devices):
"""Test the light turns on with color temp."""
# Arrange
await setup_platform(hass, LIGHT_DOMAIN, devices=light_devices)
# Act
await hass.services.async_call(
"light",
"turn_on",
{ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP: 300},
blocking=True,
)
# Assert
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_COLOR_TEMP] == 300 | [
"async",
"def",
"test_turn_on_with_color_temp",
"(",
"hass",
",",
"light_devices",
")",
":",
"# Arrange",
"await",
"setup_platform",
"(",
"hass",
",",
"LIGHT_DOMAIN",
",",
"devices",
"=",
"light_devices",
")",
"# Act",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"light\"",
",",
"\"turn_on\"",
",",
"{",
"ATTR_ENTITY_ID",
":",
"\"light.color_dimmer_2\"",
",",
"ATTR_COLOR_TEMP",
":",
"300",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"# Assert",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"light.color_dimmer_2\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_COLOR_TEMP",
"]",
"==",
"300"
] | [
235,
0
] | [
250,
51
] | python | en | ['en', 'en', 'en'] | True |
test_update_from_signal | (hass, device_factory) | Test the light updates when receiving a signal. | Test the light updates when receiving a signal. | async def test_update_from_signal(hass, device_factory):
"""Test the light updates when receiving a signal."""
# Arrange
device = device_factory(
"Color Dimmer 2",
capabilities=[
Capability.switch,
Capability.switch_level,
Capability.color_control,
Capability.color_temperature,
],
status={
Attribute.switch: "off",
Attribute.level: 100,
Attribute.hue: 76.0,
Attribute.saturation: 55.0,
Attribute.color_temperature: 4500,
},
)
await setup_platform(hass, LIGHT_DOMAIN, devices=[device])
await device.switch_on(True)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("light.color_dimmer_2")
assert state is not None
assert state.state == "on" | [
"async",
"def",
"test_update_from_signal",
"(",
"hass",
",",
"device_factory",
")",
":",
"# Arrange",
"device",
"=",
"device_factory",
"(",
"\"Color Dimmer 2\"",
",",
"capabilities",
"=",
"[",
"Capability",
".",
"switch",
",",
"Capability",
".",
"switch_level",
",",
"Capability",
".",
"color_control",
",",
"Capability",
".",
"color_temperature",
",",
"]",
",",
"status",
"=",
"{",
"Attribute",
".",
"switch",
":",
"\"off\"",
",",
"Attribute",
".",
"level",
":",
"100",
",",
"Attribute",
".",
"hue",
":",
"76.0",
",",
"Attribute",
".",
"saturation",
":",
"55.0",
",",
"Attribute",
".",
"color_temperature",
":",
"4500",
",",
"}",
",",
")",
"await",
"setup_platform",
"(",
"hass",
",",
"LIGHT_DOMAIN",
",",
"devices",
"=",
"[",
"device",
"]",
")",
"await",
"device",
".",
"switch_on",
"(",
"True",
")",
"# Act",
"async_dispatcher_send",
"(",
"hass",
",",
"SIGNAL_SMARTTHINGS_UPDATE",
",",
"[",
"device",
".",
"device_id",
"]",
")",
"# Assert",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"light.color_dimmer_2\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"\"on\""
] | [
253,
0
] | [
280,
30
] | python | en | ['en', 'lb', 'en'] | True |
test_unload_config_entry | (hass, device_factory) | Test the light is removed when the config entry is unloaded. | Test the light is removed when the config entry is unloaded. | async def test_unload_config_entry(hass, device_factory):
"""Test the light is removed when the config entry is unloaded."""
# Arrange
device = device_factory(
"Color Dimmer 2",
capabilities=[
Capability.switch,
Capability.switch_level,
Capability.color_control,
Capability.color_temperature,
],
status={
Attribute.switch: "off",
Attribute.level: 100,
Attribute.hue: 76.0,
Attribute.saturation: 55.0,
Attribute.color_temperature: 4500,
},
)
config_entry = await setup_platform(hass, LIGHT_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "light")
# Assert
assert not hass.states.get("light.color_dimmer_2") | [
"async",
"def",
"test_unload_config_entry",
"(",
"hass",
",",
"device_factory",
")",
":",
"# Arrange",
"device",
"=",
"device_factory",
"(",
"\"Color Dimmer 2\"",
",",
"capabilities",
"=",
"[",
"Capability",
".",
"switch",
",",
"Capability",
".",
"switch_level",
",",
"Capability",
".",
"color_control",
",",
"Capability",
".",
"color_temperature",
",",
"]",
",",
"status",
"=",
"{",
"Attribute",
".",
"switch",
":",
"\"off\"",
",",
"Attribute",
".",
"level",
":",
"100",
",",
"Attribute",
".",
"hue",
":",
"76.0",
",",
"Attribute",
".",
"saturation",
":",
"55.0",
",",
"Attribute",
".",
"color_temperature",
":",
"4500",
",",
"}",
",",
")",
"config_entry",
"=",
"await",
"setup_platform",
"(",
"hass",
",",
"LIGHT_DOMAIN",
",",
"devices",
"=",
"[",
"device",
"]",
")",
"# Act",
"await",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"config_entry",
",",
"\"light\"",
")",
"# Assert",
"assert",
"not",
"hass",
".",
"states",
".",
"get",
"(",
"\"light.color_dimmer_2\"",
")"
] | [
283,
0
] | [
306,
54
] | python | en | ['en', 'en', 'en'] | True |
check_task | (task: str) |
Checks an incoming task string, to validate it's correct and return the default Pipeline and Model classes, and
default models if they exist.
Args:
task (:obj:`str`):
The task defining which pipeline will be returned. Currently accepted tasks are:
- :obj:`"feature-extraction"`
- :obj:`"sentiment-analysis"`
- :obj:`"ner"`
- :obj:`"question-answering"`
- :obj:`"fill-mask"`
- :obj:`"summarization"`
- :obj:`"translation_xx_to_yy"`
- :obj:`"translation"`
- :obj:`"text-generation"`
- :obj:`"conversational"`
Returns:
(task_defaults:obj:`dict`, task_options: (:obj:`tuple`, None)) The actual dictionary required to initialize the
pipeline and some extra task options for parametrized tasks like "translation_XX_to_YY"
|
Checks an incoming task string, to validate it's correct and return the default Pipeline and Model classes, and
default models if they exist. | def check_task(task: str) -> Tuple[Dict, Any]:
"""
Checks an incoming task string, to validate it's correct and return the default Pipeline and Model classes, and
default models if they exist.
Args:
task (:obj:`str`):
The task defining which pipeline will be returned. Currently accepted tasks are:
- :obj:`"feature-extraction"`
- :obj:`"sentiment-analysis"`
- :obj:`"ner"`
- :obj:`"question-answering"`
- :obj:`"fill-mask"`
- :obj:`"summarization"`
- :obj:`"translation_xx_to_yy"`
- :obj:`"translation"`
- :obj:`"text-generation"`
- :obj:`"conversational"`
Returns:
(task_defaults:obj:`dict`, task_options: (:obj:`tuple`, None)) The actual dictionary required to initialize the
pipeline and some extra task options for parametrized tasks like "translation_XX_to_YY"
"""
if task in SUPPORTED_TASKS:
targeted_task = SUPPORTED_TASKS[task]
return targeted_task, None
if task.startswith("translation"):
tokens = task.split("_")
if len(tokens) == 4 and tokens[0] == "translation" and tokens[2] == "to":
targeted_task = SUPPORTED_TASKS["translation"]
return targeted_task, (tokens[1], tokens[3])
raise KeyError("Invalid translation task {}, use 'translation_XX_to_YY' format".format(task))
raise KeyError(
"Unknown task {}, available tasks are {}".format(task, list(SUPPORTED_TASKS.keys()) + ["translation_XX_to_YY"])
) | [
"def",
"check_task",
"(",
"task",
":",
"str",
")",
"->",
"Tuple",
"[",
"Dict",
",",
"Any",
"]",
":",
"if",
"task",
"in",
"SUPPORTED_TASKS",
":",
"targeted_task",
"=",
"SUPPORTED_TASKS",
"[",
"task",
"]",
"return",
"targeted_task",
",",
"None",
"if",
"task",
".",
"startswith",
"(",
"\"translation\"",
")",
":",
"tokens",
"=",
"task",
".",
"split",
"(",
"\"_\"",
")",
"if",
"len",
"(",
"tokens",
")",
"==",
"4",
"and",
"tokens",
"[",
"0",
"]",
"==",
"\"translation\"",
"and",
"tokens",
"[",
"2",
"]",
"==",
"\"to\"",
":",
"targeted_task",
"=",
"SUPPORTED_TASKS",
"[",
"\"translation\"",
"]",
"return",
"targeted_task",
",",
"(",
"tokens",
"[",
"1",
"]",
",",
"tokens",
"[",
"3",
"]",
")",
"raise",
"KeyError",
"(",
"\"Invalid translation task {}, use 'translation_XX_to_YY' format\"",
".",
"format",
"(",
"task",
")",
")",
"raise",
"KeyError",
"(",
"\"Unknown task {}, available tasks are {}\"",
".",
"format",
"(",
"task",
",",
"list",
"(",
"SUPPORTED_TASKS",
".",
"keys",
"(",
")",
")",
"+",
"[",
"\"translation_XX_to_YY\"",
"]",
")",
")"
] | [
198,
0
] | [
237,
5
] | python | en | ['en', 'error', 'th'] | False |
pipeline | (
task: str,
model: Optional = None,
config: Optional[Union[str, PretrainedConfig]] = None,
tokenizer: Optional[Union[str, PreTrainedTokenizer]] = None,
framework: Optional[str] = None,
revision: Optional[str] = None,
use_fast: bool = True,
model_kwargs: Dict[str, Any] = {},
**kwargs
) |
Utility factory method to build a :class:`~transformers.Pipeline`.
Pipelines are made of:
- A :doc:`tokenizer <tokenizer>` in charge of mapping raw textual input to token.
- A :doc:`model <model>` to make predictions from the inputs.
- Some (optional) post processing for enhancing model's output.
Args:
task (:obj:`str`):
The task defining which pipeline will be returned. Currently accepted tasks are:
- :obj:`"feature-extraction"`: will return a :class:`~transformers.FeatureExtractionPipeline`.
- :obj:`"sentiment-analysis"`: will return a :class:`~transformers.TextClassificationPipeline`.
- :obj:`"ner"`: will return a :class:`~transformers.TokenClassificationPipeline`.
- :obj:`"question-answering"`: will return a :class:`~transformers.QuestionAnsweringPipeline`.
- :obj:`"fill-mask"`: will return a :class:`~transformers.FillMaskPipeline`.
- :obj:`"summarization"`: will return a :class:`~transformers.SummarizationPipeline`.
- :obj:`"translation_xx_to_yy"`: will return a :class:`~transformers.TranslationPipeline`.
- :obj:`"text2text-generation"`: will return a :class:`~transformers.Text2TextGenerationPipeline`.
- :obj:`"text-generation"`: will return a :class:`~transformers.TextGenerationPipeline`.
- :obj:`"zero-shot-classification:`: will return a :class:`~transformers.ZeroShotClassificationPipeline`.
- :obj:`"conversational"`: will return a :class:`~transformers.ConversationalPipeline`.
model (:obj:`str` or :obj:`~transformers.PreTrainedModel` or :obj:`~transformers.TFPreTrainedModel`, `optional`):
The model that will be used by the pipeline to make predictions. This can be a model identifier or an
actual instance of a pretrained model inheriting from :class:`~transformers.PreTrainedModel` (for PyTorch)
or :class:`~transformers.TFPreTrainedModel` (for TensorFlow).
If not provided, the default for the :obj:`task` will be loaded.
config (:obj:`str` or :obj:`~transformers.PretrainedConfig`, `optional`):
The configuration that will be used by the pipeline to instantiate the model. This can be a model
identifier or an actual pretrained model configuration inheriting from
:class:`~transformers.PretrainedConfig`.
If not provided, the default configuration file for the requested model will be used. That means that if
:obj:`model` is given, its default configuration will be used. However, if :obj:`model` is not supplied,
this :obj:`task`'s default model's config is used instead.
tokenizer (:obj:`str` or :obj:`~transformers.PreTrainedTokenizer`, `optional`):
The tokenizer that will be used by the pipeline to encode data for the model. This can be a model
identifier or an actual pretrained tokenizer inheriting from :class:`~transformers.PreTrainedTokenizer`.
If not provided, the default tokenizer for the given :obj:`model` will be loaded (if it is a string). If
:obj:`model` is not specified or not a string, then the default tokenizer for :obj:`config` is loaded (if
it is a string). However, if :obj:`config` is also not given or not a string, then the default tokenizer
for the given :obj:`task` will be loaded.
framework (:obj:`str`, `optional`):
The framework to use, either :obj:`"pt"` for PyTorch or :obj:`"tf"` for TensorFlow. The specified framework
must be installed.
If no framework is specified, will default to the one currently installed. If no framework is specified and
both frameworks are installed, will default to the framework of the :obj:`model`, or to PyTorch if no model
is provided.
revision(:obj:`str`, `optional`, defaults to :obj:`"main"`):
When passing a task name or a string model identifier: The specific model version to use. It can be a
branch name, a tag name, or a commit id, since we use a git-based system for storing models and other
artifacts on huggingface.co, so ``revision`` can be any identifier allowed by git.
use_fast (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not to use a Fast tokenizer if possible (a :class:`~transformers.PreTrainedTokenizerFast`).
model_kwargs:
Additional dictionary of keyword arguments passed along to the model's :obj:`from_pretrained(...,
**model_kwargs)` function.
kwargs:
Additional keyword arguments passed along to the specific pipeline init (see the documentation for the
corresponding pipeline class for possible values).
Returns:
:class:`~transformers.Pipeline`: A suitable pipeline for the task.
Examples::
>>> from transformers import pipeline, AutoModelForTokenClassification, AutoTokenizer
>>> # Sentiment analysis pipeline
>>> pipeline('sentiment-analysis')
>>> # Question answering pipeline, specifying the checkpoint identifier
>>> pipeline('question-answering', model='distilbert-base-cased-distilled-squad', tokenizer='bert-base-cased')
>>> # Named entity recognition pipeline, passing in a specific model and tokenizer
>>> model = AutoModelForTokenClassification.from_pretrained("dbmdz/bert-large-cased-finetuned-conll03-english")
>>> tokenizer = AutoTokenizer.from_pretrained("bert-base-cased")
>>> pipeline('ner', model=model, tokenizer=tokenizer)
|
Utility factory method to build a :class:`~transformers.Pipeline`. | def pipeline(
task: str,
model: Optional = None,
config: Optional[Union[str, PretrainedConfig]] = None,
tokenizer: Optional[Union[str, PreTrainedTokenizer]] = None,
framework: Optional[str] = None,
revision: Optional[str] = None,
use_fast: bool = True,
model_kwargs: Dict[str, Any] = {},
**kwargs
) -> Pipeline:
"""
Utility factory method to build a :class:`~transformers.Pipeline`.
Pipelines are made of:
- A :doc:`tokenizer <tokenizer>` in charge of mapping raw textual input to token.
- A :doc:`model <model>` to make predictions from the inputs.
- Some (optional) post processing for enhancing model's output.
Args:
task (:obj:`str`):
The task defining which pipeline will be returned. Currently accepted tasks are:
- :obj:`"feature-extraction"`: will return a :class:`~transformers.FeatureExtractionPipeline`.
- :obj:`"sentiment-analysis"`: will return a :class:`~transformers.TextClassificationPipeline`.
- :obj:`"ner"`: will return a :class:`~transformers.TokenClassificationPipeline`.
- :obj:`"question-answering"`: will return a :class:`~transformers.QuestionAnsweringPipeline`.
- :obj:`"fill-mask"`: will return a :class:`~transformers.FillMaskPipeline`.
- :obj:`"summarization"`: will return a :class:`~transformers.SummarizationPipeline`.
- :obj:`"translation_xx_to_yy"`: will return a :class:`~transformers.TranslationPipeline`.
- :obj:`"text2text-generation"`: will return a :class:`~transformers.Text2TextGenerationPipeline`.
- :obj:`"text-generation"`: will return a :class:`~transformers.TextGenerationPipeline`.
- :obj:`"zero-shot-classification:`: will return a :class:`~transformers.ZeroShotClassificationPipeline`.
- :obj:`"conversational"`: will return a :class:`~transformers.ConversationalPipeline`.
model (:obj:`str` or :obj:`~transformers.PreTrainedModel` or :obj:`~transformers.TFPreTrainedModel`, `optional`):
The model that will be used by the pipeline to make predictions. This can be a model identifier or an
actual instance of a pretrained model inheriting from :class:`~transformers.PreTrainedModel` (for PyTorch)
or :class:`~transformers.TFPreTrainedModel` (for TensorFlow).
If not provided, the default for the :obj:`task` will be loaded.
config (:obj:`str` or :obj:`~transformers.PretrainedConfig`, `optional`):
The configuration that will be used by the pipeline to instantiate the model. This can be a model
identifier or an actual pretrained model configuration inheriting from
:class:`~transformers.PretrainedConfig`.
If not provided, the default configuration file for the requested model will be used. That means that if
:obj:`model` is given, its default configuration will be used. However, if :obj:`model` is not supplied,
this :obj:`task`'s default model's config is used instead.
tokenizer (:obj:`str` or :obj:`~transformers.PreTrainedTokenizer`, `optional`):
The tokenizer that will be used by the pipeline to encode data for the model. This can be a model
identifier or an actual pretrained tokenizer inheriting from :class:`~transformers.PreTrainedTokenizer`.
If not provided, the default tokenizer for the given :obj:`model` will be loaded (if it is a string). If
:obj:`model` is not specified or not a string, then the default tokenizer for :obj:`config` is loaded (if
it is a string). However, if :obj:`config` is also not given or not a string, then the default tokenizer
for the given :obj:`task` will be loaded.
framework (:obj:`str`, `optional`):
The framework to use, either :obj:`"pt"` for PyTorch or :obj:`"tf"` for TensorFlow. The specified framework
must be installed.
If no framework is specified, will default to the one currently installed. If no framework is specified and
both frameworks are installed, will default to the framework of the :obj:`model`, or to PyTorch if no model
is provided.
revision(:obj:`str`, `optional`, defaults to :obj:`"main"`):
When passing a task name or a string model identifier: The specific model version to use. It can be a
branch name, a tag name, or a commit id, since we use a git-based system for storing models and other
artifacts on huggingface.co, so ``revision`` can be any identifier allowed by git.
use_fast (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not to use a Fast tokenizer if possible (a :class:`~transformers.PreTrainedTokenizerFast`).
model_kwargs:
Additional dictionary of keyword arguments passed along to the model's :obj:`from_pretrained(...,
**model_kwargs)` function.
kwargs:
Additional keyword arguments passed along to the specific pipeline init (see the documentation for the
corresponding pipeline class for possible values).
Returns:
:class:`~transformers.Pipeline`: A suitable pipeline for the task.
Examples::
>>> from transformers import pipeline, AutoModelForTokenClassification, AutoTokenizer
>>> # Sentiment analysis pipeline
>>> pipeline('sentiment-analysis')
>>> # Question answering pipeline, specifying the checkpoint identifier
>>> pipeline('question-answering', model='distilbert-base-cased-distilled-squad', tokenizer='bert-base-cased')
>>> # Named entity recognition pipeline, passing in a specific model and tokenizer
>>> model = AutoModelForTokenClassification.from_pretrained("dbmdz/bert-large-cased-finetuned-conll03-english")
>>> tokenizer = AutoTokenizer.from_pretrained("bert-base-cased")
>>> pipeline('ner', model=model, tokenizer=tokenizer)
"""
# Retrieve the task
targeted_task, task_options = check_task(task)
# Use default model/config/tokenizer for the task if no model is provided
if model is None:
# At that point framework might still be undetermined
model = get_default_model(targeted_task, framework, task_options)
# Try to infer tokenizer from model or config name (if provided as str)
if tokenizer is None:
if isinstance(model, str):
tokenizer = model
elif isinstance(config, str):
tokenizer = config
else:
# Impossible to guest what is the right tokenizer here
raise Exception(
"Impossible to guess which tokenizer to use. "
"Please provided a PretrainedTokenizer class or a path/identifier to a pretrained tokenizer."
)
modelcard = None
# Try to infer modelcard from model or config name (if provided as str)
if isinstance(model, str):
modelcard = model
elif isinstance(config, str):
modelcard = config
# Infer the framework form the model
if framework is None:
framework, model = infer_framework_from_model(model, targeted_task, revision=revision)
task_class, model_class = targeted_task["impl"], targeted_task[framework]
# Instantiate tokenizer if needed
if isinstance(tokenizer, (str, tuple)):
if isinstance(tokenizer, tuple):
# For tuple we have (tokenizer name, {kwargs})
use_fast = tokenizer[1].pop("use_fast", use_fast)
tokenizer = AutoTokenizer.from_pretrained(
tokenizer[0], use_fast=use_fast, revision=revision, **tokenizer[1]
)
else:
tokenizer = AutoTokenizer.from_pretrained(tokenizer, revision=revision, use_fast=use_fast)
# Instantiate config if needed
if isinstance(config, str):
config = AutoConfig.from_pretrained(config, revision=revision)
# Instantiate modelcard if needed
if isinstance(modelcard, str):
modelcard = ModelCard.from_pretrained(modelcard, revision=revision)
# Instantiate model if needed
if isinstance(model, str):
# Handle transparent TF/PT model conversion
if framework == "pt" and model.endswith(".h5"):
model_kwargs["from_tf"] = True
logger.warning(
"Model might be a TensorFlow model (ending with `.h5`) but TensorFlow is not available. "
"Trying to load the model with PyTorch."
)
elif framework == "tf" and model.endswith(".bin"):
model_kwargs["from_pt"] = True
logger.warning(
"Model might be a PyTorch model (ending with `.bin`) but PyTorch is not available. "
"Trying to load the model with Tensorflow."
)
if model_class is None:
raise ValueError(
f"Pipeline using {framework} framework, but this framework is not supported by this pipeline."
)
model = model_class.from_pretrained(model, config=config, revision=revision, **model_kwargs)
if task == "translation" and model.config.task_specific_params:
for key in model.config.task_specific_params:
if key.startswith("translation"):
task = key
warnings.warn(
f'"translation" task was used, instead of "translation_XX_to_YY", defaulting to "{task}"',
UserWarning,
)
break
return task_class(model=model, tokenizer=tokenizer, modelcard=modelcard, framework=framework, task=task, **kwargs) | [
"def",
"pipeline",
"(",
"task",
":",
"str",
",",
"model",
":",
"Optional",
"=",
"None",
",",
"config",
":",
"Optional",
"[",
"Union",
"[",
"str",
",",
"PretrainedConfig",
"]",
"]",
"=",
"None",
",",
"tokenizer",
":",
"Optional",
"[",
"Union",
"[",
"str",
",",
"PreTrainedTokenizer",
"]",
"]",
"=",
"None",
",",
"framework",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"revision",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"use_fast",
":",
"bool",
"=",
"True",
",",
"model_kwargs",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
"=",
"{",
"}",
",",
"*",
"*",
"kwargs",
")",
"->",
"Pipeline",
":",
"# Retrieve the task",
"targeted_task",
",",
"task_options",
"=",
"check_task",
"(",
"task",
")",
"# Use default model/config/tokenizer for the task if no model is provided",
"if",
"model",
"is",
"None",
":",
"# At that point framework might still be undetermined",
"model",
"=",
"get_default_model",
"(",
"targeted_task",
",",
"framework",
",",
"task_options",
")",
"# Try to infer tokenizer from model or config name (if provided as str)",
"if",
"tokenizer",
"is",
"None",
":",
"if",
"isinstance",
"(",
"model",
",",
"str",
")",
":",
"tokenizer",
"=",
"model",
"elif",
"isinstance",
"(",
"config",
",",
"str",
")",
":",
"tokenizer",
"=",
"config",
"else",
":",
"# Impossible to guest what is the right tokenizer here",
"raise",
"Exception",
"(",
"\"Impossible to guess which tokenizer to use. \"",
"\"Please provided a PretrainedTokenizer class or a path/identifier to a pretrained tokenizer.\"",
")",
"modelcard",
"=",
"None",
"# Try to infer modelcard from model or config name (if provided as str)",
"if",
"isinstance",
"(",
"model",
",",
"str",
")",
":",
"modelcard",
"=",
"model",
"elif",
"isinstance",
"(",
"config",
",",
"str",
")",
":",
"modelcard",
"=",
"config",
"# Infer the framework form the model",
"if",
"framework",
"is",
"None",
":",
"framework",
",",
"model",
"=",
"infer_framework_from_model",
"(",
"model",
",",
"targeted_task",
",",
"revision",
"=",
"revision",
")",
"task_class",
",",
"model_class",
"=",
"targeted_task",
"[",
"\"impl\"",
"]",
",",
"targeted_task",
"[",
"framework",
"]",
"# Instantiate tokenizer if needed",
"if",
"isinstance",
"(",
"tokenizer",
",",
"(",
"str",
",",
"tuple",
")",
")",
":",
"if",
"isinstance",
"(",
"tokenizer",
",",
"tuple",
")",
":",
"# For tuple we have (tokenizer name, {kwargs})",
"use_fast",
"=",
"tokenizer",
"[",
"1",
"]",
".",
"pop",
"(",
"\"use_fast\"",
",",
"use_fast",
")",
"tokenizer",
"=",
"AutoTokenizer",
".",
"from_pretrained",
"(",
"tokenizer",
"[",
"0",
"]",
",",
"use_fast",
"=",
"use_fast",
",",
"revision",
"=",
"revision",
",",
"*",
"*",
"tokenizer",
"[",
"1",
"]",
")",
"else",
":",
"tokenizer",
"=",
"AutoTokenizer",
".",
"from_pretrained",
"(",
"tokenizer",
",",
"revision",
"=",
"revision",
",",
"use_fast",
"=",
"use_fast",
")",
"# Instantiate config if needed",
"if",
"isinstance",
"(",
"config",
",",
"str",
")",
":",
"config",
"=",
"AutoConfig",
".",
"from_pretrained",
"(",
"config",
",",
"revision",
"=",
"revision",
")",
"# Instantiate modelcard if needed",
"if",
"isinstance",
"(",
"modelcard",
",",
"str",
")",
":",
"modelcard",
"=",
"ModelCard",
".",
"from_pretrained",
"(",
"modelcard",
",",
"revision",
"=",
"revision",
")",
"# Instantiate model if needed",
"if",
"isinstance",
"(",
"model",
",",
"str",
")",
":",
"# Handle transparent TF/PT model conversion",
"if",
"framework",
"==",
"\"pt\"",
"and",
"model",
".",
"endswith",
"(",
"\".h5\"",
")",
":",
"model_kwargs",
"[",
"\"from_tf\"",
"]",
"=",
"True",
"logger",
".",
"warning",
"(",
"\"Model might be a TensorFlow model (ending with `.h5`) but TensorFlow is not available. \"",
"\"Trying to load the model with PyTorch.\"",
")",
"elif",
"framework",
"==",
"\"tf\"",
"and",
"model",
".",
"endswith",
"(",
"\".bin\"",
")",
":",
"model_kwargs",
"[",
"\"from_pt\"",
"]",
"=",
"True",
"logger",
".",
"warning",
"(",
"\"Model might be a PyTorch model (ending with `.bin`) but PyTorch is not available. \"",
"\"Trying to load the model with Tensorflow.\"",
")",
"if",
"model_class",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"f\"Pipeline using {framework} framework, but this framework is not supported by this pipeline.\"",
")",
"model",
"=",
"model_class",
".",
"from_pretrained",
"(",
"model",
",",
"config",
"=",
"config",
",",
"revision",
"=",
"revision",
",",
"*",
"*",
"model_kwargs",
")",
"if",
"task",
"==",
"\"translation\"",
"and",
"model",
".",
"config",
".",
"task_specific_params",
":",
"for",
"key",
"in",
"model",
".",
"config",
".",
"task_specific_params",
":",
"if",
"key",
".",
"startswith",
"(",
"\"translation\"",
")",
":",
"task",
"=",
"key",
"warnings",
".",
"warn",
"(",
"f'\"translation\" task was used, instead of \"translation_XX_to_YY\", defaulting to \"{task}\"'",
",",
"UserWarning",
",",
")",
"break",
"return",
"task_class",
"(",
"model",
"=",
"model",
",",
"tokenizer",
"=",
"tokenizer",
",",
"modelcard",
"=",
"modelcard",
",",
"framework",
"=",
"framework",
",",
"task",
"=",
"task",
",",
"*",
"*",
"kwargs",
")"
] | [
240,
0
] | [
421,
118
] | python | en | ['en', 'error', 'th'] | False |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up the eight sleep sensors. | Set up the eight sleep sensors. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the eight sleep sensors."""
if discovery_info is None:
return
name = "Eight"
sensors = discovery_info[CONF_SENSORS]
eight = hass.data[DATA_EIGHT]
if hass.config.units.is_metric:
units = "si"
else:
units = "us"
all_sensors = []
for sensor in sensors:
if "bed_state" in sensor:
all_sensors.append(EightHeatSensor(name, eight, sensor))
elif "room_temp" in sensor:
all_sensors.append(EightRoomSensor(name, eight, sensor, units))
else:
all_sensors.append(EightUserSensor(name, eight, sensor, units))
async_add_entities(all_sensors, True) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"name",
"=",
"\"Eight\"",
"sensors",
"=",
"discovery_info",
"[",
"CONF_SENSORS",
"]",
"eight",
"=",
"hass",
".",
"data",
"[",
"DATA_EIGHT",
"]",
"if",
"hass",
".",
"config",
".",
"units",
".",
"is_metric",
":",
"units",
"=",
"\"si\"",
"else",
":",
"units",
"=",
"\"us\"",
"all_sensors",
"=",
"[",
"]",
"for",
"sensor",
"in",
"sensors",
":",
"if",
"\"bed_state\"",
"in",
"sensor",
":",
"all_sensors",
".",
"append",
"(",
"EightHeatSensor",
"(",
"name",
",",
"eight",
",",
"sensor",
")",
")",
"elif",
"\"room_temp\"",
"in",
"sensor",
":",
"all_sensors",
".",
"append",
"(",
"EightRoomSensor",
"(",
"name",
",",
"eight",
",",
"sensor",
",",
"units",
")",
")",
"else",
":",
"all_sensors",
".",
"append",
"(",
"EightUserSensor",
"(",
"name",
",",
"eight",
",",
"sensor",
",",
"units",
")",
")",
"async_add_entities",
"(",
"all_sensors",
",",
"True",
")"
] | [
41,
0
] | [
65,
41
] | python | en | ['en', 'et', 'en'] | True |
EightHeatSensor.__init__ | (self, name, eight, sensor) | Initialize the sensor. | Initialize the sensor. | def __init__(self, name, eight, sensor):
"""Initialize the sensor."""
super().__init__(eight)
self._sensor = sensor
self._mapped_name = NAME_MAP.get(self._sensor, self._sensor)
self._name = f"{name} {self._mapped_name}"
self._state = None
self._side = self._sensor.split("_")[0]
self._userid = self._eight.fetch_userid(self._side)
self._usrobj = self._eight.users[self._userid]
_LOGGER.debug(
"Heat Sensor: %s, Side: %s, User: %s",
self._sensor,
self._side,
self._userid,
) | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"eight",
",",
"sensor",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"eight",
")",
"self",
".",
"_sensor",
"=",
"sensor",
"self",
".",
"_mapped_name",
"=",
"NAME_MAP",
".",
"get",
"(",
"self",
".",
"_sensor",
",",
"self",
".",
"_sensor",
")",
"self",
".",
"_name",
"=",
"f\"{name} {self._mapped_name}\"",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_side",
"=",
"self",
".",
"_sensor",
".",
"split",
"(",
"\"_\"",
")",
"[",
"0",
"]",
"self",
".",
"_userid",
"=",
"self",
".",
"_eight",
".",
"fetch_userid",
"(",
"self",
".",
"_side",
")",
"self",
".",
"_usrobj",
"=",
"self",
".",
"_eight",
".",
"users",
"[",
"self",
".",
"_userid",
"]",
"_LOGGER",
".",
"debug",
"(",
"\"Heat Sensor: %s, Side: %s, User: %s\"",
",",
"self",
".",
"_sensor",
",",
"self",
".",
"_side",
",",
"self",
".",
"_userid",
",",
")"
] | [
71,
4
] | [
89,
9
] | python | en | ['en', 'en', 'en'] | True |
EightHeatSensor.name | (self) | Return the name of the sensor, if any. | Return the name of the sensor, if any. | def name(self):
"""Return the name of the sensor, if any."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
92,
4
] | [
94,
25
] | python | en | ['en', 'en', 'en'] | True |
EightHeatSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
97,
4
] | [
99,
26
] | python | en | ['en', 'en', 'en'] | True |
EightHeatSensor.unit_of_measurement | (self) | Return the unit the value is expressed in. | Return the unit the value is expressed in. | def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return PERCENTAGE | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"PERCENTAGE"
] | [
102,
4
] | [
104,
25
] | python | en | ['en', 'en', 'en'] | True |
EightHeatSensor.async_update | (self) | Retrieve latest state. | Retrieve latest state. | async def async_update(self):
"""Retrieve latest state."""
_LOGGER.debug("Updating Heat sensor: %s", self._sensor)
self._state = self._usrobj.heating_level | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Updating Heat sensor: %s\"",
",",
"self",
".",
"_sensor",
")",
"self",
".",
"_state",
"=",
"self",
".",
"_usrobj",
".",
"heating_level"
] | [
106,
4
] | [
109,
48
] | python | en | ['es', 'sk', 'en'] | False |
EightHeatSensor.device_state_attributes | (self) | Return device state attributes. | Return device state attributes. | def device_state_attributes(self):
"""Return device state attributes."""
return {
ATTR_TARGET_HEAT: self._usrobj.target_heating_level,
ATTR_ACTIVE_HEAT: self._usrobj.now_heating,
ATTR_DURATION_HEAT: self._usrobj.heating_remaining,
} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"{",
"ATTR_TARGET_HEAT",
":",
"self",
".",
"_usrobj",
".",
"target_heating_level",
",",
"ATTR_ACTIVE_HEAT",
":",
"self",
".",
"_usrobj",
".",
"now_heating",
",",
"ATTR_DURATION_HEAT",
":",
"self",
".",
"_usrobj",
".",
"heating_remaining",
",",
"}"
] | [
112,
4
] | [
118,
9
] | python | en | ['es', 'en', 'en'] | True |
EightUserSensor.__init__ | (self, name, eight, sensor, units) | Initialize the sensor. | Initialize the sensor. | def __init__(self, name, eight, sensor, units):
"""Initialize the sensor."""
super().__init__(eight)
self._sensor = sensor
self._sensor_root = self._sensor.split("_", 1)[1]
self._mapped_name = NAME_MAP.get(self._sensor, self._sensor)
self._name = f"{name} {self._mapped_name}"
self._state = None
self._attr = None
self._units = units
self._side = self._sensor.split("_", 1)[0]
self._userid = self._eight.fetch_userid(self._side)
self._usrobj = self._eight.users[self._userid]
_LOGGER.debug(
"User Sensor: %s, Side: %s, User: %s",
self._sensor,
self._side,
self._userid,
) | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"eight",
",",
"sensor",
",",
"units",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"eight",
")",
"self",
".",
"_sensor",
"=",
"sensor",
"self",
".",
"_sensor_root",
"=",
"self",
".",
"_sensor",
".",
"split",
"(",
"\"_\"",
",",
"1",
")",
"[",
"1",
"]",
"self",
".",
"_mapped_name",
"=",
"NAME_MAP",
".",
"get",
"(",
"self",
".",
"_sensor",
",",
"self",
".",
"_sensor",
")",
"self",
".",
"_name",
"=",
"f\"{name} {self._mapped_name}\"",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_attr",
"=",
"None",
"self",
".",
"_units",
"=",
"units",
"self",
".",
"_side",
"=",
"self",
".",
"_sensor",
".",
"split",
"(",
"\"_\"",
",",
"1",
")",
"[",
"0",
"]",
"self",
".",
"_userid",
"=",
"self",
".",
"_eight",
".",
"fetch_userid",
"(",
"self",
".",
"_side",
")",
"self",
".",
"_usrobj",
"=",
"self",
".",
"_eight",
".",
"users",
"[",
"self",
".",
"_userid",
"]",
"_LOGGER",
".",
"debug",
"(",
"\"User Sensor: %s, Side: %s, User: %s\"",
",",
"self",
".",
"_sensor",
",",
"self",
".",
"_side",
",",
"self",
".",
"_userid",
",",
")"
] | [
124,
4
] | [
145,
9
] | python | en | ['en', 'en', 'en'] | True |
EightUserSensor.name | (self) | Return the name of the sensor, if any. | Return the name of the sensor, if any. | def name(self):
"""Return the name of the sensor, if any."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
148,
4
] | [
150,
25
] | python | en | ['en', 'en', 'en'] | True |
EightUserSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
153,
4
] | [
155,
26
] | python | en | ['en', 'en', 'en'] | True |
EightUserSensor.unit_of_measurement | (self) | Return the unit the value is expressed in. | Return the unit the value is expressed in. | def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
if (
"current_sleep" in self._sensor
or "last_sleep" in self._sensor
or "current_sleep_fitness" in self._sensor
):
return "Score"
if "bed_temp" in self._sensor:
if self._units == "si":
return TEMP_CELSIUS
return TEMP_FAHRENHEIT
return None | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"if",
"(",
"\"current_sleep\"",
"in",
"self",
".",
"_sensor",
"or",
"\"last_sleep\"",
"in",
"self",
".",
"_sensor",
"or",
"\"current_sleep_fitness\"",
"in",
"self",
".",
"_sensor",
")",
":",
"return",
"\"Score\"",
"if",
"\"bed_temp\"",
"in",
"self",
".",
"_sensor",
":",
"if",
"self",
".",
"_units",
"==",
"\"si\"",
":",
"return",
"TEMP_CELSIUS",
"return",
"TEMP_FAHRENHEIT",
"return",
"None"
] | [
158,
4
] | [
170,
19
] | python | en | ['en', 'en', 'en'] | True |
EightUserSensor.icon | (self) | Icon to use in the frontend, if any. | Icon to use in the frontend, if any. | def icon(self):
"""Icon to use in the frontend, if any."""
if "bed_temp" in self._sensor:
return "mdi:thermometer" | [
"def",
"icon",
"(",
"self",
")",
":",
"if",
"\"bed_temp\"",
"in",
"self",
".",
"_sensor",
":",
"return",
"\"mdi:thermometer\""
] | [
173,
4
] | [
176,
36
] | python | en | ['en', 'en', 'en'] | True |
EightUserSensor.async_update | (self) | Retrieve latest state. | Retrieve latest state. | async def async_update(self):
"""Retrieve latest state."""
_LOGGER.debug("Updating User sensor: %s", self._sensor)
if "current" in self._sensor:
if "fitness" in self._sensor:
self._state = self._usrobj.current_sleep_fitness_score
self._attr = self._usrobj.current_fitness_values
else:
self._state = self._usrobj.current_sleep_score
self._attr = self._usrobj.current_values
elif "last" in self._sensor:
self._state = self._usrobj.last_sleep_score
self._attr = self._usrobj.last_values
elif "bed_temp" in self._sensor:
temp = self._usrobj.current_values["bed_temp"]
try:
if self._units == "si":
self._state = round(temp, 2)
else:
self._state = round((temp * 1.8) + 32, 2)
except TypeError:
self._state = None
elif "sleep_stage" in self._sensor:
self._state = self._usrobj.current_values["stage"] | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Updating User sensor: %s\"",
",",
"self",
".",
"_sensor",
")",
"if",
"\"current\"",
"in",
"self",
".",
"_sensor",
":",
"if",
"\"fitness\"",
"in",
"self",
".",
"_sensor",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_usrobj",
".",
"current_sleep_fitness_score",
"self",
".",
"_attr",
"=",
"self",
".",
"_usrobj",
".",
"current_fitness_values",
"else",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_usrobj",
".",
"current_sleep_score",
"self",
".",
"_attr",
"=",
"self",
".",
"_usrobj",
".",
"current_values",
"elif",
"\"last\"",
"in",
"self",
".",
"_sensor",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_usrobj",
".",
"last_sleep_score",
"self",
".",
"_attr",
"=",
"self",
".",
"_usrobj",
".",
"last_values",
"elif",
"\"bed_temp\"",
"in",
"self",
".",
"_sensor",
":",
"temp",
"=",
"self",
".",
"_usrobj",
".",
"current_values",
"[",
"\"bed_temp\"",
"]",
"try",
":",
"if",
"self",
".",
"_units",
"==",
"\"si\"",
":",
"self",
".",
"_state",
"=",
"round",
"(",
"temp",
",",
"2",
")",
"else",
":",
"self",
".",
"_state",
"=",
"round",
"(",
"(",
"temp",
"*",
"1.8",
")",
"+",
"32",
",",
"2",
")",
"except",
"TypeError",
":",
"self",
".",
"_state",
"=",
"None",
"elif",
"\"sleep_stage\"",
"in",
"self",
".",
"_sensor",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_usrobj",
".",
"current_values",
"[",
"\"stage\"",
"]"
] | [
178,
4
] | [
201,
62
] | python | en | ['es', 'sk', 'en'] | False |
EightUserSensor.device_state_attributes | (self) | Return device state attributes. | Return device state attributes. | def device_state_attributes(self):
"""Return device state attributes."""
if self._attr is None:
# Skip attributes if sensor type doesn't support
return None
if "fitness" in self._sensor_root:
state_attr = {
ATTR_FIT_DATE: self._attr["date"],
ATTR_FIT_DURATION_SCORE: self._attr["duration"],
ATTR_FIT_ASLEEP_SCORE: self._attr["asleep"],
ATTR_FIT_OUT_SCORE: self._attr["out"],
ATTR_FIT_WAKEUP_SCORE: self._attr["wakeup"],
}
return state_attr
state_attr = {ATTR_SESSION_START: self._attr["date"]}
state_attr[ATTR_TNT] = self._attr["tnt"]
state_attr[ATTR_PROCESSING] = self._attr["processing"]
sleep_time = (
sum(self._attr["breakdown"].values()) - self._attr["breakdown"]["awake"]
)
state_attr[ATTR_SLEEP_DUR] = sleep_time
try:
state_attr[ATTR_LIGHT_PERC] = round(
(self._attr["breakdown"]["light"] / sleep_time) * 100, 2
)
except ZeroDivisionError:
state_attr[ATTR_LIGHT_PERC] = 0
try:
state_attr[ATTR_DEEP_PERC] = round(
(self._attr["breakdown"]["deep"] / sleep_time) * 100, 2
)
except ZeroDivisionError:
state_attr[ATTR_DEEP_PERC] = 0
try:
state_attr[ATTR_REM_PERC] = round(
(self._attr["breakdown"]["rem"] / sleep_time) * 100, 2
)
except ZeroDivisionError:
state_attr[ATTR_REM_PERC] = 0
try:
if self._units == "si":
room_temp = round(self._attr["room_temp"], 2)
else:
room_temp = round((self._attr["room_temp"] * 1.8) + 32, 2)
except TypeError:
room_temp = None
try:
if self._units == "si":
bed_temp = round(self._attr["bed_temp"], 2)
else:
bed_temp = round((self._attr["bed_temp"] * 1.8) + 32, 2)
except TypeError:
bed_temp = None
if "current" in self._sensor_root:
try:
state_attr[ATTR_RESP_RATE] = round(self._attr["resp_rate"], 2)
except TypeError:
state_attr[ATTR_RESP_RATE] = None
try:
state_attr[ATTR_HEART_RATE] = round(self._attr["heart_rate"], 2)
except TypeError:
state_attr[ATTR_HEART_RATE] = None
state_attr[ATTR_SLEEP_STAGE] = self._attr["stage"]
state_attr[ATTR_ROOM_TEMP] = room_temp
state_attr[ATTR_BED_TEMP] = bed_temp
elif "last" in self._sensor_root:
try:
state_attr[ATTR_AVG_RESP_RATE] = round(self._attr["resp_rate"], 2)
except TypeError:
state_attr[ATTR_AVG_RESP_RATE] = None
try:
state_attr[ATTR_AVG_HEART_RATE] = round(self._attr["heart_rate"], 2)
except TypeError:
state_attr[ATTR_AVG_HEART_RATE] = None
state_attr[ATTR_AVG_ROOM_TEMP] = room_temp
state_attr[ATTR_AVG_BED_TEMP] = bed_temp
return state_attr | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"self",
".",
"_attr",
"is",
"None",
":",
"# Skip attributes if sensor type doesn't support",
"return",
"None",
"if",
"\"fitness\"",
"in",
"self",
".",
"_sensor_root",
":",
"state_attr",
"=",
"{",
"ATTR_FIT_DATE",
":",
"self",
".",
"_attr",
"[",
"\"date\"",
"]",
",",
"ATTR_FIT_DURATION_SCORE",
":",
"self",
".",
"_attr",
"[",
"\"duration\"",
"]",
",",
"ATTR_FIT_ASLEEP_SCORE",
":",
"self",
".",
"_attr",
"[",
"\"asleep\"",
"]",
",",
"ATTR_FIT_OUT_SCORE",
":",
"self",
".",
"_attr",
"[",
"\"out\"",
"]",
",",
"ATTR_FIT_WAKEUP_SCORE",
":",
"self",
".",
"_attr",
"[",
"\"wakeup\"",
"]",
",",
"}",
"return",
"state_attr",
"state_attr",
"=",
"{",
"ATTR_SESSION_START",
":",
"self",
".",
"_attr",
"[",
"\"date\"",
"]",
"}",
"state_attr",
"[",
"ATTR_TNT",
"]",
"=",
"self",
".",
"_attr",
"[",
"\"tnt\"",
"]",
"state_attr",
"[",
"ATTR_PROCESSING",
"]",
"=",
"self",
".",
"_attr",
"[",
"\"processing\"",
"]",
"sleep_time",
"=",
"(",
"sum",
"(",
"self",
".",
"_attr",
"[",
"\"breakdown\"",
"]",
".",
"values",
"(",
")",
")",
"-",
"self",
".",
"_attr",
"[",
"\"breakdown\"",
"]",
"[",
"\"awake\"",
"]",
")",
"state_attr",
"[",
"ATTR_SLEEP_DUR",
"]",
"=",
"sleep_time",
"try",
":",
"state_attr",
"[",
"ATTR_LIGHT_PERC",
"]",
"=",
"round",
"(",
"(",
"self",
".",
"_attr",
"[",
"\"breakdown\"",
"]",
"[",
"\"light\"",
"]",
"/",
"sleep_time",
")",
"*",
"100",
",",
"2",
")",
"except",
"ZeroDivisionError",
":",
"state_attr",
"[",
"ATTR_LIGHT_PERC",
"]",
"=",
"0",
"try",
":",
"state_attr",
"[",
"ATTR_DEEP_PERC",
"]",
"=",
"round",
"(",
"(",
"self",
".",
"_attr",
"[",
"\"breakdown\"",
"]",
"[",
"\"deep\"",
"]",
"/",
"sleep_time",
")",
"*",
"100",
",",
"2",
")",
"except",
"ZeroDivisionError",
":",
"state_attr",
"[",
"ATTR_DEEP_PERC",
"]",
"=",
"0",
"try",
":",
"state_attr",
"[",
"ATTR_REM_PERC",
"]",
"=",
"round",
"(",
"(",
"self",
".",
"_attr",
"[",
"\"breakdown\"",
"]",
"[",
"\"rem\"",
"]",
"/",
"sleep_time",
")",
"*",
"100",
",",
"2",
")",
"except",
"ZeroDivisionError",
":",
"state_attr",
"[",
"ATTR_REM_PERC",
"]",
"=",
"0",
"try",
":",
"if",
"self",
".",
"_units",
"==",
"\"si\"",
":",
"room_temp",
"=",
"round",
"(",
"self",
".",
"_attr",
"[",
"\"room_temp\"",
"]",
",",
"2",
")",
"else",
":",
"room_temp",
"=",
"round",
"(",
"(",
"self",
".",
"_attr",
"[",
"\"room_temp\"",
"]",
"*",
"1.8",
")",
"+",
"32",
",",
"2",
")",
"except",
"TypeError",
":",
"room_temp",
"=",
"None",
"try",
":",
"if",
"self",
".",
"_units",
"==",
"\"si\"",
":",
"bed_temp",
"=",
"round",
"(",
"self",
".",
"_attr",
"[",
"\"bed_temp\"",
"]",
",",
"2",
")",
"else",
":",
"bed_temp",
"=",
"round",
"(",
"(",
"self",
".",
"_attr",
"[",
"\"bed_temp\"",
"]",
"*",
"1.8",
")",
"+",
"32",
",",
"2",
")",
"except",
"TypeError",
":",
"bed_temp",
"=",
"None",
"if",
"\"current\"",
"in",
"self",
".",
"_sensor_root",
":",
"try",
":",
"state_attr",
"[",
"ATTR_RESP_RATE",
"]",
"=",
"round",
"(",
"self",
".",
"_attr",
"[",
"\"resp_rate\"",
"]",
",",
"2",
")",
"except",
"TypeError",
":",
"state_attr",
"[",
"ATTR_RESP_RATE",
"]",
"=",
"None",
"try",
":",
"state_attr",
"[",
"ATTR_HEART_RATE",
"]",
"=",
"round",
"(",
"self",
".",
"_attr",
"[",
"\"heart_rate\"",
"]",
",",
"2",
")",
"except",
"TypeError",
":",
"state_attr",
"[",
"ATTR_HEART_RATE",
"]",
"=",
"None",
"state_attr",
"[",
"ATTR_SLEEP_STAGE",
"]",
"=",
"self",
".",
"_attr",
"[",
"\"stage\"",
"]",
"state_attr",
"[",
"ATTR_ROOM_TEMP",
"]",
"=",
"room_temp",
"state_attr",
"[",
"ATTR_BED_TEMP",
"]",
"=",
"bed_temp",
"elif",
"\"last\"",
"in",
"self",
".",
"_sensor_root",
":",
"try",
":",
"state_attr",
"[",
"ATTR_AVG_RESP_RATE",
"]",
"=",
"round",
"(",
"self",
".",
"_attr",
"[",
"\"resp_rate\"",
"]",
",",
"2",
")",
"except",
"TypeError",
":",
"state_attr",
"[",
"ATTR_AVG_RESP_RATE",
"]",
"=",
"None",
"try",
":",
"state_attr",
"[",
"ATTR_AVG_HEART_RATE",
"]",
"=",
"round",
"(",
"self",
".",
"_attr",
"[",
"\"heart_rate\"",
"]",
",",
"2",
")",
"except",
"TypeError",
":",
"state_attr",
"[",
"ATTR_AVG_HEART_RATE",
"]",
"=",
"None",
"state_attr",
"[",
"ATTR_AVG_ROOM_TEMP",
"]",
"=",
"room_temp",
"state_attr",
"[",
"ATTR_AVG_BED_TEMP",
"]",
"=",
"bed_temp",
"return",
"state_attr"
] | [
204,
4
] | [
288,
25
] | python | en | ['es', 'en', 'en'] | True |
EightRoomSensor.__init__ | (self, name, eight, sensor, units) | Initialize the sensor. | Initialize the sensor. | def __init__(self, name, eight, sensor, units):
"""Initialize the sensor."""
super().__init__(eight)
self._sensor = sensor
self._mapped_name = NAME_MAP.get(self._sensor, self._sensor)
self._name = f"{name} {self._mapped_name}"
self._state = None
self._attr = None
self._units = units | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"eight",
",",
"sensor",
",",
"units",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"eight",
")",
"self",
".",
"_sensor",
"=",
"sensor",
"self",
".",
"_mapped_name",
"=",
"NAME_MAP",
".",
"get",
"(",
"self",
".",
"_sensor",
",",
"self",
".",
"_sensor",
")",
"self",
".",
"_name",
"=",
"f\"{name} {self._mapped_name}\"",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_attr",
"=",
"None",
"self",
".",
"_units",
"=",
"units"
] | [
294,
4
] | [
303,
27
] | python | en | ['en', 'en', 'en'] | True |
EightRoomSensor.name | (self) | Return the name of the sensor, if any. | Return the name of the sensor, if any. | def name(self):
"""Return the name of the sensor, if any."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
306,
4
] | [
308,
25
] | python | en | ['en', 'en', 'en'] | True |
EightRoomSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
311,
4
] | [
313,
26
] | python | en | ['en', 'en', 'en'] | True |
EightRoomSensor.async_update | (self) | Retrieve latest state. | Retrieve latest state. | async def async_update(self):
"""Retrieve latest state."""
_LOGGER.debug("Updating Room sensor: %s", self._sensor)
temp = self._eight.room_temperature()
try:
if self._units == "si":
self._state = round(temp, 2)
else:
self._state = round((temp * 1.8) + 32, 2)
except TypeError:
self._state = None | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Updating Room sensor: %s\"",
",",
"self",
".",
"_sensor",
")",
"temp",
"=",
"self",
".",
"_eight",
".",
"room_temperature",
"(",
")",
"try",
":",
"if",
"self",
".",
"_units",
"==",
"\"si\"",
":",
"self",
".",
"_state",
"=",
"round",
"(",
"temp",
",",
"2",
")",
"else",
":",
"self",
".",
"_state",
"=",
"round",
"(",
"(",
"temp",
"*",
"1.8",
")",
"+",
"32",
",",
"2",
")",
"except",
"TypeError",
":",
"self",
".",
"_state",
"=",
"None"
] | [
315,
4
] | [
325,
30
] | python | en | ['es', 'sk', 'en'] | False |
EightRoomSensor.unit_of_measurement | (self) | Return the unit the value is expressed in. | Return the unit the value is expressed in. | def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
if self._units == "si":
return TEMP_CELSIUS
return TEMP_FAHRENHEIT | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"if",
"self",
".",
"_units",
"==",
"\"si\"",
":",
"return",
"TEMP_CELSIUS",
"return",
"TEMP_FAHRENHEIT"
] | [
328,
4
] | [
332,
30
] | python | en | ['en', 'en', 'en'] | True |
EightRoomSensor.icon | (self) | Icon to use in the frontend, if any. | Icon to use in the frontend, if any. | def icon(self):
"""Icon to use in the frontend, if any."""
return "mdi:thermometer" | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"\"mdi:thermometer\""
] | [
335,
4
] | [
337,
32
] | python | en | ['en', 'en', 'en'] | True |
COCOCaptionsDataset.__init__ | (self, ann_file, image_set, root_path, data_path, seq_len=64,
with_precomputed_visual_feat=False, mask_raw_pixels=True,
with_rel_task=True, with_mlm_task=True, with_mvrc_task=True,
transform=None, test_mode=False,
zip_mode=False, cache_mode=False, cache_db=False, ignore_db_cache=True,
tokenizer=None, pretrained_model_name=None,
add_image_as_a_box=False,
aspect_grouping=False, **kwargs) |
Conceptual Captions Dataset
:param ann_file: annotation jsonl file
:param image_set: image folder name, e.g., 'vcr1images'
:param root_path: root path to cache database loaded from annotation file
:param data_path: path to vcr dataset
:param transform: transform
:param test_mode: test mode means no labels available
:param zip_mode: reading images and metadata in zip archive
:param cache_mode: cache whole dataset to RAM first, then __getitem__ read them from RAM
:param ignore_db_cache: ignore previous cached database, reload it from annotation file
:param tokenizer: default is BertTokenizer from pytorch_pretrained_bert
:param add_image_as_a_box: add whole image as a box
:param aspect_grouping: whether to group images via their aspect
:param kwargs:
|
Conceptual Captions Dataset | def __init__(self, ann_file, image_set, root_path, data_path, seq_len=64,
with_precomputed_visual_feat=False, mask_raw_pixels=True,
with_rel_task=True, with_mlm_task=True, with_mvrc_task=True,
transform=None, test_mode=False,
zip_mode=False, cache_mode=False, cache_db=False, ignore_db_cache=True,
tokenizer=None, pretrained_model_name=None,
add_image_as_a_box=False,
aspect_grouping=False, **kwargs):
"""
Conceptual Captions Dataset
:param ann_file: annotation jsonl file
:param image_set: image folder name, e.g., 'vcr1images'
:param root_path: root path to cache database loaded from annotation file
:param data_path: path to vcr dataset
:param transform: transform
:param test_mode: test mode means no labels available
:param zip_mode: reading images and metadata in zip archive
:param cache_mode: cache whole dataset to RAM first, then __getitem__ read them from RAM
:param ignore_db_cache: ignore previous cached database, reload it from annotation file
:param tokenizer: default is BertTokenizer from pytorch_pretrained_bert
:param add_image_as_a_box: add whole image as a box
:param aspect_grouping: whether to group images via their aspect
:param kwargs:
"""
super(COCOCaptionsDataset, self).__init__()
assert not cache_mode, 'currently not support cache mode!'
assert not test_mode
annot = {'train': 'annotations/captions_train2017.json',
'val': 'annotations/captions_val2017.json'}
annot_inst = {'train': 'annotations/instances_train2017.json',
'val': 'annotations/instances_val2017.json'}
if zip_mode:
self.root = os.path.join(data_path, '{0}2017.zip@/{0}2017'.format(image_set))
else:
self.root = os.path.join(data_path, '{}2017'.format(image_set))
self.seq_len = seq_len
self.with_rel_task = with_rel_task
self.with_mlm_task = with_mlm_task
self.with_mvrc_task = with_mvrc_task
self.data_path = data_path
self.root_path = root_path
self.ann_file = os.path.join(data_path, annot[image_set])
self.ann_file_inst = os.path.join(data_path, annot_inst[image_set])
self.with_precomputed_visual_feat = with_precomputed_visual_feat
self.mask_raw_pixels = mask_raw_pixels
self.image_set = image_set
self.transform = transform
self.test_mode = test_mode
self.zip_mode = zip_mode
self.cache_mode = cache_mode
self.cache_db = cache_db
self.ignore_db_cache = ignore_db_cache
self.aspect_grouping = aspect_grouping
self.cache_dir = os.path.join(root_path, 'cache')
self.add_image_as_a_box = add_image_as_a_box
if not os.path.exists(self.cache_dir):
makedirsExist(self.cache_dir)
self.tokenizer = tokenizer if tokenizer is not None \
else BertTokenizer.from_pretrained(
'bert-base-uncased' if pretrained_model_name is None else pretrained_model_name,
cache_dir=self.cache_dir)
if self.zip_mode:
self.zipreader = ZipReader()
self.coco = COCO(self.ann_file)
self.coco_inst = COCO(self.ann_file_inst)
self.ids = list(sorted(self.coco.imgs.keys()))
# filter images without detection annotations
self.ids = [
img_id
for img_id in self.ids
if len(self.coco_inst.getAnnIds(imgIds=img_id, iscrowd=None)) > 0
]
self.json_category_id_to_contiguous_id = {
v: i + 1 for i, v in enumerate(self.coco_inst.getCatIds())
}
self.contiguous_category_id_to_json_id = {
v: k for k, v in self.json_category_id_to_contiguous_id.items()
}
self.id_to_img_map = {k: v for k, v in enumerate(self.ids)}
if self.aspect_grouping:
assert False, "not support aspect grouping currently!"
# self.group_ids = self.group_aspect(self.database)
print('mask_raw_pixels: ', self.mask_raw_pixels) | [
"def",
"__init__",
"(",
"self",
",",
"ann_file",
",",
"image_set",
",",
"root_path",
",",
"data_path",
",",
"seq_len",
"=",
"64",
",",
"with_precomputed_visual_feat",
"=",
"False",
",",
"mask_raw_pixels",
"=",
"True",
",",
"with_rel_task",
"=",
"True",
",",
"with_mlm_task",
"=",
"True",
",",
"with_mvrc_task",
"=",
"True",
",",
"transform",
"=",
"None",
",",
"test_mode",
"=",
"False",
",",
"zip_mode",
"=",
"False",
",",
"cache_mode",
"=",
"False",
",",
"cache_db",
"=",
"False",
",",
"ignore_db_cache",
"=",
"True",
",",
"tokenizer",
"=",
"None",
",",
"pretrained_model_name",
"=",
"None",
",",
"add_image_as_a_box",
"=",
"False",
",",
"aspect_grouping",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"COCOCaptionsDataset",
",",
"self",
")",
".",
"__init__",
"(",
")",
"assert",
"not",
"cache_mode",
",",
"'currently not support cache mode!'",
"assert",
"not",
"test_mode",
"annot",
"=",
"{",
"'train'",
":",
"'annotations/captions_train2017.json'",
",",
"'val'",
":",
"'annotations/captions_val2017.json'",
"}",
"annot_inst",
"=",
"{",
"'train'",
":",
"'annotations/instances_train2017.json'",
",",
"'val'",
":",
"'annotations/instances_val2017.json'",
"}",
"if",
"zip_mode",
":",
"self",
".",
"root",
"=",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"'{0}2017.zip@/{0}2017'",
".",
"format",
"(",
"image_set",
")",
")",
"else",
":",
"self",
".",
"root",
"=",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"'{}2017'",
".",
"format",
"(",
"image_set",
")",
")",
"self",
".",
"seq_len",
"=",
"seq_len",
"self",
".",
"with_rel_task",
"=",
"with_rel_task",
"self",
".",
"with_mlm_task",
"=",
"with_mlm_task",
"self",
".",
"with_mvrc_task",
"=",
"with_mvrc_task",
"self",
".",
"data_path",
"=",
"data_path",
"self",
".",
"root_path",
"=",
"root_path",
"self",
".",
"ann_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"annot",
"[",
"image_set",
"]",
")",
"self",
".",
"ann_file_inst",
"=",
"os",
".",
"path",
".",
"join",
"(",
"data_path",
",",
"annot_inst",
"[",
"image_set",
"]",
")",
"self",
".",
"with_precomputed_visual_feat",
"=",
"with_precomputed_visual_feat",
"self",
".",
"mask_raw_pixels",
"=",
"mask_raw_pixels",
"self",
".",
"image_set",
"=",
"image_set",
"self",
".",
"transform",
"=",
"transform",
"self",
".",
"test_mode",
"=",
"test_mode",
"self",
".",
"zip_mode",
"=",
"zip_mode",
"self",
".",
"cache_mode",
"=",
"cache_mode",
"self",
".",
"cache_db",
"=",
"cache_db",
"self",
".",
"ignore_db_cache",
"=",
"ignore_db_cache",
"self",
".",
"aspect_grouping",
"=",
"aspect_grouping",
"self",
".",
"cache_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root_path",
",",
"'cache'",
")",
"self",
".",
"add_image_as_a_box",
"=",
"add_image_as_a_box",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"cache_dir",
")",
":",
"makedirsExist",
"(",
"self",
".",
"cache_dir",
")",
"self",
".",
"tokenizer",
"=",
"tokenizer",
"if",
"tokenizer",
"is",
"not",
"None",
"else",
"BertTokenizer",
".",
"from_pretrained",
"(",
"'bert-base-uncased'",
"if",
"pretrained_model_name",
"is",
"None",
"else",
"pretrained_model_name",
",",
"cache_dir",
"=",
"self",
".",
"cache_dir",
")",
"if",
"self",
".",
"zip_mode",
":",
"self",
".",
"zipreader",
"=",
"ZipReader",
"(",
")",
"self",
".",
"coco",
"=",
"COCO",
"(",
"self",
".",
"ann_file",
")",
"self",
".",
"coco_inst",
"=",
"COCO",
"(",
"self",
".",
"ann_file_inst",
")",
"self",
".",
"ids",
"=",
"list",
"(",
"sorted",
"(",
"self",
".",
"coco",
".",
"imgs",
".",
"keys",
"(",
")",
")",
")",
"# filter images without detection annotations",
"self",
".",
"ids",
"=",
"[",
"img_id",
"for",
"img_id",
"in",
"self",
".",
"ids",
"if",
"len",
"(",
"self",
".",
"coco_inst",
".",
"getAnnIds",
"(",
"imgIds",
"=",
"img_id",
",",
"iscrowd",
"=",
"None",
")",
")",
">",
"0",
"]",
"self",
".",
"json_category_id_to_contiguous_id",
"=",
"{",
"v",
":",
"i",
"+",
"1",
"for",
"i",
",",
"v",
"in",
"enumerate",
"(",
"self",
".",
"coco_inst",
".",
"getCatIds",
"(",
")",
")",
"}",
"self",
".",
"contiguous_category_id_to_json_id",
"=",
"{",
"v",
":",
"k",
"for",
"k",
",",
"v",
"in",
"self",
".",
"json_category_id_to_contiguous_id",
".",
"items",
"(",
")",
"}",
"self",
".",
"id_to_img_map",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"enumerate",
"(",
"self",
".",
"ids",
")",
"}",
"if",
"self",
".",
"aspect_grouping",
":",
"assert",
"False",
",",
"\"not support aspect grouping currently!\"",
"# self.group_ids = self.group_aspect(self.database)",
"print",
"(",
"'mask_raw_pixels: '",
",",
"self",
".",
"mask_raw_pixels",
")"
] | [
19,
4
] | [
110,
56
] | python | en | ['en', 'error', 'th'] | False |
async_setup_platform | (hass, _, add_entities, discovery_info=None) | Add lights from the main Qwikswitch component. | Add lights from the main Qwikswitch component. | async def async_setup_platform(hass, _, add_entities, discovery_info=None):
"""Add lights from the main Qwikswitch component."""
if discovery_info is None:
return
qsusb = hass.data[QWIKSWITCH]
devs = [QSLight(qsid, qsusb) for qsid in discovery_info[QWIKSWITCH]]
add_entities(devs) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"_",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"qsusb",
"=",
"hass",
".",
"data",
"[",
"QWIKSWITCH",
"]",
"devs",
"=",
"[",
"QSLight",
"(",
"qsid",
",",
"qsusb",
")",
"for",
"qsid",
"in",
"discovery_info",
"[",
"QWIKSWITCH",
"]",
"]",
"add_entities",
"(",
"devs",
")"
] | [
6,
0
] | [
13,
22
] | python | en | ['en', 'en', 'en'] | True |
QSLight.brightness | (self) | Return the brightness of this light (0-255). | Return the brightness of this light (0-255). | def brightness(self):
"""Return the brightness of this light (0-255)."""
return self.device.value if self.device.is_dimmer else None | [
"def",
"brightness",
"(",
"self",
")",
":",
"return",
"self",
".",
"device",
".",
"value",
"if",
"self",
".",
"device",
".",
"is_dimmer",
"else",
"None"
] | [
20,
4
] | [
22,
67
] | python | en | ['en', 'sn', 'en'] | True |
QSLight.supported_features | (self) | Flag supported features. | Flag supported features. | def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS if self.device.is_dimmer else 0 | [
"def",
"supported_features",
"(",
"self",
")",
":",
"return",
"SUPPORT_BRIGHTNESS",
"if",
"self",
".",
"device",
".",
"is_dimmer",
"else",
"0"
] | [
25,
4
] | [
27,
65
] | python | en | ['da', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the GC100 devices. | Set up the GC100 devices. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the GC100 devices."""
binary_sensors = []
ports = config.get(CONF_PORTS)
for port in ports:
for port_addr, port_name in port.items():
binary_sensors.append(
GC100BinarySensor(port_name, port_addr, hass.data[DATA_GC100])
)
add_entities(binary_sensors, True) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"binary_sensors",
"=",
"[",
"]",
"ports",
"=",
"config",
".",
"get",
"(",
"CONF_PORTS",
")",
"for",
"port",
"in",
"ports",
":",
"for",
"port_addr",
",",
"port_name",
"in",
"port",
".",
"items",
"(",
")",
":",
"binary_sensors",
".",
"append",
"(",
"GC100BinarySensor",
"(",
"port_name",
",",
"port_addr",
",",
"hass",
".",
"data",
"[",
"DATA_GC100",
"]",
")",
")",
"add_entities",
"(",
"binary_sensors",
",",
"True",
")"
] | [
16,
0
] | [
25,
38
] | python | en | ['en', 'en', 'en'] | True |
GC100BinarySensor.__init__ | (self, name, port_addr, gc100) | Initialize the GC100 binary sensor. | Initialize the GC100 binary sensor. | def __init__(self, name, port_addr, gc100):
"""Initialize the GC100 binary sensor."""
self._name = name or DEVICE_DEFAULT_NAME
self._port_addr = port_addr
self._gc100 = gc100
self._state = None
# Subscribe to be notified about state changes (PUSH)
self._gc100.subscribe(self._port_addr, self.set_state) | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"port_addr",
",",
"gc100",
")",
":",
"self",
".",
"_name",
"=",
"name",
"or",
"DEVICE_DEFAULT_NAME",
"self",
".",
"_port_addr",
"=",
"port_addr",
"self",
".",
"_gc100",
"=",
"gc100",
"self",
".",
"_state",
"=",
"None",
"# Subscribe to be notified about state changes (PUSH)",
"self",
".",
"_gc100",
".",
"subscribe",
"(",
"self",
".",
"_port_addr",
",",
"self",
".",
"set_state",
")"
] | [
31,
4
] | [
39,
62
] | python | en | ['en', 'zh-Latn', 'en'] | True |
GC100BinarySensor.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
42,
4
] | [
44,
25
] | python | en | ['en', 'mi', 'en'] | True |
GC100BinarySensor.is_on | (self) | Return the state of the entity. | Return the state of the entity. | def is_on(self):
"""Return the state of the entity."""
return self._state | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
47,
4
] | [
49,
26
] | python | en | ['en', 'en', 'en'] | True |
GC100BinarySensor.update | (self) | Update the sensor state. | Update the sensor state. | def update(self):
"""Update the sensor state."""
self._gc100.read_sensor(self._port_addr, self.set_state) | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"_gc100",
".",
"read_sensor",
"(",
"self",
".",
"_port_addr",
",",
"self",
".",
"set_state",
")"
] | [
51,
4
] | [
53,
64
] | python | en | ['en', 'co', 'en'] | True |
GC100BinarySensor.set_state | (self, state) | Set the current state. | Set the current state. | def set_state(self, state):
"""Set the current state."""
self._state = state == 1
self.schedule_update_ha_state() | [
"def",
"set_state",
"(",
"self",
",",
"state",
")",
":",
"self",
".",
"_state",
"=",
"state",
"==",
"1",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
55,
4
] | [
58,
39
] | python | en | ['en', 'en', 'en'] | True |
switchbox_fixture | () | Return a default switchBox switch entity mock. | Return a default switchBox switch entity mock. | def switchbox_fixture():
"""Return a default switchBox switch entity mock."""
feature = mock_feature(
"switches",
blebox_uniapi.switch.Switch,
unique_id="BleBox-switchBox-1afe34e750b8-0.relay",
full_name="switchBox-0.relay",
device_class="relay",
is_on=False,
)
feature.async_update = AsyncMock()
product = feature.product
type(product).name = PropertyMock(return_value="My switch box")
type(product).model = PropertyMock(return_value="switchBox")
return (feature, "switch.switchbox_0_relay") | [
"def",
"switchbox_fixture",
"(",
")",
":",
"feature",
"=",
"mock_feature",
"(",
"\"switches\"",
",",
"blebox_uniapi",
".",
"switch",
".",
"Switch",
",",
"unique_id",
"=",
"\"BleBox-switchBox-1afe34e750b8-0.relay\"",
",",
"full_name",
"=",
"\"switchBox-0.relay\"",
",",
"device_class",
"=",
"\"relay\"",
",",
"is_on",
"=",
"False",
",",
")",
"feature",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"product",
"=",
"feature",
".",
"product",
"type",
"(",
"product",
")",
".",
"name",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"My switch box\"",
")",
"type",
"(",
"product",
")",
".",
"model",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"switchBox\"",
")",
"return",
"(",
"feature",
",",
"\"switch.switchbox_0_relay\"",
")"
] | [
28,
0
] | [
42,
48
] | python | en | ['en', 'cy', 'en'] | True |
test_switchbox_init | (switchbox, hass, config) | Test switch default state. | Test switch default state. | async def test_switchbox_init(switchbox, hass, config):
"""Test switch default state."""
feature_mock, entity_id = switchbox
feature_mock.async_update = AsyncMock()
entry = await async_setup_entity(hass, config, entity_id)
assert entry.unique_id == "BleBox-switchBox-1afe34e750b8-0.relay"
state = hass.states.get(entity_id)
assert state.name == "switchBox-0.relay"
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_SWITCH
assert state.state == STATE_OFF
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My switch box"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "switchBox"
assert device.sw_version == "1.23" | [
"async",
"def",
"test_switchbox_init",
"(",
"switchbox",
",",
"hass",
",",
"config",
")",
":",
"feature_mock",
",",
"entity_id",
"=",
"switchbox",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"entry",
"=",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"assert",
"entry",
".",
"unique_id",
"==",
"\"BleBox-switchBox-1afe34e750b8-0.relay\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"name",
"==",
"\"switchBox-0.relay\"",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_DEVICE_CLASS",
"]",
"==",
"DEVICE_CLASS_SWITCH",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF",
"device_registry",
"=",
"await",
"hass",
".",
"helpers",
".",
"device_registry",
".",
"async_get_registry",
"(",
")",
"device",
"=",
"device_registry",
".",
"async_get",
"(",
"entry",
".",
"device_id",
")",
"assert",
"device",
".",
"name",
"==",
"\"My switch box\"",
"assert",
"device",
".",
"identifiers",
"==",
"{",
"(",
"\"blebox\"",
",",
"\"abcd0123ef5678\"",
")",
"}",
"assert",
"device",
".",
"manufacturer",
"==",
"\"BleBox\"",
"assert",
"device",
".",
"model",
"==",
"\"switchBox\"",
"assert",
"device",
".",
"sw_version",
"==",
"\"1.23\""
] | [
45,
0
] | [
68,
38
] | python | en | ['de', 'en', 'en'] | True |
test_switchbox_update_when_off | (switchbox, hass, config) | Test switch updating when off. | Test switch updating when off. | async def test_switchbox_update_when_off(switchbox, hass, config):
"""Test switch updating when off."""
feature_mock, entity_id = switchbox
def initial_update():
feature_mock.is_on = False
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
assert state.state == STATE_OFF | [
"async",
"def",
"test_switchbox_update_when_off",
"(",
"switchbox",
",",
"hass",
",",
"config",
")",
":",
"feature_mock",
",",
"entity_id",
"=",
"switchbox",
"def",
"initial_update",
"(",
")",
":",
"feature_mock",
".",
"is_on",
"=",
"False",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update",
")",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF"
] | [
71,
0
] | [
83,
35
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_update_when_on | (switchbox, hass, config) | Test switch updating when on. | Test switch updating when on. | async def test_switchbox_update_when_on(switchbox, hass, config):
"""Test switch updating when on."""
feature_mock, entity_id = switchbox
def initial_update():
feature_mock.is_on = True
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
assert state.state == STATE_ON | [
"async",
"def",
"test_switchbox_update_when_on",
"(",
"switchbox",
",",
"hass",
",",
"config",
")",
":",
"feature_mock",
",",
"entity_id",
"=",
"switchbox",
"def",
"initial_update",
"(",
")",
":",
"feature_mock",
".",
"is_on",
"=",
"True",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update",
")",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_ON"
] | [
86,
0
] | [
98,
34
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_on | (switchbox, hass, config) | Test turning switch on. | Test turning switch on. | async def test_switchbox_on(switchbox, hass, config):
"""Test turning switch on."""
feature_mock, entity_id = switchbox
def initial_update():
feature_mock.is_on = False
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
feature_mock.async_update = AsyncMock()
def turn_on():
feature_mock.is_on = True
feature_mock.async_turn_on = AsyncMock(side_effect=turn_on)
await hass.services.async_call(
"switch",
SERVICE_TURN_ON,
{"entity_id": entity_id},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.state == STATE_ON | [
"async",
"def",
"test_switchbox_on",
"(",
"switchbox",
",",
"hass",
",",
"config",
")",
":",
"feature_mock",
",",
"entity_id",
"=",
"switchbox",
"def",
"initial_update",
"(",
")",
":",
"feature_mock",
".",
"is_on",
"=",
"False",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update",
")",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"def",
"turn_on",
"(",
")",
":",
"feature_mock",
".",
"is_on",
"=",
"True",
"feature_mock",
".",
"async_turn_on",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"turn_on",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"switch\"",
",",
"SERVICE_TURN_ON",
",",
"{",
"\"entity_id\"",
":",
"entity_id",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_ON"
] | [
101,
0
] | [
126,
34
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_off | (switchbox, hass, config) | Test turning switch off. | Test turning switch off. | async def test_switchbox_off(switchbox, hass, config):
"""Test turning switch off."""
feature_mock, entity_id = switchbox
def initial_update():
feature_mock.is_on = True
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
feature_mock.async_update = AsyncMock()
def turn_off():
feature_mock.is_on = False
feature_mock.async_turn_off = AsyncMock(side_effect=turn_off)
await hass.services.async_call(
"switch",
SERVICE_TURN_OFF,
{"entity_id": entity_id},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.state == STATE_OFF | [
"async",
"def",
"test_switchbox_off",
"(",
"switchbox",
",",
"hass",
",",
"config",
")",
":",
"feature_mock",
",",
"entity_id",
"=",
"switchbox",
"def",
"initial_update",
"(",
")",
":",
"feature_mock",
".",
"is_on",
"=",
"True",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update",
")",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"def",
"turn_off",
"(",
")",
":",
"feature_mock",
".",
"is_on",
"=",
"False",
"feature_mock",
".",
"async_turn_off",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"turn_off",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"switch\"",
",",
"SERVICE_TURN_OFF",
",",
"{",
"\"entity_id\"",
":",
"entity_id",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF"
] | [
129,
0
] | [
153,
35
] | python | en | ['en', 'en', 'en'] | True |
relay_mock | (relay_id=0) | Return a default switchBoxD switch entity mock. | Return a default switchBoxD switch entity mock. | def relay_mock(relay_id=0):
"""Return a default switchBoxD switch entity mock."""
return mock_only_feature(
blebox_uniapi.switch.Switch,
unique_id=f"BleBox-switchBoxD-1afe34e750b8-{relay_id}.relay",
full_name=f"switchBoxD-{relay_id}.relay",
device_class="relay",
is_on=None,
) | [
"def",
"relay_mock",
"(",
"relay_id",
"=",
"0",
")",
":",
"return",
"mock_only_feature",
"(",
"blebox_uniapi",
".",
"switch",
".",
"Switch",
",",
"unique_id",
"=",
"f\"BleBox-switchBoxD-1afe34e750b8-{relay_id}.relay\"",
",",
"full_name",
"=",
"f\"switchBoxD-{relay_id}.relay\"",
",",
"device_class",
"=",
"\"relay\"",
",",
"is_on",
"=",
"None",
",",
")"
] | [
156,
0
] | [
165,
5
] | python | en | ['en', 'pl', 'en'] | True |
switchbox_d_fixture | () | Set up two mocked Switch features representing a switchBoxD. | Set up two mocked Switch features representing a switchBoxD. | def switchbox_d_fixture():
"""Set up two mocked Switch features representing a switchBoxD."""
relay1 = relay_mock(0)
relay2 = relay_mock(1)
features = [relay1, relay2]
product = setup_product_mock("switches", features)
type(product).name = PropertyMock(return_value="My relays")
type(product).model = PropertyMock(return_value="switchBoxD")
type(product).brand = PropertyMock(return_value="BleBox")
type(product).firmware_version = PropertyMock(return_value="1.23")
type(product).unique_id = PropertyMock(return_value="abcd0123ef5678")
type(relay1).product = product
type(relay2).product = product
return (features, ["switch.switchboxd_0_relay", "switch.switchboxd_1_relay"]) | [
"def",
"switchbox_d_fixture",
"(",
")",
":",
"relay1",
"=",
"relay_mock",
"(",
"0",
")",
"relay2",
"=",
"relay_mock",
"(",
"1",
")",
"features",
"=",
"[",
"relay1",
",",
"relay2",
"]",
"product",
"=",
"setup_product_mock",
"(",
"\"switches\"",
",",
"features",
")",
"type",
"(",
"product",
")",
".",
"name",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"My relays\"",
")",
"type",
"(",
"product",
")",
".",
"model",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"switchBoxD\"",
")",
"type",
"(",
"product",
")",
".",
"brand",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"BleBox\"",
")",
"type",
"(",
"product",
")",
".",
"firmware_version",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"1.23\"",
")",
"type",
"(",
"product",
")",
".",
"unique_id",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"abcd0123ef5678\"",
")",
"type",
"(",
"relay1",
")",
".",
"product",
"=",
"product",
"type",
"(",
"relay2",
")",
".",
"product",
"=",
"product",
"return",
"(",
"features",
",",
"[",
"\"switch.switchboxd_0_relay\"",
",",
"\"switch.switchboxd_1_relay\"",
"]",
")"
] | [
169,
0
] | [
187,
81
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_d_init | (switchbox_d, hass, config) | Test switch default state. | Test switch default state. | async def test_switchbox_d_init(switchbox_d, hass, config):
"""Test switch default state."""
feature_mocks, entity_ids = switchbox_d
feature_mocks[0].async_update = AsyncMock()
feature_mocks[1].async_update = AsyncMock()
entries = await async_setup_entities(hass, config, entity_ids)
entry = entries[0]
assert entry.unique_id == "BleBox-switchBoxD-1afe34e750b8-0.relay"
state = hass.states.get(entity_ids[0])
assert state.name == "switchBoxD-0.relay"
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_SWITCH
assert state.state == STATE_OFF # NOTE: should instead be STATE_UNKNOWN?
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My relays"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "switchBoxD"
assert device.sw_version == "1.23"
entry = entries[1]
assert entry.unique_id == "BleBox-switchBoxD-1afe34e750b8-1.relay"
state = hass.states.get(entity_ids[1])
assert state.name == "switchBoxD-1.relay"
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_SWITCH
assert state.state == STATE_OFF # NOTE: should instead be STATE_UNKNOWN?
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My relays"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "switchBoxD"
assert device.sw_version == "1.23" | [
"async",
"def",
"test_switchbox_d_init",
"(",
"switchbox_d",
",",
"hass",
",",
"config",
")",
":",
"feature_mocks",
",",
"entity_ids",
"=",
"switchbox_d",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"feature_mocks",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"entries",
"=",
"await",
"async_setup_entities",
"(",
"hass",
",",
"config",
",",
"entity_ids",
")",
"entry",
"=",
"entries",
"[",
"0",
"]",
"assert",
"entry",
".",
"unique_id",
"==",
"\"BleBox-switchBoxD-1afe34e750b8-0.relay\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"0",
"]",
")",
"assert",
"state",
".",
"name",
"==",
"\"switchBoxD-0.relay\"",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_DEVICE_CLASS",
"]",
"==",
"DEVICE_CLASS_SWITCH",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF",
"# NOTE: should instead be STATE_UNKNOWN?",
"device_registry",
"=",
"await",
"hass",
".",
"helpers",
".",
"device_registry",
".",
"async_get_registry",
"(",
")",
"device",
"=",
"device_registry",
".",
"async_get",
"(",
"entry",
".",
"device_id",
")",
"assert",
"device",
".",
"name",
"==",
"\"My relays\"",
"assert",
"device",
".",
"identifiers",
"==",
"{",
"(",
"\"blebox\"",
",",
"\"abcd0123ef5678\"",
")",
"}",
"assert",
"device",
".",
"manufacturer",
"==",
"\"BleBox\"",
"assert",
"device",
".",
"model",
"==",
"\"switchBoxD\"",
"assert",
"device",
".",
"sw_version",
"==",
"\"1.23\"",
"entry",
"=",
"entries",
"[",
"1",
"]",
"assert",
"entry",
".",
"unique_id",
"==",
"\"BleBox-switchBoxD-1afe34e750b8-1.relay\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"1",
"]",
")",
"assert",
"state",
".",
"name",
"==",
"\"switchBoxD-1.relay\"",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_DEVICE_CLASS",
"]",
"==",
"DEVICE_CLASS_SWITCH",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF",
"# NOTE: should instead be STATE_UNKNOWN?",
"device_registry",
"=",
"await",
"hass",
".",
"helpers",
".",
"device_registry",
".",
"async_get_registry",
"(",
")",
"device",
"=",
"device_registry",
".",
"async_get",
"(",
"entry",
".",
"device_id",
")",
"assert",
"device",
".",
"name",
"==",
"\"My relays\"",
"assert",
"device",
".",
"identifiers",
"==",
"{",
"(",
"\"blebox\"",
",",
"\"abcd0123ef5678\"",
")",
"}",
"assert",
"device",
".",
"manufacturer",
"==",
"\"BleBox\"",
"assert",
"device",
".",
"model",
"==",
"\"switchBoxD\"",
"assert",
"device",
".",
"sw_version",
"==",
"\"1.23\""
] | [
190,
0
] | [
231,
38
] | python | en | ['de', 'en', 'en'] | True |
test_switchbox_d_update_when_off | (switchbox_d, hass, config) | Test switch updating when off. | Test switch updating when off. | async def test_switchbox_d_update_when_off(switchbox_d, hass, config):
"""Test switch updating when off."""
feature_mocks, entity_ids = switchbox_d
def initial_update0():
feature_mocks[0].is_on = False
feature_mocks[1].is_on = False
feature_mocks[0].async_update = AsyncMock(side_effect=initial_update0)
feature_mocks[1].async_update = AsyncMock()
await async_setup_entities(hass, config, entity_ids)
assert hass.states.get(entity_ids[0]).state == STATE_OFF
assert hass.states.get(entity_ids[1]).state == STATE_OFF | [
"async",
"def",
"test_switchbox_d_update_when_off",
"(",
"switchbox_d",
",",
"hass",
",",
"config",
")",
":",
"feature_mocks",
",",
"entity_ids",
"=",
"switchbox_d",
"def",
"initial_update0",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update0",
")",
"feature_mocks",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"await",
"async_setup_entities",
"(",
"hass",
",",
"config",
",",
"entity_ids",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"0",
"]",
")",
".",
"state",
"==",
"STATE_OFF",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"1",
"]",
")",
".",
"state",
"==",
"STATE_OFF"
] | [
234,
0
] | [
248,
60
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_d_update_when_second_off | (switchbox_d, hass, config) | Test switch updating when off. | Test switch updating when off. | async def test_switchbox_d_update_when_second_off(switchbox_d, hass, config):
"""Test switch updating when off."""
feature_mocks, entity_ids = switchbox_d
def initial_update0():
feature_mocks[0].is_on = True
feature_mocks[1].is_on = False
feature_mocks[0].async_update = AsyncMock(side_effect=initial_update0)
feature_mocks[1].async_update = AsyncMock()
await async_setup_entities(hass, config, entity_ids)
assert hass.states.get(entity_ids[0]).state == STATE_ON
assert hass.states.get(entity_ids[1]).state == STATE_OFF | [
"async",
"def",
"test_switchbox_d_update_when_second_off",
"(",
"switchbox_d",
",",
"hass",
",",
"config",
")",
":",
"feature_mocks",
",",
"entity_ids",
"=",
"switchbox_d",
"def",
"initial_update0",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"True",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update0",
")",
"feature_mocks",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"await",
"async_setup_entities",
"(",
"hass",
",",
"config",
",",
"entity_ids",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"0",
"]",
")",
".",
"state",
"==",
"STATE_ON",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"1",
"]",
")",
".",
"state",
"==",
"STATE_OFF"
] | [
251,
0
] | [
265,
60
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_d_turn_first_on | (switchbox_d, hass, config) | Test turning switch on. | Test turning switch on. | async def test_switchbox_d_turn_first_on(switchbox_d, hass, config):
"""Test turning switch on."""
feature_mocks, entity_ids = switchbox_d
def initial_update0():
feature_mocks[0].is_on = False
feature_mocks[1].is_on = False
feature_mocks[0].async_update = AsyncMock(side_effect=initial_update0)
feature_mocks[1].async_update = AsyncMock()
await async_setup_entities(hass, config, entity_ids)
feature_mocks[0].async_update = AsyncMock()
def turn_on0():
feature_mocks[0].is_on = True
feature_mocks[0].async_turn_on = AsyncMock(side_effect=turn_on0)
await hass.services.async_call(
"switch",
SERVICE_TURN_ON,
{"entity_id": entity_ids[0]},
blocking=True,
)
assert hass.states.get(entity_ids[0]).state == STATE_ON
assert hass.states.get(entity_ids[1]).state == STATE_OFF | [
"async",
"def",
"test_switchbox_d_turn_first_on",
"(",
"switchbox_d",
",",
"hass",
",",
"config",
")",
":",
"feature_mocks",
",",
"entity_ids",
"=",
"switchbox_d",
"def",
"initial_update0",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update0",
")",
"feature_mocks",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"await",
"async_setup_entities",
"(",
"hass",
",",
"config",
",",
"entity_ids",
")",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"def",
"turn_on0",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"True",
"feature_mocks",
"[",
"0",
"]",
".",
"async_turn_on",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"turn_on0",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"switch\"",
",",
"SERVICE_TURN_ON",
",",
"{",
"\"entity_id\"",
":",
"entity_ids",
"[",
"0",
"]",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"0",
"]",
")",
".",
"state",
"==",
"STATE_ON",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"1",
"]",
")",
".",
"state",
"==",
"STATE_OFF"
] | [
268,
0
] | [
294,
60
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_d_second_on | (switchbox_d, hass, config) | Test turning switch on. | Test turning switch on. | async def test_switchbox_d_second_on(switchbox_d, hass, config):
"""Test turning switch on."""
feature_mocks, entity_ids = switchbox_d
def initial_update0():
feature_mocks[0].is_on = False
feature_mocks[1].is_on = False
feature_mocks[0].async_update = AsyncMock(side_effect=initial_update0)
feature_mocks[1].async_update = AsyncMock()
await async_setup_entities(hass, config, entity_ids)
feature_mocks[0].async_update = AsyncMock()
def turn_on1():
feature_mocks[1].is_on = True
feature_mocks[1].async_turn_on = AsyncMock(side_effect=turn_on1)
await hass.services.async_call(
"switch",
SERVICE_TURN_ON,
{"entity_id": entity_ids[1]},
blocking=True,
)
assert hass.states.get(entity_ids[0]).state == STATE_OFF
assert hass.states.get(entity_ids[1]).state == STATE_ON | [
"async",
"def",
"test_switchbox_d_second_on",
"(",
"switchbox_d",
",",
"hass",
",",
"config",
")",
":",
"feature_mocks",
",",
"entity_ids",
"=",
"switchbox_d",
"def",
"initial_update0",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update0",
")",
"feature_mocks",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"await",
"async_setup_entities",
"(",
"hass",
",",
"config",
",",
"entity_ids",
")",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"def",
"turn_on1",
"(",
")",
":",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"True",
"feature_mocks",
"[",
"1",
"]",
".",
"async_turn_on",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"turn_on1",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"switch\"",
",",
"SERVICE_TURN_ON",
",",
"{",
"\"entity_id\"",
":",
"entity_ids",
"[",
"1",
"]",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"0",
"]",
")",
".",
"state",
"==",
"STATE_OFF",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"1",
"]",
")",
".",
"state",
"==",
"STATE_ON"
] | [
297,
0
] | [
323,
59
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_d_first_off | (switchbox_d, hass, config) | Test turning switch on. | Test turning switch on. | async def test_switchbox_d_first_off(switchbox_d, hass, config):
"""Test turning switch on."""
feature_mocks, entity_ids = switchbox_d
def initial_update_any():
feature_mocks[0].is_on = True
feature_mocks[1].is_on = True
feature_mocks[0].async_update = AsyncMock(side_effect=initial_update_any)
feature_mocks[1].async_update = AsyncMock()
await async_setup_entities(hass, config, entity_ids)
feature_mocks[0].async_update = AsyncMock()
def turn_off0():
feature_mocks[0].is_on = False
feature_mocks[0].async_turn_off = AsyncMock(side_effect=turn_off0)
await hass.services.async_call(
"switch",
SERVICE_TURN_OFF,
{"entity_id": entity_ids[0]},
blocking=True,
)
assert hass.states.get(entity_ids[0]).state == STATE_OFF
assert hass.states.get(entity_ids[1]).state == STATE_ON | [
"async",
"def",
"test_switchbox_d_first_off",
"(",
"switchbox_d",
",",
"hass",
",",
"config",
")",
":",
"feature_mocks",
",",
"entity_ids",
"=",
"switchbox_d",
"def",
"initial_update_any",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"True",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"True",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update_any",
")",
"feature_mocks",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"await",
"async_setup_entities",
"(",
"hass",
",",
"config",
",",
"entity_ids",
")",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"def",
"turn_off0",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"0",
"]",
".",
"async_turn_off",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"turn_off0",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"switch\"",
",",
"SERVICE_TURN_OFF",
",",
"{",
"\"entity_id\"",
":",
"entity_ids",
"[",
"0",
"]",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"0",
"]",
")",
".",
"state",
"==",
"STATE_OFF",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"1",
"]",
")",
".",
"state",
"==",
"STATE_ON"
] | [
326,
0
] | [
352,
59
] | python | en | ['en', 'en', 'en'] | True |
test_switchbox_d_second_off | (switchbox_d, hass, config) | Test turning switch on. | Test turning switch on. | async def test_switchbox_d_second_off(switchbox_d, hass, config):
"""Test turning switch on."""
feature_mocks, entity_ids = switchbox_d
def initial_update_any():
feature_mocks[0].is_on = True
feature_mocks[1].is_on = True
feature_mocks[0].async_update = AsyncMock(side_effect=initial_update_any)
feature_mocks[1].async_update = AsyncMock()
await async_setup_entities(hass, config, entity_ids)
feature_mocks[0].async_update = AsyncMock()
def turn_off1():
feature_mocks[1].is_on = False
feature_mocks[1].async_turn_off = AsyncMock(side_effect=turn_off1)
await hass.services.async_call(
"switch",
SERVICE_TURN_OFF,
{"entity_id": entity_ids[1]},
blocking=True,
)
assert hass.states.get(entity_ids[0]).state == STATE_ON
assert hass.states.get(entity_ids[1]).state == STATE_OFF | [
"async",
"def",
"test_switchbox_d_second_off",
"(",
"switchbox_d",
",",
"hass",
",",
"config",
")",
":",
"feature_mocks",
",",
"entity_ids",
"=",
"switchbox_d",
"def",
"initial_update_any",
"(",
")",
":",
"feature_mocks",
"[",
"0",
"]",
".",
"is_on",
"=",
"True",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"True",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update_any",
")",
"feature_mocks",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"await",
"async_setup_entities",
"(",
"hass",
",",
"config",
",",
"entity_ids",
")",
"feature_mocks",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"def",
"turn_off1",
"(",
")",
":",
"feature_mocks",
"[",
"1",
"]",
".",
"is_on",
"=",
"False",
"feature_mocks",
"[",
"1",
"]",
".",
"async_turn_off",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"turn_off1",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"switch\"",
",",
"SERVICE_TURN_OFF",
",",
"{",
"\"entity_id\"",
":",
"entity_ids",
"[",
"1",
"]",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"0",
"]",
")",
".",
"state",
"==",
"STATE_ON",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_ids",
"[",
"1",
"]",
")",
".",
"state",
"==",
"STATE_OFF"
] | [
355,
0
] | [
380,
60
] | python | en | ['en', 'en', 'en'] | True |
test_update_failure | (feature, hass, config, caplog) | Test that update failures are logged. | Test that update failures are logged. | async def test_update_failure(feature, hass, config, caplog):
"""Test that update failures are logged."""
caplog.set_level(logging.ERROR)
feature_mock, entity_id = feature
if isinstance(feature_mock, list):
feature_mock[0].async_update = AsyncMock()
feature_mock[1].async_update = AsyncMock()
feature_mock = feature_mock[0]
entity_id = entity_id[0]
feature_mock.async_update = AsyncMock(side_effect=blebox_uniapi.error.ClientError)
await async_setup_entity(hass, config, entity_id)
assert f"Updating '{feature_mock.full_name}' failed: " in caplog.text | [
"async",
"def",
"test_update_failure",
"(",
"feature",
",",
"hass",
",",
"config",
",",
"caplog",
")",
":",
"caplog",
".",
"set_level",
"(",
"logging",
".",
"ERROR",
")",
"feature_mock",
",",
"entity_id",
"=",
"feature",
"if",
"isinstance",
"(",
"feature_mock",
",",
"list",
")",
":",
"feature_mock",
"[",
"0",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"feature_mock",
"[",
"1",
"]",
".",
"async_update",
"=",
"AsyncMock",
"(",
")",
"feature_mock",
"=",
"feature_mock",
"[",
"0",
"]",
"entity_id",
"=",
"entity_id",
"[",
"0",
"]",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"blebox_uniapi",
".",
"error",
".",
"ClientError",
")",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"assert",
"f\"Updating '{feature_mock.full_name}' failed: \"",
"in",
"caplog",
".",
"text"
] | [
387,
0
] | [
403,
73
] | python | en | ['en', 'en', 'en'] | True |
test_reload | (hass) | Verify we can reload trend sensors. | Verify we can reload trend sensors. | async def test_reload(hass):
"""Verify we can reload trend sensors."""
hass.states.async_set("sensor.test_state", 1234)
await setup.async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {"test_trend_sensor": {"entity_id": "sensor.test_state"}},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 2
assert hass.states.get("binary_sensor.test_trend_sensor")
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"trend/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 2
assert hass.states.get("binary_sensor.test_trend_sensor") is None
assert hass.states.get("binary_sensor.second_test_trend_sensor") | [
"async",
"def",
"test_reload",
"(",
"hass",
")",
":",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_state\"",
",",
"1234",
")",
"await",
"setup",
".",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
"}",
"}",
",",
"}",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
")",
"==",
"2",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"yaml_path",
"=",
"path",
".",
"join",
"(",
"_get_fixtures_base_path",
"(",
")",
",",
"\"fixtures\"",
",",
"\"trend/configuration.yaml\"",
",",
")",
"with",
"patch",
".",
"object",
"(",
"hass_config",
",",
"\"YAML_CONFIG_FILE\"",
",",
"yaml_path",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DOMAIN",
",",
"SERVICE_RELOAD",
",",
"{",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
")",
"==",
"2",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"is",
"None",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.second_test_trend_sensor\"",
")"
] | [
377,
0
] | [
414,
68
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.setup_method | (self, method) | Set up things to be run when tests are started. | Set up things to be run when tests are started. | def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant() | [
"def",
"setup_method",
"(",
"self",
",",
"method",
")",
":",
"self",
".",
"hass",
"=",
"get_test_home_assistant",
"(",
")"
] | [
18,
4
] | [
20,
45
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.teardown_method | (self, method) | Stop everything that was started. | Stop everything that was started. | def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop() | [
"def",
"teardown_method",
"(",
"self",
",",
"method",
")",
":",
"self",
".",
"hass",
".",
"stop",
"(",
")"
] | [
22,
4
] | [
24,
24
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_up | (self) | Test up trend. | Test up trend. | def test_up(self):
"""Test up trend."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {"entity_id": "sensor.test_state"}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "1")
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "2")
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "on" | [
"def",
"test_up",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"1\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"2\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"on\""
] | [
26,
4
] | [
47,
34
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_up_using_trendline | (self) | Test up trend using multiple samples and trendline calculation. | Test up trend using multiple samples and trendline calculation. | def test_up_using_trendline(self):
"""Test up trend using multiple samples and trendline calculation."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"sample_duration": 10000,
"min_gradient": 1,
"max_samples": 25,
}
},
}
},
)
self.hass.block_till_done()
now = dt_util.utcnow()
for val in [10, 0, 20, 30]:
with patch("homeassistant.util.dt.utcnow", return_value=now):
self.hass.states.set("sensor.test_state", val)
self.hass.block_till_done()
now += timedelta(seconds=2)
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "on"
# have to change state value, otherwise sample will lost
for val in [0, 30, 1, 0]:
with patch("homeassistant.util.dt.utcnow", return_value=now):
self.hass.states.set("sensor.test_state", val)
self.hass.block_till_done()
now += timedelta(seconds=2)
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "off" | [
"def",
"test_up_using_trendline",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"sample_duration\"",
":",
"10000",
",",
"\"min_gradient\"",
":",
"1",
",",
"\"max_samples\"",
":",
"25",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"now",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"for",
"val",
"in",
"[",
"10",
",",
"0",
",",
"20",
",",
"30",
"]",
":",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"now",
")",
":",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"val",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"now",
"+=",
"timedelta",
"(",
"seconds",
"=",
"2",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"# have to change state value, otherwise sample will lost",
"for",
"val",
"in",
"[",
"0",
",",
"30",
",",
"1",
",",
"0",
"]",
":",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"now",
")",
":",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"val",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"now",
"+=",
"timedelta",
"(",
"seconds",
"=",
"2",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
49,
4
] | [
88,
35
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_down_using_trendline | (self) | Test down trend using multiple samples and trendline calculation. | Test down trend using multiple samples and trendline calculation. | def test_down_using_trendline(self):
"""Test down trend using multiple samples and trendline calculation."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"sample_duration": 10000,
"min_gradient": 1,
"max_samples": 25,
"invert": "Yes",
}
},
}
},
)
self.hass.block_till_done()
now = dt_util.utcnow()
for val in [30, 20, 30, 10]:
with patch("homeassistant.util.dt.utcnow", return_value=now):
self.hass.states.set("sensor.test_state", val)
self.hass.block_till_done()
now += timedelta(seconds=2)
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "on"
for val in [30, 0, 45, 50]:
with patch("homeassistant.util.dt.utcnow", return_value=now):
self.hass.states.set("sensor.test_state", val)
self.hass.block_till_done()
now += timedelta(seconds=2)
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "off" | [
"def",
"test_down_using_trendline",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"sample_duration\"",
":",
"10000",
",",
"\"min_gradient\"",
":",
"1",
",",
"\"max_samples\"",
":",
"25",
",",
"\"invert\"",
":",
"\"Yes\"",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"now",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"for",
"val",
"in",
"[",
"30",
",",
"20",
",",
"30",
",",
"10",
"]",
":",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"now",
")",
":",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"val",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"now",
"+=",
"timedelta",
"(",
"seconds",
"=",
"2",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"for",
"val",
"in",
"[",
"30",
",",
"0",
",",
"45",
",",
"50",
"]",
":",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"now",
")",
":",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"val",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"now",
"+=",
"timedelta",
"(",
"seconds",
"=",
"2",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
90,
4
] | [
129,
35
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_down | (self) | Test down trend. | Test down trend. | def test_down(self):
"""Test down trend."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {"entity_id": "sensor.test_state"}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "2")
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "1")
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "off" | [
"def",
"test_down",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"2\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"1\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
131,
4
] | [
152,
35
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_invert_up | (self) | Test up trend with custom message. | Test up trend with custom message. | def test_invert_up(self):
"""Test up trend with custom message."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"invert": "Yes",
}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "1")
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "2")
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "off" | [
"def",
"test_invert_up",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"invert\"",
":",
"\"Yes\"",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"1\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"2\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
154,
4
] | [
178,
35
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_invert_down | (self) | Test down trend with custom message. | Test down trend with custom message. | def test_invert_down(self):
"""Test down trend with custom message."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"invert": "Yes",
}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "2")
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "1")
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "on" | [
"def",
"test_invert_down",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"invert\"",
":",
"\"Yes\"",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"2\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"1\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"on\""
] | [
180,
4
] | [
204,
34
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_attribute_up | (self) | Test attribute up trend. | Test attribute up trend. | def test_attribute_up(self):
"""Test attribute up trend."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"attribute": "attr",
}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "State", {"attr": "1"})
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "State", {"attr": "2"})
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "on" | [
"def",
"test_attribute_up",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"attribute\"",
":",
"\"attr\"",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"State\"",
",",
"{",
"\"attr\"",
":",
"\"1\"",
"}",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"State\"",
",",
"{",
"\"attr\"",
":",
"\"2\"",
"}",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"on\""
] | [
206,
4
] | [
229,
34
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_attribute_down | (self) | Test attribute down trend. | Test attribute down trend. | def test_attribute_down(self):
"""Test attribute down trend."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"attribute": "attr",
}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "State", {"attr": "2"})
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "State", {"attr": "1"})
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "off" | [
"def",
"test_attribute_down",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"attribute\"",
":",
"\"attr\"",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"State\"",
",",
"{",
"\"attr\"",
":",
"\"2\"",
"}",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"State\"",
",",
"{",
"\"attr\"",
":",
"\"1\"",
"}",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
231,
4
] | [
255,
35
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_max_samples | (self) | Test that sample count is limited correctly. | Test that sample count is limited correctly. | def test_max_samples(self):
"""Test that sample count is limited correctly."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"max_samples": 3,
"min_gradient": -1,
}
},
}
},
)
self.hass.block_till_done()
for val in [0, 1, 2, 3, 2, 1]:
self.hass.states.set("sensor.test_state", val)
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "on"
assert state.attributes["sample_count"] == 3 | [
"def",
"test_max_samples",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"max_samples\"",
":",
"3",
",",
"\"min_gradient\"",
":",
"-",
"1",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"for",
"val",
"in",
"[",
"0",
",",
"1",
",",
"2",
",",
"3",
",",
"2",
",",
"1",
"]",
":",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"val",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"assert",
"state",
".",
"attributes",
"[",
"\"sample_count\"",
"]",
"==",
"3"
] | [
257,
4
] | [
283,
52
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_non_numeric | (self) | Test up trend. | Test up trend. | def test_non_numeric(self):
"""Test up trend."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {"entity_id": "sensor.test_state"}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "Non")
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "Numeric")
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "off" | [
"def",
"test_non_numeric",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"Non\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"Numeric\"",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
285,
4
] | [
306,
35
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_missing_attribute | (self) | Test attribute down trend. | Test attribute down trend. | def test_missing_attribute(self):
"""Test attribute down trend."""
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "trend",
"sensors": {
"test_trend_sensor": {
"entity_id": "sensor.test_state",
"attribute": "missing",
}
},
}
},
)
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "State", {"attr": "2"})
self.hass.block_till_done()
self.hass.states.set("sensor.test_state", "State", {"attr": "1"})
self.hass.block_till_done()
state = self.hass.states.get("binary_sensor.test_trend_sensor")
assert state.state == "off" | [
"def",
"test_missing_attribute",
"(",
"self",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
",",
"\"attribute\"",
":",
"\"missing\"",
",",
"}",
"}",
",",
"}",
"}",
",",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"State\"",
",",
"{",
"\"attr\"",
":",
"\"2\"",
"}",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"self",
".",
"hass",
".",
"states",
".",
"set",
"(",
"\"sensor.test_state\"",
",",
"\"State\"",
",",
"{",
"\"attr\"",
":",
"\"1\"",
"}",
")",
"self",
".",
"hass",
".",
"block_till_done",
"(",
")",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_trend_sensor\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
308,
4
] | [
332,
35
] | python | en | ['en', 'en', 'en'] | True |
TestTrendBinarySensor.test_invalid_name_does_not_create | (self) | Test invalid name. | Test invalid name. | def test_invalid_name_does_not_create(self):
"""Test invalid name."""
with assert_setup_component(0):
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "template",
"sensors": {
"test INVALID sensor": {"entity_id": "sensor.test_state"}
},
}
},
)
assert self.hass.states.all() == [] | [
"def",
"test_invalid_name_does_not_create",
"(",
"self",
")",
":",
"with",
"assert_setup_component",
"(",
"0",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"sensors\"",
":",
"{",
"\"test INVALID sensor\"",
":",
"{",
"\"entity_id\"",
":",
"\"sensor.test_state\"",
"}",
"}",
",",
"}",
"}",
",",
")",
"assert",
"self",
".",
"hass",
".",
"states",
".",
"all",
"(",
")",
"==",
"[",
"]"
] | [
334,
4
] | [
349,
43
] | python | en | ['en', 'et', 'en'] | True |
TestTrendBinarySensor.test_invalid_sensor_does_not_create | (self) | Test invalid sensor. | Test invalid sensor. | def test_invalid_sensor_does_not_create(self):
"""Test invalid sensor."""
with assert_setup_component(0):
assert setup.setup_component(
self.hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "template",
"sensors": {
"test_trend_sensor": {"not_entity_id": "sensor.test_state"}
},
}
},
)
assert self.hass.states.all() == [] | [
"def",
"test_invalid_sensor_does_not_create",
"(",
"self",
")",
":",
"with",
"assert_setup_component",
"(",
"0",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"sensors\"",
":",
"{",
"\"test_trend_sensor\"",
":",
"{",
"\"not_entity_id\"",
":",
"\"sensor.test_state\"",
"}",
"}",
",",
"}",
"}",
",",
")",
"assert",
"self",
".",
"hass",
".",
"states",
".",
"all",
"(",
")",
"==",
"[",
"]"
] | [
351,
4
] | [
366,
43
] | python | en | ['en', 'et', 'en'] | True |
TestTrendBinarySensor.test_no_sensors_does_not_create | (self) | Test no sensors. | Test no sensors. | def test_no_sensors_does_not_create(self):
"""Test no sensors."""
with assert_setup_component(0):
assert setup.setup_component(
self.hass, "binary_sensor", {"binary_sensor": {"platform": "trend"}}
)
assert self.hass.states.all() == [] | [
"def",
"test_no_sensors_does_not_create",
"(",
"self",
")",
":",
"with",
"assert_setup_component",
"(",
"0",
")",
":",
"assert",
"setup",
".",
"setup_component",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"trend\"",
"}",
"}",
")",
"assert",
"self",
".",
"hass",
".",
"states",
".",
"all",
"(",
")",
"==",
"[",
"]"
] | [
368,
4
] | [
374,
43
] | python | ca | ['es', 'ca', 'pt'] | False |
test_one_sensor_site_running | (hass, requests_mock, legacy_patchable_time) | Test the Met Office sensor platform. | Test the Met Office sensor platform. | async def test_one_sensor_site_running(hass, requests_mock, legacy_patchable_time):
"""Test the Met Office sensor platform."""
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
text=wavertree_hourly,
)
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
running_sensor_ids = hass.states.async_entity_ids("sensor")
assert len(running_sensor_ids) > 0
for running_id in running_sensor_ids:
sensor = hass.states.get(running_id)
sensor_id = sensor.attributes.get("sensor_id")
sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
assert sensor.state == sensor_value
assert (
sensor.attributes.get("last_update").strftime(DATETIME_FORMAT)
== TEST_DATETIME_STRING
)
assert sensor.attributes.get("site_id") == "354107"
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE
assert sensor.attributes.get("attribution") == ATTRIBUTION | [
"async",
"def",
"test_one_sensor_site_running",
"(",
"hass",
",",
"requests_mock",
",",
"legacy_patchable_time",
")",
":",
"# all metoffice test data encapsulated in here",
"mock_json",
"=",
"json",
".",
"loads",
"(",
"load_fixture",
"(",
"\"metoffice.json\"",
")",
")",
"all_sites",
"=",
"json",
".",
"dumps",
"(",
"mock_json",
"[",
"\"all_sites\"",
"]",
")",
"wavertree_hourly",
"=",
"json",
".",
"dumps",
"(",
"mock_json",
"[",
"\"wavertree_hourly\"",
"]",
")",
"requests_mock",
".",
"get",
"(",
"\"/public/data/val/wxfcs/all/json/sitelist/\"",
",",
"text",
"=",
"all_sites",
")",
"requests_mock",
".",
"get",
"(",
"\"/public/data/val/wxfcs/all/json/354107?res=3hourly\"",
",",
"text",
"=",
"wavertree_hourly",
",",
")",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"data",
"=",
"METOFFICE_CONFIG_WAVERTREE",
",",
")",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"running_sensor_ids",
"=",
"hass",
".",
"states",
".",
"async_entity_ids",
"(",
"\"sensor\"",
")",
"assert",
"len",
"(",
"running_sensor_ids",
")",
">",
"0",
"for",
"running_id",
"in",
"running_sensor_ids",
":",
"sensor",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"running_id",
")",
"sensor_id",
"=",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"sensor_id\"",
")",
"sensor_name",
",",
"sensor_value",
"=",
"WAVERTREE_SENSOR_RESULTS",
"[",
"sensor_id",
"]",
"assert",
"sensor",
".",
"state",
"==",
"sensor_value",
"assert",
"(",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"last_update\"",
")",
".",
"strftime",
"(",
"DATETIME_FORMAT",
")",
"==",
"TEST_DATETIME_STRING",
")",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"site_id\"",
")",
"==",
"\"354107\"",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"site_name\"",
")",
"==",
"TEST_SITE_NAME_WAVERTREE",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"attribution\"",
")",
"==",
"ATTRIBUTION"
] | [
25,
0
] | [
60,
66
] | python | en | ['en', 'da', 'en'] | True |
test_two_sensor_sites_running | (hass, requests_mock, legacy_patchable_time) | Test we handle two sets of sensors running for two different sites. | Test we handle two sets of sensors running for two different sites. | async def test_two_sensor_sites_running(hass, requests_mock, legacy_patchable_time):
"""Test we handle two sets of sensors running for two different sites."""
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
)
requests_mock.get(
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
)
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
entry2 = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_KINGSLYNN,
)
entry2.add_to_hass(hass)
await hass.config_entries.async_setup(entry2.entry_id)
await hass.async_block_till_done()
running_sensor_ids = hass.states.async_entity_ids("sensor")
assert len(running_sensor_ids) > 0
for running_id in running_sensor_ids:
sensor = hass.states.get(running_id)
sensor_id = sensor.attributes.get("sensor_id")
if sensor.attributes.get("site_id") == "354107":
sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
assert sensor.state == sensor_value
assert (
sensor.attributes.get("last_update").strftime(DATETIME_FORMAT)
== TEST_DATETIME_STRING
)
assert sensor.attributes.get("sensor_id") == sensor_id
assert sensor.attributes.get("site_id") == "354107"
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE
assert sensor.attributes.get("attribution") == ATTRIBUTION
else:
sensor_name, sensor_value = KINGSLYNN_SENSOR_RESULTS[sensor_id]
assert sensor.state == sensor_value
assert (
sensor.attributes.get("last_update").strftime(DATETIME_FORMAT)
== TEST_DATETIME_STRING
)
assert sensor.attributes.get("sensor_id") == sensor_id
assert sensor.attributes.get("site_id") == "322380"
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_KINGSLYNN
assert sensor.attributes.get("attribution") == ATTRIBUTION | [
"async",
"def",
"test_two_sensor_sites_running",
"(",
"hass",
",",
"requests_mock",
",",
"legacy_patchable_time",
")",
":",
"# all metoffice test data encapsulated in here",
"mock_json",
"=",
"json",
".",
"loads",
"(",
"load_fixture",
"(",
"\"metoffice.json\"",
")",
")",
"all_sites",
"=",
"json",
".",
"dumps",
"(",
"mock_json",
"[",
"\"all_sites\"",
"]",
")",
"wavertree_hourly",
"=",
"json",
".",
"dumps",
"(",
"mock_json",
"[",
"\"wavertree_hourly\"",
"]",
")",
"kingslynn_hourly",
"=",
"json",
".",
"dumps",
"(",
"mock_json",
"[",
"\"kingslynn_hourly\"",
"]",
")",
"requests_mock",
".",
"get",
"(",
"\"/public/data/val/wxfcs/all/json/sitelist/\"",
",",
"text",
"=",
"all_sites",
")",
"requests_mock",
".",
"get",
"(",
"\"/public/data/val/wxfcs/all/json/354107?res=3hourly\"",
",",
"text",
"=",
"wavertree_hourly",
")",
"requests_mock",
".",
"get",
"(",
"\"/public/data/val/wxfcs/all/json/322380?res=3hourly\"",
",",
"text",
"=",
"kingslynn_hourly",
")",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"data",
"=",
"METOFFICE_CONFIG_WAVERTREE",
",",
")",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"entry",
".",
"entry_id",
")",
"entry2",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"data",
"=",
"METOFFICE_CONFIG_KINGSLYNN",
",",
")",
"entry2",
".",
"add_to_hass",
"(",
"hass",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"entry2",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"running_sensor_ids",
"=",
"hass",
".",
"states",
".",
"async_entity_ids",
"(",
"\"sensor\"",
")",
"assert",
"len",
"(",
"running_sensor_ids",
")",
">",
"0",
"for",
"running_id",
"in",
"running_sensor_ids",
":",
"sensor",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"running_id",
")",
"sensor_id",
"=",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"sensor_id\"",
")",
"if",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"site_id\"",
")",
"==",
"\"354107\"",
":",
"sensor_name",
",",
"sensor_value",
"=",
"WAVERTREE_SENSOR_RESULTS",
"[",
"sensor_id",
"]",
"assert",
"sensor",
".",
"state",
"==",
"sensor_value",
"assert",
"(",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"last_update\"",
")",
".",
"strftime",
"(",
"DATETIME_FORMAT",
")",
"==",
"TEST_DATETIME_STRING",
")",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"sensor_id\"",
")",
"==",
"sensor_id",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"site_id\"",
")",
"==",
"\"354107\"",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"site_name\"",
")",
"==",
"TEST_SITE_NAME_WAVERTREE",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"attribution\"",
")",
"==",
"ATTRIBUTION",
"else",
":",
"sensor_name",
",",
"sensor_value",
"=",
"KINGSLYNN_SENSOR_RESULTS",
"[",
"sensor_id",
"]",
"assert",
"sensor",
".",
"state",
"==",
"sensor_value",
"assert",
"(",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"last_update\"",
")",
".",
"strftime",
"(",
"DATETIME_FORMAT",
")",
"==",
"TEST_DATETIME_STRING",
")",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"sensor_id\"",
")",
"==",
"sensor_id",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"site_id\"",
")",
"==",
"\"322380\"",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"site_name\"",
")",
"==",
"TEST_SITE_NAME_KINGSLYNN",
"assert",
"sensor",
".",
"attributes",
".",
"get",
"(",
"\"attribution\"",
")",
"==",
"ATTRIBUTION"
] | [
67,
0
] | [
125,
70
] | python | en | ['en', 'en', 'en'] | True |
has_all_unique_files | (value) | Validate that all persistence files are unique and set if any is set. | Validate that all persistence files are unique and set if any is set. | def has_all_unique_files(value):
"""Validate that all persistence files are unique and set if any is set."""
persistence_files = [gateway.get(CONF_PERSISTENCE_FILE) for gateway in value]
if None in persistence_files and any(
name is not None for name in persistence_files
):
raise vol.Invalid(
"persistence file name of all devices must be set if any is set"
)
if not all(name is None for name in persistence_files):
schema = vol.Schema(vol.Unique())
schema(persistence_files)
return value | [
"def",
"has_all_unique_files",
"(",
"value",
")",
":",
"persistence_files",
"=",
"[",
"gateway",
".",
"get",
"(",
"CONF_PERSISTENCE_FILE",
")",
"for",
"gateway",
"in",
"value",
"]",
"if",
"None",
"in",
"persistence_files",
"and",
"any",
"(",
"name",
"is",
"not",
"None",
"for",
"name",
"in",
"persistence_files",
")",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"\"persistence file name of all devices must be set if any is set\"",
")",
"if",
"not",
"all",
"(",
"name",
"is",
"None",
"for",
"name",
"in",
"persistence_files",
")",
":",
"schema",
"=",
"vol",
".",
"Schema",
"(",
"vol",
".",
"Unique",
"(",
")",
")",
"schema",
"(",
"persistence_files",
")",
"return",
"value"
] | [
39,
0
] | [
51,
16
] | python | en | ['en', 'en', 'en'] | True |
is_persistence_file | (value) | Validate that persistence file path ends in either .pickle or .json. | Validate that persistence file path ends in either .pickle or .json. | def is_persistence_file(value):
"""Validate that persistence file path ends in either .pickle or .json."""
if value.endswith((".json", ".pickle")):
return value
raise vol.Invalid(f"{value} does not end in either `.json` or `.pickle`") | [
"def",
"is_persistence_file",
"(",
"value",
")",
":",
"if",
"value",
".",
"endswith",
"(",
"(",
"\".json\"",
",",
"\".pickle\"",
")",
")",
":",
"return",
"value",
"raise",
"vol",
".",
"Invalid",
"(",
"f\"{value} does not end in either `.json` or `.pickle`\"",
")"
] | [
54,
0
] | [
58,
77
] | python | en | ['en', 'en', 'en'] | True |
deprecated | (key) | Mark key as deprecated in configuration. | Mark key as deprecated in configuration. | def deprecated(key):
"""Mark key as deprecated in configuration."""
def validator(config):
"""Check if key is in config, log warning and remove key."""
if key not in config:
return config
_LOGGER.warning(
"%s option for %s is deprecated. Please remove %s from your "
"configuration file",
key,
DOMAIN,
key,
)
config.pop(key)
return config
return validator | [
"def",
"deprecated",
"(",
"key",
")",
":",
"def",
"validator",
"(",
"config",
")",
":",
"\"\"\"Check if key is in config, log warning and remove key.\"\"\"",
"if",
"key",
"not",
"in",
"config",
":",
"return",
"config",
"_LOGGER",
".",
"warning",
"(",
"\"%s option for %s is deprecated. Please remove %s from your \"",
"\"configuration file\"",
",",
"key",
",",
"DOMAIN",
",",
"key",
",",
")",
"config",
".",
"pop",
"(",
"key",
")",
"return",
"config",
"return",
"validator"
] | [
61,
0
] | [
78,
20
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass, config) | Set up the MySensors component. | Set up the MySensors component. | async def async_setup(hass, config):
"""Set up the MySensors component."""
gateways = await setup_gateways(hass, config)
if not gateways:
_LOGGER.error("No devices could be setup as gateways, check your configuration")
return False
hass.data[MYSENSORS_GATEWAYS] = gateways
hass.async_create_task(finish_setup(hass, config, gateways))
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
",",
"config",
")",
":",
"gateways",
"=",
"await",
"setup_gateways",
"(",
"hass",
",",
"config",
")",
"if",
"not",
"gateways",
":",
"_LOGGER",
".",
"error",
"(",
"\"No devices could be setup as gateways, check your configuration\"",
")",
"return",
"False",
"hass",
".",
"data",
"[",
"MYSENSORS_GATEWAYS",
"]",
"=",
"gateways",
"hass",
".",
"async_create_task",
"(",
"finish_setup",
"(",
"hass",
",",
"config",
",",
"gateways",
")",
")",
"return",
"True"
] | [
114,
0
] | [
126,
15
] | python | en | ['en', 'en', 'en'] | True |
_get_mysensors_name | (gateway, node_id, child_id) | Return a name for a node child. | Return a name for a node child. | def _get_mysensors_name(gateway, node_id, child_id):
"""Return a name for a node child."""
node_name = f"{gateway.sensors[node_id].sketch_name} {node_id}"
node_name = next(
(
node[CONF_NODE_NAME]
for conf_id, node in gateway.nodes_config.items()
if node.get(CONF_NODE_NAME) is not None and conf_id == node_id
),
node_name,
)
return f"{node_name} {child_id}" | [
"def",
"_get_mysensors_name",
"(",
"gateway",
",",
"node_id",
",",
"child_id",
")",
":",
"node_name",
"=",
"f\"{gateway.sensors[node_id].sketch_name} {node_id}\"",
"node_name",
"=",
"next",
"(",
"(",
"node",
"[",
"CONF_NODE_NAME",
"]",
"for",
"conf_id",
",",
"node",
"in",
"gateway",
".",
"nodes_config",
".",
"items",
"(",
")",
"if",
"node",
".",
"get",
"(",
"CONF_NODE_NAME",
")",
"is",
"not",
"None",
"and",
"conf_id",
"==",
"node_id",
")",
",",
"node_name",
",",
")",
"return",
"f\"{node_name} {child_id}\""
] | [
129,
0
] | [
140,
36
] | python | en | ['en', 'en', 'en'] | True |
setup_mysensors_platform | (
hass,
domain,
discovery_info,
device_class,
device_args=None,
async_add_entities=None,
) | Set up a MySensors platform. | Set up a MySensors platform. | def setup_mysensors_platform(
hass,
domain,
discovery_info,
device_class,
device_args=None,
async_add_entities=None,
):
"""Set up a MySensors platform."""
# Only act if called via MySensors by discovery event.
# Otherwise gateway is not set up.
if not discovery_info:
return None
if device_args is None:
device_args = ()
new_devices = []
new_dev_ids = discovery_info[ATTR_DEVICES]
for dev_id in new_dev_ids:
devices = get_mysensors_devices(hass, domain)
if dev_id in devices:
continue
gateway_id, node_id, child_id, value_type = dev_id
gateway = get_mysensors_gateway(hass, gateway_id)
if not gateway:
continue
device_class_copy = device_class
if isinstance(device_class, dict):
child = gateway.sensors[node_id].children[child_id]
s_type = gateway.const.Presentation(child.type).name
device_class_copy = device_class[s_type]
name = _get_mysensors_name(gateway, node_id, child_id)
args_copy = (*device_args, gateway, node_id, child_id, name, value_type)
devices[dev_id] = device_class_copy(*args_copy)
new_devices.append(devices[dev_id])
if new_devices:
_LOGGER.info("Adding new devices: %s", new_devices)
if async_add_entities is not None:
async_add_entities(new_devices, True)
return new_devices | [
"def",
"setup_mysensors_platform",
"(",
"hass",
",",
"domain",
",",
"discovery_info",
",",
"device_class",
",",
"device_args",
"=",
"None",
",",
"async_add_entities",
"=",
"None",
",",
")",
":",
"# Only act if called via MySensors by discovery event.",
"# Otherwise gateway is not set up.",
"if",
"not",
"discovery_info",
":",
"return",
"None",
"if",
"device_args",
"is",
"None",
":",
"device_args",
"=",
"(",
")",
"new_devices",
"=",
"[",
"]",
"new_dev_ids",
"=",
"discovery_info",
"[",
"ATTR_DEVICES",
"]",
"for",
"dev_id",
"in",
"new_dev_ids",
":",
"devices",
"=",
"get_mysensors_devices",
"(",
"hass",
",",
"domain",
")",
"if",
"dev_id",
"in",
"devices",
":",
"continue",
"gateway_id",
",",
"node_id",
",",
"child_id",
",",
"value_type",
"=",
"dev_id",
"gateway",
"=",
"get_mysensors_gateway",
"(",
"hass",
",",
"gateway_id",
")",
"if",
"not",
"gateway",
":",
"continue",
"device_class_copy",
"=",
"device_class",
"if",
"isinstance",
"(",
"device_class",
",",
"dict",
")",
":",
"child",
"=",
"gateway",
".",
"sensors",
"[",
"node_id",
"]",
".",
"children",
"[",
"child_id",
"]",
"s_type",
"=",
"gateway",
".",
"const",
".",
"Presentation",
"(",
"child",
".",
"type",
")",
".",
"name",
"device_class_copy",
"=",
"device_class",
"[",
"s_type",
"]",
"name",
"=",
"_get_mysensors_name",
"(",
"gateway",
",",
"node_id",
",",
"child_id",
")",
"args_copy",
"=",
"(",
"*",
"device_args",
",",
"gateway",
",",
"node_id",
",",
"child_id",
",",
"name",
",",
"value_type",
")",
"devices",
"[",
"dev_id",
"]",
"=",
"device_class_copy",
"(",
"*",
"args_copy",
")",
"new_devices",
".",
"append",
"(",
"devices",
"[",
"dev_id",
"]",
")",
"if",
"new_devices",
":",
"_LOGGER",
".",
"info",
"(",
"\"Adding new devices: %s\"",
",",
"new_devices",
")",
"if",
"async_add_entities",
"is",
"not",
"None",
":",
"async_add_entities",
"(",
"new_devices",
",",
"True",
")",
"return",
"new_devices"
] | [
144,
0
] | [
183,
22
] | python | en | ['en', 'da', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the Geizwatch sensor. | Set up the Geizwatch sensor. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Geizwatch sensor."""
name = config.get(CONF_NAME)
description = config.get(CONF_DESCRIPTION)
product_id = config.get(CONF_PRODUCT_ID)
domain = config.get(CONF_LOCALE)
add_entities([Geizwatch(name, description, product_id, domain)], True) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"name",
"=",
"config",
".",
"get",
"(",
"CONF_NAME",
")",
"description",
"=",
"config",
".",
"get",
"(",
"CONF_DESCRIPTION",
")",
"product_id",
"=",
"config",
".",
"get",
"(",
"CONF_PRODUCT_ID",
")",
"domain",
"=",
"config",
".",
"get",
"(",
"CONF_LOCALE",
")",
"add_entities",
"(",
"[",
"Geizwatch",
"(",
"name",
",",
"description",
",",
"product_id",
",",
"domain",
")",
"]",
",",
"True",
")"
] | [
30,
0
] | [
37,
74
] | python | en | ['en', 'pt', 'en'] | True |
Geizwatch.__init__ | (self, name, description, product_id, domain) | Initialize the sensor. | Initialize the sensor. | def __init__(self, name, description, product_id, domain):
"""Initialize the sensor."""
# internal
self._name = name
self._geizhals = Geizhals(product_id, domain)
self._device = Device()
# external
self.description = description
self.product_id = product_id | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"description",
",",
"product_id",
",",
"domain",
")",
":",
"# internal",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_geizhals",
"=",
"Geizhals",
"(",
"product_id",
",",
"domain",
")",
"self",
".",
"_device",
"=",
"Device",
"(",
")",
"# external",
"self",
".",
"description",
"=",
"description",
"self",
".",
"product_id",
"=",
"product_id"
] | [
43,
4
] | [
53,
36
] | python | en | ['en', 'en', 'en'] | True |
Geizwatch.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
56,
4
] | [
58,
25
] | python | en | ['en', 'mi', 'en'] | True |
Geizwatch.icon | (self) | Return the icon for the frontend. | Return the icon for the frontend. | def icon(self):
"""Return the icon for the frontend."""
return ICON | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"ICON"
] | [
61,
4
] | [
63,
19
] | python | en | ['en', 'en', 'en'] | True |
Geizwatch.state | (self) | Return the best price of the selected product. | Return the best price of the selected product. | def state(self):
"""Return the best price of the selected product."""
if not self._device.prices:
return None
return self._device.prices[0] | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_device",
".",
"prices",
":",
"return",
"None",
"return",
"self",
".",
"_device",
".",
"prices",
"[",
"0",
"]"
] | [
66,
4
] | [
71,
37
] | python | en | ['en', 'en', 'en'] | True |
Geizwatch.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
while len(self._device.prices) < 4:
self._device.prices.append("None")
attrs = {
"device_name": self._device.name,
"description": self.description,
"unit_of_measurement": self._device.price_currency,
"product_id": self.product_id,
"price1": self._device.prices[0],
"price2": self._device.prices[1],
"price3": self._device.prices[2],
"price4": self._device.prices[3],
}
return attrs | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"while",
"len",
"(",
"self",
".",
"_device",
".",
"prices",
")",
"<",
"4",
":",
"self",
".",
"_device",
".",
"prices",
".",
"append",
"(",
"\"None\"",
")",
"attrs",
"=",
"{",
"\"device_name\"",
":",
"self",
".",
"_device",
".",
"name",
",",
"\"description\"",
":",
"self",
".",
"description",
",",
"\"unit_of_measurement\"",
":",
"self",
".",
"_device",
".",
"price_currency",
",",
"\"product_id\"",
":",
"self",
".",
"product_id",
",",
"\"price1\"",
":",
"self",
".",
"_device",
".",
"prices",
"[",
"0",
"]",
",",
"\"price2\"",
":",
"self",
".",
"_device",
".",
"prices",
"[",
"1",
"]",
",",
"\"price3\"",
":",
"self",
".",
"_device",
".",
"prices",
"[",
"2",
"]",
",",
"\"price4\"",
":",
"self",
".",
"_device",
".",
"prices",
"[",
"3",
"]",
",",
"}",
"return",
"attrs"
] | [
74,
4
] | [
88,
20
] | python | en | ['en', 'en', 'en'] | True |
Geizwatch.update | (self) | Get the latest price from geizhals and updates the state. | Get the latest price from geizhals and updates the state. | def update(self):
"""Get the latest price from geizhals and updates the state."""
self._device = self._geizhals.parse() | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"_device",
"=",
"self",
".",
"_geizhals",
".",
"parse",
"(",
")"
] | [
91,
4
] | [
93,
45
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass: HomeAssistantType, config) | Set up STT. | Set up STT. | async def async_setup(hass: HomeAssistantType, config):
"""Set up STT."""
providers = {}
async def async_setup_platform(p_type, p_config=None, discovery_info=None):
"""Set up a TTS platform."""
if p_config is None:
p_config = {}
platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type)
if platform is None:
return
try:
provider = await platform.async_get_engine(hass, p_config, discovery_info)
if provider is None:
_LOGGER.error("Error setting up platform %s", p_type)
return
provider.name = p_type
provider.hass = hass
providers[provider.name] = provider
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error setting up platform: %s", p_type)
return
setup_tasks = [
async_setup_platform(p_type, p_config)
for p_type, p_config in config_per_platform(config, DOMAIN)
]
if setup_tasks:
await asyncio.wait(setup_tasks)
# Add discovery support
async def async_platform_discovered(platform, info):
"""Handle for discovered platform."""
await async_setup_platform(platform, discovery_info=info)
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
hass.http.register_view(SpeechToTextView(providers))
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config",
")",
":",
"providers",
"=",
"{",
"}",
"async",
"def",
"async_setup_platform",
"(",
"p_type",
",",
"p_config",
"=",
"None",
",",
"discovery_info",
"=",
"None",
")",
":",
"\"\"\"Set up a TTS platform.\"\"\"",
"if",
"p_config",
"is",
"None",
":",
"p_config",
"=",
"{",
"}",
"platform",
"=",
"await",
"async_prepare_setup_platform",
"(",
"hass",
",",
"config",
",",
"DOMAIN",
",",
"p_type",
")",
"if",
"platform",
"is",
"None",
":",
"return",
"try",
":",
"provider",
"=",
"await",
"platform",
".",
"async_get_engine",
"(",
"hass",
",",
"p_config",
",",
"discovery_info",
")",
"if",
"provider",
"is",
"None",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error setting up platform %s\"",
",",
"p_type",
")",
"return",
"provider",
".",
"name",
"=",
"p_type",
"provider",
".",
"hass",
"=",
"hass",
"providers",
"[",
"provider",
".",
"name",
"]",
"=",
"provider",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"_LOGGER",
".",
"exception",
"(",
"\"Error setting up platform: %s\"",
",",
"p_type",
")",
"return",
"setup_tasks",
"=",
"[",
"async_setup_platform",
"(",
"p_type",
",",
"p_config",
")",
"for",
"p_type",
",",
"p_config",
"in",
"config_per_platform",
"(",
"config",
",",
"DOMAIN",
")",
"]",
"if",
"setup_tasks",
":",
"await",
"asyncio",
".",
"wait",
"(",
"setup_tasks",
")",
"# Add discovery support",
"async",
"def",
"async_platform_discovered",
"(",
"platform",
",",
"info",
")",
":",
"\"\"\"Handle for discovered platform.\"\"\"",
"await",
"async_setup_platform",
"(",
"platform",
",",
"discovery_info",
"=",
"info",
")",
"discovery",
".",
"async_listen_platform",
"(",
"hass",
",",
"DOMAIN",
",",
"async_platform_discovered",
")",
"hass",
".",
"http",
".",
"register_view",
"(",
"SpeechToTextView",
"(",
"providers",
")",
")",
"return",
"True"
] | [
36,
0
] | [
79,
15
] | python | en | ['en', 'lb', 'en'] | True |
Provider.supported_languages | (self) | Return a list of supported languages. | Return a list of supported languages. | def supported_languages(self) -> List[str]:
"""Return a list of supported languages.""" | [
"def",
"supported_languages",
"(",
"self",
")",
"->",
"List",
"[",
"str",
"]",
":"
] | [
110,
4
] | [
111,
51
] | python | en | ['en', 'en', 'en'] | True |
Provider.supported_formats | (self) | Return a list of supported formats. | Return a list of supported formats. | def supported_formats(self) -> List[AudioFormats]:
"""Return a list of supported formats.""" | [
"def",
"supported_formats",
"(",
"self",
")",
"->",
"List",
"[",
"AudioFormats",
"]",
":"
] | [
115,
4
] | [
116,
49
] | python | en | ['en', 'en', 'en'] | True |
Provider.supported_codecs | (self) | Return a list of supported codecs. | Return a list of supported codecs. | def supported_codecs(self) -> List[AudioCodecs]:
"""Return a list of supported codecs.""" | [
"def",
"supported_codecs",
"(",
"self",
")",
"->",
"List",
"[",
"AudioCodecs",
"]",
":"
] | [
120,
4
] | [
121,
48
] | python | en | ['en', 'en', 'en'] | True |
Provider.supported_bit_rates | (self) | Return a list of supported bit rates. | Return a list of supported bit rates. | def supported_bit_rates(self) -> List[AudioBitRates]:
"""Return a list of supported bit rates.""" | [
"def",
"supported_bit_rates",
"(",
"self",
")",
"->",
"List",
"[",
"AudioBitRates",
"]",
":"
] | [
125,
4
] | [
126,
51
] | python | en | ['en', 'en', 'en'] | True |
Provider.supported_sample_rates | (self) | Return a list of supported sample rates. | Return a list of supported sample rates. | def supported_sample_rates(self) -> List[AudioSampleRates]:
"""Return a list of supported sample rates.""" | [
"def",
"supported_sample_rates",
"(",
"self",
")",
"->",
"List",
"[",
"AudioSampleRates",
"]",
":"
] | [
130,
4
] | [
131,
54
] | python | en | ['en', 'en', 'en'] | True |
Provider.supported_channels | (self) | Return a list of supported channels. | Return a list of supported channels. | def supported_channels(self) -> List[AudioChannels]:
"""Return a list of supported channels.""" | [
"def",
"supported_channels",
"(",
"self",
")",
"->",
"List",
"[",
"AudioChannels",
"]",
":"
] | [
135,
4
] | [
136,
50
] | python | en | ['en', 'en', 'en'] | True |
Provider.async_process_audio_stream | (
self, metadata: SpeechMetadata, stream: StreamReader
) | Process an audio stream to STT service.
Only streaming of content are allow!
| Process an audio stream to STT service. | async def async_process_audio_stream(
self, metadata: SpeechMetadata, stream: StreamReader
) -> SpeechResult:
"""Process an audio stream to STT service.
Only streaming of content are allow!
""" | [
"async",
"def",
"async_process_audio_stream",
"(",
"self",
",",
"metadata",
":",
"SpeechMetadata",
",",
"stream",
":",
"StreamReader",
")",
"->",
"SpeechResult",
":"
] | [
139,
4
] | [
145,
11
] | python | en | ['en', 'lb', 'en'] | True |
Provider.check_metadata | (self, metadata: SpeechMetadata) | Check if given metadata supported by this provider. | Check if given metadata supported by this provider. | def check_metadata(self, metadata: SpeechMetadata) -> bool:
"""Check if given metadata supported by this provider."""
if (
metadata.language not in self.supported_languages
or metadata.format not in self.supported_formats
or metadata.codec not in self.supported_codecs
or metadata.bit_rate not in self.supported_bit_rates
or metadata.sample_rate not in self.supported_sample_rates
or metadata.channel not in self.supported_channels
):
return False
return True | [
"def",
"check_metadata",
"(",
"self",
",",
"metadata",
":",
"SpeechMetadata",
")",
"->",
"bool",
":",
"if",
"(",
"metadata",
".",
"language",
"not",
"in",
"self",
".",
"supported_languages",
"or",
"metadata",
".",
"format",
"not",
"in",
"self",
".",
"supported_formats",
"or",
"metadata",
".",
"codec",
"not",
"in",
"self",
".",
"supported_codecs",
"or",
"metadata",
".",
"bit_rate",
"not",
"in",
"self",
".",
"supported_bit_rates",
"or",
"metadata",
".",
"sample_rate",
"not",
"in",
"self",
".",
"supported_sample_rates",
"or",
"metadata",
".",
"channel",
"not",
"in",
"self",
".",
"supported_channels",
")",
":",
"return",
"False",
"return",
"True"
] | [
148,
4
] | [
159,
19
] | python | en | ['en', 'en', 'en'] | True |
SpeechToTextView.__init__ | (self, providers: Dict[str, Provider]) | Initialize a tts view. | Initialize a tts view. | def __init__(self, providers: Dict[str, Provider]) -> None:
"""Initialize a tts view."""
self.providers = providers | [
"def",
"__init__",
"(",
"self",
",",
"providers",
":",
"Dict",
"[",
"str",
",",
"Provider",
"]",
")",
"->",
"None",
":",
"self",
".",
"providers",
"=",
"providers"
] | [
169,
4
] | [
171,
34
] | python | en | ['en', 'en', 'en'] | True |
SpeechToTextView._metadata_from_header | (request: web.Request) | Extract metadata from header.
X-Speech-Content: format=wav; codec=pcm; sample_rate=16000; bit_rate=16; channel=1; language=de_de
| Extract metadata from header. | def _metadata_from_header(request: web.Request) -> Optional[SpeechMetadata]:
"""Extract metadata from header.
X-Speech-Content: format=wav; codec=pcm; sample_rate=16000; bit_rate=16; channel=1; language=de_de
"""
try:
data = request.headers[istr("X-Speech-Content")].split(";")
except KeyError:
_LOGGER.warning("Missing X-Speech-Content")
return None
# Convert Header data
args = {}
for value in data:
value = value.strip()
args[value.partition("=")[0]] = value.partition("=")[2]
try:
return SpeechMetadata(**args)
except TypeError as err:
_LOGGER.warning("Wrong format of X-Speech-Content: %s", err)
return None | [
"def",
"_metadata_from_header",
"(",
"request",
":",
"web",
".",
"Request",
")",
"->",
"Optional",
"[",
"SpeechMetadata",
"]",
":",
"try",
":",
"data",
"=",
"request",
".",
"headers",
"[",
"istr",
"(",
"\"X-Speech-Content\"",
")",
"]",
".",
"split",
"(",
"\";\"",
")",
"except",
"KeyError",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Missing X-Speech-Content\"",
")",
"return",
"None",
"# Convert Header data",
"args",
"=",
"{",
"}",
"for",
"value",
"in",
"data",
":",
"value",
"=",
"value",
".",
"strip",
"(",
")",
"args",
"[",
"value",
".",
"partition",
"(",
"\"=\"",
")",
"[",
"0",
"]",
"]",
"=",
"value",
".",
"partition",
"(",
"\"=\"",
")",
"[",
"2",
"]",
"try",
":",
"return",
"SpeechMetadata",
"(",
"*",
"*",
"args",
")",
"except",
"TypeError",
"as",
"err",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Wrong format of X-Speech-Content: %s\"",
",",
"err",
")",
"return",
"None"
] | [
174,
4
] | [
195,
23
] | python | en | ['en', 'en', 'en'] | True |
SpeechToTextView.post | (self, request: web.Request, provider: str) | Convert Speech (audio) to text. | Convert Speech (audio) to text. | async def post(self, request: web.Request, provider: str) -> web.Response:
"""Convert Speech (audio) to text."""
if provider not in self.providers:
raise HTTPNotFound()
stt_provider: Provider = self.providers[provider]
# Get metadata
metadata = self._metadata_from_header(request)
if not metadata:
raise HTTPBadRequest()
# Check format
if not stt_provider.check_metadata(metadata):
raise HTTPUnsupportedMediaType()
# Process audio stream
result = await stt_provider.async_process_audio_stream(
metadata, request.content
)
# Return result
return self.json(attr.asdict(result)) | [
"async",
"def",
"post",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
",",
"provider",
":",
"str",
")",
"->",
"web",
".",
"Response",
":",
"if",
"provider",
"not",
"in",
"self",
".",
"providers",
":",
"raise",
"HTTPNotFound",
"(",
")",
"stt_provider",
":",
"Provider",
"=",
"self",
".",
"providers",
"[",
"provider",
"]",
"# Get metadata",
"metadata",
"=",
"self",
".",
"_metadata_from_header",
"(",
"request",
")",
"if",
"not",
"metadata",
":",
"raise",
"HTTPBadRequest",
"(",
")",
"# Check format",
"if",
"not",
"stt_provider",
".",
"check_metadata",
"(",
"metadata",
")",
":",
"raise",
"HTTPUnsupportedMediaType",
"(",
")",
"# Process audio stream",
"result",
"=",
"await",
"stt_provider",
".",
"async_process_audio_stream",
"(",
"metadata",
",",
"request",
".",
"content",
")",
"# Return result",
"return",
"self",
".",
"json",
"(",
"attr",
".",
"asdict",
"(",
"result",
")",
")"
] | [
197,
4
] | [
218,
45
] | python | en | ['en', 'lb', 'en'] | True |
SpeechToTextView.get | (self, request: web.Request, provider: str) | Return provider specific audio information. | Return provider specific audio information. | async def get(self, request: web.Request, provider: str) -> web.Response:
"""Return provider specific audio information."""
if provider not in self.providers:
raise HTTPNotFound()
stt_provider: Provider = self.providers[provider]
return self.json(
{
"languages": stt_provider.supported_languages,
"formats": stt_provider.supported_formats,
"codecs": stt_provider.supported_codecs,
"sample_rates": stt_provider.supported_sample_rates,
"bit_rates": stt_provider.supported_bit_rates,
"channels": stt_provider.supported_channels,
}
) | [
"async",
"def",
"get",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
",",
"provider",
":",
"str",
")",
"->",
"web",
".",
"Response",
":",
"if",
"provider",
"not",
"in",
"self",
".",
"providers",
":",
"raise",
"HTTPNotFound",
"(",
")",
"stt_provider",
":",
"Provider",
"=",
"self",
".",
"providers",
"[",
"provider",
"]",
"return",
"self",
".",
"json",
"(",
"{",
"\"languages\"",
":",
"stt_provider",
".",
"supported_languages",
",",
"\"formats\"",
":",
"stt_provider",
".",
"supported_formats",
",",
"\"codecs\"",
":",
"stt_provider",
".",
"supported_codecs",
",",
"\"sample_rates\"",
":",
"stt_provider",
".",
"supported_sample_rates",
",",
"\"bit_rates\"",
":",
"stt_provider",
".",
"supported_bit_rates",
",",
"\"channels\"",
":",
"stt_provider",
".",
"supported_channels",
",",
"}",
")"
] | [
220,
4
] | [
235,
9
] | python | de | ['it', 'de', 'en'] | False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.