Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the Nest Sensor.
No longer used.
| Set up the Nest Sensor. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Nest Sensor.
No longer used.
""" | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":"
] | [
69,
0
] | [
73,
7
] | python | en | ['en', 'pt', 'en'] | True |
async_setup_legacy_entry | (hass, entry, async_add_entities) | Set up a Nest sensor based on a config entry. | Set up a Nest sensor based on a config entry. | async def async_setup_legacy_entry(hass, entry, async_add_entities):
"""Set up a Nest sensor based on a config entry."""
nest = hass.data[DATA_NEST]
discovery_info = hass.data.get(DATA_NEST_CONFIG, {}).get(CONF_SENSORS, {})
# Add all available sensors if no Nest sensor config is set
if discovery_info == {}:
conditions = _VALID_SENSOR_TYPES
else:
conditions = discovery_info.get(CONF_MONITORED_CONDITIONS, {})
for variable in conditions:
if variable in _SENSOR_TYPES_DEPRECATED:
if variable in DEPRECATED_WEATHER_VARS:
wstr = (
"Nest no longer provides weather data like %s. See "
"https://www.home-assistant.io/integrations/#weather "
"for a list of other weather integrations to use." % variable
)
else:
wstr = (
f"{variable} is no a longer supported "
"monitored_conditions. See "
"https://www.home-assistant.io/integrations/"
"binary_sensor.nest/ for valid options."
)
_LOGGER.error(wstr)
def get_sensors():
"""Get the Nest sensors."""
all_sensors = []
for structure in nest.structures():
all_sensors += [
NestBasicSensor(structure, None, variable)
for variable in conditions
if variable in STRUCTURE_SENSOR_TYPES
]
for structure, device in nest.thermostats():
all_sensors += [
NestBasicSensor(structure, device, variable)
for variable in conditions
if variable in SENSOR_TYPES
]
all_sensors += [
NestTempSensor(structure, device, variable)
for variable in conditions
if variable in TEMP_SENSOR_TYPES
]
for structure, device in nest.smoke_co_alarms():
all_sensors += [
NestBasicSensor(structure, device, variable)
for variable in conditions
if variable in PROTECT_SENSOR_TYPES
]
structures_has_camera = {}
for structure, device in nest.cameras():
structures_has_camera[structure] = True
for structure in structures_has_camera:
all_sensors += [
NestBasicSensor(structure, None, variable)
for variable in conditions
if variable in STRUCTURE_CAMERA_SENSOR_TYPES
]
return all_sensors
async_add_entities(await hass.async_add_executor_job(get_sensors), True) | [
"async",
"def",
"async_setup_legacy_entry",
"(",
"hass",
",",
"entry",
",",
"async_add_entities",
")",
":",
"nest",
"=",
"hass",
".",
"data",
"[",
"DATA_NEST",
"]",
"discovery_info",
"=",
"hass",
".",
"data",
".",
"get",
"(",
"DATA_NEST_CONFIG",
",",
"{",
"}",
")",
".",
"get",
"(",
"CONF_SENSORS",
",",
"{",
"}",
")",
"# Add all available sensors if no Nest sensor config is set",
"if",
"discovery_info",
"==",
"{",
"}",
":",
"conditions",
"=",
"_VALID_SENSOR_TYPES",
"else",
":",
"conditions",
"=",
"discovery_info",
".",
"get",
"(",
"CONF_MONITORED_CONDITIONS",
",",
"{",
"}",
")",
"for",
"variable",
"in",
"conditions",
":",
"if",
"variable",
"in",
"_SENSOR_TYPES_DEPRECATED",
":",
"if",
"variable",
"in",
"DEPRECATED_WEATHER_VARS",
":",
"wstr",
"=",
"(",
"\"Nest no longer provides weather data like %s. See \"",
"\"https://www.home-assistant.io/integrations/#weather \"",
"\"for a list of other weather integrations to use.\"",
"%",
"variable",
")",
"else",
":",
"wstr",
"=",
"(",
"f\"{variable} is no a longer supported \"",
"\"monitored_conditions. See \"",
"\"https://www.home-assistant.io/integrations/\"",
"\"binary_sensor.nest/ for valid options.\"",
")",
"_LOGGER",
".",
"error",
"(",
"wstr",
")",
"def",
"get_sensors",
"(",
")",
":",
"\"\"\"Get the Nest sensors.\"\"\"",
"all_sensors",
"=",
"[",
"]",
"for",
"structure",
"in",
"nest",
".",
"structures",
"(",
")",
":",
"all_sensors",
"+=",
"[",
"NestBasicSensor",
"(",
"structure",
",",
"None",
",",
"variable",
")",
"for",
"variable",
"in",
"conditions",
"if",
"variable",
"in",
"STRUCTURE_SENSOR_TYPES",
"]",
"for",
"structure",
",",
"device",
"in",
"nest",
".",
"thermostats",
"(",
")",
":",
"all_sensors",
"+=",
"[",
"NestBasicSensor",
"(",
"structure",
",",
"device",
",",
"variable",
")",
"for",
"variable",
"in",
"conditions",
"if",
"variable",
"in",
"SENSOR_TYPES",
"]",
"all_sensors",
"+=",
"[",
"NestTempSensor",
"(",
"structure",
",",
"device",
",",
"variable",
")",
"for",
"variable",
"in",
"conditions",
"if",
"variable",
"in",
"TEMP_SENSOR_TYPES",
"]",
"for",
"structure",
",",
"device",
"in",
"nest",
".",
"smoke_co_alarms",
"(",
")",
":",
"all_sensors",
"+=",
"[",
"NestBasicSensor",
"(",
"structure",
",",
"device",
",",
"variable",
")",
"for",
"variable",
"in",
"conditions",
"if",
"variable",
"in",
"PROTECT_SENSOR_TYPES",
"]",
"structures_has_camera",
"=",
"{",
"}",
"for",
"structure",
",",
"device",
"in",
"nest",
".",
"cameras",
"(",
")",
":",
"structures_has_camera",
"[",
"structure",
"]",
"=",
"True",
"for",
"structure",
"in",
"structures_has_camera",
":",
"all_sensors",
"+=",
"[",
"NestBasicSensor",
"(",
"structure",
",",
"None",
",",
"variable",
")",
"for",
"variable",
"in",
"conditions",
"if",
"variable",
"in",
"STRUCTURE_CAMERA_SENSOR_TYPES",
"]",
"return",
"all_sensors",
"async_add_entities",
"(",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"get_sensors",
")",
",",
"True",
")"
] | [
76,
0
] | [
146,
76
] | python | en | ['en', 'da', 'en'] | True |
NestBasicSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
153,
4
] | [
155,
26
] | python | en | ['en', 'en', 'en'] | True |
NestBasicSensor.device_class | (self) | Return the device class of the sensor. | Return the device class of the sensor. | def device_class(self):
"""Return the device class of the sensor."""
return SENSOR_DEVICE_CLASSES.get(self.variable) | [
"def",
"device_class",
"(",
"self",
")",
":",
"return",
"SENSOR_DEVICE_CLASSES",
".",
"get",
"(",
"self",
".",
"variable",
")"
] | [
158,
4
] | [
160,
55
] | python | en | ['en', 'en', 'en'] | True |
NestBasicSensor.update | (self) | Retrieve latest state. | Retrieve latest state. | def update(self):
"""Retrieve latest state."""
self._unit = SENSOR_UNITS.get(self.variable)
if self.variable in VARIABLE_NAME_MAPPING:
self._state = getattr(self.device, VARIABLE_NAME_MAPPING[self.variable])
elif self.variable in VALUE_MAPPING:
state = getattr(self.device, self.variable)
self._state = VALUE_MAPPING[self.variable].get(state, state)
elif self.variable in PROTECT_SENSOR_TYPES and self.variable != "color_status":
# keep backward compatibility
state = getattr(self.device, self.variable)
self._state = state.capitalize() if state is not None else None
else:
self._state = getattr(self.device, self.variable) | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"_unit",
"=",
"SENSOR_UNITS",
".",
"get",
"(",
"self",
".",
"variable",
")",
"if",
"self",
".",
"variable",
"in",
"VARIABLE_NAME_MAPPING",
":",
"self",
".",
"_state",
"=",
"getattr",
"(",
"self",
".",
"device",
",",
"VARIABLE_NAME_MAPPING",
"[",
"self",
".",
"variable",
"]",
")",
"elif",
"self",
".",
"variable",
"in",
"VALUE_MAPPING",
":",
"state",
"=",
"getattr",
"(",
"self",
".",
"device",
",",
"self",
".",
"variable",
")",
"self",
".",
"_state",
"=",
"VALUE_MAPPING",
"[",
"self",
".",
"variable",
"]",
".",
"get",
"(",
"state",
",",
"state",
")",
"elif",
"self",
".",
"variable",
"in",
"PROTECT_SENSOR_TYPES",
"and",
"self",
".",
"variable",
"!=",
"\"color_status\"",
":",
"# keep backward compatibility",
"state",
"=",
"getattr",
"(",
"self",
".",
"device",
",",
"self",
".",
"variable",
")",
"self",
".",
"_state",
"=",
"state",
".",
"capitalize",
"(",
")",
"if",
"state",
"is",
"not",
"None",
"else",
"None",
"else",
":",
"self",
".",
"_state",
"=",
"getattr",
"(",
"self",
".",
"device",
",",
"self",
".",
"variable",
")"
] | [
162,
4
] | [
176,
61
] | python | en | ['es', 'sk', 'en'] | False |
NestTempSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
183,
4
] | [
185,
26
] | python | en | ['en', 'en', 'en'] | True |
NestTempSensor.device_class | (self) | Return the device class of the sensor. | Return the device class of the sensor. | def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_TEMPERATURE | [
"def",
"device_class",
"(",
"self",
")",
":",
"return",
"DEVICE_CLASS_TEMPERATURE"
] | [
188,
4
] | [
190,
39
] | python | en | ['en', 'en', 'en'] | True |
NestTempSensor.update | (self) | Retrieve latest state. | Retrieve latest state. | def update(self):
"""Retrieve latest state."""
if self.device.temperature_scale == "C":
self._unit = TEMP_CELSIUS
else:
self._unit = TEMP_FAHRENHEIT
temp = getattr(self.device, self.variable)
if temp is None:
self._state = None
if isinstance(temp, tuple):
low, high = temp
self._state = f"{int(low)}-{int(high)}"
else:
self._state = round(temp, 1) | [
"def",
"update",
"(",
"self",
")",
":",
"if",
"self",
".",
"device",
".",
"temperature_scale",
"==",
"\"C\"",
":",
"self",
".",
"_unit",
"=",
"TEMP_CELSIUS",
"else",
":",
"self",
".",
"_unit",
"=",
"TEMP_FAHRENHEIT",
"temp",
"=",
"getattr",
"(",
"self",
".",
"device",
",",
"self",
".",
"variable",
")",
"if",
"temp",
"is",
"None",
":",
"self",
".",
"_state",
"=",
"None",
"if",
"isinstance",
"(",
"temp",
",",
"tuple",
")",
":",
"low",
",",
"high",
"=",
"temp",
"self",
".",
"_state",
"=",
"f\"{int(low)}-{int(high)}\"",
"else",
":",
"self",
".",
"_state",
"=",
"round",
"(",
"temp",
",",
"1",
")"
] | [
192,
4
] | [
207,
40
] | python | en | ['es', 'sk', 'en'] | False |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the platform in Home Assistant and Case Information. | Set up the platform in Home Assistant and Case Information. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the platform in Home Assistant and Case Information."""
uscis = UscisSensor(config["case_id"], config[CONF_NAME])
uscis.update()
if uscis.valid_case_id:
add_entities([uscis])
else:
_LOGGER.error("Setup USCIS Sensor Fail check if your Case ID is Valid") | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"uscis",
"=",
"UscisSensor",
"(",
"config",
"[",
"\"case_id\"",
"]",
",",
"config",
"[",
"CONF_NAME",
"]",
")",
"uscis",
".",
"update",
"(",
")",
"if",
"uscis",
".",
"valid_case_id",
":",
"add_entities",
"(",
"[",
"uscis",
"]",
")",
"else",
":",
"_LOGGER",
".",
"error",
"(",
"\"Setup USCIS Sensor Fail check if your Case ID is Valid\"",
")"
] | [
25,
0
] | [
32,
79
] | python | en | ['en', 'en', 'en'] | True |
UscisSensor.__init__ | (self, case, name) | Initialize the sensor. | Initialize the sensor. | def __init__(self, case, name):
"""Initialize the sensor."""
self._state = None
self._case_id = case
self._attributes = None
self.valid_case_id = None
self._name = name | [
"def",
"__init__",
"(",
"self",
",",
"case",
",",
"name",
")",
":",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_case_id",
"=",
"case",
"self",
".",
"_attributes",
"=",
"None",
"self",
".",
"valid_case_id",
"=",
"None",
"self",
".",
"_name",
"=",
"name"
] | [
43,
4
] | [
49,
25
] | python | en | ['en', 'en', 'en'] | True |
UscisSensor.name | (self) | Return the name. | Return the name. | def name(self):
"""Return the name."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
52,
4
] | [
54,
25
] | python | en | ['en', 'ig', 'en'] | True |
UscisSensor.state | (self) | Return the state. | Return the state. | def state(self):
"""Return the state."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
57,
4
] | [
59,
26
] | python | en | ['en', 'en', 'en'] | True |
UscisSensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"self",
".",
"_attributes"
] | [
62,
4
] | [
64,
31
] | python | en | ['en', 'en', 'en'] | True |
UscisSensor.update | (self) | Fetch data from the USCIS website and update state attributes. | Fetch data from the USCIS website and update state attributes. | def update(self):
"""Fetch data from the USCIS website and update state attributes."""
try:
status = uscisstatus.get_case_status(self._case_id)
self._attributes = {self.CURRENT_STATUS: status["status"]}
self._state = status["date"]
self.valid_case_id = True
except ValueError:
_LOGGER("Please Check that you have valid USCIS case id")
self.valid_case_id = False | [
"def",
"update",
"(",
"self",
")",
":",
"try",
":",
"status",
"=",
"uscisstatus",
".",
"get_case_status",
"(",
"self",
".",
"_case_id",
")",
"self",
".",
"_attributes",
"=",
"{",
"self",
".",
"CURRENT_STATUS",
":",
"status",
"[",
"\"status\"",
"]",
"}",
"self",
".",
"_state",
"=",
"status",
"[",
"\"date\"",
"]",
"self",
".",
"valid_case_id",
"=",
"True",
"except",
"ValueError",
":",
"_LOGGER",
"(",
"\"Please Check that you have valid USCIS case id\"",
")",
"self",
".",
"valid_case_id",
"=",
"False"
] | [
67,
4
] | [
77,
38
] | python | en | ['en', 'en', 'en'] | True |
load_tf_weights_in_albert | (model, config, tf_checkpoint_path) | Load tf checkpoints in a pytorch model. | Load tf checkpoints in a pytorch model. | def load_tf_weights_in_albert(model, config, tf_checkpoint_path):
""" Load tf checkpoints in a pytorch model."""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(tf_checkpoint_path)
logger.info("Converting TensorFlow checkpoint from {}".format(tf_path))
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
logger.info("Loading TF weight {} with shape {}".format(name, shape))
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array)
for name, array in zip(names, arrays):
print(name)
for name, array in zip(names, arrays):
original_name = name
# If saved from the TF HUB module
name = name.replace("module/", "")
# Renaming and simplifying
name = name.replace("ffn_1", "ffn")
name = name.replace("bert/", "albert/")
name = name.replace("attention_1", "attention")
name = name.replace("transform/", "")
name = name.replace("LayerNorm_1", "full_layer_layer_norm")
name = name.replace("LayerNorm", "attention/LayerNorm")
name = name.replace("transformer/", "")
# The feed forward layer had an 'intermediate' step which has been abstracted away
name = name.replace("intermediate/dense/", "")
name = name.replace("ffn/intermediate/output/dense/", "ffn_output/")
# ALBERT attention was split between self and output which have been abstracted away
name = name.replace("/output/", "/")
name = name.replace("/self/", "/")
# The pooler is a linear layer
name = name.replace("pooler/dense", "pooler")
# The classifier was simplified to predictions from cls/predictions
name = name.replace("cls/predictions", "predictions")
name = name.replace("predictions/attention", "predictions")
# Naming was changed to be more explicit
name = name.replace("embeddings/attention", "embeddings")
name = name.replace("inner_group_", "albert_layers/")
name = name.replace("group_", "albert_layer_groups/")
# Classifier
if len(name.split("/")) == 1 and ("output_bias" in name or "output_weights" in name):
name = "classifier/" + name
# No ALBERT model currently handles the next sentence prediction task
if "seq_relationship" in name:
name = name.replace("seq_relationship/output_", "sop_classifier/classifier/")
name = name.replace("weights", "weight")
name = name.split("/")
# Ignore the gradients applied by the LAMB/ADAM optimizers.
if (
"adam_m" in name
or "adam_v" in name
or "AdamWeightDecayOptimizer" in name
or "AdamWeightDecayOptimizer_1" in name
or "global_step" in name
):
logger.info("Skipping {}".format("/".join(name)))
continue
pointer = model
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "kernel" or scope_names[0] == "gamma":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "output_bias" or scope_names[0] == "beta":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "output_weights":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "squad":
pointer = getattr(pointer, "classifier")
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
logger.info("Skipping {}".format("/".join(name)))
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if m_name[-11:] == "_embeddings":
pointer = getattr(pointer, "weight")
elif m_name == "kernel":
array = np.transpose(array)
try:
assert (
pointer.shape == array.shape
), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
print("Initialize PyTorch weight {} from {}".format(name, original_name))
pointer.data = torch.from_numpy(array)
return model | [
"def",
"load_tf_weights_in_albert",
"(",
"model",
",",
"config",
",",
"tf_checkpoint_path",
")",
":",
"try",
":",
"import",
"re",
"import",
"numpy",
"as",
"np",
"import",
"tensorflow",
"as",
"tf",
"except",
"ImportError",
":",
"logger",
".",
"error",
"(",
"\"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see \"",
"\"https://www.tensorflow.org/install/ for installation instructions.\"",
")",
"raise",
"tf_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"tf_checkpoint_path",
")",
"logger",
".",
"info",
"(",
"\"Converting TensorFlow checkpoint from {}\"",
".",
"format",
"(",
"tf_path",
")",
")",
"# Load weights from TF model",
"init_vars",
"=",
"tf",
".",
"train",
".",
"list_variables",
"(",
"tf_path",
")",
"names",
"=",
"[",
"]",
"arrays",
"=",
"[",
"]",
"for",
"name",
",",
"shape",
"in",
"init_vars",
":",
"logger",
".",
"info",
"(",
"\"Loading TF weight {} with shape {}\"",
".",
"format",
"(",
"name",
",",
"shape",
")",
")",
"array",
"=",
"tf",
".",
"train",
".",
"load_variable",
"(",
"tf_path",
",",
"name",
")",
"names",
".",
"append",
"(",
"name",
")",
"arrays",
".",
"append",
"(",
"array",
")",
"for",
"name",
",",
"array",
"in",
"zip",
"(",
"names",
",",
"arrays",
")",
":",
"print",
"(",
"name",
")",
"for",
"name",
",",
"array",
"in",
"zip",
"(",
"names",
",",
"arrays",
")",
":",
"original_name",
"=",
"name",
"# If saved from the TF HUB module",
"name",
"=",
"name",
".",
"replace",
"(",
"\"module/\"",
",",
"\"\"",
")",
"# Renaming and simplifying",
"name",
"=",
"name",
".",
"replace",
"(",
"\"ffn_1\"",
",",
"\"ffn\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"bert/\"",
",",
"\"albert/\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"attention_1\"",
",",
"\"attention\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"transform/\"",
",",
"\"\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"LayerNorm_1\"",
",",
"\"full_layer_layer_norm\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"LayerNorm\"",
",",
"\"attention/LayerNorm\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"transformer/\"",
",",
"\"\"",
")",
"# The feed forward layer had an 'intermediate' step which has been abstracted away",
"name",
"=",
"name",
".",
"replace",
"(",
"\"intermediate/dense/\"",
",",
"\"\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"ffn/intermediate/output/dense/\"",
",",
"\"ffn_output/\"",
")",
"# ALBERT attention was split between self and output which have been abstracted away",
"name",
"=",
"name",
".",
"replace",
"(",
"\"/output/\"",
",",
"\"/\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"/self/\"",
",",
"\"/\"",
")",
"# The pooler is a linear layer",
"name",
"=",
"name",
".",
"replace",
"(",
"\"pooler/dense\"",
",",
"\"pooler\"",
")",
"# The classifier was simplified to predictions from cls/predictions",
"name",
"=",
"name",
".",
"replace",
"(",
"\"cls/predictions\"",
",",
"\"predictions\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"predictions/attention\"",
",",
"\"predictions\"",
")",
"# Naming was changed to be more explicit",
"name",
"=",
"name",
".",
"replace",
"(",
"\"embeddings/attention\"",
",",
"\"embeddings\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"inner_group_\"",
",",
"\"albert_layers/\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"group_\"",
",",
"\"albert_layer_groups/\"",
")",
"# Classifier",
"if",
"len",
"(",
"name",
".",
"split",
"(",
"\"/\"",
")",
")",
"==",
"1",
"and",
"(",
"\"output_bias\"",
"in",
"name",
"or",
"\"output_weights\"",
"in",
"name",
")",
":",
"name",
"=",
"\"classifier/\"",
"+",
"name",
"# No ALBERT model currently handles the next sentence prediction task",
"if",
"\"seq_relationship\"",
"in",
"name",
":",
"name",
"=",
"name",
".",
"replace",
"(",
"\"seq_relationship/output_\"",
",",
"\"sop_classifier/classifier/\"",
")",
"name",
"=",
"name",
".",
"replace",
"(",
"\"weights\"",
",",
"\"weight\"",
")",
"name",
"=",
"name",
".",
"split",
"(",
"\"/\"",
")",
"# Ignore the gradients applied by the LAMB/ADAM optimizers.",
"if",
"(",
"\"adam_m\"",
"in",
"name",
"or",
"\"adam_v\"",
"in",
"name",
"or",
"\"AdamWeightDecayOptimizer\"",
"in",
"name",
"or",
"\"AdamWeightDecayOptimizer_1\"",
"in",
"name",
"or",
"\"global_step\"",
"in",
"name",
")",
":",
"logger",
".",
"info",
"(",
"\"Skipping {}\"",
".",
"format",
"(",
"\"/\"",
".",
"join",
"(",
"name",
")",
")",
")",
"continue",
"pointer",
"=",
"model",
"for",
"m_name",
"in",
"name",
":",
"if",
"re",
".",
"fullmatch",
"(",
"r\"[A-Za-z]+_\\d+\"",
",",
"m_name",
")",
":",
"scope_names",
"=",
"re",
".",
"split",
"(",
"r\"_(\\d+)\"",
",",
"m_name",
")",
"else",
":",
"scope_names",
"=",
"[",
"m_name",
"]",
"if",
"scope_names",
"[",
"0",
"]",
"==",
"\"kernel\"",
"or",
"scope_names",
"[",
"0",
"]",
"==",
"\"gamma\"",
":",
"pointer",
"=",
"getattr",
"(",
"pointer",
",",
"\"weight\"",
")",
"elif",
"scope_names",
"[",
"0",
"]",
"==",
"\"output_bias\"",
"or",
"scope_names",
"[",
"0",
"]",
"==",
"\"beta\"",
":",
"pointer",
"=",
"getattr",
"(",
"pointer",
",",
"\"bias\"",
")",
"elif",
"scope_names",
"[",
"0",
"]",
"==",
"\"output_weights\"",
":",
"pointer",
"=",
"getattr",
"(",
"pointer",
",",
"\"weight\"",
")",
"elif",
"scope_names",
"[",
"0",
"]",
"==",
"\"squad\"",
":",
"pointer",
"=",
"getattr",
"(",
"pointer",
",",
"\"classifier\"",
")",
"else",
":",
"try",
":",
"pointer",
"=",
"getattr",
"(",
"pointer",
",",
"scope_names",
"[",
"0",
"]",
")",
"except",
"AttributeError",
":",
"logger",
".",
"info",
"(",
"\"Skipping {}\"",
".",
"format",
"(",
"\"/\"",
".",
"join",
"(",
"name",
")",
")",
")",
"continue",
"if",
"len",
"(",
"scope_names",
")",
">=",
"2",
":",
"num",
"=",
"int",
"(",
"scope_names",
"[",
"1",
"]",
")",
"pointer",
"=",
"pointer",
"[",
"num",
"]",
"if",
"m_name",
"[",
"-",
"11",
":",
"]",
"==",
"\"_embeddings\"",
":",
"pointer",
"=",
"getattr",
"(",
"pointer",
",",
"\"weight\"",
")",
"elif",
"m_name",
"==",
"\"kernel\"",
":",
"array",
"=",
"np",
".",
"transpose",
"(",
"array",
")",
"try",
":",
"assert",
"(",
"pointer",
".",
"shape",
"==",
"array",
".",
"shape",
")",
",",
"f\"Pointer shape {pointer.shape} and array shape {array.shape} mismatched\"",
"except",
"AssertionError",
"as",
"e",
":",
"e",
".",
"args",
"+=",
"(",
"pointer",
".",
"shape",
",",
"array",
".",
"shape",
")",
"raise",
"print",
"(",
"\"Initialize PyTorch weight {} from {}\"",
".",
"format",
"(",
"name",
",",
"original_name",
")",
")",
"pointer",
".",
"data",
"=",
"torch",
".",
"from_numpy",
"(",
"array",
")",
"return",
"model"
] | [
72,
0
] | [
196,
16
] | python | en | ['en', 'en', 'en'] | True |
AlbertPreTrainedModel._init_weights | (self, module) | Initialize the weights. | Initialize the weights. | def _init_weights(self, module):
"""Initialize the weights."""
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0) | [
"def",
"_init_weights",
"(",
"self",
",",
"module",
")",
":",
"if",
"isinstance",
"(",
"module",
",",
"nn",
".",
"Linear",
")",
":",
"# Slightly different from the TF version which uses truncated_normal for initialization",
"# cf https://github.com/pytorch/pytorch/pull/5617",
"module",
".",
"weight",
".",
"data",
".",
"normal_",
"(",
"mean",
"=",
"0.0",
",",
"std",
"=",
"self",
".",
"config",
".",
"initializer_range",
")",
"if",
"module",
".",
"bias",
"is",
"not",
"None",
":",
"module",
".",
"bias",
".",
"data",
".",
"zero_",
"(",
")",
"elif",
"isinstance",
"(",
"module",
",",
"nn",
".",
"Embedding",
")",
":",
"module",
".",
"weight",
".",
"data",
".",
"normal_",
"(",
"mean",
"=",
"0.0",
",",
"std",
"=",
"self",
".",
"config",
".",
"initializer_range",
")",
"if",
"module",
".",
"padding_idx",
"is",
"not",
"None",
":",
"module",
".",
"weight",
".",
"data",
"[",
"module",
".",
"padding_idx",
"]",
".",
"zero_",
"(",
")",
"elif",
"isinstance",
"(",
"module",
",",
"nn",
".",
"LayerNorm",
")",
":",
"module",
".",
"bias",
".",
"data",
".",
"zero_",
"(",
")",
"module",
".",
"weight",
".",
"data",
".",
"fill_",
"(",
"1.0",
")"
] | [
491,
4
] | [
505,
41
] | python | en | ['en', 'en', 'en'] | True |
HerbertTokenizerFast.build_inputs_with_special_tokens | (
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) |
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. An HerBERT, like BERT sequence has the following format:
- single sequence: ``<s> X </s>``
- pair of sequences: ``<s> A </s> B </s>``
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs to which the special tokens will be added.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
|
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. An HerBERT, like BERT sequence has the following format: | def build_inputs_with_special_tokens(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. An HerBERT, like BERT sequence has the following format:
- single sequence: ``<s> X </s>``
- pair of sequences: ``<s> A </s> B </s>``
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs to which the special tokens will be added.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
"""
cls = [self.cls_token_id]
sep = [self.sep_token_id]
if token_ids_1 is None:
return cls + token_ids_0 + sep
return cls + token_ids_0 + sep + token_ids_1 + sep | [
"def",
"build_inputs_with_special_tokens",
"(",
"self",
",",
"token_ids_0",
":",
"List",
"[",
"int",
"]",
",",
"token_ids_1",
":",
"Optional",
"[",
"List",
"[",
"int",
"]",
"]",
"=",
"None",
")",
"->",
"List",
"[",
"int",
"]",
":",
"cls",
"=",
"[",
"self",
".",
"cls_token_id",
"]",
"sep",
"=",
"[",
"self",
".",
"sep_token_id",
"]",
"if",
"token_ids_1",
"is",
"None",
":",
"return",
"cls",
"+",
"token_ids_0",
"+",
"sep",
"return",
"cls",
"+",
"token_ids_0",
"+",
"sep",
"+",
"token_ids_1",
"+",
"sep"
] | [
82,
4
] | [
107,
58
] | python | en | ['en', 'error', 'th'] | False |
HerbertTokenizerFast.get_special_tokens_mask | (
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False
) |
Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer ``prepare_for_model`` method.
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the token list is already formatted with special tokens for the model.
Returns:
:obj:`List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token.
|
Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer ``prepare_for_model`` method. | def get_special_tokens_mask(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False
) -> List[int]:
"""
Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer ``prepare_for_model`` method.
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the token list is already formatted with special tokens for the model.
Returns:
:obj:`List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token.
"""
if already_has_special_tokens:
if token_ids_1 is not None:
raise ValueError(
"You should not supply a second sequence if the provided sequence of "
"ids is already formatted with special tokens for the model."
)
return list(map(lambda x: 1 if x in [self.sep_token_id, self.cls_token_id] else 0, token_ids_0))
if token_ids_1 is None:
return [1] + ([0] * len(token_ids_0)) + [1]
return [1] + ([0] * len(token_ids_0)) + [1] + ([0] * len(token_ids_1)) + [1] | [
"def",
"get_special_tokens_mask",
"(",
"self",
",",
"token_ids_0",
":",
"List",
"[",
"int",
"]",
",",
"token_ids_1",
":",
"Optional",
"[",
"List",
"[",
"int",
"]",
"]",
"=",
"None",
",",
"already_has_special_tokens",
":",
"bool",
"=",
"False",
")",
"->",
"List",
"[",
"int",
"]",
":",
"if",
"already_has_special_tokens",
":",
"if",
"token_ids_1",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"You should not supply a second sequence if the provided sequence of \"",
"\"ids is already formatted with special tokens for the model.\"",
")",
"return",
"list",
"(",
"map",
"(",
"lambda",
"x",
":",
"1",
"if",
"x",
"in",
"[",
"self",
".",
"sep_token_id",
",",
"self",
".",
"cls_token_id",
"]",
"else",
"0",
",",
"token_ids_0",
")",
")",
"if",
"token_ids_1",
"is",
"None",
":",
"return",
"[",
"1",
"]",
"+",
"(",
"[",
"0",
"]",
"*",
"len",
"(",
"token_ids_0",
")",
")",
"+",
"[",
"1",
"]",
"return",
"[",
"1",
"]",
"+",
"(",
"[",
"0",
"]",
"*",
"len",
"(",
"token_ids_0",
")",
")",
"+",
"[",
"1",
"]",
"+",
"(",
"[",
"0",
"]",
"*",
"len",
"(",
"token_ids_1",
")",
")",
"+",
"[",
"1",
"]"
] | [
109,
4
] | [
137,
84
] | python | en | ['en', 'error', 'th'] | False |
HerbertTokenizerFast.create_token_type_ids_from_sequences | (
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) |
Create a mask from the two sequences passed to be used in a sequence-pair classification task. HerBERT, like
BERT sequence pair mask has the following format:
::
0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1
| first sequence | second sequence |
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `token type IDs <../glossary.html#token-type-ids>`_ according to the given
sequence(s).
|
Create a mask from the two sequences passed to be used in a sequence-pair classification task. HerBERT, like
BERT sequence pair mask has the following format: | def create_token_type_ids_from_sequences(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Create a mask from the two sequences passed to be used in a sequence-pair classification task. HerBERT, like
BERT sequence pair mask has the following format:
::
0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1
| first sequence | second sequence |
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `token type IDs <../glossary.html#token-type-ids>`_ according to the given
sequence(s).
"""
sep = [self.sep_token_id]
cls = [self.cls_token_id]
if token_ids_1 is None:
return len(cls + token_ids_0 + sep) * [0]
return len(cls + token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] | [
"def",
"create_token_type_ids_from_sequences",
"(",
"self",
",",
"token_ids_0",
":",
"List",
"[",
"int",
"]",
",",
"token_ids_1",
":",
"Optional",
"[",
"List",
"[",
"int",
"]",
"]",
"=",
"None",
")",
"->",
"List",
"[",
"int",
"]",
":",
"sep",
"=",
"[",
"self",
".",
"sep_token_id",
"]",
"cls",
"=",
"[",
"self",
".",
"cls_token_id",
"]",
"if",
"token_ids_1",
"is",
"None",
":",
"return",
"len",
"(",
"cls",
"+",
"token_ids_0",
"+",
"sep",
")",
"*",
"[",
"0",
"]",
"return",
"len",
"(",
"cls",
"+",
"token_ids_0",
"+",
"sep",
")",
"*",
"[",
"0",
"]",
"+",
"len",
"(",
"token_ids_1",
"+",
"sep",
")",
"*",
"[",
"1",
"]"
] | [
139,
4
] | [
166,
80
] | python | en | ['en', 'error', 'th'] | False |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the available PM sensors. | Set up the available PM sensors. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available PM sensors."""
try:
coll = pm.PMDataCollector(
config.get(CONF_SERIAL_DEVICE), pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)]
)
except KeyError:
_LOGGER.error(
"Brand %s not supported\n supported brands: %s",
config.get(CONF_BRAND),
pm.SUPPORTED_SENSORS.keys(),
)
return
except OSError as err:
_LOGGER.error(
"Could not open serial connection to %s (%s)",
config.get(CONF_SERIAL_DEVICE),
err,
)
return
dev = []
for pmname in coll.supported_values():
if config.get(CONF_NAME) is not None:
name = "{} PM{}".format(config.get(CONF_NAME), pmname)
else:
name = f"PM{pmname}"
dev.append(ParticulateMatterSensor(coll, name, pmname))
add_entities(dev) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"try",
":",
"coll",
"=",
"pm",
".",
"PMDataCollector",
"(",
"config",
".",
"get",
"(",
"CONF_SERIAL_DEVICE",
")",
",",
"pm",
".",
"SUPPORTED_SENSORS",
"[",
"config",
".",
"get",
"(",
"CONF_BRAND",
")",
"]",
")",
"except",
"KeyError",
":",
"_LOGGER",
".",
"error",
"(",
"\"Brand %s not supported\\n supported brands: %s\"",
",",
"config",
".",
"get",
"(",
"CONF_BRAND",
")",
",",
"pm",
".",
"SUPPORTED_SENSORS",
".",
"keys",
"(",
")",
",",
")",
"return",
"except",
"OSError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Could not open serial connection to %s (%s)\"",
",",
"config",
".",
"get",
"(",
"CONF_SERIAL_DEVICE",
")",
",",
"err",
",",
")",
"return",
"dev",
"=",
"[",
"]",
"for",
"pmname",
"in",
"coll",
".",
"supported_values",
"(",
")",
":",
"if",
"config",
".",
"get",
"(",
"CONF_NAME",
")",
"is",
"not",
"None",
":",
"name",
"=",
"\"{} PM{}\"",
".",
"format",
"(",
"config",
".",
"get",
"(",
"CONF_NAME",
")",
",",
"pmname",
")",
"else",
":",
"name",
"=",
"f\"PM{pmname}\"",
"dev",
".",
"append",
"(",
"ParticulateMatterSensor",
"(",
"coll",
",",
"name",
",",
"pmname",
")",
")",
"add_entities",
"(",
"dev",
")"
] | [
25,
0
] | [
55,
21
] | python | en | ['en', 'en', 'en'] | True |
ParticulateMatterSensor.__init__ | (self, pmDataCollector, name, pmname) | Initialize a new PM sensor. | Initialize a new PM sensor. | def __init__(self, pmDataCollector, name, pmname):
"""Initialize a new PM sensor."""
self._name = name
self._pmname = pmname
self._state = None
self._collector = pmDataCollector | [
"def",
"__init__",
"(",
"self",
",",
"pmDataCollector",
",",
"name",
",",
"pmname",
")",
":",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_pmname",
"=",
"pmname",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_collector",
"=",
"pmDataCollector"
] | [
61,
4
] | [
66,
41
] | python | en | ['en', 'co', 'en'] | True |
ParticulateMatterSensor.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
69,
4
] | [
71,
25
] | python | en | ['en', 'mi', 'en'] | True |
ParticulateMatterSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
74,
4
] | [
76,
26
] | python | en | ['en', 'en', 'en'] | True |
ParticulateMatterSensor.unit_of_measurement | (self) | Return the unit of measurement of this entity, if any. | Return the unit of measurement of this entity, if any. | def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"CONCENTRATION_MICROGRAMS_PER_CUBIC_METER"
] | [
79,
4
] | [
81,
55
] | python | en | ['en', 'en', 'en'] | True |
ParticulateMatterSensor.update | (self) | Read from sensor and update the state. | Read from sensor and update the state. | def update(self):
"""Read from sensor and update the state."""
_LOGGER.debug("Reading data from PM sensor")
try:
self._state = self._collector.read_data()[self._pmname]
except KeyError:
_LOGGER.error("Could not read PM%s value", self._pmname) | [
"def",
"update",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Reading data from PM sensor\"",
")",
"try",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_collector",
".",
"read_data",
"(",
")",
"[",
"self",
".",
"_pmname",
"]",
"except",
"KeyError",
":",
"_LOGGER",
".",
"error",
"(",
"\"Could not read PM%s value\"",
",",
"self",
".",
"_pmname",
")"
] | [
83,
4
] | [
89,
68
] | python | en | ['en', 'en', 'en'] | True |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up a generic IP Camera. | Set up a generic IP Camera. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a generic IP Camera."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities([GenericCamera(hass, config)]) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"await",
"async_setup_reload_service",
"(",
"hass",
",",
"DOMAIN",
",",
"PLATFORMS",
")",
"async_add_entities",
"(",
"[",
"GenericCamera",
"(",
"hass",
",",
"config",
")",
"]",
")"
] | [
62,
0
] | [
67,
53
] | python | en | ['en', 'su', 'en'] | True |
GenericCamera.__init__ | (self, hass, device_info) | Initialize a generic camera. | Initialize a generic camera. | def __init__(self, hass, device_info):
"""Initialize a generic camera."""
super().__init__()
self.hass = hass
self._authentication = device_info.get(CONF_AUTHENTICATION)
self._name = device_info.get(CONF_NAME)
self._still_image_url = device_info[CONF_STILL_IMAGE_URL]
self._stream_source = device_info.get(CONF_STREAM_SOURCE)
self._still_image_url.hass = hass
if self._stream_source is not None:
self._stream_source.hass = hass
self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE]
self._frame_interval = 1 / device_info[CONF_FRAMERATE]
self._supported_features = SUPPORT_STREAM if self._stream_source else 0
self.content_type = device_info[CONF_CONTENT_TYPE]
self.verify_ssl = device_info[CONF_VERIFY_SSL]
username = device_info.get(CONF_USERNAME)
password = device_info.get(CONF_PASSWORD)
if username and password:
if self._authentication == HTTP_DIGEST_AUTHENTICATION:
self._auth = HTTPDigestAuth(username, password)
else:
self._auth = aiohttp.BasicAuth(username, password=password)
else:
self._auth = None
self._last_url = None
self._last_image = None | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"device_info",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"_authentication",
"=",
"device_info",
".",
"get",
"(",
"CONF_AUTHENTICATION",
")",
"self",
".",
"_name",
"=",
"device_info",
".",
"get",
"(",
"CONF_NAME",
")",
"self",
".",
"_still_image_url",
"=",
"device_info",
"[",
"CONF_STILL_IMAGE_URL",
"]",
"self",
".",
"_stream_source",
"=",
"device_info",
".",
"get",
"(",
"CONF_STREAM_SOURCE",
")",
"self",
".",
"_still_image_url",
".",
"hass",
"=",
"hass",
"if",
"self",
".",
"_stream_source",
"is",
"not",
"None",
":",
"self",
".",
"_stream_source",
".",
"hass",
"=",
"hass",
"self",
".",
"_limit_refetch",
"=",
"device_info",
"[",
"CONF_LIMIT_REFETCH_TO_URL_CHANGE",
"]",
"self",
".",
"_frame_interval",
"=",
"1",
"/",
"device_info",
"[",
"CONF_FRAMERATE",
"]",
"self",
".",
"_supported_features",
"=",
"SUPPORT_STREAM",
"if",
"self",
".",
"_stream_source",
"else",
"0",
"self",
".",
"content_type",
"=",
"device_info",
"[",
"CONF_CONTENT_TYPE",
"]",
"self",
".",
"verify_ssl",
"=",
"device_info",
"[",
"CONF_VERIFY_SSL",
"]",
"username",
"=",
"device_info",
".",
"get",
"(",
"CONF_USERNAME",
")",
"password",
"=",
"device_info",
".",
"get",
"(",
"CONF_PASSWORD",
")",
"if",
"username",
"and",
"password",
":",
"if",
"self",
".",
"_authentication",
"==",
"HTTP_DIGEST_AUTHENTICATION",
":",
"self",
".",
"_auth",
"=",
"HTTPDigestAuth",
"(",
"username",
",",
"password",
")",
"else",
":",
"self",
".",
"_auth",
"=",
"aiohttp",
".",
"BasicAuth",
"(",
"username",
",",
"password",
"=",
"password",
")",
"else",
":",
"self",
".",
"_auth",
"=",
"None",
"self",
".",
"_last_url",
"=",
"None",
"self",
".",
"_last_image",
"=",
"None"
] | [
73,
4
] | [
102,
31
] | python | co | ['en', 'co', 'it'] | False |
GenericCamera.supported_features | (self) | Return supported features for this camera. | Return supported features for this camera. | def supported_features(self):
"""Return supported features for this camera."""
return self._supported_features | [
"def",
"supported_features",
"(",
"self",
")",
":",
"return",
"self",
".",
"_supported_features"
] | [
105,
4
] | [
107,
39
] | python | en | ['en', 'en', 'en'] | True |
GenericCamera.frame_interval | (self) | Return the interval between frames of the mjpeg stream. | Return the interval between frames of the mjpeg stream. | def frame_interval(self):
"""Return the interval between frames of the mjpeg stream."""
return self._frame_interval | [
"def",
"frame_interval",
"(",
"self",
")",
":",
"return",
"self",
".",
"_frame_interval"
] | [
110,
4
] | [
112,
35
] | python | en | ['en', 'en', 'en'] | True |
GenericCamera.camera_image | (self) | Return bytes of camera image. | Return bytes of camera image. | def camera_image(self):
"""Return bytes of camera image."""
return asyncio.run_coroutine_threadsafe(
self.async_camera_image(), self.hass.loop
).result() | [
"def",
"camera_image",
"(",
"self",
")",
":",
"return",
"asyncio",
".",
"run_coroutine_threadsafe",
"(",
"self",
".",
"async_camera_image",
"(",
")",
",",
"self",
".",
"hass",
".",
"loop",
")",
".",
"result",
"(",
")"
] | [
114,
4
] | [
118,
18
] | python | en | ['en', 'zu', 'en'] | True |
GenericCamera.async_camera_image | (self) | Return a still image response from the camera. | Return a still image response from the camera. | async def async_camera_image(self):
"""Return a still image response from the camera."""
try:
url = self._still_image_url.async_render(parse_result=False)
except TemplateError as err:
_LOGGER.error("Error parsing template %s: %s", self._still_image_url, err)
return self._last_image
if url == self._last_url and self._limit_refetch:
return self._last_image
# aiohttp don't support DigestAuth yet
if self._authentication == HTTP_DIGEST_AUTHENTICATION:
def fetch():
"""Read image from a URL."""
try:
response = requests.get(
url, timeout=10, auth=self._auth, verify=self.verify_ssl
)
return response.content
except requests.exceptions.RequestException as error:
_LOGGER.error(
"Error getting new camera image from %s: %s", self._name, error
)
return self._last_image
self._last_image = await self.hass.async_add_executor_job(fetch)
# async
else:
try:
websession = async_get_clientsession(
self.hass, verify_ssl=self.verify_ssl
)
with async_timeout.timeout(10):
response = await websession.get(url, auth=self._auth)
self._last_image = await response.read()
except asyncio.TimeoutError:
_LOGGER.error("Timeout getting camera image from %s", self._name)
return self._last_image
except aiohttp.ClientError as err:
_LOGGER.error(
"Error getting new camera image from %s: %s", self._name, err
)
return self._last_image
self._last_url = url
return self._last_image | [
"async",
"def",
"async_camera_image",
"(",
"self",
")",
":",
"try",
":",
"url",
"=",
"self",
".",
"_still_image_url",
".",
"async_render",
"(",
"parse_result",
"=",
"False",
")",
"except",
"TemplateError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error parsing template %s: %s\"",
",",
"self",
".",
"_still_image_url",
",",
"err",
")",
"return",
"self",
".",
"_last_image",
"if",
"url",
"==",
"self",
".",
"_last_url",
"and",
"self",
".",
"_limit_refetch",
":",
"return",
"self",
".",
"_last_image",
"# aiohttp don't support DigestAuth yet",
"if",
"self",
".",
"_authentication",
"==",
"HTTP_DIGEST_AUTHENTICATION",
":",
"def",
"fetch",
"(",
")",
":",
"\"\"\"Read image from a URL.\"\"\"",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"10",
",",
"auth",
"=",
"self",
".",
"_auth",
",",
"verify",
"=",
"self",
".",
"verify_ssl",
")",
"return",
"response",
".",
"content",
"except",
"requests",
".",
"exceptions",
".",
"RequestException",
"as",
"error",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error getting new camera image from %s: %s\"",
",",
"self",
".",
"_name",
",",
"error",
")",
"return",
"self",
".",
"_last_image",
"self",
".",
"_last_image",
"=",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"fetch",
")",
"# async",
"else",
":",
"try",
":",
"websession",
"=",
"async_get_clientsession",
"(",
"self",
".",
"hass",
",",
"verify_ssl",
"=",
"self",
".",
"verify_ssl",
")",
"with",
"async_timeout",
".",
"timeout",
"(",
"10",
")",
":",
"response",
"=",
"await",
"websession",
".",
"get",
"(",
"url",
",",
"auth",
"=",
"self",
".",
"_auth",
")",
"self",
".",
"_last_image",
"=",
"await",
"response",
".",
"read",
"(",
")",
"except",
"asyncio",
".",
"TimeoutError",
":",
"_LOGGER",
".",
"error",
"(",
"\"Timeout getting camera image from %s\"",
",",
"self",
".",
"_name",
")",
"return",
"self",
".",
"_last_image",
"except",
"aiohttp",
".",
"ClientError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error getting new camera image from %s: %s\"",
",",
"self",
".",
"_name",
",",
"err",
")",
"return",
"self",
".",
"_last_image",
"self",
".",
"_last_url",
"=",
"url",
"return",
"self",
".",
"_last_image"
] | [
120,
4
] | [
167,
31
] | python | en | ['en', 'en', 'en'] | True |
GenericCamera.name | (self) | Return the name of this device. | Return the name of this device. | def name(self):
"""Return the name of this device."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
170,
4
] | [
172,
25
] | python | en | ['en', 'en', 'en'] | True |
GenericCamera.stream_source | (self) | Return the source of the stream. | Return the source of the stream. | async def stream_source(self):
"""Return the source of the stream."""
if self._stream_source is None:
return None
try:
return self._stream_source.async_render(parse_result=False)
except TemplateError as err:
_LOGGER.error("Error parsing template %s: %s", self._stream_source, err)
return None | [
"async",
"def",
"stream_source",
"(",
"self",
")",
":",
"if",
"self",
".",
"_stream_source",
"is",
"None",
":",
"return",
"None",
"try",
":",
"return",
"self",
".",
"_stream_source",
".",
"async_render",
"(",
"parse_result",
"=",
"False",
")",
"except",
"TemplateError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error parsing template %s: %s\"",
",",
"self",
".",
"_stream_source",
",",
"err",
")",
"return",
"None"
] | [
174,
4
] | [
183,
23
] | python | en | ['en', 'en', 'en'] | True |
BaseTrainer.train | (self) |
Override the method to train.
|
Override the method to train.
| def train(self):
"""
Override the method to train.
"""
raise NotImplementedError | [
"def",
"train",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] | [
9,
4
] | [
13,
33
] | python | en | ['en', 'error', 'th'] | False |
BaseTrainer.validate | (self) |
Override the method to validate.
|
Override the method to validate.
| def validate(self):
"""
Override the method to validate.
"""
raise NotImplementedError | [
"def",
"validate",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] | [
16,
4
] | [
20,
33
] | python | en | ['en', 'error', 'th'] | False |
BaseTrainer.export | (self, file) |
Override the method to export to file.
Parameters
----------
file : str
File path to export to.
|
Override the method to export to file. | def export(self, file):
"""
Override the method to export to file.
Parameters
----------
file : str
File path to export to.
"""
raise NotImplementedError | [
"def",
"export",
"(",
"self",
",",
"file",
")",
":",
"raise",
"NotImplementedError"
] | [
23,
4
] | [
32,
33
] | python | en | ['en', 'error', 'th'] | False |
BaseTrainer.checkpoint | (self) |
Override to dump a checkpoint.
|
Override to dump a checkpoint.
| def checkpoint(self):
"""
Override to dump a checkpoint.
"""
raise NotImplementedError | [
"def",
"checkpoint",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] | [
35,
4
] | [
39,
33
] | python | en | ['en', 'error', 'th'] | False |
mock_credstash | () | Mock credstash so it doesn't connect to the internet. | Mock credstash so it doesn't connect to the internet. | def mock_credstash():
"""Mock credstash so it doesn't connect to the internet."""
with patch.object(yaml_loader, "credstash") as mock_credstash:
mock_credstash.getSecret.return_value = None
yield mock_credstash | [
"def",
"mock_credstash",
"(",
")",
":",
"with",
"patch",
".",
"object",
"(",
"yaml_loader",
",",
"\"credstash\"",
")",
"as",
"mock_credstash",
":",
"mock_credstash",
".",
"getSecret",
".",
"return_value",
"=",
"None",
"yield",
"mock_credstash"
] | [
18,
0
] | [
22,
28
] | python | en | ['en', 'en', 'en'] | True |
test_simple_list | () | Test simple list. | Test simple list. | def test_simple_list():
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["config"] == ["simple", "list"] | [
"def",
"test_simple_list",
"(",
")",
":",
"conf",
"=",
"\"config:\\n - simple\\n - list\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"config\"",
"]",
"==",
"[",
"\"simple\"",
",",
"\"list\"",
"]"
] | [
25,
0
] | [
30,
46
] | python | en | ['en', 'et', 'en'] | True |
test_simple_dict | () | Test simple dict. | Test simple dict. | def test_simple_dict():
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == "value" | [
"def",
"test_simple_dict",
"(",
")",
":",
"conf",
"=",
"\"key: value\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"\"value\""
] | [
33,
0
] | [
38,
32
] | python | en | ['it', 'la', 'en'] | False |
test_unhashable_key | () | Test an unhashable key. | Test an unhashable key. | def test_unhashable_key():
"""Test an unhashable key."""
files = {YAML_CONFIG_FILE: "message:\n {{ states.state }}"}
with pytest.raises(HomeAssistantError), patch_yaml_files(files):
load_yaml_config_file(YAML_CONFIG_FILE) | [
"def",
"test_unhashable_key",
"(",
")",
":",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"\"message:\\n {{ states.state }}\"",
"}",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
",",
"patch_yaml_files",
"(",
"files",
")",
":",
"load_yaml_config_file",
"(",
"YAML_CONFIG_FILE",
")"
] | [
41,
0
] | [
45,
47
] | python | en | ['en', 'ga', 'es'] | False |
test_no_key | () | Test item without a key. | Test item without a key. | def test_no_key():
"""Test item without a key."""
files = {YAML_CONFIG_FILE: "a: a\nnokeyhere"}
with pytest.raises(HomeAssistantError), patch_yaml_files(files):
yaml.load_yaml(YAML_CONFIG_FILE) | [
"def",
"test_no_key",
"(",
")",
":",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"\"a: a\\nnokeyhere\"",
"}",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
",",
"patch_yaml_files",
"(",
"files",
")",
":",
"yaml",
".",
"load_yaml",
"(",
"YAML_CONFIG_FILE",
")"
] | [
48,
0
] | [
52,
40
] | python | en | ['en', 'en', 'en'] | True |
test_environment_variable | () | Test config file with environment variable. | Test config file with environment variable. | def test_environment_variable():
"""Test config file with environment variable."""
os.environ["PASSWORD"] = "secret_password"
conf = "password: !env_var PASSWORD"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["password"] == "secret_password"
del os.environ["PASSWORD"] | [
"def",
"test_environment_variable",
"(",
")",
":",
"os",
".",
"environ",
"[",
"\"PASSWORD\"",
"]",
"=",
"\"secret_password\"",
"conf",
"=",
"\"password: !env_var PASSWORD\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"password\"",
"]",
"==",
"\"secret_password\"",
"del",
"os",
".",
"environ",
"[",
"\"PASSWORD\"",
"]"
] | [
55,
0
] | [
62,
30
] | python | en | ['en', 'en', 'en'] | True |
test_environment_variable_default | () | Test config file with default value for environment variable. | Test config file with default value for environment variable. | def test_environment_variable_default():
"""Test config file with default value for environment variable."""
conf = "password: !env_var PASSWORD secret_password"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["password"] == "secret_password" | [
"def",
"test_environment_variable_default",
"(",
")",
":",
"conf",
"=",
"\"password: !env_var PASSWORD secret_password\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"password\"",
"]",
"==",
"\"secret_password\""
] | [
65,
0
] | [
70,
47
] | python | en | ['en', 'en', 'en'] | True |
test_invalid_environment_variable | () | Test config file with no environment variable sat. | Test config file with no environment variable sat. | def test_invalid_environment_variable():
"""Test config file with no environment variable sat."""
conf = "password: !env_var PASSWORD"
with pytest.raises(HomeAssistantError):
with io.StringIO(conf) as file:
yaml_loader.yaml.safe_load(file) | [
"def",
"test_invalid_environment_variable",
"(",
")",
":",
"conf",
"=",
"\"password: !env_var PASSWORD\"",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
":",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")"
] | [
73,
0
] | [
78,
44
] | python | en | ['en', 'en', 'en'] | True |
test_include_yaml | () | Test include yaml. | Test include yaml. | def test_include_yaml():
"""Test include yaml."""
with patch_yaml_files({"test.yaml": "value"}):
conf = "key: !include test.yaml"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == "value"
with patch_yaml_files({"test.yaml": None}):
conf = "key: !include test.yaml"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == {} | [
"def",
"test_include_yaml",
"(",
")",
":",
"with",
"patch_yaml_files",
"(",
"{",
"\"test.yaml\"",
":",
"\"value\"",
"}",
")",
":",
"conf",
"=",
"\"key: !include test.yaml\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"\"value\"",
"with",
"patch_yaml_files",
"(",
"{",
"\"test.yaml\"",
":",
"None",
"}",
")",
":",
"conf",
"=",
"\"key: !include test.yaml\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"{",
"}"
] | [
81,
0
] | [
93,
35
] | python | en | ['tr', 'en', 'en'] | True |
test_include_dir_list | (mock_walk) | Test include dir list yaml. | Test include dir list yaml. | def test_include_dir_list(mock_walk):
"""Test include dir list yaml."""
mock_walk.return_value = [["/test", [], ["two.yaml", "one.yaml"]]]
with patch_yaml_files({"/test/one.yaml": "one", "/test/two.yaml": "two"}):
conf = "key: !include_dir_list /test"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == sorted(["one", "two"]) | [
"def",
"test_include_dir_list",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"]",
",",
"[",
"\"two.yaml\"",
",",
"\"one.yaml\"",
"]",
"]",
"]",
"with",
"patch_yaml_files",
"(",
"{",
"\"/test/one.yaml\"",
":",
"\"one\"",
",",
"\"/test/two.yaml\"",
":",
"\"two\"",
"}",
")",
":",
"conf",
"=",
"\"key: !include_dir_list /test\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"sorted",
"(",
"[",
"\"one\"",
",",
"\"two\"",
"]",
")"
] | [
97,
0
] | [
105,
55
] | python | az | ['oc', 'az', 'sw'] | False |
test_include_dir_list_recursive | (mock_walk) | Test include dir recursive list yaml. | Test include dir recursive list yaml. | def test_include_dir_list_recursive(mock_walk):
"""Test include dir recursive list yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["zero.yaml"]],
["/test/tmp2", [], ["one.yaml", "two.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/zero.yaml": "zero",
"/test/tmp2/one.yaml": "one",
"/test/tmp2/two.yaml": "two",
}
):
conf = "key: !include_dir_list /test"
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert sorted(doc["key"]) == sorted(["zero", "one", "two"]) | [
"def",
"test_include_dir_list_recursive",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"\"tmp2\"",
",",
"\".ignore\"",
",",
"\"ignore\"",
"]",
",",
"[",
"\"zero.yaml\"",
"]",
"]",
",",
"[",
"\"/test/tmp2\"",
",",
"[",
"]",
",",
"[",
"\"one.yaml\"",
",",
"\"two.yaml\"",
"]",
"]",
",",
"[",
"\"/test/ignore\"",
",",
"[",
"]",
",",
"[",
"\".ignore.yaml\"",
"]",
"]",
",",
"]",
"with",
"patch_yaml_files",
"(",
"{",
"\"/test/zero.yaml\"",
":",
"\"zero\"",
",",
"\"/test/tmp2/one.yaml\"",
":",
"\"one\"",
",",
"\"/test/tmp2/two.yaml\"",
":",
"\"two\"",
",",
"}",
")",
":",
"conf",
"=",
"\"key: !include_dir_list /test\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"assert",
"(",
"\".ignore\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
")",
",",
"\"Expecting .ignore in here\"",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"\"tmp2\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"\".ignore\"",
"not",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"sorted",
"(",
"doc",
"[",
"\"key\"",
"]",
")",
"==",
"sorted",
"(",
"[",
"\"zero\"",
",",
"\"one\"",
",",
"\"two\"",
"]",
")"
] | [
109,
0
] | [
132,
71
] | python | ro | ['tr', 'ro', 'sw'] | False |
test_include_dir_named | (mock_walk) | Test include dir named yaml. | Test include dir named yaml. | def test_include_dir_named(mock_walk):
"""Test include dir named yaml."""
mock_walk.return_value = [
["/test", [], ["first.yaml", "second.yaml", "secrets.yaml"]]
]
with patch_yaml_files({"/test/first.yaml": "one", "/test/second.yaml": "two"}):
conf = "key: !include_dir_named /test"
correct = {"first": "one", "second": "two"}
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == correct | [
"def",
"test_include_dir_named",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"]",
",",
"[",
"\"first.yaml\"",
",",
"\"second.yaml\"",
",",
"\"secrets.yaml\"",
"]",
"]",
"]",
"with",
"patch_yaml_files",
"(",
"{",
"\"/test/first.yaml\"",
":",
"\"one\"",
",",
"\"/test/second.yaml\"",
":",
"\"two\"",
"}",
")",
":",
"conf",
"=",
"\"key: !include_dir_named /test\"",
"correct",
"=",
"{",
"\"first\"",
":",
"\"one\"",
",",
"\"second\"",
":",
"\"two\"",
"}",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"correct"
] | [
136,
0
] | [
147,
40
] | python | en | ['tr', 'en', 'sw'] | False |
test_include_dir_named_recursive | (mock_walk) | Test include dir named yaml. | Test include dir named yaml. | def test_include_dir_named_recursive(mock_walk):
"""Test include dir named yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/first.yaml": "one",
"/test/tmp2/second.yaml": "two",
"/test/tmp2/third.yaml": "three",
}
):
conf = "key: !include_dir_named /test"
correct = {"first": "one", "second": "two", "third": "three"}
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert doc["key"] == correct | [
"def",
"test_include_dir_named_recursive",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"\"tmp2\"",
",",
"\".ignore\"",
",",
"\"ignore\"",
"]",
",",
"[",
"\"first.yaml\"",
"]",
"]",
",",
"[",
"\"/test/tmp2\"",
",",
"[",
"]",
",",
"[",
"\"second.yaml\"",
",",
"\"third.yaml\"",
"]",
"]",
",",
"[",
"\"/test/ignore\"",
",",
"[",
"]",
",",
"[",
"\".ignore.yaml\"",
"]",
"]",
",",
"]",
"with",
"patch_yaml_files",
"(",
"{",
"\"/test/first.yaml\"",
":",
"\"one\"",
",",
"\"/test/tmp2/second.yaml\"",
":",
"\"two\"",
",",
"\"/test/tmp2/third.yaml\"",
":",
"\"three\"",
",",
"}",
")",
":",
"conf",
"=",
"\"key: !include_dir_named /test\"",
"correct",
"=",
"{",
"\"first\"",
":",
"\"one\"",
",",
"\"second\"",
":",
"\"two\"",
",",
"\"third\"",
":",
"\"three\"",
"}",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"assert",
"(",
"\".ignore\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
")",
",",
"\"Expecting .ignore in here\"",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"\"tmp2\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"\".ignore\"",
"not",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"correct"
] | [
151,
0
] | [
175,
40
] | python | en | ['tr', 'en', 'sw'] | False |
test_include_dir_merge_list | (mock_walk) | Test include dir merge list yaml. | Test include dir merge list yaml. | def test_include_dir_merge_list(mock_walk):
"""Test include dir merge list yaml."""
mock_walk.return_value = [["/test", [], ["first.yaml", "second.yaml"]]]
with patch_yaml_files(
{"/test/first.yaml": "- one", "/test/second.yaml": "- two\n- three"}
):
conf = "key: !include_dir_merge_list /test"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert sorted(doc["key"]) == sorted(["one", "two", "three"]) | [
"def",
"test_include_dir_merge_list",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"]",
",",
"[",
"\"first.yaml\"",
",",
"\"second.yaml\"",
"]",
"]",
"]",
"with",
"patch_yaml_files",
"(",
"{",
"\"/test/first.yaml\"",
":",
"\"- one\"",
",",
"\"/test/second.yaml\"",
":",
"\"- two\\n- three\"",
"}",
")",
":",
"conf",
"=",
"\"key: !include_dir_merge_list /test\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"sorted",
"(",
"doc",
"[",
"\"key\"",
"]",
")",
"==",
"sorted",
"(",
"[",
"\"one\"",
",",
"\"two\"",
",",
"\"three\"",
"]",
")"
] | [
179,
0
] | [
189,
72
] | python | tr | ['tr', 'cy', 'tr'] | True |
test_include_dir_merge_list_recursive | (mock_walk) | Test include dir merge list yaml. | Test include dir merge list yaml. | def test_include_dir_merge_list_recursive(mock_walk):
"""Test include dir merge list yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/first.yaml": "- one",
"/test/tmp2/second.yaml": "- two",
"/test/tmp2/third.yaml": "- three\n- four",
}
):
conf = "key: !include_dir_merge_list /test"
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert sorted(doc["key"]) == sorted(["one", "two", "three", "four"]) | [
"def",
"test_include_dir_merge_list_recursive",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"\"tmp2\"",
",",
"\".ignore\"",
",",
"\"ignore\"",
"]",
",",
"[",
"\"first.yaml\"",
"]",
"]",
",",
"[",
"\"/test/tmp2\"",
",",
"[",
"]",
",",
"[",
"\"second.yaml\"",
",",
"\"third.yaml\"",
"]",
"]",
",",
"[",
"\"/test/ignore\"",
",",
"[",
"]",
",",
"[",
"\".ignore.yaml\"",
"]",
"]",
",",
"]",
"with",
"patch_yaml_files",
"(",
"{",
"\"/test/first.yaml\"",
":",
"\"- one\"",
",",
"\"/test/tmp2/second.yaml\"",
":",
"\"- two\"",
",",
"\"/test/tmp2/third.yaml\"",
":",
"\"- three\\n- four\"",
",",
"}",
")",
":",
"conf",
"=",
"\"key: !include_dir_merge_list /test\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"assert",
"(",
"\".ignore\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
")",
",",
"\"Expecting .ignore in here\"",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"\"tmp2\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"\".ignore\"",
"not",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"sorted",
"(",
"doc",
"[",
"\"key\"",
"]",
")",
"==",
"sorted",
"(",
"[",
"\"one\"",
",",
"\"two\"",
",",
"\"three\"",
",",
"\"four\"",
"]",
")"
] | [
193,
0
] | [
216,
80
] | python | tr | ['tr', 'cy', 'tr'] | True |
test_include_dir_merge_named | (mock_walk) | Test include dir merge named yaml. | Test include dir merge named yaml. | def test_include_dir_merge_named(mock_walk):
"""Test include dir merge named yaml."""
mock_walk.return_value = [["/test", [], ["first.yaml", "second.yaml"]]]
files = {
"/test/first.yaml": "key1: one",
"/test/second.yaml": "key2: two\nkey3: three",
}
with patch_yaml_files(files):
conf = "key: !include_dir_merge_named /test"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == {"key1": "one", "key2": "two", "key3": "three"} | [
"def",
"test_include_dir_merge_named",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"]",
",",
"[",
"\"first.yaml\"",
",",
"\"second.yaml\"",
"]",
"]",
"]",
"files",
"=",
"{",
"\"/test/first.yaml\"",
":",
"\"key1: one\"",
",",
"\"/test/second.yaml\"",
":",
"\"key2: two\\nkey3: three\"",
",",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"conf",
"=",
"\"key: !include_dir_merge_named /test\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"{",
"\"key1\"",
":",
"\"one\"",
",",
"\"key2\"",
":",
"\"two\"",
",",
"\"key3\"",
":",
"\"three\"",
"}"
] | [
220,
0
] | [
233,
80
] | python | en | ['tr', 'en', 'sw'] | False |
test_include_dir_merge_named_recursive | (mock_walk) | Test include dir merge named yaml. | Test include dir merge named yaml. | def test_include_dir_merge_named_recursive(mock_walk):
"""Test include dir merge named yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/first.yaml": "key1: one",
"/test/tmp2/second.yaml": "key2: two",
"/test/tmp2/third.yaml": "key3: three\nkey4: four",
}
):
conf = "key: !include_dir_merge_named /test"
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert doc["key"] == {
"key1": "one",
"key2": "two",
"key3": "three",
"key4": "four",
} | [
"def",
"test_include_dir_merge_named_recursive",
"(",
"mock_walk",
")",
":",
"mock_walk",
".",
"return_value",
"=",
"[",
"[",
"\"/test\"",
",",
"[",
"\"tmp2\"",
",",
"\".ignore\"",
",",
"\"ignore\"",
"]",
",",
"[",
"\"first.yaml\"",
"]",
"]",
",",
"[",
"\"/test/tmp2\"",
",",
"[",
"]",
",",
"[",
"\"second.yaml\"",
",",
"\"third.yaml\"",
"]",
"]",
",",
"[",
"\"/test/ignore\"",
",",
"[",
"]",
",",
"[",
"\".ignore.yaml\"",
"]",
"]",
",",
"]",
"with",
"patch_yaml_files",
"(",
"{",
"\"/test/first.yaml\"",
":",
"\"key1: one\"",
",",
"\"/test/tmp2/second.yaml\"",
":",
"\"key2: two\"",
",",
"\"/test/tmp2/third.yaml\"",
":",
"\"key3: three\\nkey4: four\"",
",",
"}",
")",
":",
"conf",
"=",
"\"key: !include_dir_merge_named /test\"",
"with",
"io",
".",
"StringIO",
"(",
"conf",
")",
"as",
"file",
":",
"assert",
"(",
"\".ignore\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
")",
",",
"\"Expecting .ignore in here\"",
"doc",
"=",
"yaml_loader",
".",
"yaml",
".",
"safe_load",
"(",
"file",
")",
"assert",
"\"tmp2\"",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"\".ignore\"",
"not",
"in",
"mock_walk",
".",
"return_value",
"[",
"0",
"]",
"[",
"1",
"]",
"assert",
"doc",
"[",
"\"key\"",
"]",
"==",
"{",
"\"key1\"",
":",
"\"one\"",
",",
"\"key2\"",
":",
"\"two\"",
",",
"\"key3\"",
":",
"\"three\"",
",",
"\"key4\"",
":",
"\"four\"",
",",
"}"
] | [
237,
0
] | [
265,
13
] | python | en | ['tr', 'en', 'sw'] | False |
test_load_yaml_encoding_error | (mock_open) | Test raising a UnicodeDecodeError. | Test raising a UnicodeDecodeError. | def test_load_yaml_encoding_error(mock_open):
"""Test raising a UnicodeDecodeError."""
mock_open.side_effect = UnicodeDecodeError("", b"", 1, 0, "")
with pytest.raises(HomeAssistantError):
yaml_loader.load_yaml("test") | [
"def",
"test_load_yaml_encoding_error",
"(",
"mock_open",
")",
":",
"mock_open",
".",
"side_effect",
"=",
"UnicodeDecodeError",
"(",
"\"\"",
",",
"b\"\"",
",",
"1",
",",
"0",
",",
"\"\"",
")",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
":",
"yaml_loader",
".",
"load_yaml",
"(",
"\"test\"",
")"
] | [
269,
0
] | [
273,
37
] | python | de | ['en', 'de', 'pt'] | False |
test_dump | () | The that the dump method returns empty None values. | The that the dump method returns empty None values. | def test_dump():
"""The that the dump method returns empty None values."""
assert yaml.dump({"a": None, "b": "b"}) == "a:\nb: b\n" | [
"def",
"test_dump",
"(",
")",
":",
"assert",
"yaml",
".",
"dump",
"(",
"{",
"\"a\"",
":",
"None",
",",
"\"b\"",
":",
"\"b\"",
"}",
")",
"==",
"\"a:\\nb: b\\n\""
] | [
276,
0
] | [
278,
59
] | python | en | ['en', 'en', 'en'] | True |
test_dump_unicode | () | The that the dump method returns empty None values. | The that the dump method returns empty None values. | def test_dump_unicode():
"""The that the dump method returns empty None values."""
assert yaml.dump({"a": None, "b": "привет"}) == "a:\nb: привет\n" | [
"def",
"test_dump_unicode",
"(",
")",
":",
"assert",
"yaml",
".",
"dump",
"(",
"{",
"\"a\"",
":",
"None",
",",
"\"b\"",
":",
"\"привет\"}) == ",
"\"",
"a",
"\\n",
": привет\\n\""
] | [
281,
0
] | [
283,
81
] | python | en | ['en', 'en', 'en'] | True |
load_yaml | (fname, string) | Write a string to file and return the parsed yaml. | Write a string to file and return the parsed yaml. | def load_yaml(fname, string):
"""Write a string to file and return the parsed yaml."""
FILES[fname] = string
with patch_yaml_files(FILES):
return load_yaml_config_file(fname) | [
"def",
"load_yaml",
"(",
"fname",
",",
"string",
")",
":",
"FILES",
"[",
"fname",
"]",
"=",
"string",
"with",
"patch_yaml_files",
"(",
"FILES",
")",
":",
"return",
"load_yaml_config_file",
"(",
"fname",
")"
] | [
289,
0
] | [
293,
43
] | python | en | ['en', 'en', 'en'] | True |
test_representing_yaml_loaded_data | () | Test we can represent YAML loaded data. | Test we can represent YAML loaded data. | def test_representing_yaml_loaded_data():
"""Test we can represent YAML loaded data."""
files = {YAML_CONFIG_FILE: 'key: [1, "2", 3]'}
with patch_yaml_files(files):
data = load_yaml_config_file(YAML_CONFIG_FILE)
assert yaml.dump(data) == "key:\n- 1\n- '2'\n- 3\n" | [
"def",
"test_representing_yaml_loaded_data",
"(",
")",
":",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"'key: [1, \"2\", 3]'",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"data",
"=",
"load_yaml_config_file",
"(",
"YAML_CONFIG_FILE",
")",
"assert",
"yaml",
".",
"dump",
"(",
"data",
")",
"==",
"\"key:\\n- 1\\n- '2'\\n- 3\\n\""
] | [
449,
0
] | [
454,
55
] | python | en | ['en', 'en', 'en'] | True |
test_duplicate_key | (caplog) | Test duplicate dict keys. | Test duplicate dict keys. | def test_duplicate_key(caplog):
"""Test duplicate dict keys."""
files = {YAML_CONFIG_FILE: "key: thing1\nkey: thing2"}
with patch_yaml_files(files):
load_yaml_config_file(YAML_CONFIG_FILE)
assert "contains duplicate key" in caplog.text | [
"def",
"test_duplicate_key",
"(",
"caplog",
")",
":",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"\"key: thing1\\nkey: thing2\"",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"load_yaml_config_file",
"(",
"YAML_CONFIG_FILE",
")",
"assert",
"\"contains duplicate key\"",
"in",
"caplog",
".",
"text"
] | [
457,
0
] | [
462,
50
] | python | en | ['ro', 'id', 'en'] | False |
test_placeholder_class | () | Test placeholder class. | Test placeholder class. | def test_placeholder_class():
"""Test placeholder class."""
placeholder = yaml_loader.Placeholder("hello")
placeholder2 = yaml_loader.Placeholder("hello")
assert placeholder.name == "hello"
assert placeholder == placeholder2
assert len({placeholder, placeholder2}) == 1 | [
"def",
"test_placeholder_class",
"(",
")",
":",
"placeholder",
"=",
"yaml_loader",
".",
"Placeholder",
"(",
"\"hello\"",
")",
"placeholder2",
"=",
"yaml_loader",
".",
"Placeholder",
"(",
"\"hello\"",
")",
"assert",
"placeholder",
".",
"name",
"==",
"\"hello\"",
"assert",
"placeholder",
"==",
"placeholder2",
"assert",
"len",
"(",
"{",
"placeholder",
",",
"placeholder2",
"}",
")",
"==",
"1"
] | [
465,
0
] | [
473,
48
] | python | en | ['en', 'en', 'en'] | True |
test_placeholder | () | Test loading placeholders. | Test loading placeholders. | def test_placeholder():
"""Test loading placeholders."""
data = {"hello": yaml.Placeholder("test_name")}
assert yaml.parse_yaml(yaml.dump(data)) == data | [
"def",
"test_placeholder",
"(",
")",
":",
"data",
"=",
"{",
"\"hello\"",
":",
"yaml",
".",
"Placeholder",
"(",
"\"test_name\"",
")",
"}",
"assert",
"yaml",
".",
"parse_yaml",
"(",
"yaml",
".",
"dump",
"(",
"data",
")",
")",
"==",
"data"
] | [
476,
0
] | [
479,
51
] | python | en | ['en', 'en', 'en'] | True |
FakeKeyring.__init__ | (self, secrets_dict) | Store keyring dictionary. | Store keyring dictionary. | def __init__(self, secrets_dict):
"""Store keyring dictionary."""
self._secrets = secrets_dict | [
"def",
"__init__",
"(",
"self",
",",
"secrets_dict",
")",
":",
"self",
".",
"_secrets",
"=",
"secrets_dict"
] | [
299,
4
] | [
301,
36
] | python | en | ['id', 'en', 'en'] | True |
FakeKeyring.get_password | (self, domain, name) | Retrieve password. | Retrieve password. | def get_password(self, domain, name):
"""Retrieve password."""
assert domain == yaml._SECRET_NAMESPACE
return self._secrets.get(name) | [
"def",
"get_password",
"(",
"self",
",",
"domain",
",",
"name",
")",
":",
"assert",
"domain",
"==",
"yaml",
".",
"_SECRET_NAMESPACE",
"return",
"self",
".",
"_secrets",
".",
"get",
"(",
"name",
")"
] | [
304,
4
] | [
307,
38
] | python | en | ['fr', 'sr', 'en'] | False |
TestSecrets.setUp | (self) | Create & load secrets file. | Create & load secrets file. | def setUp(self):
"""Create & load secrets file."""
config_dir = get_test_config_dir()
yaml.clear_secret_cache()
self._yaml_path = os.path.join(config_dir, YAML_CONFIG_FILE)
self._secret_path = os.path.join(config_dir, yaml.SECRET_YAML)
self._sub_folder_path = os.path.join(config_dir, "subFolder")
self._unrelated_path = os.path.join(config_dir, "unrelated")
load_yaml(
self._secret_path,
"http_pw: pwhttp\n"
"comp1_un: un1\n"
"comp1_pw: pw1\n"
"stale_pw: not_used\n"
"logger: debug\n",
)
self._yaml = load_yaml(
self._yaml_path,
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
) | [
"def",
"setUp",
"(",
"self",
")",
":",
"config_dir",
"=",
"get_test_config_dir",
"(",
")",
"yaml",
".",
"clear_secret_cache",
"(",
")",
"self",
".",
"_yaml_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"YAML_CONFIG_FILE",
")",
"self",
".",
"_secret_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"yaml",
".",
"SECRET_YAML",
")",
"self",
".",
"_sub_folder_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"\"subFolder\"",
")",
"self",
".",
"_unrelated_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config_dir",
",",
"\"unrelated\"",
")",
"load_yaml",
"(",
"self",
".",
"_secret_path",
",",
"\"http_pw: pwhttp\\n\"",
"\"comp1_un: un1\\n\"",
"\"comp1_pw: pw1\\n\"",
"\"stale_pw: not_used\\n\"",
"\"logger: debug\\n\"",
",",
")",
"self",
".",
"_yaml",
"=",
"load_yaml",
"(",
"self",
".",
"_yaml_path",
",",
"\"http:\\n\"",
"\" api_password: !secret http_pw\\n\"",
"\"component:\\n\"",
"\" username: !secret comp1_un\\n\"",
"\" password: !secret comp1_pw\\n\"",
"\"\"",
",",
")"
] | [
315,
4
] | [
340,
9
] | python | en | ['en', 'sm', 'en'] | True |
TestSecrets.tearDown | (self) | Clean up secrets. | Clean up secrets. | def tearDown(self):
"""Clean up secrets."""
yaml.clear_secret_cache()
FILES.clear() | [
"def",
"tearDown",
"(",
"self",
")",
":",
"yaml",
".",
"clear_secret_cache",
"(",
")",
"FILES",
".",
"clear",
"(",
")"
] | [
342,
4
] | [
345,
21
] | python | en | ['en', 'fr', 'en'] | True |
TestSecrets.test_secrets_from_yaml | (self) | Did secrets load ok. | Did secrets load ok. | def test_secrets_from_yaml(self):
"""Did secrets load ok."""
expected = {"api_password": "pwhttp"}
assert expected == self._yaml["http"]
expected = {"username": "un1", "password": "pw1"}
assert expected == self._yaml["component"] | [
"def",
"test_secrets_from_yaml",
"(",
"self",
")",
":",
"expected",
"=",
"{",
"\"api_password\"",
":",
"\"pwhttp\"",
"}",
"assert",
"expected",
"==",
"self",
".",
"_yaml",
"[",
"\"http\"",
"]",
"expected",
"=",
"{",
"\"username\"",
":",
"\"un1\"",
",",
"\"password\"",
":",
"\"pw1\"",
"}",
"assert",
"expected",
"==",
"self",
".",
"_yaml",
"[",
"\"component\"",
"]"
] | [
347,
4
] | [
353,
50
] | python | en | ['en', 'bg', 'en'] | True |
TestSecrets.test_secrets_from_parent_folder | (self) | Test loading secrets from parent folder. | Test loading secrets from parent folder. | def test_secrets_from_parent_folder(self):
"""Test loading secrets from parent folder."""
expected = {"api_password": "pwhttp"}
self._yaml = load_yaml(
os.path.join(self._sub_folder_path, "sub.yaml"),
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
)
assert expected == self._yaml["http"] | [
"def",
"test_secrets_from_parent_folder",
"(",
"self",
")",
":",
"expected",
"=",
"{",
"\"api_password\"",
":",
"\"pwhttp\"",
"}",
"self",
".",
"_yaml",
"=",
"load_yaml",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_sub_folder_path",
",",
"\"sub.yaml\"",
")",
",",
"\"http:\\n\"",
"\" api_password: !secret http_pw\\n\"",
"\"component:\\n\"",
"\" username: !secret comp1_un\\n\"",
"\" password: !secret comp1_pw\\n\"",
"\"\"",
",",
")",
"assert",
"expected",
"==",
"self",
".",
"_yaml",
"[",
"\"http\"",
"]"
] | [
355,
4
] | [
368,
45
] | python | en | ['en', 'da', 'en'] | True |
TestSecrets.test_secret_overrides_parent | (self) | Test loading current directory secret overrides the parent. | Test loading current directory secret overrides the parent. | def test_secret_overrides_parent(self):
"""Test loading current directory secret overrides the parent."""
expected = {"api_password": "override"}
load_yaml(
os.path.join(self._sub_folder_path, yaml.SECRET_YAML), "http_pw: override"
)
self._yaml = load_yaml(
os.path.join(self._sub_folder_path, "sub.yaml"),
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
)
assert expected == self._yaml["http"] | [
"def",
"test_secret_overrides_parent",
"(",
"self",
")",
":",
"expected",
"=",
"{",
"\"api_password\"",
":",
"\"override\"",
"}",
"load_yaml",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_sub_folder_path",
",",
"yaml",
".",
"SECRET_YAML",
")",
",",
"\"http_pw: override\"",
")",
"self",
".",
"_yaml",
"=",
"load_yaml",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_sub_folder_path",
",",
"\"sub.yaml\"",
")",
",",
"\"http:\\n\"",
"\" api_password: !secret http_pw\\n\"",
"\"component:\\n\"",
"\" username: !secret comp1_un\\n\"",
"\" password: !secret comp1_pw\\n\"",
"\"\"",
",",
")",
"assert",
"expected",
"==",
"self",
".",
"_yaml",
"[",
"\"http\"",
"]"
] | [
370,
4
] | [
386,
45
] | python | en | ['en', 'da', 'en'] | True |
TestSecrets.test_secrets_from_unrelated_fails | (self) | Test loading secrets from unrelated folder fails. | Test loading secrets from unrelated folder fails. | def test_secrets_from_unrelated_fails(self):
"""Test loading secrets from unrelated folder fails."""
load_yaml(os.path.join(self._unrelated_path, yaml.SECRET_YAML), "test: failure")
with pytest.raises(HomeAssistantError):
load_yaml(
os.path.join(self._sub_folder_path, "sub.yaml"),
"http:\n api_password: !secret test",
) | [
"def",
"test_secrets_from_unrelated_fails",
"(",
"self",
")",
":",
"load_yaml",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_unrelated_path",
",",
"yaml",
".",
"SECRET_YAML",
")",
",",
"\"test: failure\"",
")",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
":",
"load_yaml",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_sub_folder_path",
",",
"\"sub.yaml\"",
")",
",",
"\"http:\\n api_password: !secret test\"",
",",
")"
] | [
388,
4
] | [
395,
13
] | python | en | ['en', 'en', 'en'] | True |
TestSecrets.test_secrets_keyring | (self) | Test keyring fallback & get_password. | Test keyring fallback & get_password. | def test_secrets_keyring(self):
"""Test keyring fallback & get_password."""
yaml_loader.keyring = None # Ensure its not there
yaml_str = "http:\n api_password: !secret http_pw_keyring"
with pytest.raises(HomeAssistantError):
load_yaml(self._yaml_path, yaml_str)
yaml_loader.keyring = FakeKeyring({"http_pw_keyring": "yeah"})
_yaml = load_yaml(self._yaml_path, yaml_str)
assert {"http": {"api_password": "yeah"}} == _yaml | [
"def",
"test_secrets_keyring",
"(",
"self",
")",
":",
"yaml_loader",
".",
"keyring",
"=",
"None",
"# Ensure its not there",
"yaml_str",
"=",
"\"http:\\n api_password: !secret http_pw_keyring\"",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
":",
"load_yaml",
"(",
"self",
".",
"_yaml_path",
",",
"yaml_str",
")",
"yaml_loader",
".",
"keyring",
"=",
"FakeKeyring",
"(",
"{",
"\"http_pw_keyring\"",
":",
"\"yeah\"",
"}",
")",
"_yaml",
"=",
"load_yaml",
"(",
"self",
".",
"_yaml_path",
",",
"yaml_str",
")",
"assert",
"{",
"\"http\"",
":",
"{",
"\"api_password\"",
":",
"\"yeah\"",
"}",
"}",
"==",
"_yaml"
] | [
397,
4
] | [
406,
58
] | python | de | ['de', 'jv', 'en'] | False |
TestSecrets.test_secrets_credstash | (self, mock_credstash) | Test credstash fallback & get_password. | Test credstash fallback & get_password. | def test_secrets_credstash(self, mock_credstash):
"""Test credstash fallback & get_password."""
mock_credstash.getSecret.return_value = "yeah"
yaml_str = "http:\n api_password: !secret http_pw_credstash"
_yaml = load_yaml(self._yaml_path, yaml_str)
log = logging.getLogger()
log.error(_yaml["http"])
assert {"api_password": "yeah"} == _yaml["http"] | [
"def",
"test_secrets_credstash",
"(",
"self",
",",
"mock_credstash",
")",
":",
"mock_credstash",
".",
"getSecret",
".",
"return_value",
"=",
"\"yeah\"",
"yaml_str",
"=",
"\"http:\\n api_password: !secret http_pw_credstash\"",
"_yaml",
"=",
"load_yaml",
"(",
"self",
".",
"_yaml_path",
",",
"yaml_str",
")",
"log",
"=",
"logging",
".",
"getLogger",
"(",
")",
"log",
".",
"error",
"(",
"_yaml",
"[",
"\"http\"",
"]",
")",
"assert",
"{",
"\"api_password\"",
":",
"\"yeah\"",
"}",
"==",
"_yaml",
"[",
"\"http\"",
"]"
] | [
409,
4
] | [
416,
56
] | python | da | ['de', 'da', 'en'] | False |
TestSecrets.test_secrets_logger_removed | (self) | Ensure logger: debug was removed. | Ensure logger: debug was removed. | def test_secrets_logger_removed(self):
"""Ensure logger: debug was removed."""
with pytest.raises(HomeAssistantError):
load_yaml(self._yaml_path, "api_password: !secret logger") | [
"def",
"test_secrets_logger_removed",
"(",
"self",
")",
":",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
":",
"load_yaml",
"(",
"self",
".",
"_yaml_path",
",",
"\"api_password: !secret logger\"",
")"
] | [
418,
4
] | [
421,
70
] | python | en | ['en', 'ceb', 'en'] | True |
TestSecrets.test_bad_logger_value | (self, mock_error) | Ensure logger: debug was removed. | Ensure logger: debug was removed. | def test_bad_logger_value(self, mock_error):
"""Ensure logger: debug was removed."""
yaml.clear_secret_cache()
load_yaml(self._secret_path, "logger: info\npw: abc")
load_yaml(self._yaml_path, "api_password: !secret pw")
assert mock_error.call_count == 1, "Expected an error about logger: value" | [
"def",
"test_bad_logger_value",
"(",
"self",
",",
"mock_error",
")",
":",
"yaml",
".",
"clear_secret_cache",
"(",
")",
"load_yaml",
"(",
"self",
".",
"_secret_path",
",",
"\"logger: info\\npw: abc\"",
")",
"load_yaml",
"(",
"self",
".",
"_yaml_path",
",",
"\"api_password: !secret pw\"",
")",
"assert",
"mock_error",
".",
"call_count",
"==",
"1",
",",
"\"Expected an error about logger: value\""
] | [
424,
4
] | [
429,
82
] | python | en | ['en', 'ceb', 'en'] | True |
TestSecrets.test_secrets_are_not_dict | (self) | Did secrets handle non-dict file. | Did secrets handle non-dict file. | def test_secrets_are_not_dict(self):
"""Did secrets handle non-dict file."""
FILES[
self._secret_path
] = "- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n"
yaml.clear_secret_cache()
with pytest.raises(HomeAssistantError):
load_yaml(
self._yaml_path,
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
) | [
"def",
"test_secrets_are_not_dict",
"(",
"self",
")",
":",
"FILES",
"[",
"self",
".",
"_secret_path",
"]",
"=",
"\"- http_pw: pwhttp\\n comp1_un: un1\\n comp1_pw: pw1\\n\"",
"yaml",
".",
"clear_secret_cache",
"(",
")",
"with",
"pytest",
".",
"raises",
"(",
"HomeAssistantError",
")",
":",
"load_yaml",
"(",
"self",
".",
"_yaml_path",
",",
"\"http:\\n\"",
"\" api_password: !secret http_pw\\n\"",
"\"component:\\n\"",
"\" username: !secret comp1_un\\n\"",
"\" password: !secret comp1_pw\\n\"",
"\"\"",
",",
")"
] | [
431,
4
] | [
446,
13
] | python | it | ['en', 'it', 'it'] | True |
test_air_con_create_sensors | (hass) | Test creation of aircon sensors. | Test creation of aircon sensors. | async def test_air_con_create_sensors(hass):
"""Test creation of aircon sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.air_conditioning_power")
assert state.state == "ON"
state = hass.states.get("sensor.air_conditioning_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.air_conditioning_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.air_conditioning_tado_mode")
assert state.state == "HOME"
state = hass.states.get("sensor.air_conditioning_temperature")
assert state.state == "24.76"
state = hass.states.get("sensor.air_conditioning_ac")
assert state.state == "ON"
state = hass.states.get("sensor.air_conditioning_overlay")
assert state.state == "True"
state = hass.states.get("sensor.air_conditioning_humidity")
assert state.state == "60.9"
state = hass.states.get("sensor.air_conditioning_open_window")
assert state.state == "False" | [
"async",
"def",
"test_air_con_create_sensors",
"(",
"hass",
")",
":",
"await",
"async_init_integration",
"(",
"hass",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_power\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ON\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_link\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ONLINE\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_link\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ONLINE\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_tado_mode\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"HOME\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_temperature\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"24.76\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_ac\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ON\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_overlay\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"True\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_humidity\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"60.9\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.air_conditioning_open_window\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"False\""
] | [
5,
0
] | [
35,
33
] | python | en | ['en', 'en', 'en'] | True |
test_heater_create_sensors | (hass) | Test creation of heater sensors. | Test creation of heater sensors. | async def test_heater_create_sensors(hass):
"""Test creation of heater sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.baseboard_heater_power")
assert state.state == "ON"
state = hass.states.get("sensor.baseboard_heater_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.baseboard_heater_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.baseboard_heater_tado_mode")
assert state.state == "HOME"
state = hass.states.get("sensor.baseboard_heater_temperature")
assert state.state == "20.65"
state = hass.states.get("sensor.baseboard_heater_early_start")
assert state.state == "False"
state = hass.states.get("sensor.baseboard_heater_overlay")
assert state.state == "True"
state = hass.states.get("sensor.baseboard_heater_humidity")
assert state.state == "45.2"
state = hass.states.get("sensor.baseboard_heater_open_window")
assert state.state == "False" | [
"async",
"def",
"test_heater_create_sensors",
"(",
"hass",
")",
":",
"await",
"async_init_integration",
"(",
"hass",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_power\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ON\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_link\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ONLINE\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_link\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ONLINE\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_tado_mode\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"HOME\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_temperature\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"20.65\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_early_start\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"False\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_overlay\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"True\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_humidity\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"45.2\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.baseboard_heater_open_window\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"False\""
] | [
38,
0
] | [
68,
33
] | python | en | ['en', 'fi', 'en'] | True |
test_water_heater_create_sensors | (hass) | Test creation of water heater sensors. | Test creation of water heater sensors. | async def test_water_heater_create_sensors(hass):
"""Test creation of water heater sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.water_heater_tado_mode")
assert state.state == "HOME"
state = hass.states.get("sensor.water_heater_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.water_heater_overlay")
assert state.state == "False"
state = hass.states.get("sensor.water_heater_power")
assert state.state == "ON" | [
"async",
"def",
"test_water_heater_create_sensors",
"(",
"hass",
")",
":",
"await",
"async_init_integration",
"(",
"hass",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.water_heater_tado_mode\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"HOME\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.water_heater_link\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ONLINE\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.water_heater_overlay\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"False\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.water_heater_power\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"ON\""
] | [
71,
0
] | [
86,
30
] | python | en | ['en', 'en', 'en'] | True |
test_home_create_sensors | (hass) | Test creation of home sensors. | Test creation of home sensors. | async def test_home_create_sensors(hass):
"""Test creation of home sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.home_name_tado_bridge_status")
assert state.state == "True" | [
"async",
"def",
"test_home_create_sensors",
"(",
"hass",
")",
":",
"await",
"async_init_integration",
"(",
"hass",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"sensor.home_name_tado_bridge_status\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"True\""
] | [
89,
0
] | [
95,
32
] | python | en | ['en', 'en', 'en'] | True |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up the media player demo platform. | Set up the media player demo platform. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the media player demo platform."""
async_add_entities(
[
DemoYoutubePlayer(
"Living Room",
"eyU3bRy2x44",
"♥♥ The Best Fireplace Video (3 hours)",
300,
),
DemoYoutubePlayer(
"Bedroom", "kxopViU98Xo", "Epic sax guy 10 hours", 360000
),
DemoMusicPlayer(),
DemoTVShowPlayer(),
]
) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"async_add_entities",
"(",
"[",
"DemoYoutubePlayer",
"(",
"\"Living Room\"",
",",
"\"eyU3bRy2x44\"",
",",
"\"♥♥ The Best Fireplace Video (3 hours)\",",
"",
"300",
",",
")",
",",
"DemoYoutubePlayer",
"(",
"\"Bedroom\"",
",",
"\"kxopViU98Xo\"",
",",
"\"Epic sax guy 10 hours\"",
",",
"360000",
")",
",",
"DemoMusicPlayer",
"(",
")",
",",
"DemoTVShowPlayer",
"(",
")",
",",
"]",
")"
] | [
28,
0
] | [
44,
5
] | python | en | ['en', 'da', 'en'] | True |
async_setup_entry | (hass, config_entry, async_add_entities) | Set up the Demo config entry. | Set up the Demo config entry. | async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"config_entry",
",",
"async_add_entities",
")",
":",
"await",
"async_setup_platform",
"(",
"hass",
",",
"{",
"}",
",",
"async_add_entities",
")"
] | [
47,
0
] | [
49,
60
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.__init__ | (self, name, device_class=None) | Initialize the demo device. | Initialize the demo device. | def __init__(self, name, device_class=None):
"""Initialize the demo device."""
self._name = name
self._player_state = STATE_PLAYING
self._volume_level = 1.0
self._volume_muted = False
self._shuffle = False
self._sound_mode_list = SOUND_MODE_LIST
self._sound_mode = DEFAULT_SOUND_MODE
self._device_class = device_class | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"device_class",
"=",
"None",
")",
":",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_player_state",
"=",
"STATE_PLAYING",
"self",
".",
"_volume_level",
"=",
"1.0",
"self",
".",
"_volume_muted",
"=",
"False",
"self",
".",
"_shuffle",
"=",
"False",
"self",
".",
"_sound_mode_list",
"=",
"SOUND_MODE_LIST",
"self",
".",
"_sound_mode",
"=",
"DEFAULT_SOUND_MODE",
"self",
".",
"_device_class",
"=",
"device_class"
] | [
102,
4
] | [
111,
41
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.should_poll | (self) | Push an update after each command. | Push an update after each command. | def should_poll(self):
"""Push an update after each command."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
114,
4
] | [
116,
20
] | python | en | ['en', 'lb', 'en'] | True |
AbstractDemoPlayer.name | (self) | Return the name of the media player. | Return the name of the media player. | def name(self):
"""Return the name of the media player."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
119,
4
] | [
121,
25
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.state | (self) | Return the state of the player. | Return the state of the player. | def state(self):
"""Return the state of the player."""
return self._player_state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_player_state"
] | [
124,
4
] | [
126,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.volume_level | (self) | Return the volume level of the media player (0..1). | Return the volume level of the media player (0..1). | def volume_level(self):
"""Return the volume level of the media player (0..1)."""
return self._volume_level | [
"def",
"volume_level",
"(",
"self",
")",
":",
"return",
"self",
".",
"_volume_level"
] | [
129,
4
] | [
131,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.is_volume_muted | (self) | Return boolean if volume is currently muted. | Return boolean if volume is currently muted. | def is_volume_muted(self):
"""Return boolean if volume is currently muted."""
return self._volume_muted | [
"def",
"is_volume_muted",
"(",
"self",
")",
":",
"return",
"self",
".",
"_volume_muted"
] | [
134,
4
] | [
136,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.shuffle | (self) | Boolean if shuffling is enabled. | Boolean if shuffling is enabled. | def shuffle(self):
"""Boolean if shuffling is enabled."""
return self._shuffle | [
"def",
"shuffle",
"(",
"self",
")",
":",
"return",
"self",
".",
"_shuffle"
] | [
139,
4
] | [
141,
28
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.sound_mode | (self) | Return the current sound mode. | Return the current sound mode. | def sound_mode(self):
"""Return the current sound mode."""
return self._sound_mode | [
"def",
"sound_mode",
"(",
"self",
")",
":",
"return",
"self",
".",
"_sound_mode"
] | [
144,
4
] | [
146,
31
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.sound_mode_list | (self) | Return a list of available sound modes. | Return a list of available sound modes. | def sound_mode_list(self):
"""Return a list of available sound modes."""
return self._sound_mode_list | [
"def",
"sound_mode_list",
"(",
"self",
")",
":",
"return",
"self",
".",
"_sound_mode_list"
] | [
149,
4
] | [
151,
36
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.device_class | (self) | Return the device class of the media player. | Return the device class of the media player. | def device_class(self):
"""Return the device class of the media player."""
return self._device_class | [
"def",
"device_class",
"(",
"self",
")",
":",
"return",
"self",
".",
"_device_class"
] | [
154,
4
] | [
156,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.turn_on | (self) | Turn the media player on. | Turn the media player on. | def turn_on(self):
"""Turn the media player on."""
self._player_state = STATE_PLAYING
self.schedule_update_ha_state() | [
"def",
"turn_on",
"(",
"self",
")",
":",
"self",
".",
"_player_state",
"=",
"STATE_PLAYING",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
158,
4
] | [
161,
39
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.turn_off | (self) | Turn the media player off. | Turn the media player off. | def turn_off(self):
"""Turn the media player off."""
self._player_state = STATE_OFF
self.schedule_update_ha_state() | [
"def",
"turn_off",
"(",
"self",
")",
":",
"self",
".",
"_player_state",
"=",
"STATE_OFF",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
163,
4
] | [
166,
39
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.mute_volume | (self, mute) | Mute the volume. | Mute the volume. | def mute_volume(self, mute):
"""Mute the volume."""
self._volume_muted = mute
self.schedule_update_ha_state() | [
"def",
"mute_volume",
"(",
"self",
",",
"mute",
")",
":",
"self",
".",
"_volume_muted",
"=",
"mute",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
168,
4
] | [
171,
39
] | python | en | ['en', 'sn', 'en'] | True |
AbstractDemoPlayer.volume_up | (self) | Increase volume. | Increase volume. | def volume_up(self):
"""Increase volume."""
self._volume_level = min(1.0, self._volume_level + 0.1)
self.schedule_update_ha_state() | [
"def",
"volume_up",
"(",
"self",
")",
":",
"self",
".",
"_volume_level",
"=",
"min",
"(",
"1.0",
",",
"self",
".",
"_volume_level",
"+",
"0.1",
")",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
173,
4
] | [
176,
39
] | python | en | ['en', 'af', 'en'] | False |
AbstractDemoPlayer.volume_down | (self) | Decrease volume. | Decrease volume. | def volume_down(self):
"""Decrease volume."""
self._volume_level = max(0.0, self._volume_level - 0.1)
self.schedule_update_ha_state() | [
"def",
"volume_down",
"(",
"self",
")",
":",
"self",
".",
"_volume_level",
"=",
"max",
"(",
"0.0",
",",
"self",
".",
"_volume_level",
"-",
"0.1",
")",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
178,
4
] | [
181,
39
] | python | en | ['en', 'de', 'en'] | False |
AbstractDemoPlayer.set_volume_level | (self, volume) | Set the volume level, range 0..1. | Set the volume level, range 0..1. | def set_volume_level(self, volume):
"""Set the volume level, range 0..1."""
self._volume_level = volume
self.schedule_update_ha_state() | [
"def",
"set_volume_level",
"(",
"self",
",",
"volume",
")",
":",
"self",
".",
"_volume_level",
"=",
"volume",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
183,
4
] | [
186,
39
] | python | en | ['en', 'zu', 'en'] | True |
AbstractDemoPlayer.media_play | (self) | Send play command. | Send play command. | def media_play(self):
"""Send play command."""
self._player_state = STATE_PLAYING
self.schedule_update_ha_state() | [
"def",
"media_play",
"(",
"self",
")",
":",
"self",
".",
"_player_state",
"=",
"STATE_PLAYING",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
188,
4
] | [
191,
39
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.media_pause | (self) | Send pause command. | Send pause command. | def media_pause(self):
"""Send pause command."""
self._player_state = STATE_PAUSED
self.schedule_update_ha_state() | [
"def",
"media_pause",
"(",
"self",
")",
":",
"self",
".",
"_player_state",
"=",
"STATE_PAUSED",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
193,
4
] | [
196,
39
] | python | en | ['en', 'en', 'en'] | True |
AbstractDemoPlayer.set_shuffle | (self, shuffle) | Enable/disable shuffle mode. | Enable/disable shuffle mode. | def set_shuffle(self, shuffle):
"""Enable/disable shuffle mode."""
self._shuffle = shuffle
self.schedule_update_ha_state() | [
"def",
"set_shuffle",
"(",
"self",
",",
"shuffle",
")",
":",
"self",
".",
"_shuffle",
"=",
"shuffle",
"self",
".",
"schedule_update_ha_state",
"(",
")"
] | [
198,
4
] | [
201,
39
] | python | en | ['en', 'st', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.