Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
list | start_point
list | end_point
list | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
main | (params) |
Main function, build mnist network, run and send result to NNI.
|
Main function, build mnist network, run and send result to NNI.
| def main(params):
'''
Main function, build mnist network, run and send result to NNI.
'''
# Import data
mnist = download_mnist_retry(params['data_dir'])
print('Mnist download data done.')
logger.debug('Mnist download data done.')
# Create the model
# Build the graph for the deep net
mnist_network = MnistNetwork(channel_1_num=params['channel_1_num'],
channel_2_num=params['channel_2_num'],
conv_size=params['conv_size'],
hidden_size=params['hidden_size'],
pool_size=params['pool_size'],
learning_rate=params['learning_rate'])
mnist_network.build_network()
logger.debug('Mnist build network done.')
# Write log
graph_location = tempfile.mkdtemp()
logger.debug('Saving graph to: %s', graph_location)
train_writer = tf.summary.FileWriter(graph_location)
train_writer.add_graph(tf.get_default_graph())
test_acc = 0.0
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(params['batch_num']):
batch = mnist.train.next_batch(params['batch_size'])
mnist_network.train_step.run(feed_dict={mnist_network.images: batch[0],
mnist_network.labels: batch[1],
mnist_network.keep_prob: 1 - params['dropout_rate']}
)
if i % 100 == 0:
test_acc = mnist_network.accuracy.eval(
feed_dict={mnist_network.images: mnist.test.images,
mnist_network.labels: mnist.test.labels,
mnist_network.keep_prob: 1.0})
logger.debug('test accuracy %g', test_acc)
logger.debug('Pipe send intermediate result done.')
test_acc = mnist_network.accuracy.eval(
feed_dict={mnist_network.images: mnist.test.images,
mnist_network.labels: mnist.test.labels,
mnist_network.keep_prob: 1.0})
logger.debug('Final result is %g', test_acc)
logger.debug('Send final result done.') | [
"def",
"main",
"(",
"params",
")",
":",
"# Import data",
"mnist",
"=",
"download_mnist_retry",
"(",
"params",
"[",
"'data_dir'",
"]",
")",
"print",
"(",
"'Mnist download data done.'",
")",
"logger",
".",
"debug",
"(",
"'Mnist download data done.'",
")",
"# Create the model",
"# Build the graph for the deep net",
"mnist_network",
"=",
"MnistNetwork",
"(",
"channel_1_num",
"=",
"params",
"[",
"'channel_1_num'",
"]",
",",
"channel_2_num",
"=",
"params",
"[",
"'channel_2_num'",
"]",
",",
"conv_size",
"=",
"params",
"[",
"'conv_size'",
"]",
",",
"hidden_size",
"=",
"params",
"[",
"'hidden_size'",
"]",
",",
"pool_size",
"=",
"params",
"[",
"'pool_size'",
"]",
",",
"learning_rate",
"=",
"params",
"[",
"'learning_rate'",
"]",
")",
"mnist_network",
".",
"build_network",
"(",
")",
"logger",
".",
"debug",
"(",
"'Mnist build network done.'",
")",
"# Write log",
"graph_location",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"logger",
".",
"debug",
"(",
"'Saving graph to: %s'",
",",
"graph_location",
")",
"train_writer",
"=",
"tf",
".",
"summary",
".",
"FileWriter",
"(",
"graph_location",
")",
"train_writer",
".",
"add_graph",
"(",
"tf",
".",
"get_default_graph",
"(",
")",
")",
"test_acc",
"=",
"0.0",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"sess",
":",
"sess",
".",
"run",
"(",
"tf",
".",
"global_variables_initializer",
"(",
")",
")",
"for",
"i",
"in",
"range",
"(",
"params",
"[",
"'batch_num'",
"]",
")",
":",
"batch",
"=",
"mnist",
".",
"train",
".",
"next_batch",
"(",
"params",
"[",
"'batch_size'",
"]",
")",
"mnist_network",
".",
"train_step",
".",
"run",
"(",
"feed_dict",
"=",
"{",
"mnist_network",
".",
"images",
":",
"batch",
"[",
"0",
"]",
",",
"mnist_network",
".",
"labels",
":",
"batch",
"[",
"1",
"]",
",",
"mnist_network",
".",
"keep_prob",
":",
"1",
"-",
"params",
"[",
"'dropout_rate'",
"]",
"}",
")",
"if",
"i",
"%",
"100",
"==",
"0",
":",
"test_acc",
"=",
"mnist_network",
".",
"accuracy",
".",
"eval",
"(",
"feed_dict",
"=",
"{",
"mnist_network",
".",
"images",
":",
"mnist",
".",
"test",
".",
"images",
",",
"mnist_network",
".",
"labels",
":",
"mnist",
".",
"test",
".",
"labels",
",",
"mnist_network",
".",
"keep_prob",
":",
"1.0",
"}",
")",
"logger",
".",
"debug",
"(",
"'test accuracy %g'",
",",
"test_acc",
")",
"logger",
".",
"debug",
"(",
"'Pipe send intermediate result done.'",
")",
"test_acc",
"=",
"mnist_network",
".",
"accuracy",
".",
"eval",
"(",
"feed_dict",
"=",
"{",
"mnist_network",
".",
"images",
":",
"mnist",
".",
"test",
".",
"images",
",",
"mnist_network",
".",
"labels",
":",
"mnist",
".",
"test",
".",
"labels",
",",
"mnist_network",
".",
"keep_prob",
":",
"1.0",
"}",
")",
"logger",
".",
"debug",
"(",
"'Final result is %g'",
",",
"test_acc",
")",
"logger",
".",
"debug",
"(",
"'Send final result done.'",
")"
] | [
155,
0
] | [
206,
47
] | python | en | ['en', 'error', 'th'] | False |
get_params | () | Get parameters from command line | Get parameters from command line | def get_params():
''' Get parameters from command line '''
parser = argparse.ArgumentParser()
parser.add_argument("--data_dir", type=str, default='/tmp/tensorflow/mnist/input_data', help="data directory")
parser.add_argument("--dropout_rate", type=float, default=0.5, help="dropout rate")
parser.add_argument("--channel_1_num", type=int, default=32)
parser.add_argument("--channel_2_num", type=int, default=64)
parser.add_argument("--conv_size", type=int, default=5)
parser.add_argument("--pool_size", type=int, default=2)
parser.add_argument("--hidden_size", type=int, default=1024)
parser.add_argument("--learning_rate", type=float, default=1e-4)
parser.add_argument("--batch_num", type=int, default=2000)
parser.add_argument("--batch_size", type=int, default=32)
args, _ = parser.parse_known_args()
return args | [
"def",
"get_params",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"parser",
".",
"add_argument",
"(",
"\"--data_dir\"",
",",
"type",
"=",
"str",
",",
"default",
"=",
"'/tmp/tensorflow/mnist/input_data'",
",",
"help",
"=",
"\"data directory\"",
")",
"parser",
".",
"add_argument",
"(",
"\"--dropout_rate\"",
",",
"type",
"=",
"float",
",",
"default",
"=",
"0.5",
",",
"help",
"=",
"\"dropout rate\"",
")",
"parser",
".",
"add_argument",
"(",
"\"--channel_1_num\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"32",
")",
"parser",
".",
"add_argument",
"(",
"\"--channel_2_num\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"64",
")",
"parser",
".",
"add_argument",
"(",
"\"--conv_size\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"5",
")",
"parser",
".",
"add_argument",
"(",
"\"--pool_size\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"2",
")",
"parser",
".",
"add_argument",
"(",
"\"--hidden_size\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"1024",
")",
"parser",
".",
"add_argument",
"(",
"\"--learning_rate\"",
",",
"type",
"=",
"float",
",",
"default",
"=",
"1e-4",
")",
"parser",
".",
"add_argument",
"(",
"\"--batch_num\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"2000",
")",
"parser",
".",
"add_argument",
"(",
"\"--batch_size\"",
",",
"type",
"=",
"int",
",",
"default",
"=",
"32",
")",
"args",
",",
"_",
"=",
"parser",
".",
"parse_known_args",
"(",
")",
"return",
"args"
] | [
208,
0
] | [
223,
15
] | python | en | ['en', 'en', 'en'] | True |
MnistNetwork.build_network | (self) |
Building network for mnist
|
Building network for mnist
| def build_network(self):
'''
Building network for mnist
'''
# Reshape to use within a convolutional neural net.
# Last dimension is for "features" - there is only one here, since images are
# grayscale -- it would be 3 for an RGB image, 4 for RGBA, etc.
with tf.name_scope('reshape'):
try:
input_dim = int(math.sqrt(self.x_dim))
except:
print(
'input dim cannot be sqrt and reshape. input dim: ' + str(self.x_dim))
logger.debug(
'input dim cannot be sqrt and reshape. input dim: %s', str(self.x_dim))
raise
x_image = tf.reshape(self.images, [-1, input_dim, input_dim, 1])
# First convolutional layer - maps one grayscale image to 32 feature maps.
with tf.name_scope('conv1'):
w_conv1 = weight_variable(
[self.conv_size, self.conv_size, 1, self.channel_1_num])
b_conv1 = bias_variable([self.channel_1_num])
h_conv1 = tf.nn.relu(conv2d(x_image, w_conv1) + b_conv1)
# Pooling layer - downsamples by 2X.
with tf.name_scope('pool1'):
h_pool1 = max_pool(h_conv1, self.pool_size)
# Second convolutional layer -- maps 32 feature maps to 64.
with tf.name_scope('conv2'):
w_conv2 = weight_variable([self.conv_size, self.conv_size,
self.channel_1_num, self.channel_2_num])
b_conv2 = bias_variable([self.channel_2_num])
h_conv2 = tf.nn.relu(conv2d(h_pool1, w_conv2) + b_conv2)
# Second pooling layer.
with tf.name_scope('pool2'):
h_pool2 = max_pool(h_conv2, self.pool_size)
# Fully connected layer 1 -- after 2 round of downsampling, our 28x28 image
# is down to 7x7x64 feature maps -- maps this to 1024 features.
last_dim = int(input_dim / (self.pool_size * self.pool_size))
with tf.name_scope('fc1'):
w_fc1 = weight_variable(
[last_dim * last_dim * self.channel_2_num, self.hidden_size])
b_fc1 = bias_variable([self.hidden_size])
h_pool2_flat = tf.reshape(
h_pool2, [-1, last_dim * last_dim * self.channel_2_num])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, w_fc1) + b_fc1)
# Dropout - controls the complexity of the model, prevents co-adaptation of features.
with tf.name_scope('dropout'):
h_fc1_drop = tf.nn.dropout(h_fc1, self.keep_prob)
# Map the 1024 features to 10 classes, one for each digit
with tf.name_scope('fc2'):
w_fc2 = weight_variable([self.hidden_size, self.y_dim])
b_fc2 = bias_variable([self.y_dim])
y_conv = tf.matmul(h_fc1_drop, w_fc2) + b_fc2
with tf.name_scope('loss'):
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(labels=self.labels, logits=y_conv))
with tf.name_scope('adam_optimizer'):
self.train_step = tf.train.AdamOptimizer(
self.learning_rate).minimize(cross_entropy)
with tf.name_scope('accuracy'):
correct_prediction = tf.equal(
tf.argmax(y_conv, 1), tf.argmax(self.labels, 1))
self.accuracy = tf.reduce_mean(
tf.cast(correct_prediction, tf.float32)) | [
"def",
"build_network",
"(",
"self",
")",
":",
"# Reshape to use within a convolutional neural net.",
"# Last dimension is for \"features\" - there is only one here, since images are",
"# grayscale -- it would be 3 for an RGB image, 4 for RGBA, etc.",
"with",
"tf",
".",
"name_scope",
"(",
"'reshape'",
")",
":",
"try",
":",
"input_dim",
"=",
"int",
"(",
"math",
".",
"sqrt",
"(",
"self",
".",
"x_dim",
")",
")",
"except",
":",
"print",
"(",
"'input dim cannot be sqrt and reshape. input dim: '",
"+",
"str",
"(",
"self",
".",
"x_dim",
")",
")",
"logger",
".",
"debug",
"(",
"'input dim cannot be sqrt and reshape. input dim: %s'",
",",
"str",
"(",
"self",
".",
"x_dim",
")",
")",
"raise",
"x_image",
"=",
"tf",
".",
"reshape",
"(",
"self",
".",
"images",
",",
"[",
"-",
"1",
",",
"input_dim",
",",
"input_dim",
",",
"1",
"]",
")",
"# First convolutional layer - maps one grayscale image to 32 feature maps.",
"with",
"tf",
".",
"name_scope",
"(",
"'conv1'",
")",
":",
"w_conv1",
"=",
"weight_variable",
"(",
"[",
"self",
".",
"conv_size",
",",
"self",
".",
"conv_size",
",",
"1",
",",
"self",
".",
"channel_1_num",
"]",
")",
"b_conv1",
"=",
"bias_variable",
"(",
"[",
"self",
".",
"channel_1_num",
"]",
")",
"h_conv1",
"=",
"tf",
".",
"nn",
".",
"relu",
"(",
"conv2d",
"(",
"x_image",
",",
"w_conv1",
")",
"+",
"b_conv1",
")",
"# Pooling layer - downsamples by 2X.",
"with",
"tf",
".",
"name_scope",
"(",
"'pool1'",
")",
":",
"h_pool1",
"=",
"max_pool",
"(",
"h_conv1",
",",
"self",
".",
"pool_size",
")",
"# Second convolutional layer -- maps 32 feature maps to 64.",
"with",
"tf",
".",
"name_scope",
"(",
"'conv2'",
")",
":",
"w_conv2",
"=",
"weight_variable",
"(",
"[",
"self",
".",
"conv_size",
",",
"self",
".",
"conv_size",
",",
"self",
".",
"channel_1_num",
",",
"self",
".",
"channel_2_num",
"]",
")",
"b_conv2",
"=",
"bias_variable",
"(",
"[",
"self",
".",
"channel_2_num",
"]",
")",
"h_conv2",
"=",
"tf",
".",
"nn",
".",
"relu",
"(",
"conv2d",
"(",
"h_pool1",
",",
"w_conv2",
")",
"+",
"b_conv2",
")",
"# Second pooling layer.",
"with",
"tf",
".",
"name_scope",
"(",
"'pool2'",
")",
":",
"h_pool2",
"=",
"max_pool",
"(",
"h_conv2",
",",
"self",
".",
"pool_size",
")",
"# Fully connected layer 1 -- after 2 round of downsampling, our 28x28 image",
"# is down to 7x7x64 feature maps -- maps this to 1024 features.",
"last_dim",
"=",
"int",
"(",
"input_dim",
"/",
"(",
"self",
".",
"pool_size",
"*",
"self",
".",
"pool_size",
")",
")",
"with",
"tf",
".",
"name_scope",
"(",
"'fc1'",
")",
":",
"w_fc1",
"=",
"weight_variable",
"(",
"[",
"last_dim",
"*",
"last_dim",
"*",
"self",
".",
"channel_2_num",
",",
"self",
".",
"hidden_size",
"]",
")",
"b_fc1",
"=",
"bias_variable",
"(",
"[",
"self",
".",
"hidden_size",
"]",
")",
"h_pool2_flat",
"=",
"tf",
".",
"reshape",
"(",
"h_pool2",
",",
"[",
"-",
"1",
",",
"last_dim",
"*",
"last_dim",
"*",
"self",
".",
"channel_2_num",
"]",
")",
"h_fc1",
"=",
"tf",
".",
"nn",
".",
"relu",
"(",
"tf",
".",
"matmul",
"(",
"h_pool2_flat",
",",
"w_fc1",
")",
"+",
"b_fc1",
")",
"# Dropout - controls the complexity of the model, prevents co-adaptation of features.",
"with",
"tf",
".",
"name_scope",
"(",
"'dropout'",
")",
":",
"h_fc1_drop",
"=",
"tf",
".",
"nn",
".",
"dropout",
"(",
"h_fc1",
",",
"self",
".",
"keep_prob",
")",
"# Map the 1024 features to 10 classes, one for each digit",
"with",
"tf",
".",
"name_scope",
"(",
"'fc2'",
")",
":",
"w_fc2",
"=",
"weight_variable",
"(",
"[",
"self",
".",
"hidden_size",
",",
"self",
".",
"y_dim",
"]",
")",
"b_fc2",
"=",
"bias_variable",
"(",
"[",
"self",
".",
"y_dim",
"]",
")",
"y_conv",
"=",
"tf",
".",
"matmul",
"(",
"h_fc1_drop",
",",
"w_fc2",
")",
"+",
"b_fc2",
"with",
"tf",
".",
"name_scope",
"(",
"'loss'",
")",
":",
"cross_entropy",
"=",
"tf",
".",
"reduce_mean",
"(",
"tf",
".",
"nn",
".",
"softmax_cross_entropy_with_logits",
"(",
"labels",
"=",
"self",
".",
"labels",
",",
"logits",
"=",
"y_conv",
")",
")",
"with",
"tf",
".",
"name_scope",
"(",
"'adam_optimizer'",
")",
":",
"self",
".",
"train_step",
"=",
"tf",
".",
"train",
".",
"AdamOptimizer",
"(",
"self",
".",
"learning_rate",
")",
".",
"minimize",
"(",
"cross_entropy",
")",
"with",
"tf",
".",
"name_scope",
"(",
"'accuracy'",
")",
":",
"correct_prediction",
"=",
"tf",
".",
"equal",
"(",
"tf",
".",
"argmax",
"(",
"y_conv",
",",
"1",
")",
",",
"tf",
".",
"argmax",
"(",
"self",
".",
"labels",
",",
"1",
")",
")",
"self",
".",
"accuracy",
"=",
"tf",
".",
"reduce_mean",
"(",
"tf",
".",
"cast",
"(",
"correct_prediction",
",",
"tf",
".",
"float32",
")",
")"
] | [
47,
4
] | [
121,
56
] | python | en | ['en', 'error', 'th'] | False |
create_rfx_test_cfg | (device="abcd", automatic_add=False, devices=None) | Create rfxtrx config entry data. | Create rfxtrx config entry data. | def create_rfx_test_cfg(device="abcd", automatic_add=False, devices=None):
"""Create rfxtrx config entry data."""
return {
"device": device,
"host": None,
"port": None,
"automatic_add": automatic_add,
"debug": False,
"devices": devices,
} | [
"def",
"create_rfx_test_cfg",
"(",
"device",
"=",
"\"abcd\"",
",",
"automatic_add",
"=",
"False",
",",
"devices",
"=",
"None",
")",
":",
"return",
"{",
"\"device\"",
":",
"device",
",",
"\"host\"",
":",
"None",
",",
"\"port\"",
":",
"None",
",",
"\"automatic_add\"",
":",
"automatic_add",
",",
"\"debug\"",
":",
"False",
",",
"\"devices\"",
":",
"devices",
",",
"}"
] | [
14,
0
] | [
23,
5
] | python | en | ['en', 'cy', 'en'] | True |
rfxtrx_fixture | (hass) | Fixture that cleans up threads from integration. | Fixture that cleans up threads from integration. | async def rfxtrx_fixture(hass):
"""Fixture that cleans up threads from integration."""
with patch("RFXtrx.Connect") as connect, patch("RFXtrx.DummyTransport2"):
rfx = connect.return_value
async def _signal_event(packet_id):
event = rfxtrx.get_rfx_object(packet_id)
await hass.async_add_executor_job(
rfx.event_callback,
event,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
return event
rfx.signal = _signal_event
yield rfx | [
"async",
"def",
"rfxtrx_fixture",
"(",
"hass",
")",
":",
"with",
"patch",
"(",
"\"RFXtrx.Connect\"",
")",
"as",
"connect",
",",
"patch",
"(",
"\"RFXtrx.DummyTransport2\"",
")",
":",
"rfx",
"=",
"connect",
".",
"return_value",
"async",
"def",
"_signal_event",
"(",
"packet_id",
")",
":",
"event",
"=",
"rfxtrx",
".",
"get_rfx_object",
"(",
"packet_id",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"rfx",
".",
"event_callback",
",",
"event",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"return",
"event",
"rfx",
".",
"signal",
"=",
"_signal_event",
"yield",
"rfx"
] | [
27,
0
] | [
46,
17
] | python | en | ['en', 'en', 'en'] | True |
rfxtrx_automatic_fixture | (hass, rfxtrx) | Fixture that starts up with automatic additions. | Fixture that starts up with automatic additions. | async def rfxtrx_automatic_fixture(hass, rfxtrx):
"""Fixture that starts up with automatic additions."""
entry_data = create_rfx_test_cfg(automatic_add=True, devices={})
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.async_start()
yield rfxtrx | [
"async",
"def",
"rfxtrx_automatic_fixture",
"(",
"hass",
",",
"rfxtrx",
")",
":",
"entry_data",
"=",
"create_rfx_test_cfg",
"(",
"automatic_add",
"=",
"True",
",",
"devices",
"=",
"{",
"}",
")",
"mock_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"\"rfxtrx\"",
",",
"unique_id",
"=",
"DOMAIN",
",",
"data",
"=",
"entry_data",
")",
"mock_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"mock_entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_start",
"(",
")",
"yield",
"rfxtrx"
] | [
50,
0
] | [
60,
16
] | python | en | ['en', 'en', 'en'] | True |
timestep | (hass) | Step system time forward. | Step system time forward. | async def timestep(hass):
"""Step system time forward."""
with patch("homeassistant.core.dt_util.utcnow") as mock_utcnow:
mock_utcnow.return_value = utcnow()
async def delay(seconds):
"""Trigger delay in system."""
mock_utcnow.return_value += timedelta(seconds=seconds)
async_fire_time_changed(hass, mock_utcnow.return_value)
await hass.async_block_till_done()
yield delay | [
"async",
"def",
"timestep",
"(",
"hass",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.core.dt_util.utcnow\"",
")",
"as",
"mock_utcnow",
":",
"mock_utcnow",
".",
"return_value",
"=",
"utcnow",
"(",
")",
"async",
"def",
"delay",
"(",
"seconds",
")",
":",
"\"\"\"Trigger delay in system.\"\"\"",
"mock_utcnow",
".",
"return_value",
"+=",
"timedelta",
"(",
"seconds",
"=",
"seconds",
")",
"async_fire_time_changed",
"(",
"hass",
",",
"mock_utcnow",
".",
"return_value",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"yield",
"delay"
] | [
64,
0
] | [
76,
19
] | python | en | ['en', 'en', 'en'] | True |
mock_legacy_time | (legacy_patchable_time) | Make time patchable for all the tests. | Make time patchable for all the tests. | def mock_legacy_time(legacy_patchable_time):
"""Make time patchable for all the tests."""
yield | [
"def",
"mock_legacy_time",
"(",
"legacy_patchable_time",
")",
":",
"yield"
] | [
25,
0
] | [
27,
9
] | python | en | ['en', 'en', 'en'] | True |
test_setup_platform | (hass) | Test setup of demo platform via configuration. | Test setup of demo platform via configuration. | async def test_setup_platform(hass):
"""Test setup of demo platform via configuration."""
utcnow = dt_util.utcnow()
# Patching 'utcnow' to gain more control over the timed update.
with patch("homeassistant.util.dt.utcnow", return_value=utcnow):
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
await hass.async_block_till_done()
# In this test, one zone and geolocation entities have been
# generated.
all_states = [
hass.states.get(entity_id)
for entity_id in hass.states.async_entity_ids(geo_location.DOMAIN)
]
assert len(all_states) == NUMBER_OF_DEMO_DEVICES
for state in all_states:
# Check a single device's attributes.
if state.domain != geo_location.DOMAIN:
# ignore home zone state
continue
assert abs(state.attributes[ATTR_LATITUDE] - hass.config.latitude) < 1.0
assert abs(state.attributes[ATTR_LONGITUDE] - hass.config.longitude) < 1.0
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == LENGTH_KILOMETERS
# Update (replaces 1 device).
async_fire_time_changed(hass, utcnow + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
# Get all states again, ensure that the number of states is still
# the same, but the lists are different.
all_states_updated = [
hass.states.get(entity_id)
for entity_id in hass.states.async_entity_ids(geo_location.DOMAIN)
]
assert len(all_states_updated) == NUMBER_OF_DEMO_DEVICES
assert all_states != all_states_updated | [
"async",
"def",
"test_setup_platform",
"(",
"hass",
")",
":",
"utcnow",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"# Patching 'utcnow' to gain more control over the timed update.",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"utcnow",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
",",
"geo_location",
".",
"DOMAIN",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"geo_location",
".",
"DOMAIN",
",",
"CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# In this test, one zone and geolocation entities have been",
"# generated.",
"all_states",
"=",
"[",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"for",
"entity_id",
"in",
"hass",
".",
"states",
".",
"async_entity_ids",
"(",
"geo_location",
".",
"DOMAIN",
")",
"]",
"assert",
"len",
"(",
"all_states",
")",
"==",
"NUMBER_OF_DEMO_DEVICES",
"for",
"state",
"in",
"all_states",
":",
"# Check a single device's attributes.",
"if",
"state",
".",
"domain",
"!=",
"geo_location",
".",
"DOMAIN",
":",
"# ignore home zone state",
"continue",
"assert",
"abs",
"(",
"state",
".",
"attributes",
"[",
"ATTR_LATITUDE",
"]",
"-",
"hass",
".",
"config",
".",
"latitude",
")",
"<",
"1.0",
"assert",
"abs",
"(",
"state",
".",
"attributes",
"[",
"ATTR_LONGITUDE",
"]",
"-",
"hass",
".",
"config",
".",
"longitude",
")",
"<",
"1.0",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"LENGTH_KILOMETERS",
"# Update (replaces 1 device).",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"DEFAULT_UPDATE_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Get all states again, ensure that the number of states is still",
"# the same, but the lists are different.",
"all_states_updated",
"=",
"[",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"for",
"entity_id",
"in",
"hass",
".",
"states",
".",
"async_entity_ids",
"(",
"geo_location",
".",
"DOMAIN",
")",
"]",
"assert",
"len",
"(",
"all_states_updated",
")",
"==",
"NUMBER_OF_DEMO_DEVICES",
"assert",
"all_states",
"!=",
"all_states_updated"
] | [
30,
0
] | [
66,
47
] | python | en | ['en', 'pt', 'en'] | True |
async_get_service | (
hass: HomeAssistantType,
config: Dict[str, Any],
discovery_info: Optional[Dict[str, Any]] = None,
) | Get the notification service. | Get the notification service. | async def async_get_service(
hass: HomeAssistantType,
config: Dict[str, Any],
discovery_info: Optional[Dict[str, Any]] = None,
) -> Optional["HuaweiLteSmsNotificationService"]:
"""Get the notification service."""
if discovery_info is None:
return None
router = hass.data[DOMAIN].routers[discovery_info[CONF_URL]]
default_targets = discovery_info[CONF_RECIPIENT] or []
return HuaweiLteSmsNotificationService(router, default_targets) | [
"async",
"def",
"async_get_service",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
",",
"discovery_info",
":",
"Optional",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
"=",
"None",
",",
")",
"->",
"Optional",
"[",
"\"HuaweiLteSmsNotificationService\"",
"]",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"None",
"router",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"routers",
"[",
"discovery_info",
"[",
"CONF_URL",
"]",
"]",
"default_targets",
"=",
"discovery_info",
"[",
"CONF_RECIPIENT",
"]",
"or",
"[",
"]",
"return",
"HuaweiLteSmsNotificationService",
"(",
"router",
",",
"default_targets",
")"
] | [
19,
0
] | [
31,
67
] | python | en | ['en', 'en', 'en'] | True |
get_scanner | (hass, config) | Return the Swisscom device scanner. | Return the Swisscom device scanner. | def get_scanner(hass, config):
"""Return the Swisscom device scanner."""
scanner = SwisscomDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None | [
"def",
"get_scanner",
"(",
"hass",
",",
"config",
")",
":",
"scanner",
"=",
"SwisscomDeviceScanner",
"(",
"config",
"[",
"DOMAIN",
"]",
")",
"return",
"scanner",
"if",
"scanner",
".",
"success_init",
"else",
"None"
] | [
24,
0
] | [
28,
52
] | python | en | ['en', 'no', 'en'] | True |
SwisscomDeviceScanner.__init__ | (self, config) | Initialize the scanner. | Initialize the scanner. | def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.last_results = {}
# Test the router is accessible.
data = self.get_swisscom_data()
self.success_init = data is not None | [
"def",
"__init__",
"(",
"self",
",",
"config",
")",
":",
"self",
".",
"host",
"=",
"config",
"[",
"CONF_HOST",
"]",
"self",
".",
"last_results",
"=",
"{",
"}",
"# Test the router is accessible.",
"data",
"=",
"self",
".",
"get_swisscom_data",
"(",
")",
"self",
".",
"success_init",
"=",
"data",
"is",
"not",
"None"
] | [
34,
4
] | [
41,
44
] | python | en | ['en', 'en', 'en'] | True |
SwisscomDeviceScanner.scan_devices | (self) | Scan for new devices and return a list with found device IDs. | Scan for new devices and return a list with found device IDs. | def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [client["mac"] for client in self.last_results] | [
"def",
"scan_devices",
"(",
"self",
")",
":",
"self",
".",
"_update_info",
"(",
")",
"return",
"[",
"client",
"[",
"\"mac\"",
"]",
"for",
"client",
"in",
"self",
".",
"last_results",
"]"
] | [
43,
4
] | [
46,
62
] | python | en | ['en', 'en', 'en'] | True |
SwisscomDeviceScanner.get_device_name | (self, device) | Return the name of the given device or None if we don't know. | Return the name of the given device or None if we don't know. | def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
if not self.last_results:
return None
for client in self.last_results:
if client["mac"] == device:
return client["host"]
return None | [
"def",
"get_device_name",
"(",
"self",
",",
"device",
")",
":",
"if",
"not",
"self",
".",
"last_results",
":",
"return",
"None",
"for",
"client",
"in",
"self",
".",
"last_results",
":",
"if",
"client",
"[",
"\"mac\"",
"]",
"==",
"device",
":",
"return",
"client",
"[",
"\"host\"",
"]",
"return",
"None"
] | [
48,
4
] | [
55,
19
] | python | en | ['en', 'en', 'en'] | True |
SwisscomDeviceScanner._update_info | (self) | Ensure the information from the Swisscom router is up to date.
Return boolean if scanning successful.
| Ensure the information from the Swisscom router is up to date. | def _update_info(self):
"""Ensure the information from the Swisscom router is up to date.
Return boolean if scanning successful.
"""
if not self.success_init:
return False
_LOGGER.info("Loading data from Swisscom Internet Box")
data = self.get_swisscom_data()
if not data:
return False
active_clients = [client for client in data.values() if client["status"]]
self.last_results = active_clients
return True | [
"def",
"_update_info",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"success_init",
":",
"return",
"False",
"_LOGGER",
".",
"info",
"(",
"\"Loading data from Swisscom Internet Box\"",
")",
"data",
"=",
"self",
".",
"get_swisscom_data",
"(",
")",
"if",
"not",
"data",
":",
"return",
"False",
"active_clients",
"=",
"[",
"client",
"for",
"client",
"in",
"data",
".",
"values",
"(",
")",
"if",
"client",
"[",
"\"status\"",
"]",
"]",
"self",
".",
"last_results",
"=",
"active_clients",
"return",
"True"
] | [
57,
4
] | [
72,
19
] | python | en | ['en', 'en', 'en'] | True |
SwisscomDeviceScanner.get_swisscom_data | (self) | Retrieve data from Swisscom and return parsed result. | Retrieve data from Swisscom and return parsed result. | def get_swisscom_data(self):
"""Retrieve data from Swisscom and return parsed result."""
url = f"http://{self.host}/ws"
headers = {CONTENT_TYPE: "application/x-sah-ws-4-call+json"}
data = """
{"service":"Devices", "method":"get",
"parameters":{"expression":"lan and not self"}}"""
devices = {}
try:
request = requests.post(url, headers=headers, data=data, timeout=10)
except (
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
requests.exceptions.ConnectTimeout,
):
_LOGGER.info("No response from Swisscom Internet Box")
return devices
if "status" not in request.json():
_LOGGER.info("No status in response from Swisscom Internet Box")
return devices
for device in request.json()["status"]:
try:
devices[device["Key"]] = {
"ip": device["IPAddress"],
"mac": device["PhysAddress"],
"host": device["Name"],
"status": device["Active"],
}
except (KeyError, requests.exceptions.RequestException):
pass
return devices | [
"def",
"get_swisscom_data",
"(",
"self",
")",
":",
"url",
"=",
"f\"http://{self.host}/ws\"",
"headers",
"=",
"{",
"CONTENT_TYPE",
":",
"\"application/x-sah-ws-4-call+json\"",
"}",
"data",
"=",
"\"\"\"\n {\"service\":\"Devices\", \"method\":\"get\",\n \"parameters\":{\"expression\":\"lan and not self\"}}\"\"\"",
"devices",
"=",
"{",
"}",
"try",
":",
"request",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"data",
",",
"timeout",
"=",
"10",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"ConnectionError",
",",
"requests",
".",
"exceptions",
".",
"Timeout",
",",
"requests",
".",
"exceptions",
".",
"ConnectTimeout",
",",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"No response from Swisscom Internet Box\"",
")",
"return",
"devices",
"if",
"\"status\"",
"not",
"in",
"request",
".",
"json",
"(",
")",
":",
"_LOGGER",
".",
"info",
"(",
"\"No status in response from Swisscom Internet Box\"",
")",
"return",
"devices",
"for",
"device",
"in",
"request",
".",
"json",
"(",
")",
"[",
"\"status\"",
"]",
":",
"try",
":",
"devices",
"[",
"device",
"[",
"\"Key\"",
"]",
"]",
"=",
"{",
"\"ip\"",
":",
"device",
"[",
"\"IPAddress\"",
"]",
",",
"\"mac\"",
":",
"device",
"[",
"\"PhysAddress\"",
"]",
",",
"\"host\"",
":",
"device",
"[",
"\"Name\"",
"]",
",",
"\"status\"",
":",
"device",
"[",
"\"Active\"",
"]",
",",
"}",
"except",
"(",
"KeyError",
",",
"requests",
".",
"exceptions",
".",
"RequestException",
")",
":",
"pass",
"return",
"devices"
] | [
74,
4
] | [
108,
22
] | python | en | ['en', 'en', 'en'] | True |
Flattener.__init__ | (self) |
Flattens last 3 dimensions to make it only batch size, -1
|
Flattens last 3 dimensions to make it only batch size, -1
| def __init__(self):
"""
Flattens last 3 dimensions to make it only batch size, -1
"""
super(Flattener, self).__init__() | [
"def",
"__init__",
"(",
"self",
")",
":",
"super",
"(",
"Flattener",
",",
"self",
")",
".",
"__init__",
"(",
")"
] | [
4,
4
] | [
8,
41
] | python | en | ['en', 'error', 'th'] | False |
async_setup | (hass: HomeAssistant, config: dict) | Set up the Network UPS Tools (NUT) component. | Set up the Network UPS Tools (NUT) component. | async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Network UPS Tools (NUT) component."""
hass.data.setdefault(DOMAIN, {})
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistant",
",",
"config",
":",
"dict",
")",
":",
"hass",
".",
"data",
".",
"setdefault",
"(",
"DOMAIN",
",",
"{",
"}",
")",
"return",
"True"
] | [
39,
0
] | [
43,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass: HomeAssistant, entry: ConfigEntry) | Set up Network UPS Tools (NUT) from a config entry. | Set up Network UPS Tools (NUT) from a config entry. | async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Network UPS Tools (NUT) from a config entry."""
config = entry.data
host = config[CONF_HOST]
port = config[CONF_PORT]
alias = config.get(CONF_ALIAS)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
scan_interval = entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
data = PyNUTData(host, port, alias, username, password)
async def async_update_data():
"""Fetch data from NUT."""
async with async_timeout.timeout(10):
await hass.async_add_executor_job(data.update)
if not data.status:
raise UpdateFailed("Error fetching UPS state")
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="NUT resource status",
update_method=async_update_data,
update_interval=timedelta(seconds=scan_interval),
)
# Fetch initial data so we have data when entities subscribe
await coordinator.async_refresh()
status = data.status
if not status:
_LOGGER.error("NUT Sensor has no data, unable to set up")
raise ConfigEntryNotReady
_LOGGER.debug("NUT Sensors Available: %s", status)
undo_listener = entry.add_update_listener(_async_update_listener)
unique_id = _unique_id_from_status(status)
if unique_id is None:
unique_id = entry.entry_id
hass.data[DOMAIN][entry.entry_id] = {
COORDINATOR: coordinator,
PYNUT_DATA: data,
PYNUT_UNIQUE_ID: unique_id,
PYNUT_MANUFACTURER: _manufacturer_from_status(status),
PYNUT_MODEL: _model_from_status(status),
PYNUT_FIRMWARE: _firmware_from_status(status),
PYNUT_NAME: data.name,
UNDO_UPDATE_LISTENER: undo_listener,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistant",
",",
"entry",
":",
"ConfigEntry",
")",
":",
"config",
"=",
"entry",
".",
"data",
"host",
"=",
"config",
"[",
"CONF_HOST",
"]",
"port",
"=",
"config",
"[",
"CONF_PORT",
"]",
"alias",
"=",
"config",
".",
"get",
"(",
"CONF_ALIAS",
")",
"username",
"=",
"config",
".",
"get",
"(",
"CONF_USERNAME",
")",
"password",
"=",
"config",
".",
"get",
"(",
"CONF_PASSWORD",
")",
"scan_interval",
"=",
"entry",
".",
"options",
".",
"get",
"(",
"CONF_SCAN_INTERVAL",
",",
"DEFAULT_SCAN_INTERVAL",
")",
"data",
"=",
"PyNUTData",
"(",
"host",
",",
"port",
",",
"alias",
",",
"username",
",",
"password",
")",
"async",
"def",
"async_update_data",
"(",
")",
":",
"\"\"\"Fetch data from NUT.\"\"\"",
"async",
"with",
"async_timeout",
".",
"timeout",
"(",
"10",
")",
":",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"data",
".",
"update",
")",
"if",
"not",
"data",
".",
"status",
":",
"raise",
"UpdateFailed",
"(",
"\"Error fetching UPS state\"",
")",
"coordinator",
"=",
"DataUpdateCoordinator",
"(",
"hass",
",",
"_LOGGER",
",",
"name",
"=",
"\"NUT resource status\"",
",",
"update_method",
"=",
"async_update_data",
",",
"update_interval",
"=",
"timedelta",
"(",
"seconds",
"=",
"scan_interval",
")",
",",
")",
"# Fetch initial data so we have data when entities subscribe",
"await",
"coordinator",
".",
"async_refresh",
"(",
")",
"status",
"=",
"data",
".",
"status",
"if",
"not",
"status",
":",
"_LOGGER",
".",
"error",
"(",
"\"NUT Sensor has no data, unable to set up\"",
")",
"raise",
"ConfigEntryNotReady",
"_LOGGER",
".",
"debug",
"(",
"\"NUT Sensors Available: %s\"",
",",
"status",
")",
"undo_listener",
"=",
"entry",
".",
"add_update_listener",
"(",
"_async_update_listener",
")",
"unique_id",
"=",
"_unique_id_from_status",
"(",
"status",
")",
"if",
"unique_id",
"is",
"None",
":",
"unique_id",
"=",
"entry",
".",
"entry_id",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"entry",
".",
"entry_id",
"]",
"=",
"{",
"COORDINATOR",
":",
"coordinator",
",",
"PYNUT_DATA",
":",
"data",
",",
"PYNUT_UNIQUE_ID",
":",
"unique_id",
",",
"PYNUT_MANUFACTURER",
":",
"_manufacturer_from_status",
"(",
"status",
")",
",",
"PYNUT_MODEL",
":",
"_model_from_status",
"(",
"status",
")",
",",
"PYNUT_FIRMWARE",
":",
"_firmware_from_status",
"(",
"status",
")",
",",
"PYNUT_NAME",
":",
"data",
".",
"name",
",",
"UNDO_UPDATE_LISTENER",
":",
"undo_listener",
",",
"}",
"for",
"component",
"in",
"PLATFORMS",
":",
"hass",
".",
"async_create_task",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"entry",
",",
"component",
")",
")",
"return",
"True"
] | [
46,
0
] | [
108,
15
] | python | en | ['en', 'en', 'en'] | True |
_async_update_listener | (hass: HomeAssistant, entry: ConfigEntry) | Handle options update. | Handle options update. | async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id) | [
"async",
"def",
"_async_update_listener",
"(",
"hass",
":",
"HomeAssistant",
",",
"entry",
":",
"ConfigEntry",
")",
":",
"await",
"hass",
".",
"config_entries",
".",
"async_reload",
"(",
"entry",
".",
"entry_id",
")"
] | [
111,
0
] | [
113,
58
] | python | en | ['en', 'nl', 'en'] | True |
_manufacturer_from_status | (status) | Find the best manufacturer value from the status. | Find the best manufacturer value from the status. | def _manufacturer_from_status(status):
"""Find the best manufacturer value from the status."""
return (
status.get("device.mfr")
or status.get("ups.mfr")
or status.get("ups.vendorid")
or status.get("driver.version.data")
) | [
"def",
"_manufacturer_from_status",
"(",
"status",
")",
":",
"return",
"(",
"status",
".",
"get",
"(",
"\"device.mfr\"",
")",
"or",
"status",
".",
"get",
"(",
"\"ups.mfr\"",
")",
"or",
"status",
".",
"get",
"(",
"\"ups.vendorid\"",
")",
"or",
"status",
".",
"get",
"(",
"\"driver.version.data\"",
")",
")"
] | [
116,
0
] | [
123,
5
] | python | en | ['en', 'en', 'en'] | True |
_model_from_status | (status) | Find the best model value from the status. | Find the best model value from the status. | def _model_from_status(status):
"""Find the best model value from the status."""
return (
status.get("device.model")
or status.get("ups.model")
or status.get("ups.productid")
) | [
"def",
"_model_from_status",
"(",
"status",
")",
":",
"return",
"(",
"status",
".",
"get",
"(",
"\"device.model\"",
")",
"or",
"status",
".",
"get",
"(",
"\"ups.model\"",
")",
"or",
"status",
".",
"get",
"(",
"\"ups.productid\"",
")",
")"
] | [
126,
0
] | [
132,
5
] | python | en | ['en', 'en', 'en'] | True |
_firmware_from_status | (status) | Find the best firmware value from the status. | Find the best firmware value from the status. | def _firmware_from_status(status):
"""Find the best firmware value from the status."""
return status.get("ups.firmware") or status.get("ups.firmware.aux") | [
"def",
"_firmware_from_status",
"(",
"status",
")",
":",
"return",
"status",
".",
"get",
"(",
"\"ups.firmware\"",
")",
"or",
"status",
".",
"get",
"(",
"\"ups.firmware.aux\"",
")"
] | [
135,
0
] | [
137,
71
] | python | en | ['en', 'en', 'en'] | True |
_serial_from_status | (status) | Find the best serialvalue from the status. | Find the best serialvalue from the status. | def _serial_from_status(status):
"""Find the best serialvalue from the status."""
serial = status.get("device.serial") or status.get("ups.serial")
if serial and (serial.lower() == "unknown" or serial.count("0") == len(serial)):
return None
return serial | [
"def",
"_serial_from_status",
"(",
"status",
")",
":",
"serial",
"=",
"status",
".",
"get",
"(",
"\"device.serial\"",
")",
"or",
"status",
".",
"get",
"(",
"\"ups.serial\"",
")",
"if",
"serial",
"and",
"(",
"serial",
".",
"lower",
"(",
")",
"==",
"\"unknown\"",
"or",
"serial",
".",
"count",
"(",
"\"0\"",
")",
"==",
"len",
"(",
"serial",
")",
")",
":",
"return",
"None",
"return",
"serial"
] | [
140,
0
] | [
145,
17
] | python | en | ['en', 'en', 'en'] | True |
_unique_id_from_status | (status) | Find the best unique id value from the status. | Find the best unique id value from the status. | def _unique_id_from_status(status):
"""Find the best unique id value from the status."""
serial = _serial_from_status(status)
# We must have a serial for this to be unique
if not serial:
return None
manufacturer = _manufacturer_from_status(status)
model = _model_from_status(status)
unique_id_group = []
if manufacturer:
unique_id_group.append(manufacturer)
if model:
unique_id_group.append(model)
if serial:
unique_id_group.append(serial)
return "_".join(unique_id_group) | [
"def",
"_unique_id_from_status",
"(",
"status",
")",
":",
"serial",
"=",
"_serial_from_status",
"(",
"status",
")",
"# We must have a serial for this to be unique",
"if",
"not",
"serial",
":",
"return",
"None",
"manufacturer",
"=",
"_manufacturer_from_status",
"(",
"status",
")",
"model",
"=",
"_model_from_status",
"(",
"status",
")",
"unique_id_group",
"=",
"[",
"]",
"if",
"manufacturer",
":",
"unique_id_group",
".",
"append",
"(",
"manufacturer",
")",
"if",
"model",
":",
"unique_id_group",
".",
"append",
"(",
"model",
")",
"if",
"serial",
":",
"unique_id_group",
".",
"append",
"(",
"serial",
")",
"return",
"\"_\"",
".",
"join",
"(",
"unique_id_group",
")"
] | [
148,
0
] | [
165,
36
] | python | en | ['en', 'en', 'en'] | True |
find_resources_in_config_entry | (config_entry) | Find the configured resources in the config entry. | Find the configured resources in the config entry. | def find_resources_in_config_entry(config_entry):
"""Find the configured resources in the config entry."""
if CONF_RESOURCES in config_entry.options:
return config_entry.options[CONF_RESOURCES]
return config_entry.data[CONF_RESOURCES] | [
"def",
"find_resources_in_config_entry",
"(",
"config_entry",
")",
":",
"if",
"CONF_RESOURCES",
"in",
"config_entry",
".",
"options",
":",
"return",
"config_entry",
".",
"options",
"[",
"CONF_RESOURCES",
"]",
"return",
"config_entry",
".",
"data",
"[",
"CONF_RESOURCES",
"]"
] | [
168,
0
] | [
172,
44
] | python | en | ['en', 'en', 'en'] | True |
async_unload_entry | (hass: HomeAssistant, entry: ConfigEntry) | Unload a config entry. | Unload a config entry. | async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
":",
"HomeAssistant",
",",
"entry",
":",
"ConfigEntry",
")",
":",
"unload_ok",
"=",
"all",
"(",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"[",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"entry",
",",
"component",
")",
"for",
"component",
"in",
"PLATFORMS",
"]",
")",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"entry",
".",
"entry_id",
"]",
"[",
"UNDO_UPDATE_LISTENER",
"]",
"(",
")",
"if",
"unload_ok",
":",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"pop",
"(",
"entry",
".",
"entry_id",
")",
"return",
"unload_ok"
] | [
175,
0
] | [
191,
20
] | python | en | ['en', 'es', 'en'] | True |
PyNUTData.__init__ | (self, host, port, alias, username, password) | Initialize the data object. | Initialize the data object. | def __init__(self, host, port, alias, username, password):
"""Initialize the data object."""
self._host = host
self._alias = alias
# Establish client with persistent=False to open/close connection on
# each update call. This is more reliable with async.
self._client = PyNUTClient(self._host, port, username, password, 5, False)
self.ups_list = None
self._status = None | [
"def",
"__init__",
"(",
"self",
",",
"host",
",",
"port",
",",
"alias",
",",
"username",
",",
"password",
")",
":",
"self",
".",
"_host",
"=",
"host",
"self",
".",
"_alias",
"=",
"alias",
"# Establish client with persistent=False to open/close connection on",
"# each update call. This is more reliable with async.",
"self",
".",
"_client",
"=",
"PyNUTClient",
"(",
"self",
".",
"_host",
",",
"port",
",",
"username",
",",
"password",
",",
"5",
",",
"False",
")",
"self",
".",
"ups_list",
"=",
"None",
"self",
".",
"_status",
"=",
"None"
] | [
201,
4
] | [
211,
27
] | python | en | ['en', 'en', 'en'] | True |
PyNUTData.status | (self) | Get latest update if throttle allows. Return status. | Get latest update if throttle allows. Return status. | def status(self):
"""Get latest update if throttle allows. Return status."""
return self._status | [
"def",
"status",
"(",
"self",
")",
":",
"return",
"self",
".",
"_status"
] | [
214,
4
] | [
216,
27
] | python | en | ['en', 'en', 'en'] | True |
PyNUTData.name | (self) | Return the name of the ups. | Return the name of the ups. | def name(self):
"""Return the name of the ups."""
return self._alias | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_alias"
] | [
219,
4
] | [
221,
26
] | python | en | ['en', 'en', 'en'] | True |
PyNUTData._get_alias | (self) | Get the ups alias from NUT. | Get the ups alias from NUT. | def _get_alias(self):
"""Get the ups alias from NUT."""
try:
ups_list = self._client.list_ups()
except PyNUTError as err:
_LOGGER.error("Failure getting NUT ups alias, %s", err)
return None
if not ups_list:
_LOGGER.error("Empty list while getting NUT ups aliases")
return None
self.ups_list = ups_list
return list(ups_list)[0] | [
"def",
"_get_alias",
"(",
"self",
")",
":",
"try",
":",
"ups_list",
"=",
"self",
".",
"_client",
".",
"list_ups",
"(",
")",
"except",
"PyNUTError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Failure getting NUT ups alias, %s\"",
",",
"err",
")",
"return",
"None",
"if",
"not",
"ups_list",
":",
"_LOGGER",
".",
"error",
"(",
"\"Empty list while getting NUT ups aliases\"",
")",
"return",
"None",
"self",
".",
"ups_list",
"=",
"ups_list",
"return",
"list",
"(",
"ups_list",
")",
"[",
"0",
"]"
] | [
223,
4
] | [
236,
32
] | python | en | ['en', 'en', 'en'] | True |
PyNUTData._get_status | (self) | Get the ups status from NUT. | Get the ups status from NUT. | def _get_status(self):
"""Get the ups status from NUT."""
if self._alias is None:
self._alias = self._get_alias()
try:
return self._client.list_vars(self._alias)
except (PyNUTError, ConnectionResetError) as err:
_LOGGER.debug("Error getting NUT vars for host %s: %s", self._host, err)
return None | [
"def",
"_get_status",
"(",
"self",
")",
":",
"if",
"self",
".",
"_alias",
"is",
"None",
":",
"self",
".",
"_alias",
"=",
"self",
".",
"_get_alias",
"(",
")",
"try",
":",
"return",
"self",
".",
"_client",
".",
"list_vars",
"(",
"self",
".",
"_alias",
")",
"except",
"(",
"PyNUTError",
",",
"ConnectionResetError",
")",
"as",
"err",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Error getting NUT vars for host %s: %s\"",
",",
"self",
".",
"_host",
",",
"err",
")",
"return",
"None"
] | [
238,
4
] | [
247,
23
] | python | en | ['en', 'en', 'en'] | True |
PyNUTData.update | (self, **kwargs) | Fetch the latest status from NUT. | Fetch the latest status from NUT. | def update(self, **kwargs):
"""Fetch the latest status from NUT."""
self._status = self._get_status() | [
"def",
"update",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_status",
"=",
"self",
".",
"_get_status",
"(",
")"
] | [
249,
4
] | [
251,
41
] | python | en | ['en', 'en', 'en'] | True |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up the Demo camera platform. | Set up the Demo camera platform. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo camera platform."""
async_add_entities([DemoCamera("Demo camera")]) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"async_add_entities",
"(",
"[",
"DemoCamera",
"(",
"\"Demo camera\"",
")",
"]",
")"
] | [
6,
0
] | [
8,
51
] | python | en | ['en', 'pt', 'en'] | True |
async_setup_entry | (hass, config_entry, async_add_entities) | Set up the Demo config entry. | Set up the Demo config entry. | async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"config_entry",
",",
"async_add_entities",
")",
":",
"await",
"async_setup_platform",
"(",
"hass",
",",
"{",
"}",
",",
"async_add_entities",
")"
] | [
11,
0
] | [
13,
60
] | python | en | ['en', 'en', 'en'] | True |
DemoCamera.__init__ | (self, name) | Initialize demo camera component. | Initialize demo camera component. | def __init__(self, name):
"""Initialize demo camera component."""
super().__init__()
self._name = name
self._motion_status = False
self.is_streaming = True
self._images_index = 0 | [
"def",
"__init__",
"(",
"self",
",",
"name",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_motion_status",
"=",
"False",
"self",
".",
"is_streaming",
"=",
"True",
"self",
".",
"_images_index",
"=",
"0"
] | [
19,
4
] | [
25,
30
] | python | es | ['es', 'pt', 'it'] | False |
DemoCamera.async_camera_image | (self) | Return a faked still image response. | Return a faked still image response. | async def async_camera_image(self):
"""Return a faked still image response."""
self._images_index = (self._images_index + 1) % 4
image_path = Path(__file__).parent / f"demo_{self._images_index}.jpg"
return await self.hass.async_add_executor_job(image_path.read_bytes) | [
"async",
"def",
"async_camera_image",
"(",
"self",
")",
":",
"self",
".",
"_images_index",
"=",
"(",
"self",
".",
"_images_index",
"+",
"1",
")",
"%",
"4",
"image_path",
"=",
"Path",
"(",
"__file__",
")",
".",
"parent",
"/",
"f\"demo_{self._images_index}.jpg\"",
"return",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"image_path",
".",
"read_bytes",
")"
] | [
27,
4
] | [
32,
76
] | python | en | ['en', 'sv', 'en'] | True |
DemoCamera.name | (self) | Return the name of this camera. | Return the name of this camera. | def name(self):
"""Return the name of this camera."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
35,
4
] | [
37,
25
] | python | en | ['en', 'en', 'en'] | True |
DemoCamera.supported_features | (self) | Camera support turn on/off features. | Camera support turn on/off features. | def supported_features(self):
"""Camera support turn on/off features."""
return SUPPORT_ON_OFF | [
"def",
"supported_features",
"(",
"self",
")",
":",
"return",
"SUPPORT_ON_OFF"
] | [
40,
4
] | [
42,
29
] | python | en | ['es', 'en', 'en'] | True |
DemoCamera.is_on | (self) | Whether camera is on (streaming). | Whether camera is on (streaming). | def is_on(self):
"""Whether camera is on (streaming)."""
return self.is_streaming | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"self",
".",
"is_streaming"
] | [
45,
4
] | [
47,
32
] | python | en | ['en', 'en', 'en'] | True |
DemoCamera.motion_detection_enabled | (self) | Camera Motion Detection Status. | Camera Motion Detection Status. | def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return self._motion_status | [
"def",
"motion_detection_enabled",
"(",
"self",
")",
":",
"return",
"self",
".",
"_motion_status"
] | [
50,
4
] | [
52,
34
] | python | en | ['sv', 'ja', 'en'] | False |
DemoCamera.async_enable_motion_detection | (self) | Enable the Motion detection in base station (Arm). | Enable the Motion detection in base station (Arm). | async def async_enable_motion_detection(self):
"""Enable the Motion detection in base station (Arm)."""
self._motion_status = True
self.async_write_ha_state() | [
"async",
"def",
"async_enable_motion_detection",
"(",
"self",
")",
":",
"self",
".",
"_motion_status",
"=",
"True",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
54,
4
] | [
57,
35
] | python | en | ['en', 'en', 'en'] | True |
DemoCamera.async_disable_motion_detection | (self) | Disable the motion detection in base station (Disarm). | Disable the motion detection in base station (Disarm). | async def async_disable_motion_detection(self):
"""Disable the motion detection in base station (Disarm)."""
self._motion_status = False
self.async_write_ha_state() | [
"async",
"def",
"async_disable_motion_detection",
"(",
"self",
")",
":",
"self",
".",
"_motion_status",
"=",
"False",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
59,
4
] | [
62,
35
] | python | en | ['en', 'en', 'en'] | True |
DemoCamera.async_turn_off | (self) | Turn off camera. | Turn off camera. | async def async_turn_off(self):
"""Turn off camera."""
self.is_streaming = False
self.async_write_ha_state() | [
"async",
"def",
"async_turn_off",
"(",
"self",
")",
":",
"self",
".",
"is_streaming",
"=",
"False",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
64,
4
] | [
67,
35
] | python | en | ['en', 'ja', 'en'] | True |
DemoCamera.async_turn_on | (self) | Turn on camera. | Turn on camera. | async def async_turn_on(self):
"""Turn on camera."""
self.is_streaming = True
self.async_write_ha_state() | [
"async",
"def",
"async_turn_on",
"(",
"self",
")",
":",
"self",
".",
"is_streaming",
"=",
"True",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
69,
4
] | [
72,
35
] | python | en | ['en', 'et', 'en'] | True |
create_security_system_service | (accessory) | Define a security-system characteristics as per page 219 of HAP spec. | Define a security-system characteristics as per page 219 of HAP spec. | def create_security_system_service(accessory):
"""Define a security-system characteristics as per page 219 of HAP spec."""
service = accessory.add_service(ServicesTypes.SECURITY_SYSTEM)
cur_state = service.add_char(CharacteristicsTypes.SECURITY_SYSTEM_STATE_CURRENT)
cur_state.value = 0
targ_state = service.add_char(CharacteristicsTypes.SECURITY_SYSTEM_STATE_TARGET)
targ_state.value = 0
# According to the spec, a battery-level characteristic is normally
# part of a separate service. However as the code was written (which
# predates this test) the battery level would have to be part of the lock
# service as it is here.
targ_state = service.add_char(CharacteristicsTypes.BATTERY_LEVEL)
targ_state.value = 50 | [
"def",
"create_security_system_service",
"(",
"accessory",
")",
":",
"service",
"=",
"accessory",
".",
"add_service",
"(",
"ServicesTypes",
".",
"SECURITY_SYSTEM",
")",
"cur_state",
"=",
"service",
".",
"add_char",
"(",
"CharacteristicsTypes",
".",
"SECURITY_SYSTEM_STATE_CURRENT",
")",
"cur_state",
".",
"value",
"=",
"0",
"targ_state",
"=",
"service",
".",
"add_char",
"(",
"CharacteristicsTypes",
".",
"SECURITY_SYSTEM_STATE_TARGET",
")",
"targ_state",
".",
"value",
"=",
"0",
"# According to the spec, a battery-level characteristic is normally",
"# part of a separate service. However as the code was written (which",
"# predates this test) the battery level would have to be part of the lock",
"# service as it is here.",
"targ_state",
"=",
"service",
".",
"add_char",
"(",
"CharacteristicsTypes",
".",
"BATTERY_LEVEL",
")",
"targ_state",
".",
"value",
"=",
"50"
] | [
10,
0
] | [
25,
25
] | python | en | ['en', 'en', 'en'] | True |
test_switch_change_alarm_state | (hass, utcnow) | Test that we can turn a HomeKit alarm on and off again. | Test that we can turn a HomeKit alarm on and off again. | async def test_switch_change_alarm_state(hass, utcnow):
"""Test that we can turn a HomeKit alarm on and off again."""
helper = await setup_test_component(hass, create_security_system_service)
await hass.services.async_call(
"alarm_control_panel",
"alarm_arm_home",
{"entity_id": "alarm_control_panel.testdevice"},
blocking=True,
)
assert helper.characteristics[TARGET_STATE].value == 0
await hass.services.async_call(
"alarm_control_panel",
"alarm_arm_away",
{"entity_id": "alarm_control_panel.testdevice"},
blocking=True,
)
assert helper.characteristics[TARGET_STATE].value == 1
await hass.services.async_call(
"alarm_control_panel",
"alarm_arm_night",
{"entity_id": "alarm_control_panel.testdevice"},
blocking=True,
)
assert helper.characteristics[TARGET_STATE].value == 2
await hass.services.async_call(
"alarm_control_panel",
"alarm_disarm",
{"entity_id": "alarm_control_panel.testdevice"},
blocking=True,
)
assert helper.characteristics[TARGET_STATE].value == 3 | [
"async",
"def",
"test_switch_change_alarm_state",
"(",
"hass",
",",
"utcnow",
")",
":",
"helper",
"=",
"await",
"setup_test_component",
"(",
"hass",
",",
"create_security_system_service",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"alarm_control_panel\"",
",",
"\"alarm_arm_home\"",
",",
"{",
"\"entity_id\"",
":",
"\"alarm_control_panel.testdevice\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"helper",
".",
"characteristics",
"[",
"TARGET_STATE",
"]",
".",
"value",
"==",
"0",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"alarm_control_panel\"",
",",
"\"alarm_arm_away\"",
",",
"{",
"\"entity_id\"",
":",
"\"alarm_control_panel.testdevice\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"helper",
".",
"characteristics",
"[",
"TARGET_STATE",
"]",
".",
"value",
"==",
"1",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"alarm_control_panel\"",
",",
"\"alarm_arm_night\"",
",",
"{",
"\"entity_id\"",
":",
"\"alarm_control_panel.testdevice\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"helper",
".",
"characteristics",
"[",
"TARGET_STATE",
"]",
".",
"value",
"==",
"2",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"alarm_control_panel\"",
",",
"\"alarm_disarm\"",
",",
"{",
"\"entity_id\"",
":",
"\"alarm_control_panel.testdevice\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"helper",
".",
"characteristics",
"[",
"TARGET_STATE",
"]",
".",
"value",
"==",
"3"
] | [
28,
0
] | [
62,
58
] | python | en | ['en', 'en', 'en'] | True |
test_switch_read_alarm_state | (hass, utcnow) | Test that we can read the state of a HomeKit alarm accessory. | Test that we can read the state of a HomeKit alarm accessory. | async def test_switch_read_alarm_state(hass, utcnow):
"""Test that we can read the state of a HomeKit alarm accessory."""
helper = await setup_test_component(hass, create_security_system_service)
helper.characteristics[CURRENT_STATE].value = 0
state = await helper.poll_and_get_state()
assert state.state == "armed_home"
assert state.attributes["battery_level"] == 50
helper.characteristics[CURRENT_STATE].value = 1
state = await helper.poll_and_get_state()
assert state.state == "armed_away"
helper.characteristics[CURRENT_STATE].value = 2
state = await helper.poll_and_get_state()
assert state.state == "armed_night"
helper.characteristics[CURRENT_STATE].value = 3
state = await helper.poll_and_get_state()
assert state.state == "disarmed"
helper.characteristics[CURRENT_STATE].value = 4
state = await helper.poll_and_get_state()
assert state.state == "triggered" | [
"async",
"def",
"test_switch_read_alarm_state",
"(",
"hass",
",",
"utcnow",
")",
":",
"helper",
"=",
"await",
"setup_test_component",
"(",
"hass",
",",
"create_security_system_service",
")",
"helper",
".",
"characteristics",
"[",
"CURRENT_STATE",
"]",
".",
"value",
"=",
"0",
"state",
"=",
"await",
"helper",
".",
"poll_and_get_state",
"(",
")",
"assert",
"state",
".",
"state",
"==",
"\"armed_home\"",
"assert",
"state",
".",
"attributes",
"[",
"\"battery_level\"",
"]",
"==",
"50",
"helper",
".",
"characteristics",
"[",
"CURRENT_STATE",
"]",
".",
"value",
"=",
"1",
"state",
"=",
"await",
"helper",
".",
"poll_and_get_state",
"(",
")",
"assert",
"state",
".",
"state",
"==",
"\"armed_away\"",
"helper",
".",
"characteristics",
"[",
"CURRENT_STATE",
"]",
".",
"value",
"=",
"2",
"state",
"=",
"await",
"helper",
".",
"poll_and_get_state",
"(",
")",
"assert",
"state",
".",
"state",
"==",
"\"armed_night\"",
"helper",
".",
"characteristics",
"[",
"CURRENT_STATE",
"]",
".",
"value",
"=",
"3",
"state",
"=",
"await",
"helper",
".",
"poll_and_get_state",
"(",
")",
"assert",
"state",
".",
"state",
"==",
"\"disarmed\"",
"helper",
".",
"characteristics",
"[",
"CURRENT_STATE",
"]",
".",
"value",
"=",
"4",
"state",
"=",
"await",
"helper",
".",
"poll_and_get_state",
"(",
")",
"assert",
"state",
".",
"state",
"==",
"\"triggered\""
] | [
65,
0
] | [
88,
37
] | python | en | ['en', 'en', 'en'] | True |
_generate_mock_feed_entry | (
external_id,
title,
distance_to_home,
coordinates,
region=None,
attribution=None,
published=None,
magnitude=None,
image_url=None,
) | Construct a mock feed entry for testing purposes. | Construct a mock feed entry for testing purposes. | def _generate_mock_feed_entry(
external_id,
title,
distance_to_home,
coordinates,
region=None,
attribution=None,
published=None,
magnitude=None,
image_url=None,
):
"""Construct a mock feed entry for testing purposes."""
feed_entry = MagicMock()
feed_entry.external_id = external_id
feed_entry.title = title
feed_entry.distance_to_home = distance_to_home
feed_entry.coordinates = coordinates
feed_entry.region = region
feed_entry.attribution = attribution
feed_entry.published = published
feed_entry.magnitude = magnitude
feed_entry.image_url = image_url
return feed_entry | [
"def",
"_generate_mock_feed_entry",
"(",
"external_id",
",",
"title",
",",
"distance_to_home",
",",
"coordinates",
",",
"region",
"=",
"None",
",",
"attribution",
"=",
"None",
",",
"published",
"=",
"None",
",",
"magnitude",
"=",
"None",
",",
"image_url",
"=",
"None",
",",
")",
":",
"feed_entry",
"=",
"MagicMock",
"(",
")",
"feed_entry",
".",
"external_id",
"=",
"external_id",
"feed_entry",
".",
"title",
"=",
"title",
"feed_entry",
".",
"distance_to_home",
"=",
"distance_to_home",
"feed_entry",
".",
"coordinates",
"=",
"coordinates",
"feed_entry",
".",
"region",
"=",
"region",
"feed_entry",
".",
"attribution",
"=",
"attribution",
"feed_entry",
".",
"published",
"=",
"published",
"feed_entry",
".",
"magnitude",
"=",
"magnitude",
"feed_entry",
".",
"image_url",
"=",
"image_url",
"return",
"feed_entry"
] | [
47,
0
] | [
69,
21
] | python | en | ['en', 'en', 'en'] | True |
test_setup | (hass) | Test the general setup of the platform. | Test the general setup of the platform. | async def test_setup(hass):
"""Test the general setup of the platform."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(38.0, -3.0),
region="Region 1",
attribution="Attribution 1",
published=datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
magnitude=5.7,
image_url="http://image.url/map.jpg",
)
mock_entry_2 = _generate_mock_feed_entry(
"2345", "Title 2", 20.5, (38.1, -3.1), magnitude=4.6
)
mock_entry_3 = _generate_mock_feed_entry(
"3456", "Title 3", 25.5, (38.2, -3.2), region="Region 3"
)
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (38.3, -3.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"georss_ign_sismologia_client.IgnSismologiaFeed"
) as mock_feed:
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_2, mock_entry_3],
)
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
state = hass.states.get("geo_location.m_5_7_region_1")
assert state is not None
assert state.name == "M 5.7 - Region 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "M 5.7 - Region 1",
ATTR_TITLE: "Title 1",
ATTR_REGION: "Region 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_PUBLICATION_DATE: datetime.datetime(
2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_IMAGE_URL: "http://image.url/map.jpg",
ATTR_MAGNITUDE: 5.7,
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "ign_sismologia",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 15.5
state = hass.states.get("geo_location.m_4_6")
assert state is not None
assert state.name == "M 4.6"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: 38.1,
ATTR_LONGITUDE: -3.1,
ATTR_FRIENDLY_NAME: "M 4.6",
ATTR_TITLE: "Title 2",
ATTR_MAGNITUDE: 4.6,
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "ign_sismologia",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 20.5
state = hass.states.get("geo_location.region_3")
assert state is not None
assert state.name == "Region 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: 38.2,
ATTR_LONGITUDE: -3.2,
ATTR_FRIENDLY_NAME: "Region 3",
ATTR_TITLE: "Title 3",
ATTR_REGION: "Region 3",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "ign_sismologia",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 25.5
# Simulate an update - one existing, one new entry,
# one outdated entry
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_4, mock_entry_3],
)
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed.return_value.update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0 | [
"async",
"def",
"test_setup",
"(",
"hass",
")",
":",
"# Set up some mock feed entries for this test.",
"mock_entry_1",
"=",
"_generate_mock_feed_entry",
"(",
"\"1234\"",
",",
"\"Title 1\"",
",",
"15.5",
",",
"(",
"38.0",
",",
"-",
"3.0",
")",
",",
"region",
"=",
"\"Region 1\"",
",",
"attribution",
"=",
"\"Attribution 1\"",
",",
"published",
"=",
"datetime",
".",
"datetime",
"(",
"2018",
",",
"9",
",",
"22",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"magnitude",
"=",
"5.7",
",",
"image_url",
"=",
"\"http://image.url/map.jpg\"",
",",
")",
"mock_entry_2",
"=",
"_generate_mock_feed_entry",
"(",
"\"2345\"",
",",
"\"Title 2\"",
",",
"20.5",
",",
"(",
"38.1",
",",
"-",
"3.1",
")",
",",
"magnitude",
"=",
"4.6",
")",
"mock_entry_3",
"=",
"_generate_mock_feed_entry",
"(",
"\"3456\"",
",",
"\"Title 3\"",
",",
"25.5",
",",
"(",
"38.2",
",",
"-",
"3.2",
")",
",",
"region",
"=",
"\"Region 3\"",
")",
"mock_entry_4",
"=",
"_generate_mock_feed_entry",
"(",
"\"4567\"",
",",
"\"Title 4\"",
",",
"12.5",
",",
"(",
"38.3",
",",
"-",
"3.3",
")",
")",
"# Patching 'utcnow' to gain more control over the timed update.",
"utcnow",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"utcnow",
")",
",",
"patch",
"(",
"\"georss_ign_sismologia_client.IgnSismologiaFeed\"",
")",
"as",
"mock_feed",
":",
"mock_feed",
".",
"return_value",
".",
"update",
".",
"return_value",
"=",
"(",
"\"OK\"",
",",
"[",
"mock_entry_1",
",",
"mock_entry_2",
",",
"mock_entry_3",
"]",
",",
")",
"with",
"assert_setup_component",
"(",
"1",
",",
"geo_location",
".",
"DOMAIN",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"geo_location",
".",
"DOMAIN",
",",
"CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Artificially trigger update.",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"EVENT_HOMEASSISTANT_START",
")",
"# Collect events.",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"3",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.m_5_7_region_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"M 5.7 - Region 1\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"1234\"",
",",
"ATTR_LATITUDE",
":",
"38.0",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.0",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"M 5.7 - Region 1\"",
",",
"ATTR_TITLE",
":",
"\"Title 1\"",
",",
"ATTR_REGION",
":",
"\"Region 1\"",
",",
"ATTR_ATTRIBUTION",
":",
"\"Attribution 1\"",
",",
"ATTR_PUBLICATION_DATE",
":",
"datetime",
".",
"datetime",
"(",
"2018",
",",
"9",
",",
"22",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"ATTR_IMAGE_URL",
":",
"\"http://image.url/map.jpg\"",
",",
"ATTR_MAGNITUDE",
":",
"5.7",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"ign_sismologia\"",
",",
"ATTR_ICON",
":",
"\"mdi:pulse\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"15.5",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.m_4_6\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"M 4.6\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"2345\"",
",",
"ATTR_LATITUDE",
":",
"38.1",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.1",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"M 4.6\"",
",",
"ATTR_TITLE",
":",
"\"Title 2\"",
",",
"ATTR_MAGNITUDE",
":",
"4.6",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"ign_sismologia\"",
",",
"ATTR_ICON",
":",
"\"mdi:pulse\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"20.5",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.region_3\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Region 3\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"3456\"",
",",
"ATTR_LATITUDE",
":",
"38.2",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.2",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Region 3\"",
",",
"ATTR_TITLE",
":",
"\"Title 3\"",
",",
"ATTR_REGION",
":",
"\"Region 3\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"ign_sismologia\"",
",",
"ATTR_ICON",
":",
"\"mdi:pulse\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"25.5",
"# Simulate an update - one existing, one new entry,",
"# one outdated entry",
"mock_feed",
".",
"return_value",
".",
"update",
".",
"return_value",
"=",
"(",
"\"OK\"",
",",
"[",
"mock_entry_1",
",",
"mock_entry_4",
",",
"mock_entry_3",
"]",
",",
")",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"3",
"# Simulate an update - empty data, but successful update,",
"# so no changes to entities.",
"mock_feed",
".",
"return_value",
".",
"update",
".",
"return_value",
"=",
"\"OK_NO_DATA\"",
",",
"None",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"2",
"*",
"SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"3",
"# Simulate an update - empty data, removes all entities",
"mock_feed",
".",
"return_value",
".",
"update",
".",
"return_value",
"=",
"\"ERROR\"",
",",
"None",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"3",
"*",
"SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"0"
] | [
72,
0
] | [
195,
39
] | python | en | ['en', 'da', 'en'] | True |
test_setup_with_custom_location | (hass) | Test the setup with a custom location. | Test the setup with a custom location. | async def test_setup_with_custom_location(hass):
"""Test the setup with a custom location."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 20.5, (38.1, -3.1))
with patch("georss_ign_sismologia_client.IgnSismologiaFeed") as mock_feed:
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(
hass, geo_location.DOMAIN, CONFIG_WITH_CUSTOM_LOCATION
)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert mock_feed.call_args == call(
(40.4, -3.7), filter_minimum_magnitude=0.0, filter_radius=200.0
) | [
"async",
"def",
"test_setup_with_custom_location",
"(",
"hass",
")",
":",
"# Set up some mock feed entries for this test.",
"mock_entry_1",
"=",
"_generate_mock_feed_entry",
"(",
"\"1234\"",
",",
"\"Title 1\"",
",",
"20.5",
",",
"(",
"38.1",
",",
"-",
"3.1",
")",
")",
"with",
"patch",
"(",
"\"georss_ign_sismologia_client.IgnSismologiaFeed\"",
")",
"as",
"mock_feed",
":",
"mock_feed",
".",
"return_value",
".",
"update",
".",
"return_value",
"=",
"\"OK\"",
",",
"[",
"mock_entry_1",
"]",
"with",
"assert_setup_component",
"(",
"1",
",",
"geo_location",
".",
"DOMAIN",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"geo_location",
".",
"DOMAIN",
",",
"CONFIG_WITH_CUSTOM_LOCATION",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Artificially trigger update.",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"EVENT_HOMEASSISTANT_START",
")",
"# Collect events.",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"1",
"assert",
"mock_feed",
".",
"call_args",
"==",
"call",
"(",
"(",
"40.4",
",",
"-",
"3.7",
")",
",",
"filter_minimum_magnitude",
"=",
"0.0",
",",
"filter_radius",
"=",
"200.0",
")"
] | [
198,
0
] | [
222,
13
] | python | en | ['en', 'en', 'en'] | True |
test_homeassistant_location_exists | () | Test if Home Assistant location exists it should return True. | Test if Home Assistant location exists it should return True. | async def test_homeassistant_location_exists() -> None:
"""Test if Home Assistant location exists it should return True."""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
with patch.object(flow, "_check_location", return_value=True):
# Test exists
hass.config.location_name = "Home"
hass.config.latitude = 17.8419
hass.config.longitude = 59.3262
assert await flow._homeassistant_location_exists() is True
# Test not exists
hass.config.location_name = None
hass.config.latitude = 0
hass.config.longitude = 0
assert await flow._homeassistant_location_exists() is False | [
"async",
"def",
"test_homeassistant_location_exists",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"with",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_check_location\"",
",",
"return_value",
"=",
"True",
")",
":",
"# Test exists",
"hass",
".",
"config",
".",
"location_name",
"=",
"\"Home\"",
"hass",
".",
"config",
".",
"latitude",
"=",
"17.8419",
"hass",
".",
"config",
".",
"longitude",
"=",
"59.3262",
"assert",
"await",
"flow",
".",
"_homeassistant_location_exists",
"(",
")",
"is",
"True",
"# Test not exists",
"hass",
".",
"config",
".",
"location_name",
"=",
"None",
"hass",
".",
"config",
".",
"latitude",
"=",
"0",
"hass",
".",
"config",
".",
"longitude",
"=",
"0",
"assert",
"await",
"flow",
".",
"_homeassistant_location_exists",
"(",
")",
"is",
"False"
] | [
10,
0
] | [
28,
67
] | python | en | ['en', 'en', 'en'] | True |
test_name_in_configuration_exists | () | Test if home location exists in configuration. | Test if home location exists in configuration. | async def test_name_in_configuration_exists() -> None:
"""Test if home location exists in configuration."""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
# Test exists
hass.config.location_name = "Home"
hass.config.latitude = 17.8419
hass.config.longitude = 59.3262
# Check not exists
with patch.object(
config_flow,
"smhi_locations",
return_value={"test": "something", "test2": "something else"},
):
assert flow._name_in_configuration_exists("no_exist_name") is False
# Check exists
with patch.object(
config_flow,
"smhi_locations",
return_value={"test": "something", "name_exist": "config"},
):
assert flow._name_in_configuration_exists("name_exist") is True | [
"async",
"def",
"test_name_in_configuration_exists",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"# Test exists",
"hass",
".",
"config",
".",
"location_name",
"=",
"\"Home\"",
"hass",
".",
"config",
".",
"latitude",
"=",
"17.8419",
"hass",
".",
"config",
".",
"longitude",
"=",
"59.3262",
"# Check not exists",
"with",
"patch",
".",
"object",
"(",
"config_flow",
",",
"\"smhi_locations\"",
",",
"return_value",
"=",
"{",
"\"test\"",
":",
"\"something\"",
",",
"\"test2\"",
":",
"\"something else\"",
"}",
",",
")",
":",
"assert",
"flow",
".",
"_name_in_configuration_exists",
"(",
"\"no_exist_name\"",
")",
"is",
"False",
"# Check exists",
"with",
"patch",
".",
"object",
"(",
"config_flow",
",",
"\"smhi_locations\"",
",",
"return_value",
"=",
"{",
"\"test\"",
":",
"\"something\"",
",",
"\"name_exist\"",
":",
"\"config\"",
"}",
",",
")",
":",
"assert",
"flow",
".",
"_name_in_configuration_exists",
"(",
"\"name_exist\"",
")",
"is",
"True"
] | [
31,
0
] | [
58,
71
] | python | en | ['en', 'en', 'en'] | True |
test_smhi_locations | (hass) | Test return empty set. | Test return empty set. | def test_smhi_locations(hass) -> None:
"""Test return empty set."""
locations = config_flow.smhi_locations(hass)
assert not locations | [
"def",
"test_smhi_locations",
"(",
"hass",
")",
"->",
"None",
":",
"locations",
"=",
"config_flow",
".",
"smhi_locations",
"(",
"hass",
")",
"assert",
"not",
"locations"
] | [
61,
0
] | [
64,
24
] | python | en | ['en', 'hu', 'en'] | True |
test_show_config_form | () | Test show configuration form. | Test show configuration form. | async def test_show_config_form() -> None:
"""Test show configuration form."""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
result = await flow._show_config_form()
assert result["type"] == "form"
assert result["step_id"] == "user" | [
"async",
"def",
"test_show_config_form",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"result",
"=",
"await",
"flow",
".",
"_show_config_form",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\""
] | [
67,
0
] | [
76,
38
] | python | en | ['en', 'fr', 'en'] | True |
test_show_config_form_default_values | () | Test show configuration form. | Test show configuration form. | async def test_show_config_form_default_values() -> None:
"""Test show configuration form."""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
result = await flow._show_config_form(name="test", latitude="65", longitude="17")
assert result["type"] == "form"
assert result["step_id"] == "user" | [
"async",
"def",
"test_show_config_form_default_values",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"result",
"=",
"await",
"flow",
".",
"_show_config_form",
"(",
"name",
"=",
"\"test\"",
",",
"latitude",
"=",
"\"65\"",
",",
"longitude",
"=",
"\"17\"",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\""
] | [
79,
0
] | [
88,
38
] | python | en | ['en', 'fr', 'en'] | True |
test_flow_with_home_location | (hass) | Test config flow .
Tests the flow when a default location is configured
then it should return a form with default values
| Test config flow . | async def test_flow_with_home_location(hass) -> None:
"""Test config flow .
Tests the flow when a default location is configured
then it should return a form with default values
"""
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
with patch.object(flow, "_check_location", return_value=True):
hass.config.location_name = "Home"
hass.config.latitude = 17.8419
hass.config.longitude = 59.3262
result = await flow.async_step_user()
assert result["type"] == "form"
assert result["step_id"] == "user" | [
"async",
"def",
"test_flow_with_home_location",
"(",
"hass",
")",
"->",
"None",
":",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"with",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_check_location\"",
",",
"return_value",
"=",
"True",
")",
":",
"hass",
".",
"config",
".",
"location_name",
"=",
"\"Home\"",
"hass",
".",
"config",
".",
"latitude",
"=",
"17.8419",
"hass",
".",
"config",
".",
"longitude",
"=",
"59.3262",
"result",
"=",
"await",
"flow",
".",
"async_step_user",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\""
] | [
91,
0
] | [
107,
42
] | python | en | ['en', 'da', 'en'] | True |
test_flow_show_form | () | Test show form scenarios first time.
Test when the form should show when no configurations exists
| Test show form scenarios first time. | async def test_flow_show_form() -> None:
"""Test show form scenarios first time.
Test when the form should show when no configurations exists
"""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
# Test show form when Home Assistant config exists and
# home is already configured, then new config is allowed
with patch.object(
flow, "_show_config_form", return_value=None
) as config_form, patch.object(
flow, "_homeassistant_location_exists", return_value=True
), patch.object(
config_flow,
"smhi_locations",
return_value={"test": "something", "name_exist": "config"},
):
await flow.async_step_user()
assert len(config_form.mock_calls) == 1
# Test show form when Home Assistant config not and
# home is not configured
with patch.object(
flow, "_show_config_form", return_value=None
) as config_form, patch.object(
flow, "_homeassistant_location_exists", return_value=False
), patch.object(
config_flow,
"smhi_locations",
return_value={"test": "something", "name_exist": "config"},
):
await flow.async_step_user()
assert len(config_form.mock_calls) == 1 | [
"async",
"def",
"test_flow_show_form",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"# Test show form when Home Assistant config exists and",
"# home is already configured, then new config is allowed",
"with",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_show_config_form\"",
",",
"return_value",
"=",
"None",
")",
"as",
"config_form",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_homeassistant_location_exists\"",
",",
"return_value",
"=",
"True",
")",
",",
"patch",
".",
"object",
"(",
"config_flow",
",",
"\"smhi_locations\"",
",",
"return_value",
"=",
"{",
"\"test\"",
":",
"\"something\"",
",",
"\"name_exist\"",
":",
"\"config\"",
"}",
",",
")",
":",
"await",
"flow",
".",
"async_step_user",
"(",
")",
"assert",
"len",
"(",
"config_form",
".",
"mock_calls",
")",
"==",
"1",
"# Test show form when Home Assistant config not and",
"# home is not configured",
"with",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_show_config_form\"",
",",
"return_value",
"=",
"None",
")",
"as",
"config_form",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_homeassistant_location_exists\"",
",",
"return_value",
"=",
"False",
")",
",",
"patch",
".",
"object",
"(",
"config_flow",
",",
"\"smhi_locations\"",
",",
"return_value",
"=",
"{",
"\"test\"",
":",
"\"something\"",
",",
"\"name_exist\"",
":",
"\"config\"",
"}",
",",
")",
":",
"await",
"flow",
".",
"async_step_user",
"(",
")",
"assert",
"len",
"(",
"config_form",
".",
"mock_calls",
")",
"==",
"1"
] | [
110,
0
] | [
146,
47
] | python | en | ['es', 'en', 'en'] | True |
test_flow_show_form_name_exists | () | Test show form if name already exists.
Test when the form should show when no configurations exists
| Test show form if name already exists. | async def test_flow_show_form_name_exists() -> None:
"""Test show form if name already exists.
Test when the form should show when no configurations exists
"""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
test_data = {"name": "home", CONF_LONGITUDE: "0", CONF_LATITUDE: "0"}
# Test show form when Home Assistant config exists and
# home is already configured, then new config is allowed
with patch.object(
flow, "_show_config_form", return_value=None
) as config_form, patch.object(
flow, "_name_in_configuration_exists", return_value=True
), patch.object(
config_flow,
"smhi_locations",
return_value={"test": "something", "name_exist": "config"},
), patch.object(
flow, "_check_location", return_value=True
):
await flow.async_step_user(user_input=test_data)
assert len(config_form.mock_calls) == 1
assert len(flow._errors) == 1 | [
"async",
"def",
"test_flow_show_form_name_exists",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"test_data",
"=",
"{",
"\"name\"",
":",
"\"home\"",
",",
"CONF_LONGITUDE",
":",
"\"0\"",
",",
"CONF_LATITUDE",
":",
"\"0\"",
"}",
"# Test show form when Home Assistant config exists and",
"# home is already configured, then new config is allowed",
"with",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_show_config_form\"",
",",
"return_value",
"=",
"None",
")",
"as",
"config_form",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_name_in_configuration_exists\"",
",",
"return_value",
"=",
"True",
")",
",",
"patch",
".",
"object",
"(",
"config_flow",
",",
"\"smhi_locations\"",
",",
"return_value",
"=",
"{",
"\"test\"",
":",
"\"something\"",
",",
"\"name_exist\"",
":",
"\"config\"",
"}",
",",
")",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_check_location\"",
",",
"return_value",
"=",
"True",
")",
":",
"await",
"flow",
".",
"async_step_user",
"(",
"user_input",
"=",
"test_data",
")",
"assert",
"len",
"(",
"config_form",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"flow",
".",
"_errors",
")",
"==",
"1"
] | [
149,
0
] | [
175,
37
] | python | en | ['en', 'en', 'en'] | True |
test_flow_entry_created_from_user_input | () | Test that create data from user input.
Test when the form should show when no configurations exists
| Test that create data from user input. | async def test_flow_entry_created_from_user_input() -> None:
"""Test that create data from user input.
Test when the form should show when no configurations exists
"""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
test_data = {"name": "home", CONF_LONGITUDE: "0", CONF_LATITUDE: "0"}
# Test that entry created when user_input name not exists
with patch.object(
flow, "_show_config_form", return_value=None
) as config_form, patch.object(
flow, "_name_in_configuration_exists", return_value=False
), patch.object(
flow, "_homeassistant_location_exists", return_value=False
), patch.object(
config_flow,
"smhi_locations",
return_value={"test": "something", "name_exist": "config"},
), patch.object(
flow, "_check_location", return_value=True
):
result = await flow.async_step_user(user_input=test_data)
assert result["type"] == "create_entry"
assert result["data"] == test_data
assert not config_form.mock_calls | [
"async",
"def",
"test_flow_entry_created_from_user_input",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"test_data",
"=",
"{",
"\"name\"",
":",
"\"home\"",
",",
"CONF_LONGITUDE",
":",
"\"0\"",
",",
"CONF_LATITUDE",
":",
"\"0\"",
"}",
"# Test that entry created when user_input name not exists",
"with",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_show_config_form\"",
",",
"return_value",
"=",
"None",
")",
"as",
"config_form",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_name_in_configuration_exists\"",
",",
"return_value",
"=",
"False",
")",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_homeassistant_location_exists\"",
",",
"return_value",
"=",
"False",
")",
",",
"patch",
".",
"object",
"(",
"config_flow",
",",
"\"smhi_locations\"",
",",
"return_value",
"=",
"{",
"\"test\"",
":",
"\"something\"",
",",
"\"name_exist\"",
":",
"\"config\"",
"}",
",",
")",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_check_location\"",
",",
"return_value",
"=",
"True",
")",
":",
"result",
"=",
"await",
"flow",
".",
"async_step_user",
"(",
"user_input",
"=",
"test_data",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"==",
"test_data",
"assert",
"not",
"config_form",
".",
"mock_calls"
] | [
178,
0
] | [
208,
41
] | python | en | ['en', 'en', 'en'] | True |
test_flow_entry_created_user_input_faulty | () | Test that create data from user input and are faulty.
Test when the form should show when user puts faulty location
in the config gui. Then the form should show with error
| Test that create data from user input and are faulty. | async def test_flow_entry_created_user_input_faulty() -> None:
"""Test that create data from user input and are faulty.
Test when the form should show when user puts faulty location
in the config gui. Then the form should show with error
"""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
test_data = {"name": "home", CONF_LONGITUDE: "0", CONF_LATITUDE: "0"}
# Test that entry created when user_input name not exists
with patch.object(flow, "_check_location", return_value=True), patch.object(
flow, "_show_config_form", return_value=None
) as config_form, patch.object(
flow, "_name_in_configuration_exists", return_value=False
), patch.object(
flow, "_homeassistant_location_exists", return_value=False
), patch.object(
config_flow,
"smhi_locations",
return_value={"test": "something", "name_exist": "config"},
), patch.object(
flow, "_check_location", return_value=False
):
await flow.async_step_user(user_input=test_data)
assert len(config_form.mock_calls) == 1
assert len(flow._errors) == 1 | [
"async",
"def",
"test_flow_entry_created_user_input_faulty",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"test_data",
"=",
"{",
"\"name\"",
":",
"\"home\"",
",",
"CONF_LONGITUDE",
":",
"\"0\"",
",",
"CONF_LATITUDE",
":",
"\"0\"",
"}",
"# Test that entry created when user_input name not exists",
"with",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_check_location\"",
",",
"return_value",
"=",
"True",
")",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_show_config_form\"",
",",
"return_value",
"=",
"None",
")",
"as",
"config_form",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_name_in_configuration_exists\"",
",",
"return_value",
"=",
"False",
")",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_homeassistant_location_exists\"",
",",
"return_value",
"=",
"False",
")",
",",
"patch",
".",
"object",
"(",
"config_flow",
",",
"\"smhi_locations\"",
",",
"return_value",
"=",
"{",
"\"test\"",
":",
"\"something\"",
",",
"\"name_exist\"",
":",
"\"config\"",
"}",
",",
")",
",",
"patch",
".",
"object",
"(",
"flow",
",",
"\"_check_location\"",
",",
"return_value",
"=",
"False",
")",
":",
"await",
"flow",
".",
"async_step_user",
"(",
"user_input",
"=",
"test_data",
")",
"assert",
"len",
"(",
"config_form",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"flow",
".",
"_errors",
")",
"==",
"1"
] | [
211,
0
] | [
241,
37
] | python | en | ['en', 'en', 'en'] | True |
test_check_location_correct | () | Test check location when correct input. | Test check location when correct input. | async def test_check_location_correct() -> None:
"""Test check location when correct input."""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
with patch.object(
config_flow.aiohttp_client, "async_get_clientsession"
), patch.object(SmhiApi, "async_get_forecast", return_value=None):
assert await flow._check_location("58", "17") is True | [
"async",
"def",
"test_check_location_correct",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"with",
"patch",
".",
"object",
"(",
"config_flow",
".",
"aiohttp_client",
",",
"\"async_get_clientsession\"",
")",
",",
"patch",
".",
"object",
"(",
"SmhiApi",
",",
"\"async_get_forecast\"",
",",
"return_value",
"=",
"None",
")",
":",
"assert",
"await",
"flow",
".",
"_check_location",
"(",
"\"58\"",
",",
"\"17\"",
")",
"is",
"True"
] | [
244,
0
] | [
254,
61
] | python | en | ['en', 'en', 'en'] | True |
test_check_location_faulty | () | Test check location when faulty input. | Test check location when faulty input. | async def test_check_location_faulty() -> None:
"""Test check location when faulty input."""
hass = Mock()
flow = config_flow.SmhiFlowHandler()
flow.hass = hass
with patch.object(
config_flow.aiohttp_client, "async_get_clientsession"
), patch.object(SmhiApi, "async_get_forecast", side_effect=SmhiForecastException()):
assert await flow._check_location("58", "17") is False | [
"async",
"def",
"test_check_location_faulty",
"(",
")",
"->",
"None",
":",
"hass",
"=",
"Mock",
"(",
")",
"flow",
"=",
"config_flow",
".",
"SmhiFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"with",
"patch",
".",
"object",
"(",
"config_flow",
".",
"aiohttp_client",
",",
"\"async_get_clientsession\"",
")",
",",
"patch",
".",
"object",
"(",
"SmhiApi",
",",
"\"async_get_forecast\"",
",",
"side_effect",
"=",
"SmhiForecastException",
"(",
")",
")",
":",
"assert",
"await",
"flow",
".",
"_check_location",
"(",
"\"58\"",
",",
"\"17\"",
")",
"is",
"False"
] | [
257,
0
] | [
267,
62
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the OhmConnect sensor. | Set up the OhmConnect sensor. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the OhmConnect sensor."""
name = config.get(CONF_NAME)
ohmid = config.get(CONF_ID)
add_entities([OhmconnectSensor(name, ohmid)], True) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"name",
"=",
"config",
".",
"get",
"(",
"CONF_NAME",
")",
"ohmid",
"=",
"config",
".",
"get",
"(",
"CONF_ID",
")",
"add_entities",
"(",
"[",
"OhmconnectSensor",
"(",
"name",
",",
"ohmid",
")",
"]",
",",
"True",
")"
] | [
30,
0
] | [
35,
55
] | python | en | ['en', 'haw', 'en'] | True |
OhmconnectSensor.__init__ | (self, name, ohmid) | Initialize the sensor. | Initialize the sensor. | def __init__(self, name, ohmid):
"""Initialize the sensor."""
self._name = name
self._ohmid = ohmid
self._data = {} | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"ohmid",
")",
":",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_ohmid",
"=",
"ohmid",
"self",
".",
"_data",
"=",
"{",
"}"
] | [
41,
4
] | [
45,
23
] | python | en | ['en', 'en', 'en'] | True |
OhmconnectSensor.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
48,
4
] | [
50,
25
] | python | en | ['en', 'mi', 'en'] | True |
OhmconnectSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
if self._data.get("active") == "True":
return "Active"
return "Inactive" | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_data",
".",
"get",
"(",
"\"active\"",
")",
"==",
"\"True\"",
":",
"return",
"\"Active\"",
"return",
"\"Inactive\""
] | [
53,
4
] | [
57,
25
] | python | en | ['en', 'en', 'en'] | True |
OhmconnectSensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
return {"Address": self._data.get("address"), "ID": self._ohmid} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"{",
"\"Address\"",
":",
"self",
".",
"_data",
".",
"get",
"(",
"\"address\"",
")",
",",
"\"ID\"",
":",
"self",
".",
"_ohmid",
"}"
] | [
60,
4
] | [
62,
72
] | python | en | ['en', 'en', 'en'] | True |
OhmconnectSensor.update | (self) | Get the latest data from OhmConnect. | Get the latest data from OhmConnect. | def update(self):
"""Get the latest data from OhmConnect."""
try:
url = f"https://login.ohmconnect.com/verify-ohm-hour/{self._ohmid}"
response = requests.get(url, timeout=10)
root = ET.fromstring(response.text)
for child in root:
self._data[child.tag] = child.text
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to host/endpoint: %s", url)
self._data = {} | [
"def",
"update",
"(",
"self",
")",
":",
"try",
":",
"url",
"=",
"f\"https://login.ohmconnect.com/verify-ohm-hour/{self._ohmid}\"",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"10",
")",
"root",
"=",
"ET",
".",
"fromstring",
"(",
"response",
".",
"text",
")",
"for",
"child",
"in",
"root",
":",
"self",
".",
"_data",
"[",
"child",
".",
"tag",
"]",
"=",
"child",
".",
"text",
"except",
"requests",
".",
"exceptions",
".",
"ConnectionError",
":",
"_LOGGER",
".",
"error",
"(",
"\"No route to host/endpoint: %s\"",
",",
"url",
")",
"self",
".",
"_data",
"=",
"{",
"}"
] | [
65,
4
] | [
76,
27
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass: HomeAssistant, config: Config) | Set up Awair integration. | Set up Awair integration. | async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up Awair integration."""
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistant",
",",
"config",
":",
"Config",
")",
"->",
"bool",
":",
"return",
"True"
] | [
20,
0
] | [
22,
15
] | python | en | ['en', 'su', 'en'] | True |
async_setup_entry | (hass, config_entry) | Set up Awair integration from a config entry. | Set up Awair integration from a config entry. | async def async_setup_entry(hass, config_entry) -> bool:
"""Set up Awair integration from a config entry."""
session = async_get_clientsession(hass)
coordinator = AwairDataUpdateCoordinator(hass, config_entry, session)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = coordinator
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, platform)
)
return True | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"config_entry",
")",
"->",
"bool",
":",
"session",
"=",
"async_get_clientsession",
"(",
"hass",
")",
"coordinator",
"=",
"AwairDataUpdateCoordinator",
"(",
"hass",
",",
"config_entry",
",",
"session",
")",
"await",
"coordinator",
".",
"async_refresh",
"(",
")",
"if",
"not",
"coordinator",
".",
"last_update_success",
":",
"raise",
"ConfigEntryNotReady",
"hass",
".",
"data",
".",
"setdefault",
"(",
"DOMAIN",
",",
"{",
"}",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"config_entry",
".",
"entry_id",
"]",
"=",
"coordinator",
"for",
"platform",
"in",
"PLATFORMS",
":",
"hass",
".",
"async_create_task",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"config_entry",
",",
"platform",
")",
")",
"return",
"True"
] | [
25,
0
] | [
43,
15
] | python | en | ['en', 'en', 'en'] | True |
async_unload_entry | (hass, config_entry) | Unload Awair configuration. | Unload Awair configuration. | async def async_unload_entry(hass, config_entry) -> bool:
"""Unload Awair configuration."""
tasks = []
for platform in PLATFORMS:
tasks.append(
hass.config_entries.async_forward_entry_unload(config_entry, platform)
)
unload_ok = all(await gather(*tasks))
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
",",
"config_entry",
")",
"->",
"bool",
":",
"tasks",
"=",
"[",
"]",
"for",
"platform",
"in",
"PLATFORMS",
":",
"tasks",
".",
"append",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"config_entry",
",",
"platform",
")",
")",
"unload_ok",
"=",
"all",
"(",
"await",
"gather",
"(",
"*",
"tasks",
")",
")",
"if",
"unload_ok",
":",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"pop",
"(",
"config_entry",
".",
"entry_id",
")",
"return",
"unload_ok"
] | [
46,
0
] | [
58,
20
] | python | en | ['fr', 'lb', 'en'] | False |
AwairDataUpdateCoordinator.__init__ | (self, hass, config_entry, session) | Set up the AwairDataUpdateCoordinator class. | Set up the AwairDataUpdateCoordinator class. | def __init__(self, hass, config_entry, session) -> None:
"""Set up the AwairDataUpdateCoordinator class."""
access_token = config_entry.data[CONF_ACCESS_TOKEN]
self._awair = Awair(access_token=access_token, session=session)
self._config_entry = config_entry
super().__init__(hass, LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL) | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"config_entry",
",",
"session",
")",
"->",
"None",
":",
"access_token",
"=",
"config_entry",
".",
"data",
"[",
"CONF_ACCESS_TOKEN",
"]",
"self",
".",
"_awair",
"=",
"Awair",
"(",
"access_token",
"=",
"access_token",
",",
"session",
"=",
"session",
")",
"self",
".",
"_config_entry",
"=",
"config_entry",
"super",
"(",
")",
".",
"__init__",
"(",
"hass",
",",
"LOGGER",
",",
"name",
"=",
"DOMAIN",
",",
"update_interval",
"=",
"UPDATE_INTERVAL",
")"
] | [
64,
4
] | [
70,
84
] | python | en | ['en', 'sn', 'en'] | True |
AwairDataUpdateCoordinator._async_update_data | (self) | Update data via Awair client library. | Update data via Awair client library. | async def _async_update_data(self) -> Optional[Any]:
"""Update data via Awair client library."""
with timeout(API_TIMEOUT):
try:
LOGGER.debug("Fetching users and devices")
user = await self._awair.user()
devices = await user.devices()
results = await gather(
*[self._fetch_air_data(device) for device in devices]
)
return {result.device.uuid: result for result in results}
except AuthError as err:
flow_context = {
"source": "reauth",
"unique_id": self._config_entry.unique_id,
}
matching_flows = [
flow
for flow in self.hass.config_entries.flow.async_progress()
if flow["context"] == flow_context
]
if not matching_flows:
self.hass.async_create_task(
self.hass.config_entries.flow.async_init(
DOMAIN,
context=flow_context,
data=self._config_entry.data,
)
)
raise UpdateFailed(err) from err
except Exception as err:
raise UpdateFailed(err) from err | [
"async",
"def",
"_async_update_data",
"(",
"self",
")",
"->",
"Optional",
"[",
"Any",
"]",
":",
"with",
"timeout",
"(",
"API_TIMEOUT",
")",
":",
"try",
":",
"LOGGER",
".",
"debug",
"(",
"\"Fetching users and devices\"",
")",
"user",
"=",
"await",
"self",
".",
"_awair",
".",
"user",
"(",
")",
"devices",
"=",
"await",
"user",
".",
"devices",
"(",
")",
"results",
"=",
"await",
"gather",
"(",
"*",
"[",
"self",
".",
"_fetch_air_data",
"(",
"device",
")",
"for",
"device",
"in",
"devices",
"]",
")",
"return",
"{",
"result",
".",
"device",
".",
"uuid",
":",
"result",
"for",
"result",
"in",
"results",
"}",
"except",
"AuthError",
"as",
"err",
":",
"flow_context",
"=",
"{",
"\"source\"",
":",
"\"reauth\"",
",",
"\"unique_id\"",
":",
"self",
".",
"_config_entry",
".",
"unique_id",
",",
"}",
"matching_flows",
"=",
"[",
"flow",
"for",
"flow",
"in",
"self",
".",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_progress",
"(",
")",
"if",
"flow",
"[",
"\"context\"",
"]",
"==",
"flow_context",
"]",
"if",
"not",
"matching_flows",
":",
"self",
".",
"hass",
".",
"async_create_task",
"(",
"self",
".",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"flow_context",
",",
"data",
"=",
"self",
".",
"_config_entry",
".",
"data",
",",
")",
")",
"raise",
"UpdateFailed",
"(",
"err",
")",
"from",
"err",
"except",
"Exception",
"as",
"err",
":",
"raise",
"UpdateFailed",
"(",
"err",
")",
"from",
"err"
] | [
72,
4
] | [
106,
48
] | python | en | ['fr', 'lb', 'en'] | False |
AwairDataUpdateCoordinator._fetch_air_data | (self, device) | Fetch latest air quality data. | Fetch latest air quality data. | async def _fetch_air_data(self, device):
"""Fetch latest air quality data."""
LOGGER.debug("Fetching data for %s", device.uuid)
air_data = await device.air_data_latest()
LOGGER.debug(air_data)
return AwairResult(device=device, air_data=air_data) | [
"async",
"def",
"_fetch_air_data",
"(",
"self",
",",
"device",
")",
":",
"LOGGER",
".",
"debug",
"(",
"\"Fetching data for %s\"",
",",
"device",
".",
"uuid",
")",
"air_data",
"=",
"await",
"device",
".",
"air_data_latest",
"(",
")",
"LOGGER",
".",
"debug",
"(",
"air_data",
")",
"return",
"AwairResult",
"(",
"device",
"=",
"device",
",",
"air_data",
"=",
"air_data",
")"
] | [
108,
4
] | [
113,
60
] | python | en | ['en', 'cy', 'en'] | True |
async_setup_entry | (
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
) | Set up the Tado sensor platform. | Set up the Tado sensor platform. | async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
):
"""Set up the Tado sensor platform."""
tado = hass.data[DOMAIN][entry.entry_id][DATA]
# Create zone sensors
zones = tado.zones
devices = tado.devices
entities = []
for zone in zones:
zone_type = zone["type"]
if zone_type not in ZONE_SENSORS:
_LOGGER.warning("Unknown zone type skipped: %s", zone_type)
continue
entities.extend(
[
TadoZoneSensor(
tado, zone["name"], zone["id"], variable, zone["devices"][0]
)
for variable in ZONE_SENSORS[zone_type]
]
)
# Create device sensors
for device in devices:
entities.extend(
[
TadoDeviceSensor(tado, device["name"], device["id"], variable, device)
for variable in DEVICE_SENSORS
]
)
if entities:
async_add_entities(entities, True) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistant",
",",
"entry",
":",
"ConfigEntry",
",",
"async_add_entities",
")",
":",
"tado",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"entry",
".",
"entry_id",
"]",
"[",
"DATA",
"]",
"# Create zone sensors",
"zones",
"=",
"tado",
".",
"zones",
"devices",
"=",
"tado",
".",
"devices",
"entities",
"=",
"[",
"]",
"for",
"zone",
"in",
"zones",
":",
"zone_type",
"=",
"zone",
"[",
"\"type\"",
"]",
"if",
"zone_type",
"not",
"in",
"ZONE_SENSORS",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Unknown zone type skipped: %s\"",
",",
"zone_type",
")",
"continue",
"entities",
".",
"extend",
"(",
"[",
"TadoZoneSensor",
"(",
"tado",
",",
"zone",
"[",
"\"name\"",
"]",
",",
"zone",
"[",
"\"id\"",
"]",
",",
"variable",
",",
"zone",
"[",
"\"devices\"",
"]",
"[",
"0",
"]",
")",
"for",
"variable",
"in",
"ZONE_SENSORS",
"[",
"zone_type",
"]",
"]",
")",
"# Create device sensors",
"for",
"device",
"in",
"devices",
":",
"entities",
".",
"extend",
"(",
"[",
"TadoDeviceSensor",
"(",
"tado",
",",
"device",
"[",
"\"name\"",
"]",
",",
"device",
"[",
"\"id\"",
"]",
",",
"variable",
",",
"device",
")",
"for",
"variable",
"in",
"DEVICE_SENSORS",
"]",
")",
"if",
"entities",
":",
"async_add_entities",
"(",
"entities",
",",
"True",
")"
] | [
51,
0
] | [
87,
42
] | python | en | ['en', 'pt', 'en'] | True |
TadoZoneSensor.__init__ | (self, tado, zone_name, zone_id, zone_variable, device_info) | Initialize of the Tado Sensor. | Initialize of the Tado Sensor. | def __init__(self, tado, zone_name, zone_id, zone_variable, device_info):
"""Initialize of the Tado Sensor."""
self._tado = tado
super().__init__(zone_name, device_info, tado.device_id, zone_id)
self.zone_id = zone_id
self.zone_variable = zone_variable
self._unique_id = f"{zone_variable} {zone_id} {tado.device_id}"
self._state = None
self._state_attributes = None
self._tado_zone_data = None | [
"def",
"__init__",
"(",
"self",
",",
"tado",
",",
"zone_name",
",",
"zone_id",
",",
"zone_variable",
",",
"device_info",
")",
":",
"self",
".",
"_tado",
"=",
"tado",
"super",
"(",
")",
".",
"__init__",
"(",
"zone_name",
",",
"device_info",
",",
"tado",
".",
"device_id",
",",
"zone_id",
")",
"self",
".",
"zone_id",
"=",
"zone_id",
"self",
".",
"zone_variable",
"=",
"zone_variable",
"self",
".",
"_unique_id",
"=",
"f\"{zone_variable} {zone_id} {tado.device_id}\"",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_state_attributes",
"=",
"None",
"self",
".",
"_tado_zone_data",
"=",
"None"
] | [
93,
4
] | [
105,
35
] | python | en | ['en', 'pt', 'en'] | True |
TadoZoneSensor.async_added_to_hass | (self) | Register for sensor updates. | Register for sensor updates. | async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.device_id, "zone", self.zone_id
),
self._async_update_callback,
)
)
self._async_update_zone_data() | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_on_remove",
"(",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"SIGNAL_TADO_UPDATE_RECEIVED",
".",
"format",
"(",
"self",
".",
"_tado",
".",
"device_id",
",",
"\"zone\"",
",",
"self",
".",
"zone_id",
")",
",",
"self",
".",
"_async_update_callback",
",",
")",
")",
"self",
".",
"_async_update_zone_data",
"(",
")"
] | [
107,
4
] | [
119,
38
] | python | da | ['da', 'no', 'en'] | False |
TadoZoneSensor.unique_id | (self) | Return the unique id. | Return the unique id. | def unique_id(self):
"""Return the unique id."""
return self._unique_id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unique_id"
] | [
122,
4
] | [
124,
30
] | python | en | ['en', 'la', 'en'] | True |
TadoZoneSensor.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return f"{self.zone_name} {self.zone_variable}" | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"f\"{self.zone_name} {self.zone_variable}\""
] | [
127,
4
] | [
129,
55
] | python | en | ['en', 'mi', 'en'] | True |
TadoZoneSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
132,
4
] | [
134,
26
] | python | en | ['en', 'en', 'en'] | True |
TadoZoneSensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
return self._state_attributes | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state_attributes"
] | [
137,
4
] | [
139,
37
] | python | en | ['en', 'en', 'en'] | True |
TadoZoneSensor.unit_of_measurement | (self) | Return the unit of measurement. | Return the unit of measurement. | def unit_of_measurement(self):
"""Return the unit of measurement."""
if self.zone_variable == "temperature":
return self.hass.config.units.temperature_unit
if self.zone_variable == "humidity":
return PERCENTAGE
if self.zone_variable == "heating":
return PERCENTAGE
if self.zone_variable == "ac":
return None | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"if",
"self",
".",
"zone_variable",
"==",
"\"temperature\"",
":",
"return",
"self",
".",
"hass",
".",
"config",
".",
"units",
".",
"temperature_unit",
"if",
"self",
".",
"zone_variable",
"==",
"\"humidity\"",
":",
"return",
"PERCENTAGE",
"if",
"self",
".",
"zone_variable",
"==",
"\"heating\"",
":",
"return",
"PERCENTAGE",
"if",
"self",
".",
"zone_variable",
"==",
"\"ac\"",
":",
"return",
"None"
] | [
142,
4
] | [
151,
23
] | python | en | ['en', 'la', 'en'] | True |
TadoZoneSensor.icon | (self) | Icon for the sensor. | Icon for the sensor. | def icon(self):
"""Icon for the sensor."""
if self.zone_variable == "temperature":
return "mdi:thermometer"
if self.zone_variable == "humidity":
return "mdi:water-percent" | [
"def",
"icon",
"(",
"self",
")",
":",
"if",
"self",
".",
"zone_variable",
"==",
"\"temperature\"",
":",
"return",
"\"mdi:thermometer\"",
"if",
"self",
".",
"zone_variable",
"==",
"\"humidity\"",
":",
"return",
"\"mdi:water-percent\""
] | [
154,
4
] | [
159,
38
] | python | en | ['en', 'en', 'en'] | True |
TadoZoneSensor._async_update_callback | (self) | Update and write state. | Update and write state. | def _async_update_callback(self):
"""Update and write state."""
self._async_update_zone_data()
self.async_write_ha_state() | [
"def",
"_async_update_callback",
"(",
"self",
")",
":",
"self",
".",
"_async_update_zone_data",
"(",
")",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
162,
4
] | [
165,
35
] | python | en | ['en', 'en', 'en'] | True |
TadoZoneSensor._async_update_zone_data | (self) | Handle update callbacks. | Handle update callbacks. | def _async_update_zone_data(self):
"""Handle update callbacks."""
try:
self._tado_zone_data = self._tado.data["zone"][self.zone_id]
except KeyError:
return
if self.zone_variable == "temperature":
self._state = self.hass.config.units.temperature(
self._tado_zone_data.current_temp, TEMP_CELSIUS
)
self._state_attributes = {
"time": self._tado_zone_data.current_temp_timestamp,
"setting": 0, # setting is used in climate device
}
elif self.zone_variable == "humidity":
self._state = self._tado_zone_data.current_humidity
self._state_attributes = {
"time": self._tado_zone_data.current_humidity_timestamp
}
elif self.zone_variable == "power":
self._state = self._tado_zone_data.power
elif self.zone_variable == "link":
self._state = self._tado_zone_data.link
elif self.zone_variable == "heating":
self._state = self._tado_zone_data.heating_power_percentage
self._state_attributes = {
"time": self._tado_zone_data.heating_power_timestamp
}
elif self.zone_variable == "ac":
self._state = self._tado_zone_data.ac_power
self._state_attributes = {"time": self._tado_zone_data.ac_power_timestamp}
elif self.zone_variable == "tado bridge status":
self._state = self._tado_zone_data.connection
elif self.zone_variable == "tado mode":
self._state = self._tado_zone_data.tado_mode
elif self.zone_variable == "overlay":
self._state = self._tado_zone_data.overlay_active
self._state_attributes = (
{"termination": self._tado_zone_data.overlay_termination_type}
if self._tado_zone_data.overlay_active
else {}
)
elif self.zone_variable == "early start":
self._state = self._tado_zone_data.preparation
elif self.zone_variable == "open window":
self._state = bool(
self._tado_zone_data.open_window
or self._tado_zone_data.open_window_detected
)
self._state_attributes = self._tado_zone_data.open_window_attr | [
"def",
"_async_update_zone_data",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"_tado_zone_data",
"=",
"self",
".",
"_tado",
".",
"data",
"[",
"\"zone\"",
"]",
"[",
"self",
".",
"zone_id",
"]",
"except",
"KeyError",
":",
"return",
"if",
"self",
".",
"zone_variable",
"==",
"\"temperature\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"hass",
".",
"config",
".",
"units",
".",
"temperature",
"(",
"self",
".",
"_tado_zone_data",
".",
"current_temp",
",",
"TEMP_CELSIUS",
")",
"self",
".",
"_state_attributes",
"=",
"{",
"\"time\"",
":",
"self",
".",
"_tado_zone_data",
".",
"current_temp_timestamp",
",",
"\"setting\"",
":",
"0",
",",
"# setting is used in climate device",
"}",
"elif",
"self",
".",
"zone_variable",
"==",
"\"humidity\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"current_humidity",
"self",
".",
"_state_attributes",
"=",
"{",
"\"time\"",
":",
"self",
".",
"_tado_zone_data",
".",
"current_humidity_timestamp",
"}",
"elif",
"self",
".",
"zone_variable",
"==",
"\"power\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"power",
"elif",
"self",
".",
"zone_variable",
"==",
"\"link\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"link",
"elif",
"self",
".",
"zone_variable",
"==",
"\"heating\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"heating_power_percentage",
"self",
".",
"_state_attributes",
"=",
"{",
"\"time\"",
":",
"self",
".",
"_tado_zone_data",
".",
"heating_power_timestamp",
"}",
"elif",
"self",
".",
"zone_variable",
"==",
"\"ac\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"ac_power",
"self",
".",
"_state_attributes",
"=",
"{",
"\"time\"",
":",
"self",
".",
"_tado_zone_data",
".",
"ac_power_timestamp",
"}",
"elif",
"self",
".",
"zone_variable",
"==",
"\"tado bridge status\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"connection",
"elif",
"self",
".",
"zone_variable",
"==",
"\"tado mode\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"tado_mode",
"elif",
"self",
".",
"zone_variable",
"==",
"\"overlay\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"overlay_active",
"self",
".",
"_state_attributes",
"=",
"(",
"{",
"\"termination\"",
":",
"self",
".",
"_tado_zone_data",
".",
"overlay_termination_type",
"}",
"if",
"self",
".",
"_tado_zone_data",
".",
"overlay_active",
"else",
"{",
"}",
")",
"elif",
"self",
".",
"zone_variable",
"==",
"\"early start\"",
":",
"self",
".",
"_state",
"=",
"self",
".",
"_tado_zone_data",
".",
"preparation",
"elif",
"self",
".",
"zone_variable",
"==",
"\"open window\"",
":",
"self",
".",
"_state",
"=",
"bool",
"(",
"self",
".",
"_tado_zone_data",
".",
"open_window",
"or",
"self",
".",
"_tado_zone_data",
".",
"open_window_detected",
")",
"self",
".",
"_state_attributes",
"=",
"self",
".",
"_tado_zone_data",
".",
"open_window_attr"
] | [
168,
4
] | [
228,
74
] | python | en | ['en', 'xh', 'en'] | True |
TadoDeviceSensor.__init__ | (self, tado, device_name, device_id, device_variable, device_info) | Initialize of the Tado Sensor. | Initialize of the Tado Sensor. | def __init__(self, tado, device_name, device_id, device_variable, device_info):
"""Initialize of the Tado Sensor."""
self._tado = tado
self._device_info = device_info
self.device_name = device_name
self.device_id = device_id
self.device_variable = device_variable
self._unique_id = f"{device_variable} {device_id} {tado.device_id}"
self._state = None
self._state_attributes = None
self._tado_device_data = None | [
"def",
"__init__",
"(",
"self",
",",
"tado",
",",
"device_name",
",",
"device_id",
",",
"device_variable",
",",
"device_info",
")",
":",
"self",
".",
"_tado",
"=",
"tado",
"self",
".",
"_device_info",
"=",
"device_info",
"self",
".",
"device_name",
"=",
"device_name",
"self",
".",
"device_id",
"=",
"device_id",
"self",
".",
"device_variable",
"=",
"device_variable",
"self",
".",
"_unique_id",
"=",
"f\"{device_variable} {device_id} {tado.device_id}\"",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_state_attributes",
"=",
"None",
"self",
".",
"_tado_device_data",
"=",
"None"
] | [
234,
4
] | [
247,
37
] | python | en | ['en', 'pt', 'en'] | True |
TadoDeviceSensor.async_added_to_hass | (self) | Register for sensor updates. | Register for sensor updates. | async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.device_id, "device", self.device_id
),
self._async_update_callback,
)
)
self._async_update_device_data() | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_on_remove",
"(",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"SIGNAL_TADO_UPDATE_RECEIVED",
".",
"format",
"(",
"self",
".",
"_tado",
".",
"device_id",
",",
"\"device\"",
",",
"self",
".",
"device_id",
")",
",",
"self",
".",
"_async_update_callback",
",",
")",
")",
"self",
".",
"_async_update_device_data",
"(",
")"
] | [
249,
4
] | [
261,
40
] | python | da | ['da', 'no', 'en'] | False |
TadoDeviceSensor.unique_id | (self) | Return the unique id. | Return the unique id. | def unique_id(self):
"""Return the unique id."""
return self._unique_id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unique_id"
] | [
264,
4
] | [
266,
30
] | python | en | ['en', 'la', 'en'] | True |
TadoDeviceSensor.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return f"{self.device_name} {self.device_variable}" | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"f\"{self.device_name} {self.device_variable}\""
] | [
269,
4
] | [
271,
59
] | python | en | ['en', 'mi', 'en'] | True |
TadoDeviceSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
274,
4
] | [
276,
26
] | python | en | ['en', 'en', 'en'] | True |
TadoDeviceSensor.should_poll | (self) | Do not poll. | Do not poll. | def should_poll(self):
"""Do not poll."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
279,
4
] | [
281,
20
] | python | en | ['en', 'lb', 'en'] | True |
TadoDeviceSensor._async_update_callback | (self) | Update and write state. | Update and write state. | def _async_update_callback(self):
"""Update and write state."""
self._async_update_device_data()
self.async_write_ha_state() | [
"def",
"_async_update_callback",
"(",
"self",
")",
":",
"self",
".",
"_async_update_device_data",
"(",
")",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
284,
4
] | [
287,
35
] | python | en | ['en', 'en', 'en'] | True |
TadoDeviceSensor._async_update_device_data | (self) | Handle update callbacks. | Handle update callbacks. | def _async_update_device_data(self):
"""Handle update callbacks."""
try:
data = self._tado.data["device"][self.device_id]
except KeyError:
return
if self.device_variable == "tado bridge status":
self._state = data.get("connectionState", {}).get("value", False) | [
"def",
"_async_update_device_data",
"(",
"self",
")",
":",
"try",
":",
"data",
"=",
"self",
".",
"_tado",
".",
"data",
"[",
"\"device\"",
"]",
"[",
"self",
".",
"device_id",
"]",
"except",
"KeyError",
":",
"return",
"if",
"self",
".",
"device_variable",
"==",
"\"tado bridge status\"",
":",
"self",
".",
"_state",
"=",
"data",
".",
"get",
"(",
"\"connectionState\"",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"value\"",
",",
"False",
")"
] | [
290,
4
] | [
298,
77
] | python | en | ['en', 'xh', 'en'] | True |
TadoDeviceSensor.device_info | (self) | Return the device_info of the device. | Return the device_info of the device. | def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self.device_id)},
"name": self.device_name,
"manufacturer": DEFAULT_NAME,
"model": TADO_BRIDGE,
} | [
"def",
"device_info",
"(",
"self",
")",
":",
"return",
"{",
"\"identifiers\"",
":",
"{",
"(",
"DOMAIN",
",",
"self",
".",
"device_id",
")",
"}",
",",
"\"name\"",
":",
"self",
".",
"device_name",
",",
"\"manufacturer\"",
":",
"DEFAULT_NAME",
",",
"\"model\"",
":",
"TADO_BRIDGE",
",",
"}"
] | [
301,
4
] | [
308,
9
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the RaspyRFM switch. | Set up the RaspyRFM switch. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the RaspyRFM switch."""
gateway_manufacturer = config.get(
CONF_GATEWAY_MANUFACTURER, Manufacturer.SEEGEL_SYSTEME.value
)
gateway_model = config.get(CONF_GATEWAY_MODEL, GatewayModel.RASPYRFM.value)
host = config[CONF_HOST]
port = config.get(CONF_PORT)
switches = config[CONF_SWITCHES]
raspyrfm_client = RaspyRFMClient()
gateway = raspyrfm_client.get_gateway(
Manufacturer(gateway_manufacturer), GatewayModel(gateway_model), host, port
)
switch_entities = []
for switch in switches:
name = switch[CONF_NAME]
controlunit_manufacturer = switch[CONF_CONTROLUNIT_MANUFACTURER]
controlunit_model = switch[CONF_CONTROLUNIT_MODEL]
channel_config = switch[CONF_CHANNEL_CONFIG]
controlunit = raspyrfm_client.get_controlunit(
Manufacturer(controlunit_manufacturer), ControlUnitModel(controlunit_model)
)
controlunit.set_channel_config(**channel_config)
switch = RaspyRFMSwitch(raspyrfm_client, name, gateway, controlunit)
switch_entities.append(switch)
add_entities(switch_entities) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"gateway_manufacturer",
"=",
"config",
".",
"get",
"(",
"CONF_GATEWAY_MANUFACTURER",
",",
"Manufacturer",
".",
"SEEGEL_SYSTEME",
".",
"value",
")",
"gateway_model",
"=",
"config",
".",
"get",
"(",
"CONF_GATEWAY_MODEL",
",",
"GatewayModel",
".",
"RASPYRFM",
".",
"value",
")",
"host",
"=",
"config",
"[",
"CONF_HOST",
"]",
"port",
"=",
"config",
".",
"get",
"(",
"CONF_PORT",
")",
"switches",
"=",
"config",
"[",
"CONF_SWITCHES",
"]",
"raspyrfm_client",
"=",
"RaspyRFMClient",
"(",
")",
"gateway",
"=",
"raspyrfm_client",
".",
"get_gateway",
"(",
"Manufacturer",
"(",
"gateway_manufacturer",
")",
",",
"GatewayModel",
"(",
"gateway_model",
")",
",",
"host",
",",
"port",
")",
"switch_entities",
"=",
"[",
"]",
"for",
"switch",
"in",
"switches",
":",
"name",
"=",
"switch",
"[",
"CONF_NAME",
"]",
"controlunit_manufacturer",
"=",
"switch",
"[",
"CONF_CONTROLUNIT_MANUFACTURER",
"]",
"controlunit_model",
"=",
"switch",
"[",
"CONF_CONTROLUNIT_MODEL",
"]",
"channel_config",
"=",
"switch",
"[",
"CONF_CHANNEL_CONFIG",
"]",
"controlunit",
"=",
"raspyrfm_client",
".",
"get_controlunit",
"(",
"Manufacturer",
"(",
"controlunit_manufacturer",
")",
",",
"ControlUnitModel",
"(",
"controlunit_model",
")",
")",
"controlunit",
".",
"set_channel_config",
"(",
"*",
"*",
"channel_config",
")",
"switch",
"=",
"RaspyRFMSwitch",
"(",
"raspyrfm_client",
",",
"name",
",",
"gateway",
",",
"controlunit",
")",
"switch_entities",
".",
"append",
"(",
"switch",
")",
"add_entities",
"(",
"switch_entities",
")"
] | [
51,
0
] | [
82,
33
] | python | en | ['en', 'pl', 'en'] | True |
RaspyRFMSwitch.__init__ | (self, raspyrfm_client, name: str, gateway, controlunit) | Initialize the switch. | Initialize the switch. | def __init__(self, raspyrfm_client, name: str, gateway, controlunit):
"""Initialize the switch."""
self._raspyrfm_client = raspyrfm_client
self._name = name
self._gateway = gateway
self._controlunit = controlunit
self._state = None | [
"def",
"__init__",
"(",
"self",
",",
"raspyrfm_client",
",",
"name",
":",
"str",
",",
"gateway",
",",
"controlunit",
")",
":",
"self",
".",
"_raspyrfm_client",
"=",
"raspyrfm_client",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_gateway",
"=",
"gateway",
"self",
".",
"_controlunit",
"=",
"controlunit",
"self",
".",
"_state",
"=",
"None"
] | [
88,
4
] | [
96,
26
] | python | en | ['en', 'en', 'en'] | True |
RaspyRFMSwitch.name | (self) | Return the name of the device if any. | Return the name of the device if any. | def name(self):
"""Return the name of the device if any."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
99,
4
] | [
101,
25
] | python | en | ['en', 'en', 'en'] | True |
RaspyRFMSwitch.should_poll | (self) | Return True if polling should be used. | Return True if polling should be used. | def should_poll(self):
"""Return True if polling should be used."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
104,
4
] | [
106,
20
] | python | en | ['en', 'en', 'en'] | True |
RaspyRFMSwitch.assumed_state | (self) | Return True when the current state can not be queried. | Return True when the current state can not be queried. | def assumed_state(self):
"""Return True when the current state can not be queried."""
return True | [
"def",
"assumed_state",
"(",
"self",
")",
":",
"return",
"True"
] | [
109,
4
] | [
111,
19
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.