Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
_expand_mask
(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0)
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ src_len = shape_list(mask)[1] tgt_len = tgt_len if tgt_len is not None else src_len one_cst = tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1)) return (one_cst - expanded_mask) * LARGE_NEGATIVE
[ "def", "_expand_mask", "(", "mask", ":", "tf", ".", "Tensor", ",", "tgt_len", ":", "Optional", "[", "int", "]", "=", "None", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "src_len", "=", "shape_list", "(", "mask", ")", "[", "1", "]", "tgt_len", "=", "tgt_len", "if", "tgt_len", "is", "not", "None", "else", "src_len", "one_cst", "=", "tf", ".", "constant", "(", "1.0", ")", "mask", "=", "tf", ".", "cast", "(", "mask", ",", "dtype", "=", "one_cst", ".", "dtype", ")", "expanded_mask", "=", "tf", ".", "tile", "(", "mask", "[", ":", ",", "None", ",", "None", ",", ":", "]", ",", "(", "1", ",", "1", ",", "tgt_len", ",", "1", ")", ")", "return", "(", "one_cst", "-", "expanded_mask", ")", "*", "LARGE_NEGATIVE" ]
[ 93, 0 ]
[ 103, 53 ]
python
en
['en', 'error', 'th']
False
TFLEDLearnedPositionalEmbedding.call
(self, input_shape: tf.TensorShape, past_key_values_length: int = 0)
Input is expected to be of size [bsz x seqlen].
Input is expected to be of size [bsz x seqlen].
def call(self, input_shape: tf.TensorShape, past_key_values_length: int = 0): """Input is expected to be of size [bsz x seqlen].""" bsz, seq_len = input_shape[:2] positions = tf.range(past_key_values_length, seq_len + past_key_values_length, delta=1, name="range") return super().call(positions)
[ "def", "call", "(", "self", ",", "input_shape", ":", "tf", ".", "TensorShape", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "bsz", ",", "seq_len", "=", "input_shape", "[", ":", "2", "]", "positions", "=", "tf", ".", "range", "(", "past_key_values_length", ",", "seq_len", "+", "past_key_values_length", ",", "delta", "=", "1", ",", "name", "=", "\"range\"", ")", "return", "super", "(", ")", ".", "call", "(", "positions", ")" ]
[ 114, 4 ]
[ 119, 38 ]
python
en
['en', 'en', 'en']
True
TFLEDEncoderSelfAttention.call
( self, inputs, training=False, )
LongformerSelfAttention expects `len(hidden_states)` to be multiple of `attention_window`. Padding to `attention_window` happens in LongformerModel.forward to avoid redoing the padding on each layer. The `attention_mask` is changed in :meth:`LongformerModel.forward` from 0, 1, 2 to: * -10000: no attention * 0: local attention * +10000: global attention
LongformerSelfAttention expects `len(hidden_states)` to be multiple of `attention_window`. Padding to `attention_window` happens in LongformerModel.forward to avoid redoing the padding on each layer.
def call( self, inputs, training=False, ): """ LongformerSelfAttention expects `len(hidden_states)` to be multiple of `attention_window`. Padding to `attention_window` happens in LongformerModel.forward to avoid redoing the padding on each layer. The `attention_mask` is changed in :meth:`LongformerModel.forward` from 0, 1, 2 to: * -10000: no attention * 0: local attention * +10000: global attention """ # retrieve input args ( hidden_states, attention_mask, layer_head_mask, is_index_masked, is_index_global_attn, is_global_attn, ) = inputs # project hidden states query_vectors = self.query(hidden_states) key_vectors = self.key(hidden_states) value_vectors = self.value(hidden_states) batch_size, seq_len, embed_dim = shape_list(hidden_states) if tf.executing_eagerly(): tf.debugging.assert_equal( embed_dim, self.embed_dim, message=f"hidden_states should have embed_dim = {self.embed_dim}, but has {embed_dim}", ) # normalize query query_vectors /= tf.math.sqrt(tf.cast(self.head_dim, dtype=query_vectors.dtype)) query_vectors = tf.reshape(query_vectors, (batch_size, seq_len, self.num_heads, self.head_dim)) key_vectors = tf.reshape(key_vectors, (batch_size, seq_len, self.num_heads, self.head_dim)) # attn_probs = (batch_size, seq_len, num_heads, window*2+1) attn_scores = self._sliding_chunks_query_key_matmul( query_vectors, key_vectors, self.one_sided_attn_window_size ) # diagonal mask with zeros everywhere and -inf inplace of padding diagonal_mask = self._sliding_chunks_query_key_matmul( tf.ones(shape_list(attention_mask)), attention_mask, self.one_sided_attn_window_size, ) # pad local attention probs attn_scores += diagonal_mask if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_scores), [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + 1], message=f"attn_probs should be of size ({batch_size}, {seq_len}, {self.num_heads}, {self.one_sided_attn_window_size * 2 + 1}), but is of size {shape_list(attn_scores)}", ) # compute global attn indices required through out forward fn ( max_num_global_attn_indices, is_index_global_attn_nonzero, is_local_index_global_attn_nonzero, is_local_index_no_global_attn_nonzero, ) = self._get_global_attn_indices(is_index_global_attn) # this function is only relevant for global attention attn_scores = tf.cond( is_global_attn, lambda: self._concat_with_global_key_attn_probs( attn_scores=attn_scores, query_vectors=query_vectors, key_vectors=key_vectors, max_num_global_attn_indices=max_num_global_attn_indices, is_index_global_attn_nonzero=is_index_global_attn_nonzero, is_local_index_global_attn_nonzero=is_local_index_global_attn_nonzero, is_local_index_no_global_attn_nonzero=is_local_index_no_global_attn_nonzero, ), lambda: attn_scores, ) attn_probs = tf.nn.softmax(attn_scores, axis=-1) # softmax sometimes inserts NaN if all positions are masked, replace them with 0 # Make sure to create a mask with the proper shape: # if is_global_attn==True => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + max_num_global_attn_indices + 1] # if is_global_attn==False => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + 1] masked_index = tf.cond( is_global_attn, lambda: tf.tile( is_index_masked[:, :, None, None], (1, 1, self.num_heads, self.one_sided_attn_window_size * 2 + max_num_global_attn_indices + 1), ), lambda: tf.tile( is_index_masked[:, :, None, None], (1, 1, self.num_heads, self.one_sided_attn_window_size * 2 + 1), ), ) attn_probs = tf.where( masked_index, tf.zeros(shape_list(masked_index), dtype=attn_probs.dtype), attn_probs, ) if layer_head_mask is not None: if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", ) attn_probs = tf.reshape(layer_head_mask, (1, 1, -1, 1)) * attn_probs # apply dropout attn_probs = self.dropout(attn_probs, training=training) value_vectors = tf.reshape(value_vectors, (batch_size, seq_len, self.num_heads, self.head_dim)) # if global attention, compute sum of global and local attn attn_output = tf.cond( is_global_attn, lambda: self._compute_attn_output_with_global_indices( value_vectors=value_vectors, attn_probs=attn_probs, max_num_global_attn_indices=max_num_global_attn_indices, is_index_global_attn_nonzero=is_index_global_attn_nonzero, is_local_index_global_attn_nonzero=is_local_index_global_attn_nonzero, ), lambda: self._sliding_chunks_matmul_attn_probs_value( attn_probs, value_vectors, self.one_sided_attn_window_size ), ) if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [batch_size, seq_len, self.num_heads, self.head_dim], message="Unexpected size", ) attn_output = tf.reshape(attn_output, (batch_size, seq_len, embed_dim)) # compute value for global attention and overwrite to attention output # TODO: remove the redundant computation attn_output, global_attn_probs = tf.cond( is_global_attn, lambda: self._compute_global_attn_output_from_hidden( attn_output=attn_output, hidden_states=hidden_states, max_num_global_attn_indices=max_num_global_attn_indices, layer_head_mask=layer_head_mask, is_local_index_global_attn_nonzero=is_local_index_global_attn_nonzero, is_index_global_attn_nonzero=is_index_global_attn_nonzero, is_local_index_no_global_attn_nonzero=is_local_index_no_global_attn_nonzero, is_index_masked=is_index_masked, training=training, ), lambda: (attn_output, tf.zeros((batch_size, self.num_heads, max_num_global_attn_indices, seq_len))), ) # make sure that local attention probabilities are set to 0 for indices of global attn # Make sure to create a mask with the proper shape: # if is_global_attn==True => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + max_num_global_attn_indices + 1] # if is_global_attn==False => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + 1] masked_global_attn_index = tf.cond( is_global_attn, lambda: tf.tile( is_index_global_attn[:, :, None, None], (1, 1, self.num_heads, self.one_sided_attn_window_size * 2 + max_num_global_attn_indices + 1), ), lambda: tf.tile( is_index_global_attn[:, :, None, None], (1, 1, self.num_heads, self.one_sided_attn_window_size * 2 + 1), ), ) attn_probs = tf.where( masked_global_attn_index, tf.zeros(shape_list(masked_global_attn_index), dtype=attn_probs.dtype), attn_probs, ) outputs = (attn_output, attn_probs, global_attn_probs) return outputs
[ "def", "call", "(", "self", ",", "inputs", ",", "training", "=", "False", ",", ")", ":", "# retrieve input args", "(", "hidden_states", ",", "attention_mask", ",", "layer_head_mask", ",", "is_index_masked", ",", "is_index_global_attn", ",", "is_global_attn", ",", ")", "=", "inputs", "# project hidden states", "query_vectors", "=", "self", ".", "query", "(", "hidden_states", ")", "key_vectors", "=", "self", ".", "key", "(", "hidden_states", ")", "value_vectors", "=", "self", ".", "value", "(", "hidden_states", ")", "batch_size", ",", "seq_len", ",", "embed_dim", "=", "shape_list", "(", "hidden_states", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "embed_dim", ",", "self", ".", "embed_dim", ",", "message", "=", "f\"hidden_states should have embed_dim = {self.embed_dim}, but has {embed_dim}\"", ",", ")", "# normalize query", "query_vectors", "/=", "tf", ".", "math", ".", "sqrt", "(", "tf", ".", "cast", "(", "self", ".", "head_dim", ",", "dtype", "=", "query_vectors", ".", "dtype", ")", ")", "query_vectors", "=", "tf", ".", "reshape", "(", "query_vectors", ",", "(", "batch_size", ",", "seq_len", ",", "self", ".", "num_heads", ",", "self", ".", "head_dim", ")", ")", "key_vectors", "=", "tf", ".", "reshape", "(", "key_vectors", ",", "(", "batch_size", ",", "seq_len", ",", "self", ".", "num_heads", ",", "self", ".", "head_dim", ")", ")", "# attn_probs = (batch_size, seq_len, num_heads, window*2+1)", "attn_scores", "=", "self", ".", "_sliding_chunks_query_key_matmul", "(", "query_vectors", ",", "key_vectors", ",", "self", ".", "one_sided_attn_window_size", ")", "# diagonal mask with zeros everywhere and -inf inplace of padding", "diagonal_mask", "=", "self", ".", "_sliding_chunks_query_key_matmul", "(", "tf", ".", "ones", "(", "shape_list", "(", "attention_mask", ")", ")", ",", "attention_mask", ",", "self", ".", "one_sided_attn_window_size", ",", ")", "# pad local attention probs", "attn_scores", "+=", "diagonal_mask", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_scores", ")", ",", "[", "batch_size", ",", "seq_len", ",", "self", ".", "num_heads", ",", "self", ".", "one_sided_attn_window_size", "*", "2", "+", "1", "]", ",", "message", "=", "f\"attn_probs should be of size ({batch_size}, {seq_len}, {self.num_heads}, {self.one_sided_attn_window_size * 2 + 1}), but is of size {shape_list(attn_scores)}\"", ",", ")", "# compute global attn indices required through out forward fn", "(", "max_num_global_attn_indices", ",", "is_index_global_attn_nonzero", ",", "is_local_index_global_attn_nonzero", ",", "is_local_index_no_global_attn_nonzero", ",", ")", "=", "self", ".", "_get_global_attn_indices", "(", "is_index_global_attn", ")", "# this function is only relevant for global attention", "attn_scores", "=", "tf", ".", "cond", "(", "is_global_attn", ",", "lambda", ":", "self", ".", "_concat_with_global_key_attn_probs", "(", "attn_scores", "=", "attn_scores", ",", "query_vectors", "=", "query_vectors", ",", "key_vectors", "=", "key_vectors", ",", "max_num_global_attn_indices", "=", "max_num_global_attn_indices", ",", "is_index_global_attn_nonzero", "=", "is_index_global_attn_nonzero", ",", "is_local_index_global_attn_nonzero", "=", "is_local_index_global_attn_nonzero", ",", "is_local_index_no_global_attn_nonzero", "=", "is_local_index_no_global_attn_nonzero", ",", ")", ",", "lambda", ":", "attn_scores", ",", ")", "attn_probs", "=", "tf", ".", "nn", ".", "softmax", "(", "attn_scores", ",", "axis", "=", "-", "1", ")", "# softmax sometimes inserts NaN if all positions are masked, replace them with 0", "# Make sure to create a mask with the proper shape:", "# if is_global_attn==True => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + max_num_global_attn_indices + 1]", "# if is_global_attn==False => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + 1]", "masked_index", "=", "tf", ".", "cond", "(", "is_global_attn", ",", "lambda", ":", "tf", ".", "tile", "(", "is_index_masked", "[", ":", ",", ":", ",", "None", ",", "None", "]", ",", "(", "1", ",", "1", ",", "self", ".", "num_heads", ",", "self", ".", "one_sided_attn_window_size", "*", "2", "+", "max_num_global_attn_indices", "+", "1", ")", ",", ")", ",", "lambda", ":", "tf", ".", "tile", "(", "is_index_masked", "[", ":", ",", ":", ",", "None", ",", "None", "]", ",", "(", "1", ",", "1", ",", "self", ".", "num_heads", ",", "self", ".", "one_sided_attn_window_size", "*", "2", "+", "1", ")", ",", ")", ",", ")", "attn_probs", "=", "tf", ".", "where", "(", "masked_index", ",", "tf", ".", "zeros", "(", "shape_list", "(", "masked_index", ")", ",", "dtype", "=", "attn_probs", ".", "dtype", ")", ",", "attn_probs", ",", ")", "if", "layer_head_mask", "is", "not", "None", ":", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "layer_head_mask", ")", ",", "[", "self", ".", "num_heads", "]", ",", "message", "=", "f\"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\"", ",", ")", "attn_probs", "=", "tf", ".", "reshape", "(", "layer_head_mask", ",", "(", "1", ",", "1", ",", "-", "1", ",", "1", ")", ")", "*", "attn_probs", "# apply dropout", "attn_probs", "=", "self", ".", "dropout", "(", "attn_probs", ",", "training", "=", "training", ")", "value_vectors", "=", "tf", ".", "reshape", "(", "value_vectors", ",", "(", "batch_size", ",", "seq_len", ",", "self", ".", "num_heads", ",", "self", ".", "head_dim", ")", ")", "# if global attention, compute sum of global and local attn", "attn_output", "=", "tf", ".", "cond", "(", "is_global_attn", ",", "lambda", ":", "self", ".", "_compute_attn_output_with_global_indices", "(", "value_vectors", "=", "value_vectors", ",", "attn_probs", "=", "attn_probs", ",", "max_num_global_attn_indices", "=", "max_num_global_attn_indices", ",", "is_index_global_attn_nonzero", "=", "is_index_global_attn_nonzero", ",", "is_local_index_global_attn_nonzero", "=", "is_local_index_global_attn_nonzero", ",", ")", ",", "lambda", ":", "self", ".", "_sliding_chunks_matmul_attn_probs_value", "(", "attn_probs", ",", "value_vectors", ",", "self", ".", "one_sided_attn_window_size", ")", ",", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_output", ")", ",", "[", "batch_size", ",", "seq_len", ",", "self", ".", "num_heads", ",", "self", ".", "head_dim", "]", ",", "message", "=", "\"Unexpected size\"", ",", ")", "attn_output", "=", "tf", ".", "reshape", "(", "attn_output", ",", "(", "batch_size", ",", "seq_len", ",", "embed_dim", ")", ")", "# compute value for global attention and overwrite to attention output", "# TODO: remove the redundant computation", "attn_output", ",", "global_attn_probs", "=", "tf", ".", "cond", "(", "is_global_attn", ",", "lambda", ":", "self", ".", "_compute_global_attn_output_from_hidden", "(", "attn_output", "=", "attn_output", ",", "hidden_states", "=", "hidden_states", ",", "max_num_global_attn_indices", "=", "max_num_global_attn_indices", ",", "layer_head_mask", "=", "layer_head_mask", ",", "is_local_index_global_attn_nonzero", "=", "is_local_index_global_attn_nonzero", ",", "is_index_global_attn_nonzero", "=", "is_index_global_attn_nonzero", ",", "is_local_index_no_global_attn_nonzero", "=", "is_local_index_no_global_attn_nonzero", ",", "is_index_masked", "=", "is_index_masked", ",", "training", "=", "training", ",", ")", ",", "lambda", ":", "(", "attn_output", ",", "tf", ".", "zeros", "(", "(", "batch_size", ",", "self", ".", "num_heads", ",", "max_num_global_attn_indices", ",", "seq_len", ")", ")", ")", ",", ")", "# make sure that local attention probabilities are set to 0 for indices of global attn", "# Make sure to create a mask with the proper shape:", "# if is_global_attn==True => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + max_num_global_attn_indices + 1]", "# if is_global_attn==False => [batch_size, seq_len, self.num_heads, self.one_sided_attn_window_size * 2 + 1]", "masked_global_attn_index", "=", "tf", ".", "cond", "(", "is_global_attn", ",", "lambda", ":", "tf", ".", "tile", "(", "is_index_global_attn", "[", ":", ",", ":", ",", "None", ",", "None", "]", ",", "(", "1", ",", "1", ",", "self", ".", "num_heads", ",", "self", ".", "one_sided_attn_window_size", "*", "2", "+", "max_num_global_attn_indices", "+", "1", ")", ",", ")", ",", "lambda", ":", "tf", ".", "tile", "(", "is_index_global_attn", "[", ":", ",", ":", ",", "None", ",", "None", "]", ",", "(", "1", ",", "1", ",", "self", ".", "num_heads", ",", "self", ".", "one_sided_attn_window_size", "*", "2", "+", "1", ")", ",", ")", ",", ")", "attn_probs", "=", "tf", ".", "where", "(", "masked_global_attn_index", ",", "tf", ".", "zeros", "(", "shape_list", "(", "masked_global_attn_index", ")", ",", "dtype", "=", "attn_probs", ".", "dtype", ")", ",", "attn_probs", ",", ")", "outputs", "=", "(", "attn_output", ",", "attn_probs", ",", "global_attn_probs", ")", "return", "outputs" ]
[ 182, 4 ]
[ 371, 22 ]
python
en
['en', 'error', 'th']
False
TFLEDEncoderSelfAttention._sliding_chunks_query_key_matmul
(self, query, key, window_overlap)
Matrix multiplication of query and key tensors using with a sliding window attention pattern. This implementation splits the input into overlapping chunks of size 2w (e.g. 512 for pretrained Longformer) with an overlap of size window_overlap
Matrix multiplication of query and key tensors using with a sliding window attention pattern. This implementation splits the input into overlapping chunks of size 2w (e.g. 512 for pretrained Longformer) with an overlap of size window_overlap
def _sliding_chunks_query_key_matmul(self, query, key, window_overlap): """ Matrix multiplication of query and key tensors using with a sliding window attention pattern. This implementation splits the input into overlapping chunks of size 2w (e.g. 512 for pretrained Longformer) with an overlap of size window_overlap """ batch_size, seq_len, num_heads, head_dim = shape_list(query) if tf.executing_eagerly(): tf.debugging.assert_equal( seq_len % (window_overlap * 2), 0, message=f"Sequence length should be multiple of {window_overlap * 2}. Given {seq_len}", ) tf.debugging.assert_equal( shape_list(query), shape_list(key), message=f"Shape of query and key should be equal, but got query: {shape_list(query)} and key: {shape_list(key)}", ) chunks_count = seq_len // window_overlap - 1 # group batch_size and num_heads dimensions into one, then chunk seq_len into chunks of size window_overlap * 2 query = tf.reshape( tf.transpose(query, (0, 2, 1, 3)), (batch_size * num_heads, seq_len, head_dim), ) key = tf.reshape(tf.transpose(key, (0, 2, 1, 3)), (batch_size * num_heads, seq_len, head_dim)) chunked_query = self._chunk(query, window_overlap) chunked_key = self._chunk(key, window_overlap) # matrix multiplication # bcxd: batch_size * num_heads x chunks x 2window_overlap x head_dim # bcyd: batch_size * num_heads x chunks x 2window_overlap x head_dim # bcxy: batch_size * num_heads x chunks x 2window_overlap x 2window_overlap chunked_query = tf.cast(chunked_query, dtype=chunked_key.dtype) chunked_attention_scores = tf.einsum("bcxd,bcyd->bcxy", chunked_query, chunked_key) # multiply # convert diagonals into columns paddings = tf.convert_to_tensor([[0, 0], [0, 0], [0, 1], [0, 0]]) diagonal_chunked_attention_scores = self._pad_and_transpose_last_two_dims(chunked_attention_scores, paddings) # allocate space for the overall attention matrix where the chunks are combined. The last dimension # has (window_overlap * 2 + 1) columns. The first (window_overlap) columns are the window_overlap lower triangles (attention from a word to # window_overlap previous words). The following column is attention score from each word to itself, then # followed by window_overlap columns for the upper triangle. # copy parts from diagonal_chunked_attention_scores into the combined matrix of attentions # - copying the main diagonal and the upper triangle # TODO: This code is most likely not very efficient and should be improved diagonal_attn_scores_up_triang = tf.concat( [ diagonal_chunked_attention_scores[:, :, :window_overlap, : window_overlap + 1], diagonal_chunked_attention_scores[:, -1:, window_overlap:, : window_overlap + 1], ], axis=1, ) # - copying the lower triangle diagonal_attn_scores_low_triang = tf.concat( [ tf.zeros( (batch_size * num_heads, 1, window_overlap, window_overlap), dtype=diagonal_chunked_attention_scores.dtype, ), diagonal_chunked_attention_scores[:, :, -(window_overlap + 1) : -1, window_overlap + 1 :], ], axis=1, ) diagonal_attn_scores_first_chunk = tf.concat( [ tf.roll( diagonal_chunked_attention_scores, shift=[1, window_overlap], axis=[2, 3], )[:, :, :window_overlap, :window_overlap], tf.zeros( (batch_size * num_heads, 1, window_overlap, window_overlap), dtype=diagonal_chunked_attention_scores.dtype, ), ], axis=1, ) first_chunk_mask = ( tf.tile( tf.range(chunks_count + 1)[None, :, None, None], (batch_size * num_heads, 1, window_overlap, window_overlap), ) < 1 ) diagonal_attn_scores_low_triang = tf.where( first_chunk_mask, diagonal_attn_scores_first_chunk, diagonal_attn_scores_low_triang, ) # merging upper and lower triangle diagonal_attention_scores = tf.concat( [diagonal_attn_scores_low_triang, diagonal_attn_scores_up_triang], axis=-1 ) # separate batch_size and num_heads dimensions again diagonal_attention_scores = tf.transpose( tf.reshape( diagonal_attention_scores, (batch_size, num_heads, seq_len, 2 * window_overlap + 1), ), (0, 2, 1, 3), ) diagonal_attention_scores = self._mask_invalid_locations(diagonal_attention_scores, window_overlap) return diagonal_attention_scores
[ "def", "_sliding_chunks_query_key_matmul", "(", "self", ",", "query", ",", "key", ",", "window_overlap", ")", ":", "batch_size", ",", "seq_len", ",", "num_heads", ",", "head_dim", "=", "shape_list", "(", "query", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "seq_len", "%", "(", "window_overlap", "*", "2", ")", ",", "0", ",", "message", "=", "f\"Sequence length should be multiple of {window_overlap * 2}. Given {seq_len}\"", ",", ")", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "query", ")", ",", "shape_list", "(", "key", ")", ",", "message", "=", "f\"Shape of query and key should be equal, but got query: {shape_list(query)} and key: {shape_list(key)}\"", ",", ")", "chunks_count", "=", "seq_len", "//", "window_overlap", "-", "1", "# group batch_size and num_heads dimensions into one, then chunk seq_len into chunks of size window_overlap * 2", "query", "=", "tf", ".", "reshape", "(", "tf", ".", "transpose", "(", "query", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ")", ",", "(", "batch_size", "*", "num_heads", ",", "seq_len", ",", "head_dim", ")", ",", ")", "key", "=", "tf", ".", "reshape", "(", "tf", ".", "transpose", "(", "key", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ")", ",", "(", "batch_size", "*", "num_heads", ",", "seq_len", ",", "head_dim", ")", ")", "chunked_query", "=", "self", ".", "_chunk", "(", "query", ",", "window_overlap", ")", "chunked_key", "=", "self", ".", "_chunk", "(", "key", ",", "window_overlap", ")", "# matrix multiplication", "# bcxd: batch_size * num_heads x chunks x 2window_overlap x head_dim", "# bcyd: batch_size * num_heads x chunks x 2window_overlap x head_dim", "# bcxy: batch_size * num_heads x chunks x 2window_overlap x 2window_overlap", "chunked_query", "=", "tf", ".", "cast", "(", "chunked_query", ",", "dtype", "=", "chunked_key", ".", "dtype", ")", "chunked_attention_scores", "=", "tf", ".", "einsum", "(", "\"bcxd,bcyd->bcxy\"", ",", "chunked_query", ",", "chunked_key", ")", "# multiply", "# convert diagonals into columns", "paddings", "=", "tf", ".", "convert_to_tensor", "(", "[", "[", "0", ",", "0", "]", ",", "[", "0", ",", "0", "]", ",", "[", "0", ",", "1", "]", ",", "[", "0", ",", "0", "]", "]", ")", "diagonal_chunked_attention_scores", "=", "self", ".", "_pad_and_transpose_last_two_dims", "(", "chunked_attention_scores", ",", "paddings", ")", "# allocate space for the overall attention matrix where the chunks are combined. The last dimension", "# has (window_overlap * 2 + 1) columns. The first (window_overlap) columns are the window_overlap lower triangles (attention from a word to", "# window_overlap previous words). The following column is attention score from each word to itself, then", "# followed by window_overlap columns for the upper triangle.", "# copy parts from diagonal_chunked_attention_scores into the combined matrix of attentions", "# - copying the main diagonal and the upper triangle", "# TODO: This code is most likely not very efficient and should be improved", "diagonal_attn_scores_up_triang", "=", "tf", ".", "concat", "(", "[", "diagonal_chunked_attention_scores", "[", ":", ",", ":", ",", ":", "window_overlap", ",", ":", "window_overlap", "+", "1", "]", ",", "diagonal_chunked_attention_scores", "[", ":", ",", "-", "1", ":", ",", "window_overlap", ":", ",", ":", "window_overlap", "+", "1", "]", ",", "]", ",", "axis", "=", "1", ",", ")", "# - copying the lower triangle", "diagonal_attn_scores_low_triang", "=", "tf", ".", "concat", "(", "[", "tf", ".", "zeros", "(", "(", "batch_size", "*", "num_heads", ",", "1", ",", "window_overlap", ",", "window_overlap", ")", ",", "dtype", "=", "diagonal_chunked_attention_scores", ".", "dtype", ",", ")", ",", "diagonal_chunked_attention_scores", "[", ":", ",", ":", ",", "-", "(", "window_overlap", "+", "1", ")", ":", "-", "1", ",", "window_overlap", "+", "1", ":", "]", ",", "]", ",", "axis", "=", "1", ",", ")", "diagonal_attn_scores_first_chunk", "=", "tf", ".", "concat", "(", "[", "tf", ".", "roll", "(", "diagonal_chunked_attention_scores", ",", "shift", "=", "[", "1", ",", "window_overlap", "]", ",", "axis", "=", "[", "2", ",", "3", "]", ",", ")", "[", ":", ",", ":", ",", ":", "window_overlap", ",", ":", "window_overlap", "]", ",", "tf", ".", "zeros", "(", "(", "batch_size", "*", "num_heads", ",", "1", ",", "window_overlap", ",", "window_overlap", ")", ",", "dtype", "=", "diagonal_chunked_attention_scores", ".", "dtype", ",", ")", ",", "]", ",", "axis", "=", "1", ",", ")", "first_chunk_mask", "=", "(", "tf", ".", "tile", "(", "tf", ".", "range", "(", "chunks_count", "+", "1", ")", "[", "None", ",", ":", ",", "None", ",", "None", "]", ",", "(", "batch_size", "*", "num_heads", ",", "1", ",", "window_overlap", ",", "window_overlap", ")", ",", ")", "<", "1", ")", "diagonal_attn_scores_low_triang", "=", "tf", ".", "where", "(", "first_chunk_mask", ",", "diagonal_attn_scores_first_chunk", ",", "diagonal_attn_scores_low_triang", ",", ")", "# merging upper and lower triangle", "diagonal_attention_scores", "=", "tf", ".", "concat", "(", "[", "diagonal_attn_scores_low_triang", ",", "diagonal_attn_scores_up_triang", "]", ",", "axis", "=", "-", "1", ")", "# separate batch_size and num_heads dimensions again", "diagonal_attention_scores", "=", "tf", ".", "transpose", "(", "tf", ".", "reshape", "(", "diagonal_attention_scores", ",", "(", "batch_size", ",", "num_heads", ",", "seq_len", ",", "2", "*", "window_overlap", "+", "1", ")", ",", ")", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ",", ")", "diagonal_attention_scores", "=", "self", ".", "_mask_invalid_locations", "(", "diagonal_attention_scores", ",", "window_overlap", ")", "return", "diagonal_attention_scores" ]
[ 373, 4 ]
[ 485, 40 ]
python
en
['en', 'error', 'th']
False
TFLEDEncoderSelfAttention._sliding_chunks_matmul_attn_probs_value
(self, attn_probs, value, window_overlap)
Same as _sliding_chunks_query_key_matmul but for attn_probs and value tensors. Returned tensor will be of the same shape as `attn_probs`
Same as _sliding_chunks_query_key_matmul but for attn_probs and value tensors. Returned tensor will be of the same shape as `attn_probs`
def _sliding_chunks_matmul_attn_probs_value(self, attn_probs, value, window_overlap): """ Same as _sliding_chunks_query_key_matmul but for attn_probs and value tensors. Returned tensor will be of the same shape as `attn_probs` """ batch_size, seq_len, num_heads, head_dim = shape_list(value) if tf.executing_eagerly(): tf.debugging.assert_equal( seq_len % (window_overlap * 2), 0, message="Seq_len has to be multiple of 2 * window_overlap", ) tf.debugging.assert_equal( shape_list(attn_probs)[:3], shape_list(value)[:3], message="value and attn_probs must have same dims (except head_dim)", ) tf.debugging.assert_equal( shape_list(attn_probs)[3], 2 * window_overlap + 1, message="attn_probs last dim has to be 2 * window_overlap + 1", ) chunks_count = seq_len // window_overlap - 1 # group batch_size and num_heads dimensions into one, then chunk seq_len into chunks of size 2 window overlap chunked_attn_probs = tf.reshape( tf.transpose(attn_probs, (0, 2, 1, 3)), ( batch_size * num_heads, seq_len // window_overlap, window_overlap, 2 * window_overlap + 1, ), ) # group batch_size and num_heads dimensions into one value = tf.reshape( tf.transpose(value, (0, 2, 1, 3)), (batch_size * num_heads, seq_len, head_dim), ) # pad seq_len with w at the beginning of the sequence and another window overlap at the end paddings = tf.convert_to_tensor([[0, 0], [window_overlap, window_overlap], [0, 0]]) padded_value = tf.pad(value, paddings, constant_values=-1) # chunk padded_value into chunks of size 3 window overlap and an overlap of size window overlap frame_size = 3 * window_overlap * head_dim frame_hop_size = (shape_list(padded_value)[1] * head_dim - frame_size) // chunks_count chunked_value = tf.signal.frame( tf.reshape(padded_value, (batch_size * num_heads, -1)), frame_size, frame_hop_size, ) chunked_value = tf.reshape( chunked_value, (batch_size * num_heads, chunks_count + 1, 3 * window_overlap, head_dim), ) if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(chunked_value), [batch_size * num_heads, chunks_count + 1, 3 * window_overlap, head_dim], message="Chunked value has the wrong shape", ) chunked_attn_probs = self._pad_and_diagonalize(chunked_attn_probs) context = tf.einsum("bcwd,bcdh->bcwh", chunked_attn_probs, chunked_value) context = tf.transpose( tf.reshape(context, (batch_size, num_heads, seq_len, head_dim)), (0, 2, 1, 3), ) return context
[ "def", "_sliding_chunks_matmul_attn_probs_value", "(", "self", ",", "attn_probs", ",", "value", ",", "window_overlap", ")", ":", "batch_size", ",", "seq_len", ",", "num_heads", ",", "head_dim", "=", "shape_list", "(", "value", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "seq_len", "%", "(", "window_overlap", "*", "2", ")", ",", "0", ",", "message", "=", "\"Seq_len has to be multiple of 2 * window_overlap\"", ",", ")", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_probs", ")", "[", ":", "3", "]", ",", "shape_list", "(", "value", ")", "[", ":", "3", "]", ",", "message", "=", "\"value and attn_probs must have same dims (except head_dim)\"", ",", ")", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_probs", ")", "[", "3", "]", ",", "2", "*", "window_overlap", "+", "1", ",", "message", "=", "\"attn_probs last dim has to be 2 * window_overlap + 1\"", ",", ")", "chunks_count", "=", "seq_len", "//", "window_overlap", "-", "1", "# group batch_size and num_heads dimensions into one, then chunk seq_len into chunks of size 2 window overlap", "chunked_attn_probs", "=", "tf", ".", "reshape", "(", "tf", ".", "transpose", "(", "attn_probs", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ")", ",", "(", "batch_size", "*", "num_heads", ",", "seq_len", "//", "window_overlap", ",", "window_overlap", ",", "2", "*", "window_overlap", "+", "1", ",", ")", ",", ")", "# group batch_size and num_heads dimensions into one", "value", "=", "tf", ".", "reshape", "(", "tf", ".", "transpose", "(", "value", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ")", ",", "(", "batch_size", "*", "num_heads", ",", "seq_len", ",", "head_dim", ")", ",", ")", "# pad seq_len with w at the beginning of the sequence and another window overlap at the end", "paddings", "=", "tf", ".", "convert_to_tensor", "(", "[", "[", "0", ",", "0", "]", ",", "[", "window_overlap", ",", "window_overlap", "]", ",", "[", "0", ",", "0", "]", "]", ")", "padded_value", "=", "tf", ".", "pad", "(", "value", ",", "paddings", ",", "constant_values", "=", "-", "1", ")", "# chunk padded_value into chunks of size 3 window overlap and an overlap of size window overlap", "frame_size", "=", "3", "*", "window_overlap", "*", "head_dim", "frame_hop_size", "=", "(", "shape_list", "(", "padded_value", ")", "[", "1", "]", "*", "head_dim", "-", "frame_size", ")", "//", "chunks_count", "chunked_value", "=", "tf", ".", "signal", ".", "frame", "(", "tf", ".", "reshape", "(", "padded_value", ",", "(", "batch_size", "*", "num_heads", ",", "-", "1", ")", ")", ",", "frame_size", ",", "frame_hop_size", ",", ")", "chunked_value", "=", "tf", ".", "reshape", "(", "chunked_value", ",", "(", "batch_size", "*", "num_heads", ",", "chunks_count", "+", "1", ",", "3", "*", "window_overlap", ",", "head_dim", ")", ",", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "chunked_value", ")", ",", "[", "batch_size", "*", "num_heads", ",", "chunks_count", "+", "1", ",", "3", "*", "window_overlap", ",", "head_dim", "]", ",", "message", "=", "\"Chunked value has the wrong shape\"", ",", ")", "chunked_attn_probs", "=", "self", ".", "_pad_and_diagonalize", "(", "chunked_attn_probs", ")", "context", "=", "tf", ".", "einsum", "(", "\"bcwd,bcdh->bcwh\"", ",", "chunked_attn_probs", ",", "chunked_value", ")", "context", "=", "tf", ".", "transpose", "(", "tf", ".", "reshape", "(", "context", ",", "(", "batch_size", ",", "num_heads", ",", "seq_len", ",", "head_dim", ")", ")", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ",", ")", "return", "context" ]
[ 517, 4 ]
[ 592, 22 ]
python
en
['en', 'error', 'th']
False
TFLEDEncoderSelfAttention._pad_and_transpose_last_two_dims
(hidden_states_padded, paddings)
pads rows and then flips rows and columns
pads rows and then flips rows and columns
def _pad_and_transpose_last_two_dims(hidden_states_padded, paddings): """pads rows and then flips rows and columns""" hidden_states_padded = tf.pad( hidden_states_padded, paddings ) # padding value is not important because it will be overwritten batch_size, chunk_size, seq_length, hidden_dim = shape_list(hidden_states_padded) hidden_states_padded = tf.reshape(hidden_states_padded, (batch_size, chunk_size, hidden_dim, seq_length)) return hidden_states_padded
[ "def", "_pad_and_transpose_last_two_dims", "(", "hidden_states_padded", ",", "paddings", ")", ":", "hidden_states_padded", "=", "tf", ".", "pad", "(", "hidden_states_padded", ",", "paddings", ")", "# padding value is not important because it will be overwritten", "batch_size", ",", "chunk_size", ",", "seq_length", ",", "hidden_dim", "=", "shape_list", "(", "hidden_states_padded", ")", "hidden_states_padded", "=", "tf", ".", "reshape", "(", "hidden_states_padded", ",", "(", "batch_size", ",", "chunk_size", ",", "hidden_dim", ",", "seq_length", ")", ")", "return", "hidden_states_padded" ]
[ 595, 4 ]
[ 603, 35 ]
python
en
['en', 'en', 'en']
True
TFLEDEncoderSelfAttention._pad_and_diagonalize
(chunked_hidden_states)
shift every row 1 step right, converting columns into diagonals. Example:: chunked_hidden_states: [ 0.4983, 2.6918, -0.0071, 1.0492, -1.8348, 0.7672, 0.2986, 0.0285, -0.7584, 0.4206, -0.0405, 0.1599, 2.0514, -1.1600, 0.5372, 0.2629 ] window_overlap = num_rows = 4 (pad & diagonalize) => [ 0.4983, 2.6918, -0.0071, 1.0492, 0.0000, 0.0000, 0.0000 0.0000, -1.8348, 0.7672, 0.2986, 0.0285, 0.0000, 0.0000 0.0000, 0.0000, -0.7584, 0.4206, -0.0405, 0.1599, 0.0000 0.0000, 0.0000, 0.0000, 2.0514, -1.1600, 0.5372, 0.2629 ]
shift every row 1 step right, converting columns into diagonals.
def _pad_and_diagonalize(chunked_hidden_states): """ shift every row 1 step right, converting columns into diagonals. Example:: chunked_hidden_states: [ 0.4983, 2.6918, -0.0071, 1.0492, -1.8348, 0.7672, 0.2986, 0.0285, -0.7584, 0.4206, -0.0405, 0.1599, 2.0514, -1.1600, 0.5372, 0.2629 ] window_overlap = num_rows = 4 (pad & diagonalize) => [ 0.4983, 2.6918, -0.0071, 1.0492, 0.0000, 0.0000, 0.0000 0.0000, -1.8348, 0.7672, 0.2986, 0.0285, 0.0000, 0.0000 0.0000, 0.0000, -0.7584, 0.4206, -0.0405, 0.1599, 0.0000 0.0000, 0.0000, 0.0000, 2.0514, -1.1600, 0.5372, 0.2629 ] """ total_num_heads, num_chunks, window_overlap, hidden_dim = shape_list(chunked_hidden_states) paddings = tf.convert_to_tensor([[0, 0], [0, 0], [0, 0], [0, window_overlap + 1]]) chunked_hidden_states = tf.pad( chunked_hidden_states, paddings ) # total_num_heads x num_chunks x window_overlap x (hidden_dim+window_overlap+1). Padding value is not important because it'll be overwritten chunked_hidden_states = tf.reshape( chunked_hidden_states, (total_num_heads, num_chunks, -1) ) # total_num_heads x num_chunks x window_overlapL+window_overlapwindow_overlap+window_overlap chunked_hidden_states = chunked_hidden_states[ :, :, :-window_overlap ] # total_num_heads x num_chunks x window_overlapL+window_overlapwindow_overlap chunked_hidden_states = tf.reshape( chunked_hidden_states, (total_num_heads, num_chunks, window_overlap, window_overlap + hidden_dim), ) # total_num_heads x num_chunks, window_overlap x hidden_dim+window_overlap chunked_hidden_states = chunked_hidden_states[:, :, :, :-1] return chunked_hidden_states
[ "def", "_pad_and_diagonalize", "(", "chunked_hidden_states", ")", ":", "total_num_heads", ",", "num_chunks", ",", "window_overlap", ",", "hidden_dim", "=", "shape_list", "(", "chunked_hidden_states", ")", "paddings", "=", "tf", ".", "convert_to_tensor", "(", "[", "[", "0", ",", "0", "]", ",", "[", "0", ",", "0", "]", ",", "[", "0", ",", "0", "]", ",", "[", "0", ",", "window_overlap", "+", "1", "]", "]", ")", "chunked_hidden_states", "=", "tf", ".", "pad", "(", "chunked_hidden_states", ",", "paddings", ")", "# total_num_heads x num_chunks x window_overlap x (hidden_dim+window_overlap+1). Padding value is not important because it'll be overwritten", "chunked_hidden_states", "=", "tf", ".", "reshape", "(", "chunked_hidden_states", ",", "(", "total_num_heads", ",", "num_chunks", ",", "-", "1", ")", ")", "# total_num_heads x num_chunks x window_overlapL+window_overlapwindow_overlap+window_overlap", "chunked_hidden_states", "=", "chunked_hidden_states", "[", ":", ",", ":", ",", ":", "-", "window_overlap", "]", "# total_num_heads x num_chunks x window_overlapL+window_overlapwindow_overlap", "chunked_hidden_states", "=", "tf", ".", "reshape", "(", "chunked_hidden_states", ",", "(", "total_num_heads", ",", "num_chunks", ",", "window_overlap", ",", "window_overlap", "+", "hidden_dim", ")", ",", ")", "# total_num_heads x num_chunks, window_overlap x hidden_dim+window_overlap", "chunked_hidden_states", "=", "chunked_hidden_states", "[", ":", ",", ":", ",", ":", ",", ":", "-", "1", "]", "return", "chunked_hidden_states" ]
[ 606, 4 ]
[ 640, 36 ]
python
en
['en', 'error', 'th']
False
TFLEDEncoderSelfAttention._chunk
(hidden_states, window_overlap)
convert into overlapping chunks. Chunk size = 2w, overlap size = w
convert into overlapping chunks. Chunk size = 2w, overlap size = w
def _chunk(hidden_states, window_overlap): """convert into overlapping chunks. Chunk size = 2w, overlap size = w""" batch_size, seq_length, hidden_dim = shape_list(hidden_states) num_output_chunks = 2 * (seq_length // (2 * window_overlap)) - 1 # define frame size and frame stride (similar to convolution) frame_hop_size = window_overlap * hidden_dim frame_size = 2 * frame_hop_size hidden_states = tf.reshape(hidden_states, (batch_size, seq_length * hidden_dim)) # chunk with overlap chunked_hidden_states = tf.signal.frame(hidden_states, frame_size, frame_hop_size) if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(chunked_hidden_states), [batch_size, num_output_chunks, frame_size], message=f"Make sure chunking is correctly applied. `Chunked hidden states should have output dimension {[batch_size, frame_size, num_output_chunks]}, but got {shape_list(chunked_hidden_states)}.", ) chunked_hidden_states = tf.reshape( chunked_hidden_states, (batch_size, num_output_chunks, 2 * window_overlap, hidden_dim), ) return chunked_hidden_states
[ "def", "_chunk", "(", "hidden_states", ",", "window_overlap", ")", ":", "batch_size", ",", "seq_length", ",", "hidden_dim", "=", "shape_list", "(", "hidden_states", ")", "num_output_chunks", "=", "2", "*", "(", "seq_length", "//", "(", "2", "*", "window_overlap", ")", ")", "-", "1", "# define frame size and frame stride (similar to convolution)", "frame_hop_size", "=", "window_overlap", "*", "hidden_dim", "frame_size", "=", "2", "*", "frame_hop_size", "hidden_states", "=", "tf", ".", "reshape", "(", "hidden_states", ",", "(", "batch_size", ",", "seq_length", "*", "hidden_dim", ")", ")", "# chunk with overlap", "chunked_hidden_states", "=", "tf", ".", "signal", ".", "frame", "(", "hidden_states", ",", "frame_size", ",", "frame_hop_size", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "chunked_hidden_states", ")", ",", "[", "batch_size", ",", "num_output_chunks", ",", "frame_size", "]", ",", "message", "=", "f\"Make sure chunking is correctly applied. `Chunked hidden states should have output dimension {[batch_size, frame_size, num_output_chunks]}, but got {shape_list(chunked_hidden_states)}.\"", ",", ")", "chunked_hidden_states", "=", "tf", ".", "reshape", "(", "chunked_hidden_states", ",", "(", "batch_size", ",", "num_output_chunks", ",", "2", "*", "window_overlap", ",", "hidden_dim", ")", ",", ")", "return", "chunked_hidden_states" ]
[ 643, 4 ]
[ 668, 36 ]
python
en
['en', 'en', 'en']
True
TFLEDEncoderSelfAttention._get_global_attn_indices
(is_index_global_attn)
compute global attn indices required throughout forward pass
compute global attn indices required throughout forward pass
def _get_global_attn_indices(is_index_global_attn): """ compute global attn indices required throughout forward pass """ # helper variable num_global_attn_indices = tf.math.count_nonzero(is_index_global_attn, axis=1) num_global_attn_indices = tf.cast(num_global_attn_indices, dtype=tf.constant(1).dtype) # max number of global attn indices in batch max_num_global_attn_indices = tf.reduce_max(num_global_attn_indices) # indices of global attn is_index_global_attn_nonzero = tf.where(is_index_global_attn) # helper variable is_local_index_global_attn = tf.range(max_num_global_attn_indices) < tf.expand_dims( num_global_attn_indices, axis=-1 ) # location of the non-padding values within global attention indices is_local_index_global_attn_nonzero = tf.where(is_local_index_global_attn) # location of the padding values within global attention indices is_local_index_no_global_attn_nonzero = tf.where(tf.math.logical_not(is_local_index_global_attn)) return ( max_num_global_attn_indices, is_index_global_attn_nonzero, is_local_index_global_attn_nonzero, is_local_index_no_global_attn_nonzero, )
[ "def", "_get_global_attn_indices", "(", "is_index_global_attn", ")", ":", "# helper variable", "num_global_attn_indices", "=", "tf", ".", "math", ".", "count_nonzero", "(", "is_index_global_attn", ",", "axis", "=", "1", ")", "num_global_attn_indices", "=", "tf", ".", "cast", "(", "num_global_attn_indices", ",", "dtype", "=", "tf", ".", "constant", "(", "1", ")", ".", "dtype", ")", "# max number of global attn indices in batch", "max_num_global_attn_indices", "=", "tf", ".", "reduce_max", "(", "num_global_attn_indices", ")", "# indices of global attn", "is_index_global_attn_nonzero", "=", "tf", ".", "where", "(", "is_index_global_attn", ")", "# helper variable", "is_local_index_global_attn", "=", "tf", ".", "range", "(", "max_num_global_attn_indices", ")", "<", "tf", ".", "expand_dims", "(", "num_global_attn_indices", ",", "axis", "=", "-", "1", ")", "# location of the non-padding values within global attention indices", "is_local_index_global_attn_nonzero", "=", "tf", ".", "where", "(", "is_local_index_global_attn", ")", "# location of the padding values within global attention indices", "is_local_index_no_global_attn_nonzero", "=", "tf", ".", "where", "(", "tf", ".", "math", ".", "logical_not", "(", "is_local_index_global_attn", ")", ")", "return", "(", "max_num_global_attn_indices", ",", "is_index_global_attn_nonzero", ",", "is_local_index_global_attn_nonzero", ",", "is_local_index_no_global_attn_nonzero", ",", ")" ]
[ 671, 4 ]
[ 699, 9 ]
python
en
['en', 'en', 'en']
True
TFLEDDecoderAttention.call
( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training=False, )
Input shape: Batch x Time x Channel
Input shape: Batch x Time x Channel
def call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training=False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, embed_dim = shape_list(hidden_states) # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True) if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len], message=f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}", ) if attention_mask is not None: if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}", ) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + tf.cast( attention_mask, dtype=attn_weights.dtype ) attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is not None: if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", ) attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states) if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim], message=f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}", ) attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3) ) attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return attn_output, attn_weights, past_key_value
[ "def", "call", "(", "self", ",", "hidden_states", ":", "tf", ".", "Tensor", ",", "key_value_states", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "Tuple", "[", "tf", ".", "Tensor", "]", "]", "]", "=", "None", ",", "attention_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "training", "=", "False", ",", ")", "->", "Tuple", "[", "tf", ".", "Tensor", ",", "Optional", "[", "tf", ".", "Tensor", "]", "]", ":", "# if key_value_states are provided this layer is used as a cross-attention layer", "# for the decoder", "is_cross_attention", "=", "key_value_states", "is", "not", "None", "bsz", ",", "tgt_len", ",", "embed_dim", "=", "shape_list", "(", "hidden_states", ")", "# get query proj", "query_states", "=", "self", ".", "q_proj", "(", "hidden_states", ")", "*", "self", ".", "scaling", "# get key, value proj", "if", "is_cross_attention", "and", "past_key_value", "is", "not", "None", ":", "# reuse k,v, cross_attentions", "key_states", "=", "past_key_value", "[", "0", "]", "value_states", "=", "past_key_value", "[", "1", "]", "elif", "is_cross_attention", ":", "# cross_attentions", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "elif", "past_key_value", "is", "not", "None", ":", "# reuse k, v, self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "key_states", "=", "tf", ".", "concat", "(", "[", "past_key_value", "[", "0", "]", ",", "key_states", "]", ",", "axis", "=", "2", ")", "value_states", "=", "tf", ".", "concat", "(", "[", "past_key_value", "[", "1", "]", ",", "value_states", "]", ",", "axis", "=", "2", ")", "else", ":", "# self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "if", "self", ".", "is_decoder", ":", "# if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states.", "# Further calls to cross_attention layer can then reuse all cross-attention", "# key/value_states (first \"if\" case)", "# if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of", "# all previous decoder key/value_states. Further calls to uni-directional self-attention", "# can concat previous decoder key/value_states to current projected key/value_states (third \"elif\" case)", "# if encoder bi-directional self-attention `past_key_value` is always `None`", "past_key_value", "=", "(", "key_states", ",", "value_states", ")", "proj_shape", "=", "(", "bsz", "*", "self", ".", "num_heads", ",", "-", "1", ",", "self", ".", "head_dim", ")", "query_states", "=", "tf", ".", "reshape", "(", "self", ".", "_shape", "(", "query_states", ",", "tgt_len", ",", "bsz", ")", ",", "proj_shape", ")", "key_states", "=", "tf", ".", "reshape", "(", "key_states", ",", "proj_shape", ")", "value_states", "=", "tf", ".", "reshape", "(", "value_states", ",", "proj_shape", ")", "src_len", "=", "shape_list", "(", "key_states", ")", "[", "1", "]", "attn_weights", "=", "tf", ".", "matmul", "(", "query_states", ",", "key_states", ",", "transpose_b", "=", "True", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_weights", ")", ",", "[", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", "]", ",", "message", "=", "f\"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}\"", ",", ")", "if", "attention_mask", "is", "not", "None", ":", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attention_mask", ")", ",", "[", "bsz", ",", "1", ",", "tgt_len", ",", "src_len", "]", ",", "message", "=", "f\"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}\"", ",", ")", "attn_weights", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "+", "tf", ".", "cast", "(", "attention_mask", ",", "dtype", "=", "attn_weights", ".", "dtype", ")", "attn_weights", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "attn_weights", "=", "tf", ".", "nn", ".", "softmax", "(", "attn_weights", ",", "axis", "=", "-", "1", ")", "if", "layer_head_mask", "is", "not", "None", ":", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "layer_head_mask", ")", ",", "[", "self", ".", "num_heads", "]", ",", "message", "=", "f\"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\"", ",", ")", "attn_weights", "=", "tf", ".", "reshape", "(", "layer_head_mask", ",", "(", "1", ",", "-", "1", ",", "1", ",", "1", ")", ")", "*", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "attn_weights", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "attn_probs", "=", "self", ".", "dropout", "(", "attn_weights", ",", "training", "=", "training", ")", "attn_output", "=", "tf", ".", "matmul", "(", "attn_probs", ",", "value_states", ")", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_output", ")", ",", "[", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", "]", ",", "message", "=", "f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\"", ",", ")", "attn_output", "=", "tf", ".", "transpose", "(", "tf", ".", "reshape", "(", "attn_output", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", ")", ")", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ")", "attn_output", "=", "tf", ".", "reshape", "(", "attn_output", ",", "(", "bsz", ",", "tgt_len", ",", "embed_dim", ")", ")", "attn_output", "=", "self", ".", "out_proj", "(", "attn_output", ")", "attn_weights", ":", "tf", ".", "Tensor", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "return", "attn_output", ",", "attn_weights", ",", "past_key_value" ]
[ 992, 4 ]
[ 1102, 56 ]
python
en
['en', 'pl', 'en']
True
TFLEDEncoderLayer.call
( self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, is_index_masked: tf.Tensor, is_index_global_attn: tf.Tensor, is_global_attn: bool, training=False, )
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`.
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`.
def call( self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, is_index_masked: tf.Tensor, is_index_global_attn: tf.Tensor, is_global_attn: bool, training=False, ): """ Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. """ residual = hidden_states layer_outputs = self.self_attn( [hidden_states, attention_mask, layer_head_mask, is_index_masked, is_index_global_attn, is_global_attn], training=training, ) hidden_states = layer_outputs[0] if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(hidden_states), shape_list(residual), message=f"Self attn modified the shape of query {shape_list(residual)} to {shape_list(hidden_states)}", ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout(hidden_states, training=training) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) return (hidden_states,) + layer_outputs[1:]
[ "def", "call", "(", "self", ",", "hidden_states", ":", "tf", ".", "Tensor", ",", "attention_mask", ":", "tf", ".", "Tensor", ",", "layer_head_mask", ":", "tf", ".", "Tensor", ",", "is_index_masked", ":", "tf", ".", "Tensor", ",", "is_index_global_attn", ":", "tf", ".", "Tensor", ",", "is_global_attn", ":", "bool", ",", "training", "=", "False", ",", ")", ":", "residual", "=", "hidden_states", "layer_outputs", "=", "self", ".", "self_attn", "(", "[", "hidden_states", ",", "attention_mask", ",", "layer_head_mask", ",", "is_index_masked", ",", "is_index_global_attn", ",", "is_global_attn", "]", ",", "training", "=", "training", ",", ")", "hidden_states", "=", "layer_outputs", "[", "0", "]", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "hidden_states", ")", ",", "shape_list", "(", "residual", ")", ",", "message", "=", "f\"Self attn modified the shape of query {shape_list(residual)} to {shape_list(hidden_states)}\"", ",", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "self", ".", "activation_dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "return", "(", "hidden_states", ",", ")", "+", "layer_outputs", "[", "1", ":", "]" ]
[ 1118, 4 ]
[ 1162, 51 ]
python
en
['en', 'error', 'th']
False
TFLEDDecoderLayer.call
( self, hidden_states, attention_mask: Optional[tf.Tensor] = None, encoder_hidden_states: Optional[tf.Tensor] = None, encoder_attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, encoder_layer_head_mask: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[tf.Tensor]] = None, training=False, )
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size `(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size `(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states
def call( self, hidden_states, attention_mask: Optional[tf.Tensor] = None, encoder_hidden_states: Optional[tf.Tensor] = None, encoder_attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, encoder_layer_head_mask: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[tf.Tensor]] = None, training=False, ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: """ Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size `(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states """ residual = hidden_states # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Cross-Attention Block cross_attn_present_key_value = None if encoder_hidden_states is not None: residual = hidden_states # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, _, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=encoder_layer_head_mask, past_key_value=cross_attn_past_key_value, ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout(hidden_states, training=training) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) return ( hidden_states, self_attn_weights, present_key_value, )
[ "def", "call", "(", "self", ",", "hidden_states", ",", "attention_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "encoder_hidden_states", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "encoder_attention_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "encoder_layer_head_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "tf", ".", "Tensor", "]", "]", "=", "None", ",", "training", "=", "False", ",", ")", "->", "Tuple", "[", "tf", ".", "Tensor", ",", "tf", ".", "Tensor", ",", "Tuple", "[", "Tuple", "[", "tf", ".", "Tensor", "]", "]", "]", ":", "residual", "=", "hidden_states", "# Self Attention", "# decoder uni-directional self-attention cached key/values tuple is at positions 1,2", "self_attn_past_key_value", "=", "past_key_value", "[", ":", "2", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "# add present self-attn cache to positions 1,2 of present_key_value tuple", "hidden_states", ",", "self_attn_weights", ",", "present_key_value", "=", "self", ".", "self_attn", "(", "hidden_states", "=", "hidden_states", ",", "past_key_value", "=", "self_attn_past_key_value", ",", "attention_mask", "=", "attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ",", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "# Cross-Attention Block", "cross_attn_present_key_value", "=", "None", "if", "encoder_hidden_states", "is", "not", "None", ":", "residual", "=", "hidden_states", "# cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple", "cross_attn_past_key_value", "=", "past_key_value", "[", "-", "2", ":", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "hidden_states", ",", "_", ",", "cross_attn_present_key_value", "=", "self", ".", "encoder_attn", "(", "hidden_states", "=", "hidden_states", ",", "key_value_states", "=", "encoder_hidden_states", ",", "attention_mask", "=", "encoder_attention_mask", ",", "layer_head_mask", "=", "encoder_layer_head_mask", ",", "past_key_value", "=", "cross_attn_past_key_value", ",", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "encoder_attn_layer_norm", "(", "hidden_states", ")", "# add cross-attn to positions 3,4 of present_key_value tuple", "present_key_value", "=", "present_key_value", "+", "cross_attn_present_key_value", "# Fully Connected", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "self", ".", "activation_dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "return", "(", "hidden_states", ",", "self_attn_weights", ",", "present_key_value", ",", ")" ]
[ 1193, 4 ]
[ 1268, 9 ]
python
en
['en', 'error', 'th']
False
async_setup
(hass: HomeAssistant, config: dict)
Set up the xbox component.
Set up the xbox component.
async def async_setup(hass: HomeAssistant, config: dict): """Set up the xbox component.""" hass.data[DOMAIN] = {} if DOMAIN not in config: return True config_flow.OAuth2FlowHandler.async_register_implementation( hass, config_entry_oauth2_flow.LocalOAuth2Implementation( hass, DOMAIN, config[DOMAIN][CONF_CLIENT_ID], config[DOMAIN][CONF_CLIENT_SECRET], OAUTH2_AUTHORIZE, OAUTH2_TOKEN, ), ) return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "dict", ")", ":", "hass", ".", "data", "[", "DOMAIN", "]", "=", "{", "}", "if", "DOMAIN", "not", "in", "config", ":", "return", "True", "config_flow", ".", "OAuth2FlowHandler", ".", "async_register_implementation", "(", "hass", ",", "config_entry_oauth2_flow", ".", "LocalOAuth2Implementation", "(", "hass", ",", "DOMAIN", ",", "config", "[", "DOMAIN", "]", "[", "CONF_CLIENT_ID", "]", ",", "config", "[", "DOMAIN", "]", "[", "CONF_CLIENT_SECRET", "]", ",", "OAUTH2_AUTHORIZE", ",", "OAUTH2_TOKEN", ",", ")", ",", ")", "return", "True" ]
[ 52, 0 ]
[ 71, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
Set up xbox from a config entry.
Set up xbox from a config entry.
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up xbox from a config entry.""" implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( hass, entry ) ) session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) auth = api.AsyncConfigEntryAuth( aiohttp_client.async_get_clientsession(hass), session ) client = XboxLiveClient(auth) consoles: SmartglassConsoleList = await client.smartglass.get_console_list() _LOGGER.debug( "Found %d consoles: %s", len(consoles.result), consoles.dict(), ) coordinator = XboxUpdateCoordinator(hass, client, consoles) await coordinator.async_refresh() hass.data[DOMAIN][entry.entry_id] = { "client": XboxLiveClient(auth), "consoles": consoles, "coordinator": coordinator, } for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "implementation", "=", "(", "await", "config_entry_oauth2_flow", ".", "async_get_config_entry_implementation", "(", "hass", ",", "entry", ")", ")", "session", "=", "config_entry_oauth2_flow", ".", "OAuth2Session", "(", "hass", ",", "entry", ",", "implementation", ")", "auth", "=", "api", ".", "AsyncConfigEntryAuth", "(", "aiohttp_client", ".", "async_get_clientsession", "(", "hass", ")", ",", "session", ")", "client", "=", "XboxLiveClient", "(", "auth", ")", "consoles", ":", "SmartglassConsoleList", "=", "await", "client", ".", "smartglass", ".", "get_console_list", "(", ")", "_LOGGER", ".", "debug", "(", "\"Found %d consoles: %s\"", ",", "len", "(", "consoles", ".", "result", ")", ",", "consoles", ".", "dict", "(", ")", ",", ")", "coordinator", "=", "XboxUpdateCoordinator", "(", "hass", ",", "client", ",", "consoles", ")", "await", "coordinator", ".", "async_refresh", "(", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "=", "{", "\"client\"", ":", "XboxLiveClient", "(", "auth", ")", ",", "\"consoles\"", ":", "consoles", ",", "\"coordinator\"", ":", "coordinator", ",", "}", "for", "component", "in", "PLATFORMS", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "entry", ",", "component", ")", ")", "return", "True" ]
[ 74, 0 ]
[ 108, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload a config entry.
Unload a config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: # Unsub from coordinator updates hass.data[DOMAIN][entry.entry_id]["sensor_unsub"]() hass.data[DOMAIN][entry.entry_id]["binary_sensor_unsub"]() hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "unload_ok", "=", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "component", ")", "for", "component", "in", "PLATFORMS", "]", ")", ")", "if", "unload_ok", ":", "# Unsub from coordinator updates", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "[", "\"sensor_unsub\"", "]", "(", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "[", "\"binary_sensor_unsub\"", "]", "(", ")", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "entry", ".", "entry_id", ")", "return", "unload_ok" ]
[ 111, 0 ]
[ 127, 20 ]
python
en
['en', 'es', 'en']
True
_build_presence_data
(person: Person)
Build presence data from a person.
Build presence data from a person.
def _build_presence_data(person: Person) -> PresenceData: """Build presence data from a person.""" active_app: Optional[PresenceDetail] = None try: active_app = next( presence for presence in person.presence_details if presence.is_primary ) except StopIteration: pass return PresenceData( xuid=person.xuid, gamertag=person.gamertag, display_pic=person.display_pic_raw, online=person.presence_state == "Online", status=person.presence_text, in_party=person.multiplayer_summary.in_party > 0, in_game=active_app and active_app.is_game, in_multiplayer=person.multiplayer_summary.in_multiplayer_session, gamer_score=person.gamer_score, gold_tenure=person.detail.tenure, account_tier=person.detail.account_tier, )
[ "def", "_build_presence_data", "(", "person", ":", "Person", ")", "->", "PresenceData", ":", "active_app", ":", "Optional", "[", "PresenceDetail", "]", "=", "None", "try", ":", "active_app", "=", "next", "(", "presence", "for", "presence", "in", "person", ".", "presence_details", "if", "presence", ".", "is_primary", ")", "except", "StopIteration", ":", "pass", "return", "PresenceData", "(", "xuid", "=", "person", ".", "xuid", ",", "gamertag", "=", "person", ".", "gamertag", ",", "display_pic", "=", "person", ".", "display_pic_raw", ",", "online", "=", "person", ".", "presence_state", "==", "\"Online\"", ",", "status", "=", "person", ".", "presence_text", ",", "in_party", "=", "person", ".", "multiplayer_summary", ".", "in_party", ">", "0", ",", "in_game", "=", "active_app", "and", "active_app", ".", "is_game", ",", "in_multiplayer", "=", "person", ".", "multiplayer_summary", ".", "in_multiplayer_session", ",", "gamer_score", "=", "person", ".", "gamer_score", ",", "gold_tenure", "=", "person", ".", "detail", ".", "tenure", ",", "account_tier", "=", "person", ".", "detail", ".", "account_tier", ",", ")" ]
[ 246, 0 ]
[ 268, 5 ]
python
en
['en', 'en', 'en']
True
XboxUpdateCoordinator.__init__
( self, hass: HomeAssistantType, client: XboxLiveClient, consoles: SmartglassConsoleList, )
Initialize.
Initialize.
def __init__( self, hass: HomeAssistantType, client: XboxLiveClient, consoles: SmartglassConsoleList, ) -> None: """Initialize.""" super().__init__( hass, _LOGGER, name=DOMAIN, update_interval=timedelta(seconds=10), ) self.data: XboxData = XboxData({}, []) self.client: XboxLiveClient = client self.consoles: SmartglassConsoleList = consoles
[ "def", "__init__", "(", "self", ",", "hass", ":", "HomeAssistantType", ",", "client", ":", "XboxLiveClient", ",", "consoles", ":", "SmartglassConsoleList", ",", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", "hass", ",", "_LOGGER", ",", "name", "=", "DOMAIN", ",", "update_interval", "=", "timedelta", "(", "seconds", "=", "10", ")", ",", ")", "self", ".", "data", ":", "XboxData", "=", "XboxData", "(", "{", "}", ",", "[", "]", ")", "self", ".", "client", ":", "XboxLiveClient", "=", "client", "self", ".", "consoles", ":", "SmartglassConsoleList", "=", "consoles" ]
[ 166, 4 ]
[ 181, 55 ]
python
en
['en', 'en', 'it']
False
XboxUpdateCoordinator._async_update_data
(self)
Fetch the latest console status.
Fetch the latest console status.
async def _async_update_data(self) -> XboxData: """Fetch the latest console status.""" # Update Console Status new_console_data: Dict[str, ConsoleData] = {} for console in self.consoles.result: current_state: Optional[ConsoleData] = self.data.consoles.get(console.id) status: SmartglassConsoleStatus = ( await self.client.smartglass.get_console_status(console.id) ) _LOGGER.debug( "%s status: %s", console.name, status.dict(), ) # Setup focus app app_details: Optional[Product] = None if current_state is not None: app_details = current_state.app_details if status.focus_app_aumid: if ( not current_state or status.focus_app_aumid != current_state.status.focus_app_aumid ): app_id = status.focus_app_aumid.split("!")[0] id_type = AlternateIdType.PACKAGE_FAMILY_NAME if app_id in SYSTEM_PFN_ID_MAP: id_type = AlternateIdType.LEGACY_XBOX_PRODUCT_ID app_id = SYSTEM_PFN_ID_MAP[app_id][id_type] catalog_result = ( await self.client.catalog.get_product_from_alternate_id( app_id, id_type ) ) if catalog_result and catalog_result.products: app_details = catalog_result.products[0] else: app_details = None new_console_data[console.id] = ConsoleData( status=status, app_details=app_details ) # Update user presence presence_data = {} batch: PeopleResponse = await self.client.people.get_friends_own_batch( [self.client.xuid] ) own_presence: Person = batch.people[0] presence_data[own_presence.xuid] = _build_presence_data(own_presence) friends: PeopleResponse = await self.client.people.get_friends_own() for friend in friends.people: if not friend.is_favorite: continue presence_data[friend.xuid] = _build_presence_data(friend) return XboxData(new_console_data, presence_data)
[ "async", "def", "_async_update_data", "(", "self", ")", "->", "XboxData", ":", "# Update Console Status", "new_console_data", ":", "Dict", "[", "str", ",", "ConsoleData", "]", "=", "{", "}", "for", "console", "in", "self", ".", "consoles", ".", "result", ":", "current_state", ":", "Optional", "[", "ConsoleData", "]", "=", "self", ".", "data", ".", "consoles", ".", "get", "(", "console", ".", "id", ")", "status", ":", "SmartglassConsoleStatus", "=", "(", "await", "self", ".", "client", ".", "smartglass", ".", "get_console_status", "(", "console", ".", "id", ")", ")", "_LOGGER", ".", "debug", "(", "\"%s status: %s\"", ",", "console", ".", "name", ",", "status", ".", "dict", "(", ")", ",", ")", "# Setup focus app", "app_details", ":", "Optional", "[", "Product", "]", "=", "None", "if", "current_state", "is", "not", "None", ":", "app_details", "=", "current_state", ".", "app_details", "if", "status", ".", "focus_app_aumid", ":", "if", "(", "not", "current_state", "or", "status", ".", "focus_app_aumid", "!=", "current_state", ".", "status", ".", "focus_app_aumid", ")", ":", "app_id", "=", "status", ".", "focus_app_aumid", ".", "split", "(", "\"!\"", ")", "[", "0", "]", "id_type", "=", "AlternateIdType", ".", "PACKAGE_FAMILY_NAME", "if", "app_id", "in", "SYSTEM_PFN_ID_MAP", ":", "id_type", "=", "AlternateIdType", ".", "LEGACY_XBOX_PRODUCT_ID", "app_id", "=", "SYSTEM_PFN_ID_MAP", "[", "app_id", "]", "[", "id_type", "]", "catalog_result", "=", "(", "await", "self", ".", "client", ".", "catalog", ".", "get_product_from_alternate_id", "(", "app_id", ",", "id_type", ")", ")", "if", "catalog_result", "and", "catalog_result", ".", "products", ":", "app_details", "=", "catalog_result", ".", "products", "[", "0", "]", "else", ":", "app_details", "=", "None", "new_console_data", "[", "console", ".", "id", "]", "=", "ConsoleData", "(", "status", "=", "status", ",", "app_details", "=", "app_details", ")", "# Update user presence", "presence_data", "=", "{", "}", "batch", ":", "PeopleResponse", "=", "await", "self", ".", "client", ".", "people", ".", "get_friends_own_batch", "(", "[", "self", ".", "client", ".", "xuid", "]", ")", "own_presence", ":", "Person", "=", "batch", ".", "people", "[", "0", "]", "presence_data", "[", "own_presence", ".", "xuid", "]", "=", "_build_presence_data", "(", "own_presence", ")", "friends", ":", "PeopleResponse", "=", "await", "self", ".", "client", ".", "people", ".", "get_friends_own", "(", ")", "for", "friend", "in", "friends", ".", "people", ":", "if", "not", "friend", ".", "is_favorite", ":", "continue", "presence_data", "[", "friend", ".", "xuid", "]", "=", "_build_presence_data", "(", "friend", ")", "return", "XboxData", "(", "new_console_data", ",", "presence_data", ")" ]
[ 183, 4 ]
[ 243, 56 ]
python
en
['en', 'en', 'en']
True
async_get_service
(hass, config, discovery_info=None)
Get the notification service.
Get the notification service.
async def async_get_service(hass, config, discovery_info=None): """Get the notification service.""" if discovery_info is None: return return NetgearNotifyService(hass, discovery_info)
[ "async", "def", "async_get_service", "(", "hass", ",", "config", ",", "discovery_info", "=", "None", ")", ":", "if", "discovery_info", "is", "None", ":", "return", "return", "NetgearNotifyService", "(", "hass", ",", "discovery_info", ")" ]
[ 13, 0 ]
[ 18, 53 ]
python
en
['en', 'en', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the demo light platform.
Set up the demo light platform.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the demo light platform.""" async_add_entities( [ DemoLight( unique_id="light_1", name="Bed Light", state=False, available=True, effect_list=LIGHT_EFFECT_LIST, effect=LIGHT_EFFECT_LIST[0], ), DemoLight( unique_id="light_2", name="Ceiling Lights", state=True, available=True, ct=LIGHT_TEMPS[1], ), DemoLight( unique_id="light_3", name="Kitchen Lights", state=True, available=True, hs_color=LIGHT_COLORS[1], ct=LIGHT_TEMPS[0], ), ] )
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "async_add_entities", "(", "[", "DemoLight", "(", "unique_id", "=", "\"light_1\"", ",", "name", "=", "\"Bed Light\"", ",", "state", "=", "False", ",", "available", "=", "True", ",", "effect_list", "=", "LIGHT_EFFECT_LIST", ",", "effect", "=", "LIGHT_EFFECT_LIST", "[", "0", "]", ",", ")", ",", "DemoLight", "(", "unique_id", "=", "\"light_2\"", ",", "name", "=", "\"Ceiling Lights\"", ",", "state", "=", "True", ",", "available", "=", "True", ",", "ct", "=", "LIGHT_TEMPS", "[", "1", "]", ",", ")", ",", "DemoLight", "(", "unique_id", "=", "\"light_3\"", ",", "name", "=", "\"Kitchen Lights\"", ",", "state", "=", "True", ",", "available", "=", "True", ",", "hs_color", "=", "LIGHT_COLORS", "[", "1", "]", ",", "ct", "=", "LIGHT_TEMPS", "[", "0", "]", ",", ")", ",", "]", ")" ]
[ 30, 0 ]
[ 58, 5 ]
python
en
['en', 'da', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up the Demo config entry.
Set up the Demo config entry.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Demo config entry.""" await async_setup_platform(hass, {}, async_add_entities)
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "await", "async_setup_platform", "(", "hass", ",", "{", "}", ",", "async_add_entities", ")" ]
[ 61, 0 ]
[ 63, 60 ]
python
en
['en', 'en', 'en']
True
DemoLight.__init__
( self, unique_id, name, state, available=False, hs_color=None, ct=None, brightness=180, white=200, effect_list=None, effect=None, )
Initialize the light.
Initialize the light.
def __init__( self, unique_id, name, state, available=False, hs_color=None, ct=None, brightness=180, white=200, effect_list=None, effect=None, ): """Initialize the light.""" self._unique_id = unique_id self._name = name self._state = state self._hs_color = hs_color self._ct = ct or random.choice(LIGHT_TEMPS) self._brightness = brightness self._white = white self._features = SUPPORT_DEMO self._effect_list = effect_list self._effect = effect self._available = True self._color_mode = "ct" if ct is not None and hs_color is None else "hs" if self._effect_list is not None: self._features |= SUPPORT_EFFECT
[ "def", "__init__", "(", "self", ",", "unique_id", ",", "name", ",", "state", ",", "available", "=", "False", ",", "hs_color", "=", "None", ",", "ct", "=", "None", ",", "brightness", "=", "180", ",", "white", "=", "200", ",", "effect_list", "=", "None", ",", "effect", "=", "None", ",", ")", ":", "self", ".", "_unique_id", "=", "unique_id", "self", ".", "_name", "=", "name", "self", ".", "_state", "=", "state", "self", ".", "_hs_color", "=", "hs_color", "self", ".", "_ct", "=", "ct", "or", "random", ".", "choice", "(", "LIGHT_TEMPS", ")", "self", ".", "_brightness", "=", "brightness", "self", ".", "_white", "=", "white", "self", ".", "_features", "=", "SUPPORT_DEMO", "self", ".", "_effect_list", "=", "effect_list", "self", ".", "_effect", "=", "effect", "self", ".", "_available", "=", "True", "self", ".", "_color_mode", "=", "\"ct\"", "if", "ct", "is", "not", "None", "and", "hs_color", "is", "None", "else", "\"hs\"", "if", "self", ".", "_effect_list", "is", "not", "None", ":", "self", ".", "_features", "|=", "SUPPORT_EFFECT" ]
[ 69, 4 ]
[ 96, 44 ]
python
en
['en', 'en', 'en']
True
DemoLight.device_info
(self)
Return device info.
Return device info.
def device_info(self): """Return device info.""" return { "identifiers": { # Serial numbers are unique identifiers within a specific domain (DOMAIN, self.unique_id) }, "name": self.name, }
[ "def", "device_info", "(", "self", ")", ":", "return", "{", "\"identifiers\"", ":", "{", "# Serial numbers are unique identifiers within a specific domain", "(", "DOMAIN", ",", "self", ".", "unique_id", ")", "}", ",", "\"name\"", ":", "self", ".", "name", ",", "}" ]
[ 99, 4 ]
[ 107, 9 ]
python
en
['es', 'hr', 'en']
False
DemoLight.should_poll
(self)
No polling needed for a demo light.
No polling needed for a demo light.
def should_poll(self) -> bool: """No polling needed for a demo light.""" return False
[ "def", "should_poll", "(", "self", ")", "->", "bool", ":", "return", "False" ]
[ 110, 4 ]
[ 112, 20 ]
python
en
['en', 'en', 'en']
True
DemoLight.name
(self)
Return the name of the light if any.
Return the name of the light if any.
def name(self) -> str: """Return the name of the light if any.""" return self._name
[ "def", "name", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_name" ]
[ 115, 4 ]
[ 117, 25 ]
python
en
['en', 'en', 'en']
True
DemoLight.unique_id
(self)
Return unique ID for light.
Return unique ID for light.
def unique_id(self): """Return unique ID for light.""" return self._unique_id
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_unique_id" ]
[ 120, 4 ]
[ 122, 30 ]
python
en
['fr', 'la', 'en']
False
DemoLight.available
(self)
Return availability.
Return availability.
def available(self) -> bool: """Return availability.""" # This demo light is always available, but well-behaving components # should implement this to inform Home Assistant accordingly. return self._available
[ "def", "available", "(", "self", ")", "->", "bool", ":", "# This demo light is always available, but well-behaving components", "# should implement this to inform Home Assistant accordingly.", "return", "self", ".", "_available" ]
[ 125, 4 ]
[ 129, 30 ]
python
en
['fr', 'ga', 'en']
False
DemoLight.brightness
(self)
Return the brightness of this light between 0..255.
Return the brightness of this light between 0..255.
def brightness(self) -> int: """Return the brightness of this light between 0..255.""" return self._brightness
[ "def", "brightness", "(", "self", ")", "->", "int", ":", "return", "self", ".", "_brightness" ]
[ 132, 4 ]
[ 134, 31 ]
python
en
['en', 'en', 'en']
True
DemoLight.hs_color
(self)
Return the hs color value.
Return the hs color value.
def hs_color(self) -> tuple: """Return the hs color value.""" if self._color_mode == "hs": return self._hs_color return None
[ "def", "hs_color", "(", "self", ")", "->", "tuple", ":", "if", "self", ".", "_color_mode", "==", "\"hs\"", ":", "return", "self", ".", "_hs_color", "return", "None" ]
[ 137, 4 ]
[ 141, 19 ]
python
en
['en', 'en', 'en']
True
DemoLight.color_temp
(self)
Return the CT color temperature.
Return the CT color temperature.
def color_temp(self) -> int: """Return the CT color temperature.""" if self._color_mode == "ct": return self._ct return None
[ "def", "color_temp", "(", "self", ")", "->", "int", ":", "if", "self", ".", "_color_mode", "==", "\"ct\"", ":", "return", "self", ".", "_ct", "return", "None" ]
[ 144, 4 ]
[ 148, 19 ]
python
en
['en', 'la', 'en']
True
DemoLight.white_value
(self)
Return the white value of this light between 0..255.
Return the white value of this light between 0..255.
def white_value(self) -> int: """Return the white value of this light between 0..255.""" return self._white
[ "def", "white_value", "(", "self", ")", "->", "int", ":", "return", "self", ".", "_white" ]
[ 151, 4 ]
[ 153, 26 ]
python
en
['en', 'en', 'en']
True
DemoLight.effect_list
(self)
Return the list of supported effects.
Return the list of supported effects.
def effect_list(self) -> list: """Return the list of supported effects.""" return self._effect_list
[ "def", "effect_list", "(", "self", ")", "->", "list", ":", "return", "self", ".", "_effect_list" ]
[ 156, 4 ]
[ 158, 32 ]
python
en
['en', 'en', 'en']
True
DemoLight.effect
(self)
Return the current effect.
Return the current effect.
def effect(self) -> str: """Return the current effect.""" return self._effect
[ "def", "effect", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_effect" ]
[ 161, 4 ]
[ 163, 27 ]
python
en
['en', 'en', 'en']
True
DemoLight.is_on
(self)
Return true if light is on.
Return true if light is on.
def is_on(self) -> bool: """Return true if light is on.""" return self._state
[ "def", "is_on", "(", "self", ")", "->", "bool", ":", "return", "self", ".", "_state" ]
[ 166, 4 ]
[ 168, 26 ]
python
en
['en', 'et', 'en']
True
DemoLight.supported_features
(self)
Flag supported features.
Flag supported features.
def supported_features(self) -> int: """Flag supported features.""" return self._features
[ "def", "supported_features", "(", "self", ")", "->", "int", ":", "return", "self", ".", "_features" ]
[ 171, 4 ]
[ 173, 29 ]
python
en
['da', 'en', 'en']
True
DemoLight.async_turn_on
(self, **kwargs)
Turn the light on.
Turn the light on.
async def async_turn_on(self, **kwargs) -> None: """Turn the light on.""" self._state = True if ATTR_HS_COLOR in kwargs: self._color_mode = "hs" self._hs_color = kwargs[ATTR_HS_COLOR] if ATTR_COLOR_TEMP in kwargs: self._color_mode = "ct" self._ct = kwargs[ATTR_COLOR_TEMP] if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] if ATTR_WHITE_VALUE in kwargs: self._white = kwargs[ATTR_WHITE_VALUE] if ATTR_EFFECT in kwargs: self._effect = kwargs[ATTR_EFFECT] # As we have disabled polling, we need to inform # Home Assistant about updates in our state ourselves. self.async_write_ha_state()
[ "async", "def", "async_turn_on", "(", "self", ",", "*", "*", "kwargs", ")", "->", "None", ":", "self", ".", "_state", "=", "True", "if", "ATTR_HS_COLOR", "in", "kwargs", ":", "self", ".", "_color_mode", "=", "\"hs\"", "self", ".", "_hs_color", "=", "kwargs", "[", "ATTR_HS_COLOR", "]", "if", "ATTR_COLOR_TEMP", "in", "kwargs", ":", "self", ".", "_color_mode", "=", "\"ct\"", "self", ".", "_ct", "=", "kwargs", "[", "ATTR_COLOR_TEMP", "]", "if", "ATTR_BRIGHTNESS", "in", "kwargs", ":", "self", ".", "_brightness", "=", "kwargs", "[", "ATTR_BRIGHTNESS", "]", "if", "ATTR_WHITE_VALUE", "in", "kwargs", ":", "self", ".", "_white", "=", "kwargs", "[", "ATTR_WHITE_VALUE", "]", "if", "ATTR_EFFECT", "in", "kwargs", ":", "self", ".", "_effect", "=", "kwargs", "[", "ATTR_EFFECT", "]", "# As we have disabled polling, we need to inform", "# Home Assistant about updates in our state ourselves.", "self", ".", "async_write_ha_state", "(", ")" ]
[ 175, 4 ]
[ 198, 35 ]
python
en
['en', 'et', 'en']
True
DemoLight.async_turn_off
(self, **kwargs)
Turn the light off.
Turn the light off.
async def async_turn_off(self, **kwargs) -> None: """Turn the light off.""" self._state = False # As we have disabled polling, we need to inform # Home Assistant about updates in our state ourselves. self.async_write_ha_state()
[ "async", "def", "async_turn_off", "(", "self", ",", "*", "*", "kwargs", ")", "->", "None", ":", "self", ".", "_state", "=", "False", "# As we have disabled polling, we need to inform", "# Home Assistant about updates in our state ourselves.", "self", ".", "async_write_ha_state", "(", ")" ]
[ 200, 4 ]
[ 206, 35 ]
python
en
['en', 'zh', 'en']
True
valid_isy_commands
(value: Any)
Validate the command is valid.
Validate the command is valid.
def valid_isy_commands(value: Any) -> str: """Validate the command is valid.""" value = str(value).upper() if value in COMMAND_FRIENDLY_NAME: return value raise vol.Invalid("Invalid ISY Command.")
[ "def", "valid_isy_commands", "(", "value", ":", "Any", ")", "->", "str", ":", "value", "=", "str", "(", "value", ")", ".", "upper", "(", ")", "if", "value", "in", "COMMAND_FRIENDLY_NAME", ":", "return", "value", "raise", "vol", ".", "Invalid", "(", "\"Invalid ISY Command.\"", ")" ]
[ 86, 0 ]
[ 91, 45 ]
python
en
['en', 'en', 'en']
True
async_setup_services
(hass: HomeAssistantType)
Create and register services for the ISY integration.
Create and register services for the ISY integration.
def async_setup_services(hass: HomeAssistantType): """Create and register services for the ISY integration.""" existing_services = hass.services.async_services().get(DOMAIN) if existing_services and any( service in INTEGRATION_SERVICES for service in existing_services ): # Integration-level services have already been added. Return. return async def async_system_query_service_handler(service): """Handle a system query service call.""" address = service.data.get(CONF_ADDRESS) isy_name = service.data.get(CONF_ISY) for config_entry_id in hass.data[DOMAIN]: isy = hass.data[DOMAIN][config_entry_id][ISY994_ISY] if isy_name and not isy_name == isy.configuration["name"]: continue # If an address is provided, make sure we query the correct ISY. # Otherwise, query the whole system on all ISY's connected. if address and isy.nodes.get_by_id(address) is not None: _LOGGER.debug( "Requesting query of device %s on ISY %s", address, isy.configuration["uuid"], ) await hass.async_add_executor_job(isy.query, address) return _LOGGER.debug( "Requesting system query of ISY %s", isy.configuration["uuid"] ) await hass.async_add_executor_job(isy.query) async def async_run_network_resource_service_handler(service): """Handle a network resource service call.""" address = service.data.get(CONF_ADDRESS) name = service.data.get(CONF_NAME) isy_name = service.data.get(CONF_ISY) for config_entry_id in hass.data[DOMAIN]: isy = hass.data[DOMAIN][config_entry_id][ISY994_ISY] if isy_name and not isy_name == isy.configuration["name"]: continue if not hasattr(isy, "networking") or isy.networking is None: continue command = None if address: command = isy.networking.get_by_id(address) if name: command = isy.networking.get_by_name(name) if command is not None: await hass.async_add_executor_job(command.run) return _LOGGER.error( "Could not run network resource command. Not found or enabled on the ISY" ) async def async_send_program_command_service_handler(service): """Handle a send program command service call.""" address = service.data.get(CONF_ADDRESS) name = service.data.get(CONF_NAME) command = service.data.get(CONF_COMMAND) isy_name = service.data.get(CONF_ISY) for config_entry_id in hass.data[DOMAIN]: isy = hass.data[DOMAIN][config_entry_id][ISY994_ISY] if isy_name and not isy_name == isy.configuration["name"]: continue program = None if address: program = isy.programs.get_by_id(address) if name: program = isy.programs.get_by_name(name) if program is not None: await hass.async_add_executor_job(getattr(program, command)) return _LOGGER.error("Could not send program command. Not found or enabled on the ISY") async def async_set_variable_service_handler(service): """Handle a set variable service call.""" address = service.data.get(CONF_ADDRESS) vtype = service.data.get(CONF_TYPE) name = service.data.get(CONF_NAME) value = service.data.get(CONF_VALUE) init = service.data.get(CONF_INIT, False) isy_name = service.data.get(CONF_ISY) for config_entry_id in hass.data[DOMAIN]: isy = hass.data[DOMAIN][config_entry_id][ISY994_ISY] if isy_name and not isy_name == isy.configuration["name"]: continue variable = None if name: variable = isy.variables.get_by_name(name) if address and vtype: variable = isy.variables.vobjs[vtype].get(address) if variable is not None: await hass.async_add_executor_job(variable.set_value, value, init) return _LOGGER.error("Could not set variable value. Not found or enabled on the ISY") async def async_cleanup_registry_entries(service) -> None: """Remove extra entities that are no longer part of the integration.""" entity_registry = await er.async_get_registry(hass) config_ids = [] current_unique_ids = [] for config_entry_id in hass.data[DOMAIN]: entries_for_this_config = er.async_entries_for_config_entry( entity_registry, config_entry_id ) config_ids.extend( [ (entity.unique_id, entity.entity_id) for entity in entries_for_this_config ] ) hass_isy_data = hass.data[DOMAIN][config_entry_id] uuid = hass_isy_data[ISY994_ISY].configuration["uuid"] for platform in SUPPORTED_PLATFORMS: for node in hass_isy_data[ISY994_NODES][platform]: if hasattr(node, "address"): current_unique_ids.append(f"{uuid}_{node.address}") for platform in SUPPORTED_PROGRAM_PLATFORMS: for _, node, _ in hass_isy_data[ISY994_PROGRAMS][platform]: if hasattr(node, "address"): current_unique_ids.append(f"{uuid}_{node.address}") for node in hass_isy_data[ISY994_VARIABLES]: if hasattr(node, "address"): current_unique_ids.append(f"{uuid}_{node.address}") extra_entities = [ entity_id for unique_id, entity_id in config_ids if unique_id not in current_unique_ids ] for entity_id in extra_entities: if entity_registry.async_is_registered(entity_id): entity_registry.async_remove(entity_id) _LOGGER.debug( "Cleaning up ISY994 Entities and devices: Config Entries: %s, Current Entries: %s, " "Extra Entries Removed: %s", len(config_ids), len(current_unique_ids), len(extra_entities), ) async def async_reload_config_entries(service) -> None: """Trigger a reload of all ISY994 config entries.""" for config_entry_id in hass.data[DOMAIN]: hass.async_create_task(hass.config_entries.async_reload(config_entry_id)) hass.services.async_register( domain=DOMAIN, service=SERVICE_SYSTEM_QUERY, service_func=async_system_query_service_handler, schema=SERVICE_SYSTEM_QUERY_SCHEMA, ) hass.services.async_register( domain=DOMAIN, service=SERVICE_RUN_NETWORK_RESOURCE, service_func=async_run_network_resource_service_handler, schema=SERVICE_RUN_NETWORK_RESOURCE_SCHEMA, ) hass.services.async_register( domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND, service_func=async_send_program_command_service_handler, schema=SERVICE_SEND_PROGRAM_COMMAND_SCHEMA, ) hass.services.async_register( domain=DOMAIN, service=SERVICE_SET_VARIABLE, service_func=async_set_variable_service_handler, schema=SERVICE_SET_VARIABLE_SCHEMA, ) hass.services.async_register( domain=DOMAIN, service=SERVICE_CLEANUP, service_func=async_cleanup_registry_entries, ) hass.services.async_register( domain=DOMAIN, service=SERVICE_RELOAD, service_func=async_reload_config_entries ) async def _async_send_raw_node_command(call: ServiceCall): await hass.helpers.service.entity_service_call( async_get_platforms(hass, DOMAIN), SERVICE_SEND_RAW_NODE_COMMAND, call ) hass.services.async_register( domain=DOMAIN, service=SERVICE_SEND_RAW_NODE_COMMAND, schema=cv.make_entity_service_schema(SERVICE_SEND_RAW_NODE_COMMAND_SCHEMA), service_func=_async_send_raw_node_command, ) async def _async_send_node_command(call: ServiceCall): await hass.helpers.service.entity_service_call( async_get_platforms(hass, DOMAIN), SERVICE_SEND_NODE_COMMAND, call ) hass.services.async_register( domain=DOMAIN, service=SERVICE_SEND_NODE_COMMAND, schema=cv.make_entity_service_schema(SERVICE_SEND_NODE_COMMAND_SCHEMA), service_func=_async_send_node_command, )
[ "def", "async_setup_services", "(", "hass", ":", "HomeAssistantType", ")", ":", "existing_services", "=", "hass", ".", "services", ".", "async_services", "(", ")", ".", "get", "(", "DOMAIN", ")", "if", "existing_services", "and", "any", "(", "service", "in", "INTEGRATION_SERVICES", "for", "service", "in", "existing_services", ")", ":", "# Integration-level services have already been added. Return.", "return", "async", "def", "async_system_query_service_handler", "(", "service", ")", ":", "\"\"\"Handle a system query service call.\"\"\"", "address", "=", "service", ".", "data", ".", "get", "(", "CONF_ADDRESS", ")", "isy_name", "=", "service", ".", "data", ".", "get", "(", "CONF_ISY", ")", "for", "config_entry_id", "in", "hass", ".", "data", "[", "DOMAIN", "]", ":", "isy", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry_id", "]", "[", "ISY994_ISY", "]", "if", "isy_name", "and", "not", "isy_name", "==", "isy", ".", "configuration", "[", "\"name\"", "]", ":", "continue", "# If an address is provided, make sure we query the correct ISY.", "# Otherwise, query the whole system on all ISY's connected.", "if", "address", "and", "isy", ".", "nodes", ".", "get_by_id", "(", "address", ")", "is", "not", "None", ":", "_LOGGER", ".", "debug", "(", "\"Requesting query of device %s on ISY %s\"", ",", "address", ",", "isy", ".", "configuration", "[", "\"uuid\"", "]", ",", ")", "await", "hass", ".", "async_add_executor_job", "(", "isy", ".", "query", ",", "address", ")", "return", "_LOGGER", ".", "debug", "(", "\"Requesting system query of ISY %s\"", ",", "isy", ".", "configuration", "[", "\"uuid\"", "]", ")", "await", "hass", ".", "async_add_executor_job", "(", "isy", ".", "query", ")", "async", "def", "async_run_network_resource_service_handler", "(", "service", ")", ":", "\"\"\"Handle a network resource service call.\"\"\"", "address", "=", "service", ".", "data", ".", "get", "(", "CONF_ADDRESS", ")", "name", "=", "service", ".", "data", ".", "get", "(", "CONF_NAME", ")", "isy_name", "=", "service", ".", "data", ".", "get", "(", "CONF_ISY", ")", "for", "config_entry_id", "in", "hass", ".", "data", "[", "DOMAIN", "]", ":", "isy", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry_id", "]", "[", "ISY994_ISY", "]", "if", "isy_name", "and", "not", "isy_name", "==", "isy", ".", "configuration", "[", "\"name\"", "]", ":", "continue", "if", "not", "hasattr", "(", "isy", ",", "\"networking\"", ")", "or", "isy", ".", "networking", "is", "None", ":", "continue", "command", "=", "None", "if", "address", ":", "command", "=", "isy", ".", "networking", ".", "get_by_id", "(", "address", ")", "if", "name", ":", "command", "=", "isy", ".", "networking", ".", "get_by_name", "(", "name", ")", "if", "command", "is", "not", "None", ":", "await", "hass", ".", "async_add_executor_job", "(", "command", ".", "run", ")", "return", "_LOGGER", ".", "error", "(", "\"Could not run network resource command. Not found or enabled on the ISY\"", ")", "async", "def", "async_send_program_command_service_handler", "(", "service", ")", ":", "\"\"\"Handle a send program command service call.\"\"\"", "address", "=", "service", ".", "data", ".", "get", "(", "CONF_ADDRESS", ")", "name", "=", "service", ".", "data", ".", "get", "(", "CONF_NAME", ")", "command", "=", "service", ".", "data", ".", "get", "(", "CONF_COMMAND", ")", "isy_name", "=", "service", ".", "data", ".", "get", "(", "CONF_ISY", ")", "for", "config_entry_id", "in", "hass", ".", "data", "[", "DOMAIN", "]", ":", "isy", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry_id", "]", "[", "ISY994_ISY", "]", "if", "isy_name", "and", "not", "isy_name", "==", "isy", ".", "configuration", "[", "\"name\"", "]", ":", "continue", "program", "=", "None", "if", "address", ":", "program", "=", "isy", ".", "programs", ".", "get_by_id", "(", "address", ")", "if", "name", ":", "program", "=", "isy", ".", "programs", ".", "get_by_name", "(", "name", ")", "if", "program", "is", "not", "None", ":", "await", "hass", ".", "async_add_executor_job", "(", "getattr", "(", "program", ",", "command", ")", ")", "return", "_LOGGER", ".", "error", "(", "\"Could not send program command. Not found or enabled on the ISY\"", ")", "async", "def", "async_set_variable_service_handler", "(", "service", ")", ":", "\"\"\"Handle a set variable service call.\"\"\"", "address", "=", "service", ".", "data", ".", "get", "(", "CONF_ADDRESS", ")", "vtype", "=", "service", ".", "data", ".", "get", "(", "CONF_TYPE", ")", "name", "=", "service", ".", "data", ".", "get", "(", "CONF_NAME", ")", "value", "=", "service", ".", "data", ".", "get", "(", "CONF_VALUE", ")", "init", "=", "service", ".", "data", ".", "get", "(", "CONF_INIT", ",", "False", ")", "isy_name", "=", "service", ".", "data", ".", "get", "(", "CONF_ISY", ")", "for", "config_entry_id", "in", "hass", ".", "data", "[", "DOMAIN", "]", ":", "isy", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry_id", "]", "[", "ISY994_ISY", "]", "if", "isy_name", "and", "not", "isy_name", "==", "isy", ".", "configuration", "[", "\"name\"", "]", ":", "continue", "variable", "=", "None", "if", "name", ":", "variable", "=", "isy", ".", "variables", ".", "get_by_name", "(", "name", ")", "if", "address", "and", "vtype", ":", "variable", "=", "isy", ".", "variables", ".", "vobjs", "[", "vtype", "]", ".", "get", "(", "address", ")", "if", "variable", "is", "not", "None", ":", "await", "hass", ".", "async_add_executor_job", "(", "variable", ".", "set_value", ",", "value", ",", "init", ")", "return", "_LOGGER", ".", "error", "(", "\"Could not set variable value. Not found or enabled on the ISY\"", ")", "async", "def", "async_cleanup_registry_entries", "(", "service", ")", "->", "None", ":", "\"\"\"Remove extra entities that are no longer part of the integration.\"\"\"", "entity_registry", "=", "await", "er", ".", "async_get_registry", "(", "hass", ")", "config_ids", "=", "[", "]", "current_unique_ids", "=", "[", "]", "for", "config_entry_id", "in", "hass", ".", "data", "[", "DOMAIN", "]", ":", "entries_for_this_config", "=", "er", ".", "async_entries_for_config_entry", "(", "entity_registry", ",", "config_entry_id", ")", "config_ids", ".", "extend", "(", "[", "(", "entity", ".", "unique_id", ",", "entity", ".", "entity_id", ")", "for", "entity", "in", "entries_for_this_config", "]", ")", "hass_isy_data", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry_id", "]", "uuid", "=", "hass_isy_data", "[", "ISY994_ISY", "]", ".", "configuration", "[", "\"uuid\"", "]", "for", "platform", "in", "SUPPORTED_PLATFORMS", ":", "for", "node", "in", "hass_isy_data", "[", "ISY994_NODES", "]", "[", "platform", "]", ":", "if", "hasattr", "(", "node", ",", "\"address\"", ")", ":", "current_unique_ids", ".", "append", "(", "f\"{uuid}_{node.address}\"", ")", "for", "platform", "in", "SUPPORTED_PROGRAM_PLATFORMS", ":", "for", "_", ",", "node", ",", "_", "in", "hass_isy_data", "[", "ISY994_PROGRAMS", "]", "[", "platform", "]", ":", "if", "hasattr", "(", "node", ",", "\"address\"", ")", ":", "current_unique_ids", ".", "append", "(", "f\"{uuid}_{node.address}\"", ")", "for", "node", "in", "hass_isy_data", "[", "ISY994_VARIABLES", "]", ":", "if", "hasattr", "(", "node", ",", "\"address\"", ")", ":", "current_unique_ids", ".", "append", "(", "f\"{uuid}_{node.address}\"", ")", "extra_entities", "=", "[", "entity_id", "for", "unique_id", ",", "entity_id", "in", "config_ids", "if", "unique_id", "not", "in", "current_unique_ids", "]", "for", "entity_id", "in", "extra_entities", ":", "if", "entity_registry", ".", "async_is_registered", "(", "entity_id", ")", ":", "entity_registry", ".", "async_remove", "(", "entity_id", ")", "_LOGGER", ".", "debug", "(", "\"Cleaning up ISY994 Entities and devices: Config Entries: %s, Current Entries: %s, \"", "\"Extra Entries Removed: %s\"", ",", "len", "(", "config_ids", ")", ",", "len", "(", "current_unique_ids", ")", ",", "len", "(", "extra_entities", ")", ",", ")", "async", "def", "async_reload_config_entries", "(", "service", ")", "->", "None", ":", "\"\"\"Trigger a reload of all ISY994 config entries.\"\"\"", "for", "config_entry_id", "in", "hass", ".", "data", "[", "DOMAIN", "]", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_reload", "(", "config_entry_id", ")", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SYSTEM_QUERY", ",", "service_func", "=", "async_system_query_service_handler", ",", "schema", "=", "SERVICE_SYSTEM_QUERY_SCHEMA", ",", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_RUN_NETWORK_RESOURCE", ",", "service_func", "=", "async_run_network_resource_service_handler", ",", "schema", "=", "SERVICE_RUN_NETWORK_RESOURCE_SCHEMA", ",", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SEND_PROGRAM_COMMAND", ",", "service_func", "=", "async_send_program_command_service_handler", ",", "schema", "=", "SERVICE_SEND_PROGRAM_COMMAND_SCHEMA", ",", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SET_VARIABLE", ",", "service_func", "=", "async_set_variable_service_handler", ",", "schema", "=", "SERVICE_SET_VARIABLE_SCHEMA", ",", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_CLEANUP", ",", "service_func", "=", "async_cleanup_registry_entries", ",", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_RELOAD", ",", "service_func", "=", "async_reload_config_entries", ")", "async", "def", "_async_send_raw_node_command", "(", "call", ":", "ServiceCall", ")", ":", "await", "hass", ".", "helpers", ".", "service", ".", "entity_service_call", "(", "async_get_platforms", "(", "hass", ",", "DOMAIN", ")", ",", "SERVICE_SEND_RAW_NODE_COMMAND", ",", "call", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SEND_RAW_NODE_COMMAND", ",", "schema", "=", "cv", ".", "make_entity_service_schema", "(", "SERVICE_SEND_RAW_NODE_COMMAND_SCHEMA", ")", ",", "service_func", "=", "_async_send_raw_node_command", ",", ")", "async", "def", "_async_send_node_command", "(", "call", ":", "ServiceCall", ")", ":", "await", "hass", ".", "helpers", ".", "service", ".", "entity_service_call", "(", "async_get_platforms", "(", "hass", ",", "DOMAIN", ")", ",", "SERVICE_SEND_NODE_COMMAND", ",", "call", ")", "hass", ".", "services", ".", "async_register", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SEND_NODE_COMMAND", ",", "schema", "=", "cv", ".", "make_entity_service_schema", "(", "SERVICE_SEND_NODE_COMMAND_SCHEMA", ")", ",", "service_func", "=", "_async_send_node_command", ",", ")" ]
[ 160, 0 ]
[ 378, 5 ]
python
en
['en', 'en', 'en']
True
async_unload_services
(hass: HomeAssistantType)
Unload services for the ISY integration.
Unload services for the ISY integration.
def async_unload_services(hass: HomeAssistantType): """Unload services for the ISY integration.""" if hass.data[DOMAIN]: # There is still another config entry for this domain, don't remove services. return existing_services = hass.services.async_services().get(DOMAIN) if not existing_services or not any( service in INTEGRATION_SERVICES for service in existing_services ): return _LOGGER.info("Unloading ISY994 Services") hass.services.async_remove(domain=DOMAIN, service=SERVICE_SYSTEM_QUERY) hass.services.async_remove(domain=DOMAIN, service=SERVICE_RUN_NETWORK_RESOURCE) hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND) hass.services.async_remove(domain=DOMAIN, service=SERVICE_SET_VARIABLE) hass.services.async_remove(domain=DOMAIN, service=SERVICE_CLEANUP) hass.services.async_remove(domain=DOMAIN, service=SERVICE_RELOAD) hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_RAW_NODE_COMMAND) hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_NODE_COMMAND)
[ "def", "async_unload_services", "(", "hass", ":", "HomeAssistantType", ")", ":", "if", "hass", ".", "data", "[", "DOMAIN", "]", ":", "# There is still another config entry for this domain, don't remove services.", "return", "existing_services", "=", "hass", ".", "services", ".", "async_services", "(", ")", ".", "get", "(", "DOMAIN", ")", "if", "not", "existing_services", "or", "not", "any", "(", "service", "in", "INTEGRATION_SERVICES", "for", "service", "in", "existing_services", ")", ":", "return", "_LOGGER", ".", "info", "(", "\"Unloading ISY994 Services\"", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SYSTEM_QUERY", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_RUN_NETWORK_RESOURCE", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SEND_PROGRAM_COMMAND", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SET_VARIABLE", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_CLEANUP", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_RELOAD", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SEND_RAW_NODE_COMMAND", ")", "hass", ".", "services", ".", "async_remove", "(", "domain", "=", "DOMAIN", ",", "service", "=", "SERVICE_SEND_NODE_COMMAND", ")" ]
[ 382, 0 ]
[ 402, 80 ]
python
en
['en', 'en', 'en']
True
async_setup_light_services
(hass: HomeAssistantType)
Create device-specific services for the ISY Integration.
Create device-specific services for the ISY Integration.
def async_setup_light_services(hass: HomeAssistantType): """Create device-specific services for the ISY Integration.""" platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_SET_ON_LEVEL, SERVICE_SET_VALUE_SCHEMA, SERVICE_SET_ON_LEVEL ) platform.async_register_entity_service( SERVICE_SET_RAMP_RATE, SERVICE_SET_RAMP_RATE_SCHEMA, SERVICE_SET_RAMP_RATE )
[ "def", "async_setup_light_services", "(", "hass", ":", "HomeAssistantType", ")", ":", "platform", "=", "entity_platform", ".", "current_platform", ".", "get", "(", ")", "platform", ".", "async_register_entity_service", "(", "SERVICE_SET_ON_LEVEL", ",", "SERVICE_SET_VALUE_SCHEMA", ",", "SERVICE_SET_ON_LEVEL", ")", "platform", ".", "async_register_entity_service", "(", "SERVICE_SET_RAMP_RATE", ",", "SERVICE_SET_RAMP_RATE_SCHEMA", ",", "SERVICE_SET_RAMP_RATE", ")" ]
[ 406, 0 ]
[ 415, 5 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Ted5000 sensor.
Set up the Ted5000 sensor.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Ted5000 sensor.""" host = config.get(CONF_HOST) port = config.get(CONF_PORT) name = config.get(CONF_NAME) url = f"http://{host}:{port}/api/LiveData.xml" gateway = Ted5000Gateway(url) # Get MUT information to create the sensors. gateway.update() dev = [] for mtu in gateway.data: dev.append(Ted5000Sensor(gateway, name, mtu, POWER_WATT)) dev.append(Ted5000Sensor(gateway, name, mtu, VOLT)) add_entities(dev) return True
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "host", "=", "config", ".", "get", "(", "CONF_HOST", ")", "port", "=", "config", ".", "get", "(", "CONF_PORT", ")", "name", "=", "config", ".", "get", "(", "CONF_NAME", ")", "url", "=", "f\"http://{host}:{port}/api/LiveData.xml\"", "gateway", "=", "Ted5000Gateway", "(", "url", ")", "# Get MUT information to create the sensors.", "gateway", ".", "update", "(", ")", "dev", "=", "[", "]", "for", "mtu", "in", "gateway", ".", "data", ":", "dev", ".", "append", "(", "Ted5000Sensor", "(", "gateway", ",", "name", ",", "mtu", ",", "POWER_WATT", ")", ")", "dev", ".", "append", "(", "Ted5000Sensor", "(", "gateway", ",", "name", ",", "mtu", ",", "VOLT", ")", ")", "add_entities", "(", "dev", ")", "return", "True" ]
[ 30, 0 ]
[ 48, 15 ]
python
en
['en', 'da', 'en']
True
Ted5000Sensor.__init__
(self, gateway, name, mtu, unit)
Initialize the sensor.
Initialize the sensor.
def __init__(self, gateway, name, mtu, unit): """Initialize the sensor.""" units = {POWER_WATT: "power", VOLT: "voltage"} self._gateway = gateway self._name = "{} mtu{} {}".format(name, mtu, units[unit]) self._mtu = mtu self._unit = unit self.update()
[ "def", "__init__", "(", "self", ",", "gateway", ",", "name", ",", "mtu", ",", "unit", ")", ":", "units", "=", "{", "POWER_WATT", ":", "\"power\"", ",", "VOLT", ":", "\"voltage\"", "}", "self", ".", "_gateway", "=", "gateway", "self", ".", "_name", "=", "\"{} mtu{} {}\"", ".", "format", "(", "name", ",", "mtu", ",", "units", "[", "unit", "]", ")", "self", ".", "_mtu", "=", "mtu", "self", ".", "_unit", "=", "unit", "self", ".", "update", "(", ")" ]
[ 54, 4 ]
[ 61, 21 ]
python
en
['en', 'en', 'en']
True
Ted5000Sensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 64, 4 ]
[ 66, 25 ]
python
en
['en', 'mi', 'en']
True
Ted5000Sensor.unit_of_measurement
(self)
Return the unit the value is expressed in.
Return the unit the value is expressed in.
def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit" ]
[ 69, 4 ]
[ 71, 25 ]
python
en
['en', 'en', 'en']
True
Ted5000Sensor.state
(self)
Return the state of the resources.
Return the state of the resources.
def state(self): """Return the state of the resources.""" try: return self._gateway.data[self._mtu][self._unit] except KeyError: pass
[ "def", "state", "(", "self", ")", ":", "try", ":", "return", "self", ".", "_gateway", ".", "data", "[", "self", ".", "_mtu", "]", "[", "self", ".", "_unit", "]", "except", "KeyError", ":", "pass" ]
[ 74, 4 ]
[ 79, 16 ]
python
en
['en', 'en', 'en']
True
Ted5000Sensor.update
(self)
Get the latest data from REST API.
Get the latest data from REST API.
def update(self): """Get the latest data from REST API.""" self._gateway.update()
[ "def", "update", "(", "self", ")", ":", "self", ".", "_gateway", ".", "update", "(", ")" ]
[ 81, 4 ]
[ 83, 30 ]
python
en
['en', 'en', 'en']
True
Ted5000Gateway.__init__
(self, url)
Initialize the data object.
Initialize the data object.
def __init__(self, url): """Initialize the data object.""" self.url = url self.data = {}
[ "def", "__init__", "(", "self", ",", "url", ")", ":", "self", ".", "url", "=", "url", "self", ".", "data", "=", "{", "}" ]
[ 89, 4 ]
[ 92, 22 ]
python
en
['en', 'en', 'en']
True
Ted5000Gateway.update
(self)
Get the latest data from the Ted5000 XML API.
Get the latest data from the Ted5000 XML API.
def update(self): """Get the latest data from the Ted5000 XML API.""" try: request = requests.get(self.url, timeout=10) except requests.exceptions.RequestException as err: _LOGGER.error("No connection to endpoint: %s", err) else: doc = xmltodict.parse(request.text) mtus = int(doc["LiveData"]["System"]["NumberMTU"]) for mtu in range(1, mtus + 1): power = int(doc["LiveData"]["Power"]["MTU%d" % mtu]["PowerNow"]) voltage = int(doc["LiveData"]["Voltage"]["MTU%d" % mtu]["VoltageNow"]) self.data[mtu] = {POWER_WATT: power, VOLT: voltage / 10}
[ "def", "update", "(", "self", ")", ":", "try", ":", "request", "=", "requests", ".", "get", "(", "self", ".", "url", ",", "timeout", "=", "10", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"No connection to endpoint: %s\"", ",", "err", ")", "else", ":", "doc", "=", "xmltodict", ".", "parse", "(", "request", ".", "text", ")", "mtus", "=", "int", "(", "doc", "[", "\"LiveData\"", "]", "[", "\"System\"", "]", "[", "\"NumberMTU\"", "]", ")", "for", "mtu", "in", "range", "(", "1", ",", "mtus", "+", "1", ")", ":", "power", "=", "int", "(", "doc", "[", "\"LiveData\"", "]", "[", "\"Power\"", "]", "[", "\"MTU%d\"", "%", "mtu", "]", "[", "\"PowerNow\"", "]", ")", "voltage", "=", "int", "(", "doc", "[", "\"LiveData\"", "]", "[", "\"Voltage\"", "]", "[", "\"MTU%d\"", "%", "mtu", "]", "[", "\"VoltageNow\"", "]", ")", "self", ".", "data", "[", "mtu", "]", "=", "{", "POWER_WATT", ":", "power", ",", "VOLT", ":", "voltage", "/", "10", "}" ]
[ 95, 4 ]
[ 110, 72 ]
python
en
['en', 'en', 'en']
True
test_duplicate_error
(hass)
Test that errors are shown when duplicate entries are added.
Test that errors are shown when duplicate entries are added.
async def test_duplicate_error(hass): """Test that errors are shown when duplicate entries are added.""" geography_conf = { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765, } MockConfigEntry( domain=DOMAIN, unique_id="51.528308, -0.3817765", data=geography_conf ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": "Geographical Location"} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=geography_conf ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" node_pro_conf = {CONF_IP_ADDRESS: "192.168.1.100", CONF_PASSWORD: "12345"} MockConfigEntry( domain=DOMAIN, unique_id="192.168.1.100", data=node_pro_conf ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": "AirVisual Node/Pro"} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=node_pro_conf ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured"
[ "async", "def", "test_duplicate_error", "(", "hass", ")", ":", "geography_conf", "=", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", ",", "}", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "\"51.528308, -0.3817765\"", ",", "data", "=", "geography_conf", ")", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "\"Geographical Location\"", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "geography_conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"", "node_pro_conf", "=", "{", "CONF_IP_ADDRESS", ":", "\"192.168.1.100\"", ",", "CONF_PASSWORD", ":", "\"12345\"", "}", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "\"192.168.1.100\"", ",", "data", "=", "node_pro_conf", ")", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "\"AirVisual Node/Pro\"", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "node_pro_conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"" ]
[ 26, 0 ]
[ 62, 51 ]
python
en
['en', 'en', 'en']
True
test_invalid_identifier
(hass)
Test that an invalid API key or Node/Pro ID throws an error.
Test that an invalid API key or Node/Pro ID throws an error.
async def test_invalid_identifier(hass): """Test that an invalid API key or Node/Pro ID throws an error.""" geography_conf = { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765, } with patch( "pyairvisual.air_quality.AirQuality.nearest_city", side_effect=InvalidKeyError, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": "Geographical Location"}, ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=geography_conf ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_API_KEY: "invalid_api_key"}
[ "async", "def", "test_invalid_identifier", "(", "hass", ")", ":", "geography_conf", "=", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", ",", "}", "with", "patch", "(", "\"pyairvisual.air_quality.AirQuality.nearest_city\"", ",", "side_effect", "=", "InvalidKeyError", ",", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "\"Geographical Location\"", "}", ",", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "geography_conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "CONF_API_KEY", ":", "\"invalid_api_key\"", "}" ]
[ 65, 0 ]
[ 87, 68 ]
python
en
['en', 'en', 'en']
True
test_migration
(hass)
Test migrating from version 1 to the current version.
Test migrating from version 1 to the current version.
async def test_migration(hass): """Test migrating from version 1 to the current version.""" conf = { CONF_API_KEY: "abcde12345", CONF_GEOGRAPHIES: [ {CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765}, {CONF_LATITUDE: 35.48847, CONF_LONGITUDE: 137.5263065}, ], } config_entry = MockConfigEntry( domain=DOMAIN, version=1, unique_id="abcde12345", data=conf ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 with patch("pyairvisual.air_quality.AirQuality.nearest_city"), patch.object( hass.config_entries, "async_forward_entry_setup" ): assert await async_setup_component(hass, DOMAIN, {DOMAIN: conf}) await hass.async_block_till_done() config_entries = hass.config_entries.async_entries(DOMAIN) assert len(config_entries) == 2 assert config_entries[0].unique_id == "51.528308, -0.3817765" assert config_entries[0].title == "Cloud API (51.528308, -0.3817765)" assert config_entries[0].data == { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY, } assert config_entries[1].unique_id == "35.48847, 137.5263065" assert config_entries[1].title == "Cloud API (35.48847, 137.5263065)" assert config_entries[1].data == { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 35.48847, CONF_LONGITUDE: 137.5263065, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY, }
[ "async", "def", "test_migration", "(", "hass", ")", ":", "conf", "=", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_GEOGRAPHIES", ":", "[", "{", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", "}", ",", "{", "CONF_LATITUDE", ":", "35.48847", ",", "CONF_LONGITUDE", ":", "137.5263065", "}", ",", "]", ",", "}", "config_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "version", "=", "1", ",", "unique_id", "=", "\"abcde12345\"", ",", "data", "=", "conf", ")", "config_entry", ".", "add_to_hass", "(", "hass", ")", "assert", "len", "(", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", ")", "==", "1", "with", "patch", "(", "\"pyairvisual.air_quality.AirQuality.nearest_city\"", ")", ",", "patch", ".", "object", "(", "hass", ".", "config_entries", ",", "\"async_forward_entry_setup\"", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "conf", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "config_entries", "=", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", "assert", "len", "(", "config_entries", ")", "==", "2", "assert", "config_entries", "[", "0", "]", ".", "unique_id", "==", "\"51.528308, -0.3817765\"", "assert", "config_entries", "[", "0", "]", ".", "title", "==", "\"Cloud API (51.528308, -0.3817765)\"", "assert", "config_entries", "[", "0", "]", ".", "data", "==", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", ",", "CONF_INTEGRATION_TYPE", ":", "INTEGRATION_TYPE_GEOGRAPHY", ",", "}", "assert", "config_entries", "[", "1", "]", ".", "unique_id", "==", "\"35.48847, 137.5263065\"", "assert", "config_entries", "[", "1", "]", ".", "title", "==", "\"Cloud API (35.48847, 137.5263065)\"", "assert", "config_entries", "[", "1", "]", ".", "data", "==", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "35.48847", ",", "CONF_LONGITUDE", ":", "137.5263065", ",", "CONF_INTEGRATION_TYPE", ":", "INTEGRATION_TYPE_GEOGRAPHY", ",", "}" ]
[ 90, 0 ]
[ 133, 5 ]
python
en
['en', 'en', 'en']
True
test_node_pro_error
(hass)
Test that an invalid Node/Pro ID shows an error.
Test that an invalid Node/Pro ID shows an error.
async def test_node_pro_error(hass): """Test that an invalid Node/Pro ID shows an error.""" node_pro_conf = {CONF_IP_ADDRESS: "192.168.1.100", CONF_PASSWORD: "my_password"} with patch( "pyairvisual.node.NodeSamba.async_connect", side_effect=NodeProError, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": "AirVisual Node/Pro"} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=node_pro_conf ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_IP_ADDRESS: "cannot_connect"}
[ "async", "def", "test_node_pro_error", "(", "hass", ")", ":", "node_pro_conf", "=", "{", "CONF_IP_ADDRESS", ":", "\"192.168.1.100\"", ",", "CONF_PASSWORD", ":", "\"my_password\"", "}", "with", "patch", "(", "\"pyairvisual.node.NodeSamba.async_connect\"", ",", "side_effect", "=", "NodeProError", ",", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "\"AirVisual Node/Pro\"", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "node_pro_conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "CONF_IP_ADDRESS", ":", "\"cannot_connect\"", "}" ]
[ 136, 0 ]
[ 151, 70 ]
python
en
['en', 'en', 'en']
True
test_options_flow
(hass)
Test config flow options.
Test config flow options.
async def test_options_flow(hass): """Test config flow options.""" geography_conf = { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765, } config_entry = MockConfigEntry( domain=DOMAIN, unique_id="51.528308, -0.3817765", data=geography_conf, options={CONF_SHOW_ON_MAP: True}, ) config_entry.add_to_hass(hass) with patch( "homeassistant.components.airvisual.async_setup_entry", return_value=True ): await hass.config_entries.async_setup(config_entry.entry_id) result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SHOW_ON_MAP: False} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert config_entry.options == {CONF_SHOW_ON_MAP: False}
[ "async", "def", "test_options_flow", "(", "hass", ")", ":", "geography_conf", "=", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", ",", "}", "config_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "\"51.528308, -0.3817765\"", ",", "data", "=", "geography_conf", ",", "options", "=", "{", "CONF_SHOW_ON_MAP", ":", "True", "}", ",", ")", "config_entry", ".", "add_to_hass", "(", "hass", ")", "with", "patch", "(", "\"homeassistant.components.airvisual.async_setup_entry\"", ",", "return_value", "=", "True", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "config_entry", ".", "entry_id", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_init", "(", "config_entry", ".", "entry_id", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"init\"", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "{", "CONF_SHOW_ON_MAP", ":", "False", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "config_entry", ".", "options", "==", "{", "CONF_SHOW_ON_MAP", ":", "False", "}" ]
[ 154, 0 ]
[ 184, 64 ]
python
en
['en', 'fr', 'en']
True
test_step_geography
(hass)
Test the geograph (cloud API) step.
Test the geograph (cloud API) step.
async def test_step_geography(hass): """Test the geograph (cloud API) step.""" conf = { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765, } with patch( "homeassistant.components.airvisual.async_setup_entry", return_value=True ), patch("pyairvisual.air_quality.AirQuality.nearest_city"): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": "Geographical Location"}, ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=conf ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "Cloud API (51.528308, -0.3817765)" assert result["data"] == { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY, }
[ "async", "def", "test_step_geography", "(", "hass", ")", ":", "conf", "=", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", ",", "}", "with", "patch", "(", "\"homeassistant.components.airvisual.async_setup_entry\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"pyairvisual.air_quality.AirQuality.nearest_city\"", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "\"Geographical Location\"", "}", ",", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "\"Cloud API (51.528308, -0.3817765)\"", "assert", "result", "[", "\"data\"", "]", "==", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", ",", "CONF_INTEGRATION_TYPE", ":", "INTEGRATION_TYPE_GEOGRAPHY", ",", "}" ]
[ 187, 0 ]
[ 214, 9 ]
python
en
['en', 'en', 'en']
True
test_step_node_pro
(hass)
Test the Node/Pro step.
Test the Node/Pro step.
async def test_step_node_pro(hass): """Test the Node/Pro step.""" conf = {CONF_IP_ADDRESS: "192.168.1.100", CONF_PASSWORD: "my_password"} with patch( "homeassistant.components.airvisual.async_setup_entry", return_value=True ), patch("pyairvisual.node.NodeSamba.async_connect"), patch( "pyairvisual.node.NodeSamba.async_get_latest_measurements" ), patch( "pyairvisual.node.NodeSamba.async_disconnect" ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": "AirVisual Node/Pro"} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=conf ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "Node/Pro (192.168.1.100)" assert result["data"] == { CONF_IP_ADDRESS: "192.168.1.100", CONF_PASSWORD: "my_password", CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO, }
[ "async", "def", "test_step_node_pro", "(", "hass", ")", ":", "conf", "=", "{", "CONF_IP_ADDRESS", ":", "\"192.168.1.100\"", ",", "CONF_PASSWORD", ":", "\"my_password\"", "}", "with", "patch", "(", "\"homeassistant.components.airvisual.async_setup_entry\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"pyairvisual.node.NodeSamba.async_connect\"", ")", ",", "patch", "(", "\"pyairvisual.node.NodeSamba.async_get_latest_measurements\"", ")", ",", "patch", "(", "\"pyairvisual.node.NodeSamba.async_disconnect\"", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "\"AirVisual Node/Pro\"", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "\"Node/Pro (192.168.1.100)\"", "assert", "result", "[", "\"data\"", "]", "==", "{", "CONF_IP_ADDRESS", ":", "\"192.168.1.100\"", ",", "CONF_PASSWORD", ":", "\"my_password\"", ",", "CONF_INTEGRATION_TYPE", ":", "INTEGRATION_TYPE_NODE_PRO", ",", "}" ]
[ 217, 0 ]
[ 240, 9 ]
python
en
['en', 'en', 'en']
True
test_step_reauth
(hass)
Test that the reauth step works.
Test that the reauth step works.
async def test_step_reauth(hass): """Test that the reauth step works.""" geography_conf = { CONF_API_KEY: "abcde12345", CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765, } MockConfigEntry( domain=DOMAIN, unique_id="51.528308, -0.3817765", data=geography_conf ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "reauth"}, data=geography_conf ) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.airvisual.async_setup_entry", return_value=True ), patch("pyairvisual.air_quality.AirQuality.nearest_city", return_value=True): result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_API_KEY: "defgh67890"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "reauth_successful" assert len(hass.config_entries.async_entries()) == 1
[ "async", "def", "test_step_reauth", "(", "hass", ")", ":", "geography_conf", "=", "{", "CONF_API_KEY", ":", "\"abcde12345\"", ",", "CONF_LATITUDE", ":", "51.528308", ",", "CONF_LONGITUDE", ":", "-", "0.3817765", ",", "}", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "\"51.528308, -0.3817765\"", ",", "data", "=", "geography_conf", ")", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "\"reauth\"", "}", ",", "data", "=", "geography_conf", ")", "assert", "result", "[", "\"step_id\"", "]", "==", "\"reauth_confirm\"", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"reauth_confirm\"", "with", "patch", "(", "\"homeassistant.components.airvisual.async_setup_entry\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"pyairvisual.air_quality.AirQuality.nearest_city\"", ",", "return_value", "=", "True", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "{", "CONF_API_KEY", ":", "\"defgh67890\"", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"reauth_successful\"", "assert", "len", "(", "hass", ".", "config_entries", ".", "async_entries", "(", ")", ")", "==", "1" ]
[ 243, 0 ]
[ 273, 56 ]
python
en
['en', 'en', 'en']
True
test_step_user
(hass)
Test the user ("pick the integration type") step.
Test the user ("pick the integration type") step.
async def test_step_user(hass): """Test the user ("pick the integration type") step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": INTEGRATION_TYPE_GEOGRAPHY}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "geography" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data={"type": INTEGRATION_TYPE_NODE_PRO}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "node_pro"
[ "async", "def", "test_step_user", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"user\"", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "INTEGRATION_TYPE_GEOGRAPHY", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"geography\"", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_USER", "}", ",", "data", "=", "{", "\"type\"", ":", "INTEGRATION_TYPE_NODE_PRO", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"node_pro\"" ]
[ 276, 0 ]
[ 301, 42 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass: HomeAssistant, config: Dict)
Set up the component.
Set up the component.
async def async_setup(hass: HomeAssistant, config: Dict) -> bool: """Set up the component.""" hass.data.setdefault(DOMAIN, {}) if len(hass.config_entries.async_entries(DOMAIN)) > 0: return True if DOMAIN in config and CONF_API_KEY in config[DOMAIN]: persistent_notification.async_create( hass, "Cloudflare integration now requires an API Token. Please go to the integrations page to setup.", "Cloudflare Setup", "cloudflare_setup", ) return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "Dict", ")", "->", "bool", ":", "hass", ".", "data", ".", "setdefault", "(", "DOMAIN", ",", "{", "}", ")", "if", "len", "(", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", ")", ">", "0", ":", "return", "True", "if", "DOMAIN", "in", "config", "and", "CONF_API_KEY", "in", "config", "[", "DOMAIN", "]", ":", "persistent_notification", ".", "async_create", "(", "hass", ",", "\"Cloudflare integration now requires an API Token. Please go to the integrations page to setup.\"", ",", "\"Cloudflare Setup\"", ",", "\"cloudflare_setup\"", ",", ")", "return", "True" ]
[ 53, 0 ]
[ 68, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
Set up Cloudflare from a config entry.
Set up Cloudflare from a config entry.
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Cloudflare from a config entry.""" cfupdate = CloudflareUpdater( async_get_clientsession(hass), entry.data[CONF_API_TOKEN], entry.data[CONF_ZONE], entry.data[CONF_RECORDS], ) try: zone_id = await cfupdate.get_zone_id() except CloudflareAuthenticationException: _LOGGER.error("API access forbidden. Please reauthenticate") return False except CloudflareConnectionException as error: raise ConfigEntryNotReady from error async def update_records(now): """Set up recurring update.""" try: await _async_update_cloudflare(cfupdate, zone_id) except CloudflareException as error: _LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error) async def update_records_service(call): """Set up service for manual trigger.""" try: await _async_update_cloudflare(cfupdate, zone_id) except CloudflareException as error: _LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error) update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL) undo_interval = async_track_time_interval(hass, update_records, update_interval) hass.data[DOMAIN][entry.entry_id] = { DATA_UNDO_UPDATE_INTERVAL: undo_interval, } hass.services.async_register(DOMAIN, SERVICE_UPDATE_RECORDS, update_records_service) return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", "->", "bool", ":", "cfupdate", "=", "CloudflareUpdater", "(", "async_get_clientsession", "(", "hass", ")", ",", "entry", ".", "data", "[", "CONF_API_TOKEN", "]", ",", "entry", ".", "data", "[", "CONF_ZONE", "]", ",", "entry", ".", "data", "[", "CONF_RECORDS", "]", ",", ")", "try", ":", "zone_id", "=", "await", "cfupdate", ".", "get_zone_id", "(", ")", "except", "CloudflareAuthenticationException", ":", "_LOGGER", ".", "error", "(", "\"API access forbidden. Please reauthenticate\"", ")", "return", "False", "except", "CloudflareConnectionException", "as", "error", ":", "raise", "ConfigEntryNotReady", "from", "error", "async", "def", "update_records", "(", "now", ")", ":", "\"\"\"Set up recurring update.\"\"\"", "try", ":", "await", "_async_update_cloudflare", "(", "cfupdate", ",", "zone_id", ")", "except", "CloudflareException", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Error updating zone %s: %s\"", ",", "entry", ".", "data", "[", "CONF_ZONE", "]", ",", "error", ")", "async", "def", "update_records_service", "(", "call", ")", ":", "\"\"\"Set up service for manual trigger.\"\"\"", "try", ":", "await", "_async_update_cloudflare", "(", "cfupdate", ",", "zone_id", ")", "except", "CloudflareException", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Error updating zone %s: %s\"", ",", "entry", ".", "data", "[", "CONF_ZONE", "]", ",", "error", ")", "update_interval", "=", "timedelta", "(", "minutes", "=", "DEFAULT_UPDATE_INTERVAL", ")", "undo_interval", "=", "async_track_time_interval", "(", "hass", ",", "update_records", ",", "update_interval", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "=", "{", "DATA_UNDO_UPDATE_INTERVAL", ":", "undo_interval", ",", "}", "hass", ".", "services", ".", "async_register", "(", "DOMAIN", ",", "SERVICE_UPDATE_RECORDS", ",", "update_records_service", ")", "return", "True" ]
[ 71, 0 ]
[ 111, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload Cloudflare config entry.
Unload Cloudflare config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Cloudflare config entry.""" hass.data[DOMAIN][entry.entry_id][DATA_UNDO_UPDATE_INTERVAL]() hass.data[DOMAIN].pop(entry.entry_id) return True
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", "->", "bool", ":", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "[", "DATA_UNDO_UPDATE_INTERVAL", "]", "(", ")", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "entry", ".", "entry_id", ")", "return", "True" ]
[ 114, 0 ]
[ 119, 15 ]
python
en
['en', 'en', 'en']
True
test_cached_event_message
(hass)
Test that we cache event messages.
Test that we cache event messages.
async def test_cached_event_message(hass): """Test that we cache event messages.""" events = [] @callback def _event_listener(event): events.append(event) hass.bus.async_listen(EVENT_STATE_CHANGED, _event_listener) hass.states.async_set("light.window", "on") hass.states.async_set("light.window", "off") await hass.async_block_till_done() assert len(events) == 2 lru_event_cache.cache_clear() msg0 = cached_event_message(2, events[0]) assert msg0 == cached_event_message(2, events[0]) msg1 = cached_event_message(2, events[1]) assert msg1 == cached_event_message(2, events[1]) assert msg0 != msg1 cache_info = lru_event_cache.cache_info() assert cache_info.hits == 2 assert cache_info.misses == 2 assert cache_info.currsize == 2 cached_event_message(2, events[1]) cache_info = lru_event_cache.cache_info() assert cache_info.hits == 3 assert cache_info.misses == 2 assert cache_info.currsize == 2
[ "async", "def", "test_cached_event_message", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "callback", "def", "_event_listener", "(", "event", ")", ":", "events", ".", "append", "(", "event", ")", "hass", ".", "bus", ".", "async_listen", "(", "EVENT_STATE_CHANGED", ",", "_event_listener", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.window\"", ",", "\"on\"", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.window\"", ",", "\"off\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "2", "lru_event_cache", ".", "cache_clear", "(", ")", "msg0", "=", "cached_event_message", "(", "2", ",", "events", "[", "0", "]", ")", "assert", "msg0", "==", "cached_event_message", "(", "2", ",", "events", "[", "0", "]", ")", "msg1", "=", "cached_event_message", "(", "2", ",", "events", "[", "1", "]", ")", "assert", "msg1", "==", "cached_event_message", "(", "2", ",", "events", "[", "1", "]", ")", "assert", "msg0", "!=", "msg1", "cache_info", "=", "lru_event_cache", ".", "cache_info", "(", ")", "assert", "cache_info", ".", "hits", "==", "2", "assert", "cache_info", ".", "misses", "==", "2", "assert", "cache_info", ".", "currsize", "==", "2", "cached_event_message", "(", "2", ",", "events", "[", "1", "]", ")", "cache_info", "=", "lru_event_cache", ".", "cache_info", "(", ")", "assert", "cache_info", ".", "hits", "==", "3", "assert", "cache_info", ".", "misses", "==", "2", "assert", "cache_info", ".", "currsize", "==", "2" ]
[ 11, 0 ]
[ 46, 35 ]
python
en
['en', 'en', 'en']
True
test_cached_event_message_with_different_idens
(hass)
Test that we cache event messages when the subscrition idens differ.
Test that we cache event messages when the subscrition idens differ.
async def test_cached_event_message_with_different_idens(hass): """Test that we cache event messages when the subscrition idens differ.""" events = [] @callback def _event_listener(event): events.append(event) hass.bus.async_listen(EVENT_STATE_CHANGED, _event_listener) hass.states.async_set("light.window", "on") await hass.async_block_till_done() assert len(events) == 1 lru_event_cache.cache_clear() msg0 = cached_event_message(2, events[0]) msg1 = cached_event_message(3, events[0]) msg2 = cached_event_message(4, events[0]) assert msg0 != msg1 assert msg0 != msg2 cache_info = lru_event_cache.cache_info() assert cache_info.hits == 2 assert cache_info.misses == 1 assert cache_info.currsize == 1
[ "async", "def", "test_cached_event_message_with_different_idens", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "callback", "def", "_event_listener", "(", "event", ")", ":", "events", ".", "append", "(", "event", ")", "hass", ".", "bus", ".", "async_listen", "(", "EVENT_STATE_CHANGED", ",", "_event_listener", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.window\"", ",", "\"on\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "1", "lru_event_cache", ".", "cache_clear", "(", ")", "msg0", "=", "cached_event_message", "(", "2", ",", "events", "[", "0", "]", ")", "msg1", "=", "cached_event_message", "(", "3", ",", "events", "[", "0", "]", ")", "msg2", "=", "cached_event_message", "(", "4", ",", "events", "[", "0", "]", ")", "assert", "msg0", "!=", "msg1", "assert", "msg0", "!=", "msg2", "cache_info", "=", "lru_event_cache", ".", "cache_info", "(", ")", "assert", "cache_info", ".", "hits", "==", "2", "assert", "cache_info", ".", "misses", "==", "1", "assert", "cache_info", ".", "currsize", "==", "1" ]
[ 49, 0 ]
[ 77, 35 ]
python
en
['en', 'en', 'en']
True
test_message_to_json
(caplog)
Test we can serialize websocket messages.
Test we can serialize websocket messages.
async def test_message_to_json(caplog): """Test we can serialize websocket messages.""" json_str = message_to_json({"id": 1, "message": "xyz"}) assert json_str == '{"id": 1, "message": "xyz"}' json_str2 = message_to_json({"id": 1, "message": _Unserializeable()}) assert ( json_str2 == '{"id": 1, "type": "result", "success": false, "error": {"code": "unknown_error", "message": "Invalid JSON in response"}}' ) assert "Unable to serialize to JSON" in caplog.text
[ "async", "def", "test_message_to_json", "(", "caplog", ")", ":", "json_str", "=", "message_to_json", "(", "{", "\"id\"", ":", "1", ",", "\"message\"", ":", "\"xyz\"", "}", ")", "assert", "json_str", "==", "'{\"id\": 1, \"message\": \"xyz\"}'", "json_str2", "=", "message_to_json", "(", "{", "\"id\"", ":", "1", ",", "\"message\"", ":", "_Unserializeable", "(", ")", "}", ")", "assert", "(", "json_str2", "==", "'{\"id\": 1, \"type\": \"result\", \"success\": false, \"error\": {\"code\": \"unknown_error\", \"message\": \"Invalid JSON in response\"}}'", ")", "assert", "\"Unable to serialize to JSON\"", "in", "caplog", ".", "text" ]
[ 80, 0 ]
[ 93, 55 ]
python
en
['en', 'da', 'en']
True
test_create_doorbell
(hass, aiohttp_client)
Test creation of a doorbell.
Test creation of a doorbell.
async def test_create_doorbell(hass, aiohttp_client): """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") with patch.object( doorbell_one, "async_get_doorbell_image", create=False, return_value="image" ): await _create_august_with_devices(hass, [doorbell_one]) camera_k98gidt45gul_name_camera = hass.states.get( "camera.k98gidt45gul_name_camera" ) assert camera_k98gidt45gul_name_camera.state == STATE_IDLE url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ "entity_picture" ] client = await aiohttp_client(hass.http.app) resp = await client.get(url) assert resp.status == 200 body = await resp.text() assert body == "image"
[ "async", "def", "test_create_doorbell", "(", "hass", ",", "aiohttp_client", ")", ":", "doorbell_one", "=", "await", "_mock_doorbell_from_fixture", "(", "hass", ",", "\"get_doorbell.json\"", ")", "with", "patch", ".", "object", "(", "doorbell_one", ",", "\"async_get_doorbell_image\"", ",", "create", "=", "False", ",", "return_value", "=", "\"image\"", ")", ":", "await", "_create_august_with_devices", "(", "hass", ",", "[", "doorbell_one", "]", ")", "camera_k98gidt45gul_name_camera", "=", "hass", ".", "states", ".", "get", "(", "\"camera.k98gidt45gul_name_camera\"", ")", "assert", "camera_k98gidt45gul_name_camera", ".", "state", "==", "STATE_IDLE", "url", "=", "hass", ".", "states", ".", "get", "(", "\"camera.k98gidt45gul_name_camera\"", ")", ".", "attributes", "[", "\"entity_picture\"", "]", "client", "=", "await", "aiohttp_client", "(", "hass", ".", "http", ".", "app", ")", "resp", "=", "await", "client", ".", "get", "(", "url", ")", "assert", "resp", ".", "status", "==", "200", "body", "=", "await", "resp", ".", "text", "(", ")", "assert", "body", "==", "\"image\"" ]
[ 11, 0 ]
[ 33, 30 ]
python
en
['en', 'lb', 'en']
True
test_onewiredirect_setup_valid_device
(hass, device_id)
Test that sysbus config entry works correctly.
Test that sysbus config entry works correctly.
async def test_onewiredirect_setup_valid_device(hass, device_id): """Test that sysbus config entry works correctly.""" entity_registry = mock_registry(hass) device_registry = mock_device_registry(hass) mock_device_sensor = MOCK_DEVICE_SENSORS[device_id] glob_result = [f"/{DEFAULT_SYSBUS_MOUNT_DIR}/{device_id}"] read_side_effect = [] expected_sensors = mock_device_sensor["sensors"] for expected_sensor in expected_sensors: read_side_effect.append(expected_sensor["injected_value"]) # Ensure enough read side effect read_side_effect.extend([FileNotFoundError("Missing injected value")] * 20) with patch( "homeassistant.components.onewire.onewirehub.os.path.isdir", return_value=True ), patch("pi1wire._finder.glob.glob", return_value=glob_result,), patch( "pi1wire.OneWire.get_temperature", side_effect=read_side_effect, ): assert await async_setup_component(hass, SENSOR_DOMAIN, MOCK_CONFIG) await hass.async_block_till_done() assert len(entity_registry.entities) == len(expected_sensors) if len(expected_sensors) > 0: device_info = mock_device_sensor["device_info"] assert len(device_registry.devices) == 1 registry_entry = device_registry.async_get_device({(DOMAIN, device_id)}, set()) assert registry_entry is not None assert registry_entry.identifiers == {(DOMAIN, device_id)} assert registry_entry.manufacturer == device_info["manufacturer"] assert registry_entry.name == device_info["name"] assert registry_entry.model == device_info["model"] for expected_sensor in expected_sensors: entity_id = expected_sensor["entity_id"] registry_entry = entity_registry.entities.get(entity_id) assert registry_entry is not None assert registry_entry.unique_id == expected_sensor["unique_id"] assert registry_entry.unit_of_measurement == expected_sensor["unit"] assert registry_entry.device_class == expected_sensor["class"] state = hass.states.get(entity_id) assert state.state == expected_sensor["result"]
[ "async", "def", "test_onewiredirect_setup_valid_device", "(", "hass", ",", "device_id", ")", ":", "entity_registry", "=", "mock_registry", "(", "hass", ")", "device_registry", "=", "mock_device_registry", "(", "hass", ")", "mock_device_sensor", "=", "MOCK_DEVICE_SENSORS", "[", "device_id", "]", "glob_result", "=", "[", "f\"/{DEFAULT_SYSBUS_MOUNT_DIR}/{device_id}\"", "]", "read_side_effect", "=", "[", "]", "expected_sensors", "=", "mock_device_sensor", "[", "\"sensors\"", "]", "for", "expected_sensor", "in", "expected_sensors", ":", "read_side_effect", ".", "append", "(", "expected_sensor", "[", "\"injected_value\"", "]", ")", "# Ensure enough read side effect", "read_side_effect", ".", "extend", "(", "[", "FileNotFoundError", "(", "\"Missing injected value\"", ")", "]", "*", "20", ")", "with", "patch", "(", "\"homeassistant.components.onewire.onewirehub.os.path.isdir\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"pi1wire._finder.glob.glob\"", ",", "return_value", "=", "glob_result", ",", ")", ",", "patch", "(", "\"pi1wire.OneWire.get_temperature\"", ",", "side_effect", "=", "read_side_effect", ",", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "SENSOR_DOMAIN", ",", "MOCK_CONFIG", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "entity_registry", ".", "entities", ")", "==", "len", "(", "expected_sensors", ")", "if", "len", "(", "expected_sensors", ")", ">", "0", ":", "device_info", "=", "mock_device_sensor", "[", "\"device_info\"", "]", "assert", "len", "(", "device_registry", ".", "devices", ")", "==", "1", "registry_entry", "=", "device_registry", ".", "async_get_device", "(", "{", "(", "DOMAIN", ",", "device_id", ")", "}", ",", "set", "(", ")", ")", "assert", "registry_entry", "is", "not", "None", "assert", "registry_entry", ".", "identifiers", "==", "{", "(", "DOMAIN", ",", "device_id", ")", "}", "assert", "registry_entry", ".", "manufacturer", "==", "device_info", "[", "\"manufacturer\"", "]", "assert", "registry_entry", ".", "name", "==", "device_info", "[", "\"name\"", "]", "assert", "registry_entry", ".", "model", "==", "device_info", "[", "\"model\"", "]", "for", "expected_sensor", "in", "expected_sensors", ":", "entity_id", "=", "expected_sensor", "[", "\"entity_id\"", "]", "registry_entry", "=", "entity_registry", ".", "entities", ".", "get", "(", "entity_id", ")", "assert", "registry_entry", "is", "not", "None", "assert", "registry_entry", ".", "unique_id", "==", "expected_sensor", "[", "\"unique_id\"", "]", "assert", "registry_entry", ".", "unit_of_measurement", "==", "expected_sensor", "[", "\"unit\"", "]", "assert", "registry_entry", ".", "device_class", "==", "expected_sensor", "[", "\"class\"", "]", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "expected_sensor", "[", "\"result\"", "]" ]
[ 128, 0 ]
[ 173, 55 ]
python
en
['en', 'en', 'en']
True
test_reload_platform
(hass)
Test the polling of only updated entities.
Test the polling of only updated entities.
async def test_reload_platform(hass): """Test the polling of only updated entities.""" component_setup = Mock(return_value=True) setup_called = [] async def setup_platform(*args): setup_called.append(args) mock_integration(hass, MockModule(DOMAIN, setup=component_setup)) mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) mock_platform = MockPlatform(async_setup_platform=setup_platform) mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) component = EntityComponent(_LOGGER, DOMAIN, hass) await component.async_setup({DOMAIN: {"platform": PLATFORM, "sensors": None}}) await hass.async_block_till_done() assert component_setup.called assert f"{DOMAIN}.{PLATFORM}" in hass.config.components assert len(setup_called) == 1 platform = async_get_platform_without_config_entry(hass, PLATFORM, DOMAIN) assert platform.platform_name == PLATFORM assert platform.domain == DOMAIN yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "helpers/reload_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await async_reload_integration_platforms(hass, PLATFORM, [DOMAIN]) assert len(setup_called) == 2 existing_platforms = async_get_platforms(hass, PLATFORM) for existing_platform in existing_platforms: existing_platform.config_entry = "abc" assert not async_get_platform_without_config_entry(hass, PLATFORM, DOMAIN)
[ "async", "def", "test_reload_platform", "(", "hass", ")", ":", "component_setup", "=", "Mock", "(", "return_value", "=", "True", ")", "setup_called", "=", "[", "]", "async", "def", "setup_platform", "(", "*", "args", ")", ":", "setup_called", ".", "append", "(", "args", ")", "mock_integration", "(", "hass", ",", "MockModule", "(", "DOMAIN", ",", "setup", "=", "component_setup", ")", ")", "mock_integration", "(", "hass", ",", "MockModule", "(", "PLATFORM", ",", "dependencies", "=", "[", "DOMAIN", "]", ")", ")", "mock_platform", "=", "MockPlatform", "(", "async_setup_platform", "=", "setup_platform", ")", "mock_entity_platform", "(", "hass", ",", "f\"{DOMAIN}.{PLATFORM}\"", ",", "mock_platform", ")", "component", "=", "EntityComponent", "(", "_LOGGER", ",", "DOMAIN", ",", "hass", ")", "await", "component", ".", "async_setup", "(", "{", "DOMAIN", ":", "{", "\"platform\"", ":", "PLATFORM", ",", "\"sensors\"", ":", "None", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "component_setup", ".", "called", "assert", "f\"{DOMAIN}.{PLATFORM}\"", "in", "hass", ".", "config", ".", "components", "assert", "len", "(", "setup_called", ")", "==", "1", "platform", "=", "async_get_platform_without_config_entry", "(", "hass", ",", "PLATFORM", ",", "DOMAIN", ")", "assert", "platform", ".", "platform_name", "==", "PLATFORM", "assert", "platform", ".", "domain", "==", "DOMAIN", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"helpers/reload_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "async_reload_integration_platforms", "(", "hass", ",", "PLATFORM", ",", "[", "DOMAIN", "]", ")", "assert", "len", "(", "setup_called", ")", "==", "2", "existing_platforms", "=", "async_get_platforms", "(", "hass", ",", "PLATFORM", ")", "for", "existing_platform", "in", "existing_platforms", ":", "existing_platform", ".", "config_entry", "=", "\"abc\"", "assert", "not", "async_get_platform_without_config_entry", "(", "hass", ",", "PLATFORM", ",", "DOMAIN", ")" ]
[ 31, 0 ]
[ 72, 78 ]
python
en
['en', 'en', 'en']
True
test_setup_reload_service
(hass)
Test setting up a reload service.
Test setting up a reload service.
async def test_setup_reload_service(hass): """Test setting up a reload service.""" component_setup = Mock(return_value=True) setup_called = [] async def setup_platform(*args): setup_called.append(args) mock_integration(hass, MockModule(DOMAIN, setup=component_setup)) mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) mock_platform = MockPlatform(async_setup_platform=setup_platform) mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) component = EntityComponent(_LOGGER, DOMAIN, hass) await component.async_setup({DOMAIN: {"platform": PLATFORM, "sensors": None}}) await hass.async_block_till_done() assert component_setup.called assert f"{DOMAIN}.{PLATFORM}" in hass.config.components assert len(setup_called) == 1 await async_setup_reload_service(hass, PLATFORM, [DOMAIN]) yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "helpers/reload_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( PLATFORM, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(setup_called) == 2
[ "async", "def", "test_setup_reload_service", "(", "hass", ")", ":", "component_setup", "=", "Mock", "(", "return_value", "=", "True", ")", "setup_called", "=", "[", "]", "async", "def", "setup_platform", "(", "*", "args", ")", ":", "setup_called", ".", "append", "(", "args", ")", "mock_integration", "(", "hass", ",", "MockModule", "(", "DOMAIN", ",", "setup", "=", "component_setup", ")", ")", "mock_integration", "(", "hass", ",", "MockModule", "(", "PLATFORM", ",", "dependencies", "=", "[", "DOMAIN", "]", ")", ")", "mock_platform", "=", "MockPlatform", "(", "async_setup_platform", "=", "setup_platform", ")", "mock_entity_platform", "(", "hass", ",", "f\"{DOMAIN}.{PLATFORM}\"", ",", "mock_platform", ")", "component", "=", "EntityComponent", "(", "_LOGGER", ",", "DOMAIN", ",", "hass", ")", "await", "component", ".", "async_setup", "(", "{", "DOMAIN", ":", "{", "\"platform\"", ":", "PLATFORM", ",", "\"sensors\"", ":", "None", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "component_setup", ".", "called", "assert", "f\"{DOMAIN}.{PLATFORM}\"", "in", "hass", ".", "config", ".", "components", "assert", "len", "(", "setup_called", ")", "==", "1", "await", "async_setup_reload_service", "(", "hass", ",", "PLATFORM", ",", "[", "DOMAIN", "]", ")", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"helpers/reload_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "PLATFORM", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "setup_called", ")", "==", "2" ]
[ 75, 0 ]
[ 115, 33 ]
python
en
['en', 'en', 'en']
True
test_setup_reload_service_when_async_process_component_config_fails
(hass)
Test setting up a reload service with the config processing failing.
Test setting up a reload service with the config processing failing.
async def test_setup_reload_service_when_async_process_component_config_fails(hass): """Test setting up a reload service with the config processing failing.""" component_setup = Mock(return_value=True) setup_called = [] async def setup_platform(*args): setup_called.append(args) mock_integration(hass, MockModule(DOMAIN, setup=component_setup)) mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) mock_platform = MockPlatform(async_setup_platform=setup_platform) mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) component = EntityComponent(_LOGGER, DOMAIN, hass) await component.async_setup({DOMAIN: {"platform": PLATFORM, "sensors": None}}) await hass.async_block_till_done() assert component_setup.called assert f"{DOMAIN}.{PLATFORM}" in hass.config.components assert len(setup_called) == 1 await async_setup_reload_service(hass, PLATFORM, [DOMAIN]) yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "helpers/reload_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path), patch.object( config, "async_process_component_config", return_value=None ): await hass.services.async_call( PLATFORM, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(setup_called) == 1
[ "async", "def", "test_setup_reload_service_when_async_process_component_config_fails", "(", "hass", ")", ":", "component_setup", "=", "Mock", "(", "return_value", "=", "True", ")", "setup_called", "=", "[", "]", "async", "def", "setup_platform", "(", "*", "args", ")", ":", "setup_called", ".", "append", "(", "args", ")", "mock_integration", "(", "hass", ",", "MockModule", "(", "DOMAIN", ",", "setup", "=", "component_setup", ")", ")", "mock_integration", "(", "hass", ",", "MockModule", "(", "PLATFORM", ",", "dependencies", "=", "[", "DOMAIN", "]", ")", ")", "mock_platform", "=", "MockPlatform", "(", "async_setup_platform", "=", "setup_platform", ")", "mock_entity_platform", "(", "hass", ",", "f\"{DOMAIN}.{PLATFORM}\"", ",", "mock_platform", ")", "component", "=", "EntityComponent", "(", "_LOGGER", ",", "DOMAIN", ",", "hass", ")", "await", "component", ".", "async_setup", "(", "{", "DOMAIN", ":", "{", "\"platform\"", ":", "PLATFORM", ",", "\"sensors\"", ":", "None", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "component_setup", ".", "called", "assert", "f\"{DOMAIN}.{PLATFORM}\"", "in", "hass", ".", "config", ".", "components", "assert", "len", "(", "setup_called", ")", "==", "1", "await", "async_setup_reload_service", "(", "hass", ",", "PLATFORM", ",", "[", "DOMAIN", "]", ")", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"helpers/reload_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ",", "patch", ".", "object", "(", "config", ",", "\"async_process_component_config\"", ",", "return_value", "=", "None", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "PLATFORM", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "setup_called", ")", "==", "1" ]
[ 118, 0 ]
[ 160, 33 ]
python
en
['en', 'en', 'en']
True
test_setup_reload_service_with_platform_that_provides_async_reset_platform
( hass, )
Test setting up a reload service using a platform that has its own async_reset_platform.
Test setting up a reload service using a platform that has its own async_reset_platform.
async def test_setup_reload_service_with_platform_that_provides_async_reset_platform( hass, ): """Test setting up a reload service using a platform that has its own async_reset_platform.""" component_setup = AsyncMock(return_value=True) setup_called = [] async_reset_platform_called = [] async def setup_platform(*args): setup_called.append(args) async def async_reset_platform(*args): async_reset_platform_called.append(args) mock_integration(hass, MockModule(DOMAIN, async_setup=component_setup)) integration = await async_get_integration(hass, DOMAIN) integration.get_component().async_reset_platform = async_reset_platform mock_integration(hass, MockModule(PLATFORM, dependencies=[DOMAIN])) mock_platform = MockPlatform(async_setup_platform=setup_platform) mock_entity_platform(hass, f"{DOMAIN}.{PLATFORM}", mock_platform) component = EntityComponent(_LOGGER, DOMAIN, hass) await component.async_setup({DOMAIN: {"platform": PLATFORM, "name": "xyz"}}) await hass.async_block_till_done() assert component_setup.called assert f"{DOMAIN}.{PLATFORM}" in hass.config.components assert len(setup_called) == 1 await async_setup_reload_service(hass, PLATFORM, [DOMAIN]) yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "helpers/reload_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( PLATFORM, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(setup_called) == 1 assert len(async_reset_platform_called) == 1
[ "async", "def", "test_setup_reload_service_with_platform_that_provides_async_reset_platform", "(", "hass", ",", ")", ":", "component_setup", "=", "AsyncMock", "(", "return_value", "=", "True", ")", "setup_called", "=", "[", "]", "async_reset_platform_called", "=", "[", "]", "async", "def", "setup_platform", "(", "*", "args", ")", ":", "setup_called", ".", "append", "(", "args", ")", "async", "def", "async_reset_platform", "(", "*", "args", ")", ":", "async_reset_platform_called", ".", "append", "(", "args", ")", "mock_integration", "(", "hass", ",", "MockModule", "(", "DOMAIN", ",", "async_setup", "=", "component_setup", ")", ")", "integration", "=", "await", "async_get_integration", "(", "hass", ",", "DOMAIN", ")", "integration", ".", "get_component", "(", ")", ".", "async_reset_platform", "=", "async_reset_platform", "mock_integration", "(", "hass", ",", "MockModule", "(", "PLATFORM", ",", "dependencies", "=", "[", "DOMAIN", "]", ")", ")", "mock_platform", "=", "MockPlatform", "(", "async_setup_platform", "=", "setup_platform", ")", "mock_entity_platform", "(", "hass", ",", "f\"{DOMAIN}.{PLATFORM}\"", ",", "mock_platform", ")", "component", "=", "EntityComponent", "(", "_LOGGER", ",", "DOMAIN", ",", "hass", ")", "await", "component", ".", "async_setup", "(", "{", "DOMAIN", ":", "{", "\"platform\"", ":", "PLATFORM", ",", "\"name\"", ":", "\"xyz\"", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "component_setup", ".", "called", "assert", "f\"{DOMAIN}.{PLATFORM}\"", "in", "hass", ".", "config", ".", "components", "assert", "len", "(", "setup_called", ")", "==", "1", "await", "async_setup_reload_service", "(", "hass", ",", "PLATFORM", ",", "[", "DOMAIN", "]", ")", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"helpers/reload_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "PLATFORM", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "setup_called", ")", "==", "1", "assert", "len", "(", "async_reset_platform_called", ")", "==", "1" ]
[ 163, 0 ]
[ 213, 48 ]
python
en
['en', 'en', 'en']
True
test_async_integration_yaml_config
(hass)
Test loading yaml config for an integration.
Test loading yaml config for an integration.
async def test_async_integration_yaml_config(hass): """Test loading yaml config for an integration.""" mock_integration(hass, MockModule(DOMAIN)) yaml_path = path.join( _get_fixtures_base_path(), "fixtures", f"helpers/{DOMAIN}_configuration.yaml", ) with patch.object(config, "YAML_CONFIG_FILE", yaml_path): processed_config = await async_integration_yaml_config(hass, DOMAIN) assert processed_config == {DOMAIN: [{"name": "one"}, {"name": "two"}]}
[ "async", "def", "test_async_integration_yaml_config", "(", "hass", ")", ":", "mock_integration", "(", "hass", ",", "MockModule", "(", "DOMAIN", ")", ")", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "f\"helpers/{DOMAIN}_configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "processed_config", "=", "await", "async_integration_yaml_config", "(", "hass", ",", "DOMAIN", ")", "assert", "processed_config", "==", "{", "DOMAIN", ":", "[", "{", "\"name\"", ":", "\"one\"", "}", ",", "{", "\"name\"", ":", "\"two\"", "}", "]", "}" ]
[ 216, 0 ]
[ 228, 75 ]
python
en
['en', 'en', 'en']
True
test_async_integration_missing_yaml_config
(hass)
Test loading missing yaml config for an integration.
Test loading missing yaml config for an integration.
async def test_async_integration_missing_yaml_config(hass): """Test loading missing yaml config for an integration.""" mock_integration(hass, MockModule(DOMAIN)) yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "helpers/does_not_exist_configuration.yaml", ) with pytest.raises(FileNotFoundError), patch.object( config, "YAML_CONFIG_FILE", yaml_path ): await async_integration_yaml_config(hass, DOMAIN)
[ "async", "def", "test_async_integration_missing_yaml_config", "(", "hass", ")", ":", "mock_integration", "(", "hass", ",", "MockModule", "(", "DOMAIN", ")", ")", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"helpers/does_not_exist_configuration.yaml\"", ",", ")", "with", "pytest", ".", "raises", "(", "FileNotFoundError", ")", ",", "patch", ".", "object", "(", "config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "async_integration_yaml_config", "(", "hass", ",", "DOMAIN", ")" ]
[ 231, 0 ]
[ 243, 57 ]
python
en
['en', 'en', 'en']
True
test_reproducing_states
(hass)
Test reproducing input_boolean states.
Test reproducing input_boolean states.
async def test_reproducing_states(hass): """Test reproducing input_boolean states.""" assert await async_setup_component( hass, "input_boolean", { "input_boolean": { "initial_on": {"initial": True}, "initial_off": {"initial": False}, } }, ) await hass.helpers.state.async_reproduce_state( [ State("input_boolean.initial_on", "off"), State("input_boolean.initial_off", "on"), # Should not raise State("input_boolean.non_existing", "on"), ], ) assert hass.states.get("input_boolean.initial_off").state == "on" assert hass.states.get("input_boolean.initial_on").state == "off" await hass.helpers.state.async_reproduce_state( [ # Test invalid state State("input_boolean.initial_on", "invalid_state"), # Set to state it already is. State("input_boolean.initial_off", "on"), ], ) assert hass.states.get("input_boolean.initial_on").state == "off" assert hass.states.get("input_boolean.initial_off").state == "on"
[ "async", "def", "test_reproducing_states", "(", "hass", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "\"input_boolean\"", ",", "{", "\"input_boolean\"", ":", "{", "\"initial_on\"", ":", "{", "\"initial\"", ":", "True", "}", ",", "\"initial_off\"", ":", "{", "\"initial\"", ":", "False", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "helpers", ".", "state", ".", "async_reproduce_state", "(", "[", "State", "(", "\"input_boolean.initial_on\"", ",", "\"off\"", ")", ",", "State", "(", "\"input_boolean.initial_off\"", ",", "\"on\"", ")", ",", "# Should not raise", "State", "(", "\"input_boolean.non_existing\"", ",", "\"on\"", ")", ",", "]", ",", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"input_boolean.initial_off\"", ")", ".", "state", "==", "\"on\"", "assert", "hass", ".", "states", ".", "get", "(", "\"input_boolean.initial_on\"", ")", ".", "state", "==", "\"off\"", "await", "hass", ".", "helpers", ".", "state", ".", "async_reproduce_state", "(", "[", "# Test invalid state", "State", "(", "\"input_boolean.initial_on\"", ",", "\"invalid_state\"", ")", ",", "# Set to state it already is.", "State", "(", "\"input_boolean.initial_off\"", ",", "\"on\"", ")", ",", "]", ",", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"input_boolean.initial_on\"", ")", ".", "state", "==", "\"off\"", "assert", "hass", ".", "states", ".", "get", "(", "\"input_boolean.initial_off\"", ")", ".", "state", "==", "\"on\"" ]
[ 5, 0 ]
[ 38, 69 ]
python
en
['en', 'en', 'en']
True
async_get_code
(hass, aiohttp_client)
Return authorization code for link user tests.
Return authorization code for link user tests.
async def async_get_code(hass, aiohttp_client): """Return authorization code for link user tests.""" config = [ { "name": "Example", "type": "insecure_example", "users": [ {"username": "test-user", "password": "test-pass", "name": "Test Name"} ], }, { "name": "Example", "id": "2nd auth", "type": "insecure_example", "users": [ {"username": "2nd-user", "password": "2nd-pass", "name": "2nd Name"} ], }, ] client = await async_setup_auth(hass, aiohttp_client, config) user = await hass.auth.async_create_user(name="Hello") refresh_token = await hass.auth.async_create_refresh_token(user, CLIENT_ID) access_token = hass.auth.async_create_access_token(refresh_token) # Now authenticate with the 2nd flow resp = await client.post( "/auth/login_flow", json={ "client_id": CLIENT_ID, "handler": ["insecure_example", "2nd auth"], "redirect_uri": CLIENT_REDIRECT_URI, "type": "link_user", }, ) assert resp.status == 200 step = await resp.json() resp = await client.post( f"/auth/login_flow/{step['flow_id']}", json={"client_id": CLIENT_ID, "username": "2nd-user", "password": "2nd-pass"}, ) assert resp.status == 200 step = await resp.json() return { "user": user, "code": step["result"], "client": client, "access_token": access_token, }
[ "async", "def", "async_get_code", "(", "hass", ",", "aiohttp_client", ")", ":", "config", "=", "[", "{", "\"name\"", ":", "\"Example\"", ",", "\"type\"", ":", "\"insecure_example\"", ",", "\"users\"", ":", "[", "{", "\"username\"", ":", "\"test-user\"", ",", "\"password\"", ":", "\"test-pass\"", ",", "\"name\"", ":", "\"Test Name\"", "}", "]", ",", "}", ",", "{", "\"name\"", ":", "\"Example\"", ",", "\"id\"", ":", "\"2nd auth\"", ",", "\"type\"", ":", "\"insecure_example\"", ",", "\"users\"", ":", "[", "{", "\"username\"", ":", "\"2nd-user\"", ",", "\"password\"", ":", "\"2nd-pass\"", ",", "\"name\"", ":", "\"2nd Name\"", "}", "]", ",", "}", ",", "]", "client", "=", "await", "async_setup_auth", "(", "hass", ",", "aiohttp_client", ",", "config", ")", "user", "=", "await", "hass", ".", "auth", ".", "async_create_user", "(", "name", "=", "\"Hello\"", ")", "refresh_token", "=", "await", "hass", ".", "auth", ".", "async_create_refresh_token", "(", "user", ",", "CLIENT_ID", ")", "access_token", "=", "hass", ".", "auth", ".", "async_create_access_token", "(", "refresh_token", ")", "# Now authenticate with the 2nd flow", "resp", "=", "await", "client", ".", "post", "(", "\"/auth/login_flow\"", ",", "json", "=", "{", "\"client_id\"", ":", "CLIENT_ID", ",", "\"handler\"", ":", "[", "\"insecure_example\"", ",", "\"2nd auth\"", "]", ",", "\"redirect_uri\"", ":", "CLIENT_REDIRECT_URI", ",", "\"type\"", ":", "\"link_user\"", ",", "}", ",", ")", "assert", "resp", ".", "status", "==", "200", "step", "=", "await", "resp", ".", "json", "(", ")", "resp", "=", "await", "client", ".", "post", "(", "f\"/auth/login_flow/{step['flow_id']}\"", ",", "json", "=", "{", "\"client_id\"", ":", "CLIENT_ID", ",", "\"username\"", ":", "\"2nd-user\"", ",", "\"password\"", ":", "\"2nd-pass\"", "}", ",", ")", "assert", "resp", ".", "status", "==", "200", "step", "=", "await", "resp", ".", "json", "(", ")", "return", "{", "\"user\"", ":", "user", ",", "\"code\"", ":", "step", "[", "\"result\"", "]", ",", "\"client\"", ":", "client", ",", "\"access_token\"", ":", "access_token", ",", "}" ]
[ 6, 0 ]
[ 56, 5 ]
python
en
['nb', 'en', 'en']
True
test_link_user
(hass, aiohttp_client)
Test linking a user to new credentials.
Test linking a user to new credentials.
async def test_link_user(hass, aiohttp_client): """Test linking a user to new credentials.""" info = await async_get_code(hass, aiohttp_client) client = info["client"] code = info["code"] # Link user resp = await client.post( "/auth/link_user", json={"client_id": CLIENT_ID, "code": code}, headers={"authorization": f"Bearer {info['access_token']}"}, ) assert resp.status == 200 assert len(info["user"].credentials) == 1
[ "async", "def", "test_link_user", "(", "hass", ",", "aiohttp_client", ")", ":", "info", "=", "await", "async_get_code", "(", "hass", ",", "aiohttp_client", ")", "client", "=", "info", "[", "\"client\"", "]", "code", "=", "info", "[", "\"code\"", "]", "# Link user", "resp", "=", "await", "client", ".", "post", "(", "\"/auth/link_user\"", ",", "json", "=", "{", "\"client_id\"", ":", "CLIENT_ID", ",", "\"code\"", ":", "code", "}", ",", "headers", "=", "{", "\"authorization\"", ":", "f\"Bearer {info['access_token']}\"", "}", ",", ")", "assert", "resp", ".", "status", "==", "200", "assert", "len", "(", "info", "[", "\"user\"", "]", ".", "credentials", ")", "==", "1" ]
[ 59, 0 ]
[ 73, 45 ]
python
en
['en', 'en', 'en']
True
test_link_user_invalid_client_id
(hass, aiohttp_client)
Test linking a user to new credentials.
Test linking a user to new credentials.
async def test_link_user_invalid_client_id(hass, aiohttp_client): """Test linking a user to new credentials.""" info = await async_get_code(hass, aiohttp_client) client = info["client"] code = info["code"] # Link user resp = await client.post( "/auth/link_user", json={"client_id": "invalid", "code": code}, headers={"authorization": f"Bearer {info['access_token']}"}, ) assert resp.status == 400 assert len(info["user"].credentials) == 0
[ "async", "def", "test_link_user_invalid_client_id", "(", "hass", ",", "aiohttp_client", ")", ":", "info", "=", "await", "async_get_code", "(", "hass", ",", "aiohttp_client", ")", "client", "=", "info", "[", "\"client\"", "]", "code", "=", "info", "[", "\"code\"", "]", "# Link user", "resp", "=", "await", "client", ".", "post", "(", "\"/auth/link_user\"", ",", "json", "=", "{", "\"client_id\"", ":", "\"invalid\"", ",", "\"code\"", ":", "code", "}", ",", "headers", "=", "{", "\"authorization\"", ":", "f\"Bearer {info['access_token']}\"", "}", ",", ")", "assert", "resp", ".", "status", "==", "400", "assert", "len", "(", "info", "[", "\"user\"", "]", ".", "credentials", ")", "==", "0" ]
[ 76, 0 ]
[ 90, 45 ]
python
en
['en', 'en', 'en']
True
test_link_user_invalid_code
(hass, aiohttp_client)
Test linking a user to new credentials.
Test linking a user to new credentials.
async def test_link_user_invalid_code(hass, aiohttp_client): """Test linking a user to new credentials.""" info = await async_get_code(hass, aiohttp_client) client = info["client"] # Link user resp = await client.post( "/auth/link_user", json={"client_id": CLIENT_ID, "code": "invalid"}, headers={"authorization": f"Bearer {info['access_token']}"}, ) assert resp.status == 400 assert len(info["user"].credentials) == 0
[ "async", "def", "test_link_user_invalid_code", "(", "hass", ",", "aiohttp_client", ")", ":", "info", "=", "await", "async_get_code", "(", "hass", ",", "aiohttp_client", ")", "client", "=", "info", "[", "\"client\"", "]", "# Link user", "resp", "=", "await", "client", ".", "post", "(", "\"/auth/link_user\"", ",", "json", "=", "{", "\"client_id\"", ":", "CLIENT_ID", ",", "\"code\"", ":", "\"invalid\"", "}", ",", "headers", "=", "{", "\"authorization\"", ":", "f\"Bearer {info['access_token']}\"", "}", ",", ")", "assert", "resp", ".", "status", "==", "400", "assert", "len", "(", "info", "[", "\"user\"", "]", ".", "credentials", ")", "==", "0" ]
[ 93, 0 ]
[ 106, 45 ]
python
en
['en', 'en', 'en']
True
test_link_user_invalid_auth
(hass, aiohttp_client)
Test linking a user to new credentials.
Test linking a user to new credentials.
async def test_link_user_invalid_auth(hass, aiohttp_client): """Test linking a user to new credentials.""" info = await async_get_code(hass, aiohttp_client) client = info["client"] code = info["code"] # Link user resp = await client.post( "/auth/link_user", json={"client_id": CLIENT_ID, "code": code}, headers={"authorization": "Bearer invalid"}, ) assert resp.status == 401 assert len(info["user"].credentials) == 0
[ "async", "def", "test_link_user_invalid_auth", "(", "hass", ",", "aiohttp_client", ")", ":", "info", "=", "await", "async_get_code", "(", "hass", ",", "aiohttp_client", ")", "client", "=", "info", "[", "\"client\"", "]", "code", "=", "info", "[", "\"code\"", "]", "# Link user", "resp", "=", "await", "client", ".", "post", "(", "\"/auth/link_user\"", ",", "json", "=", "{", "\"client_id\"", ":", "CLIENT_ID", ",", "\"code\"", ":", "code", "}", ",", "headers", "=", "{", "\"authorization\"", ":", "\"Bearer invalid\"", "}", ",", ")", "assert", "resp", ".", "status", "==", "401", "assert", "len", "(", "info", "[", "\"user\"", "]", ".", "credentials", ")", "==", "0" ]
[ 109, 0 ]
[ 123, 45 ]
python
en
['en', 'en', 'en']
True
test_form
(hass, requests_mock)
Test we get the form.
Test we get the form.
async def test_form(hass, requests_mock): """Test we get the form.""" hass.config.latitude = TEST_LATITUDE_WAVERTREE hass.config.longitude = TEST_LONGITUDE_WAVERTREE # all metoffice test data encapsulated in here mock_json = json.loads(load_fixture("metoffice.json")) all_sites = json.dumps(mock_json["all_sites"]) requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "homeassistant.components.metoffice.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.metoffice.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": TEST_API_KEY} ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == TEST_SITE_NAME_WAVERTREE assert result2["data"] == { "api_key": TEST_API_KEY, "latitude": TEST_LATITUDE_WAVERTREE, "longitude": TEST_LONGITUDE_WAVERTREE, "name": TEST_SITE_NAME_WAVERTREE, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_form", "(", "hass", ",", "requests_mock", ")", ":", "hass", ".", "config", ".", "latitude", "=", "TEST_LATITUDE_WAVERTREE", "hass", ".", "config", ".", "longitude", "=", "TEST_LONGITUDE_WAVERTREE", "# all metoffice test data encapsulated in here", "mock_json", "=", "json", ".", "loads", "(", "load_fixture", "(", "\"metoffice.json\"", ")", ")", "all_sites", "=", "json", ".", "dumps", "(", "mock_json", "[", "\"all_sites\"", "]", ")", "requests_mock", ".", "get", "(", "\"/public/data/val/wxfcs/all/json/sitelist/\"", ",", "text", "=", "all_sites", ")", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"homeassistant.components.metoffice.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.metoffice.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"api_key\"", ":", "TEST_API_KEY", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "TEST_SITE_NAME_WAVERTREE", "assert", "result2", "[", "\"data\"", "]", "==", "{", "\"api_key\"", ":", "TEST_API_KEY", ",", "\"latitude\"", ":", "TEST_LATITUDE_WAVERTREE", ",", "\"longitude\"", ":", "TEST_LONGITUDE_WAVERTREE", ",", "\"name\"", ":", "TEST_SITE_NAME_WAVERTREE", ",", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 18, 0 ]
[ 55, 48 ]
python
en
['en', 'en', 'en']
True
test_form_already_configured
(hass, requests_mock)
Test we handle duplicate entries.
Test we handle duplicate entries.
async def test_form_already_configured(hass, requests_mock): """Test we handle duplicate entries.""" hass.config.latitude = TEST_LATITUDE_WAVERTREE hass.config.longitude = TEST_LONGITUDE_WAVERTREE # all metoffice test data encapsulated in here mock_json = json.loads(load_fixture("metoffice.json")) all_sites = json.dumps(mock_json["all_sites"]) requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites) requests_mock.get( "/public/data/val/wxfcs/all/json/354107?res=3hourly", text="", ) MockConfigEntry( domain=DOMAIN, unique_id=f"{TEST_LATITUDE_WAVERTREE}_{TEST_LONGITUDE_WAVERTREE}", data=METOFFICE_CONFIG_WAVERTREE, ).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data=METOFFICE_CONFIG_WAVERTREE, ) assert result["type"] == "abort" assert result["reason"] == "already_configured"
[ "async", "def", "test_form_already_configured", "(", "hass", ",", "requests_mock", ")", ":", "hass", ".", "config", ".", "latitude", "=", "TEST_LATITUDE_WAVERTREE", "hass", ".", "config", ".", "longitude", "=", "TEST_LONGITUDE_WAVERTREE", "# all metoffice test data encapsulated in here", "mock_json", "=", "json", ".", "loads", "(", "load_fixture", "(", "\"metoffice.json\"", ")", ")", "all_sites", "=", "json", ".", "dumps", "(", "mock_json", "[", "\"all_sites\"", "]", ")", "requests_mock", ".", "get", "(", "\"/public/data/val/wxfcs/all/json/sitelist/\"", ",", "text", "=", "all_sites", ")", "requests_mock", ".", "get", "(", "\"/public/data/val/wxfcs/all/json/354107?res=3hourly\"", ",", "text", "=", "\"\"", ",", ")", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "f\"{TEST_LATITUDE_WAVERTREE}_{TEST_LONGITUDE_WAVERTREE}\"", ",", "data", "=", "METOFFICE_CONFIG_WAVERTREE", ",", ")", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ",", "data", "=", "METOFFICE_CONFIG_WAVERTREE", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"" ]
[ 58, 0 ]
[ 87, 51 ]
python
en
['fr', 'en', 'en']
True
test_form_cannot_connect
(hass, requests_mock)
Test we handle cannot connect error.
Test we handle cannot connect error.
async def test_form_cannot_connect(hass, requests_mock): """Test we handle cannot connect error.""" hass.config.latitude = TEST_LATITUDE_WAVERTREE hass.config.longitude = TEST_LONGITUDE_WAVERTREE requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="") result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": TEST_API_KEY}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "cannot_connect"}
[ "async", "def", "test_form_cannot_connect", "(", "hass", ",", "requests_mock", ")", ":", "hass", ".", "config", ".", "latitude", "=", "TEST_LATITUDE_WAVERTREE", "hass", ".", "config", ".", "longitude", "=", "TEST_LONGITUDE_WAVERTREE", "requests_mock", ".", "get", "(", "\"/public/data/val/wxfcs/all/json/sitelist/\"", ",", "text", "=", "\"\"", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"api_key\"", ":", "TEST_API_KEY", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"cannot_connect\"", "}" ]
[ 90, 0 ]
[ 107, 58 ]
python
en
['en', 'en', 'en']
True
test_form_unknown_error
(hass, mock_simple_manager_fail)
Test we handle unknown error.
Test we handle unknown error.
async def test_form_unknown_error(hass, mock_simple_manager_fail): """Test we handle unknown error.""" mock_instance = mock_simple_manager_fail.return_value mock_instance.get_nearest_forecast_site.side_effect = ValueError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": TEST_API_KEY}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "unknown"}
[ "async", "def", "test_form_unknown_error", "(", "hass", ",", "mock_simple_manager_fail", ")", ":", "mock_instance", "=", "mock_simple_manager_fail", ".", "return_value", "mock_instance", ".", "get_nearest_forecast_site", ".", "side_effect", "=", "ValueError", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"api_key\"", ":", "TEST_API_KEY", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"unknown\"", "}" ]
[ 110, 0 ]
[ 125, 51 ]
python
en
['en', 'de', 'en']
True
async_get_actions
(hass: HomeAssistant, device_id: str)
List device actions for Climate devices.
List device actions for Climate devices.
async def async_get_actions(hass: HomeAssistant, device_id: str) -> List[dict]: """List device actions for Climate devices.""" registry = await entity_registry.async_get_registry(hass) actions = [] # Get all the integrations entities for this device for entry in entity_registry.async_entries_for_device(registry, device_id): if entry.domain != DOMAIN: continue state = hass.states.get(entry.entity_id) # We need a state or else we can't populate the HVAC and preset modes. if state is None: continue actions.append( { CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "set_hvac_mode", } ) if state.attributes[ATTR_SUPPORTED_FEATURES] & const.SUPPORT_PRESET_MODE: actions.append( { CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "set_preset_mode", } ) return actions
[ "async", "def", "async_get_actions", "(", "hass", ":", "HomeAssistant", ",", "device_id", ":", "str", ")", "->", "List", "[", "dict", "]", ":", "registry", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "actions", "=", "[", "]", "# Get all the integrations entities for this device", "for", "entry", "in", "entity_registry", ".", "async_entries_for_device", "(", "registry", ",", "device_id", ")", ":", "if", "entry", ".", "domain", "!=", "DOMAIN", ":", "continue", "state", "=", "hass", ".", "states", ".", "get", "(", "entry", ".", "entity_id", ")", "# We need a state or else we can't populate the HVAC and preset modes.", "if", "state", "is", "None", ":", "continue", "actions", ".", "append", "(", "{", "CONF_DEVICE_ID", ":", "device_id", ",", "CONF_DOMAIN", ":", "DOMAIN", ",", "CONF_ENTITY_ID", ":", "entry", ".", "entity_id", ",", "CONF_TYPE", ":", "\"set_hvac_mode\"", ",", "}", ")", "if", "state", ".", "attributes", "[", "ATTR_SUPPORTED_FEATURES", "]", "&", "const", ".", "SUPPORT_PRESET_MODE", ":", "actions", ".", "append", "(", "{", "CONF_DEVICE_ID", ":", "device_id", ",", "CONF_DOMAIN", ":", "DOMAIN", ",", "CONF_ENTITY_ID", ":", "entry", ".", "entity_id", ",", "CONF_TYPE", ":", "\"set_preset_mode\"", ",", "}", ")", "return", "actions" ]
[ 40, 0 ]
[ 74, 18 ]
python
en
['fr', 'en', 'en']
True
async_call_action_from_config
( hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context] )
Execute a device action.
Execute a device action.
async def async_call_action_from_config( hass: HomeAssistant, config: dict, variables: dict, context: Optional[Context] ) -> None: """Execute a device action.""" config = ACTION_SCHEMA(config) service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]} if config[CONF_TYPE] == "set_hvac_mode": service = const.SERVICE_SET_HVAC_MODE service_data[const.ATTR_HVAC_MODE] = config[const.ATTR_HVAC_MODE] elif config[CONF_TYPE] == "set_preset_mode": service = const.SERVICE_SET_PRESET_MODE service_data[const.ATTR_PRESET_MODE] = config[const.ATTR_PRESET_MODE] await hass.services.async_call( DOMAIN, service, service_data, blocking=True, context=context )
[ "async", "def", "async_call_action_from_config", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "dict", ",", "variables", ":", "dict", ",", "context", ":", "Optional", "[", "Context", "]", ")", "->", "None", ":", "config", "=", "ACTION_SCHEMA", "(", "config", ")", "service_data", "=", "{", "ATTR_ENTITY_ID", ":", "config", "[", "CONF_ENTITY_ID", "]", "}", "if", "config", "[", "CONF_TYPE", "]", "==", "\"set_hvac_mode\"", ":", "service", "=", "const", ".", "SERVICE_SET_HVAC_MODE", "service_data", "[", "const", ".", "ATTR_HVAC_MODE", "]", "=", "config", "[", "const", ".", "ATTR_HVAC_MODE", "]", "elif", "config", "[", "CONF_TYPE", "]", "==", "\"set_preset_mode\"", ":", "service", "=", "const", ".", "SERVICE_SET_PRESET_MODE", "service_data", "[", "const", ".", "ATTR_PRESET_MODE", "]", "=", "config", "[", "const", ".", "ATTR_PRESET_MODE", "]", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "service", ",", "service_data", ",", "blocking", "=", "True", ",", "context", "=", "context", ")" ]
[ 77, 0 ]
[ 94, 5 ]
python
en
['ro', 'en', 'en']
True
async_get_action_capabilities
(hass, config)
List action capabilities.
List action capabilities.
async def async_get_action_capabilities(hass, config): """List action capabilities.""" state = hass.states.get(config[CONF_ENTITY_ID]) action_type = config[CONF_TYPE] fields = {} if action_type == "set_hvac_mode": hvac_modes = state.attributes[const.ATTR_HVAC_MODES] if state else [] fields[vol.Required(const.ATTR_HVAC_MODE)] = vol.In(hvac_modes) elif action_type == "set_preset_mode": if state: preset_modes = state.attributes.get(const.ATTR_PRESET_MODES, []) else: preset_modes = [] fields[vol.Required(const.ATTR_PRESET_MODE)] = vol.In(preset_modes) return {"extra_fields": vol.Schema(fields)}
[ "async", "def", "async_get_action_capabilities", "(", "hass", ",", "config", ")", ":", "state", "=", "hass", ".", "states", ".", "get", "(", "config", "[", "CONF_ENTITY_ID", "]", ")", "action_type", "=", "config", "[", "CONF_TYPE", "]", "fields", "=", "{", "}", "if", "action_type", "==", "\"set_hvac_mode\"", ":", "hvac_modes", "=", "state", ".", "attributes", "[", "const", ".", "ATTR_HVAC_MODES", "]", "if", "state", "else", "[", "]", "fields", "[", "vol", ".", "Required", "(", "const", ".", "ATTR_HVAC_MODE", ")", "]", "=", "vol", ".", "In", "(", "hvac_modes", ")", "elif", "action_type", "==", "\"set_preset_mode\"", ":", "if", "state", ":", "preset_modes", "=", "state", ".", "attributes", ".", "get", "(", "const", ".", "ATTR_PRESET_MODES", ",", "[", "]", ")", "else", ":", "preset_modes", "=", "[", "]", "fields", "[", "vol", ".", "Required", "(", "const", ".", "ATTR_PRESET_MODE", ")", "]", "=", "vol", ".", "In", "(", "preset_modes", ")", "return", "{", "\"extra_fields\"", ":", "vol", ".", "Schema", "(", "fields", ")", "}" ]
[ 97, 0 ]
[ 114, 47 ]
python
en
['ro', 'ga', 'en']
False
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Perform the setup for Envisalink sensor devices.
Perform the setup for Envisalink sensor devices.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Perform the setup for Envisalink sensor devices.""" configured_partitions = discovery_info["partitions"] devices = [] for part_num in configured_partitions: device_config_data = PARTITION_SCHEMA(configured_partitions[part_num]) device = EnvisalinkSensor( hass, device_config_data[CONF_PARTITIONNAME], part_num, hass.data[DATA_EVL].alarm_state["partition"][part_num], hass.data[DATA_EVL], ) devices.append(device) async_add_entities(devices)
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "configured_partitions", "=", "discovery_info", "[", "\"partitions\"", "]", "devices", "=", "[", "]", "for", "part_num", "in", "configured_partitions", ":", "device_config_data", "=", "PARTITION_SCHEMA", "(", "configured_partitions", "[", "part_num", "]", ")", "device", "=", "EnvisalinkSensor", "(", "hass", ",", "device_config_data", "[", "CONF_PARTITIONNAME", "]", ",", "part_num", ",", "hass", ".", "data", "[", "DATA_EVL", "]", ".", "alarm_state", "[", "\"partition\"", "]", "[", "part_num", "]", ",", "hass", ".", "data", "[", "DATA_EVL", "]", ",", ")", "devices", ".", "append", "(", "device", ")", "async_add_entities", "(", "devices", ")" ]
[ 19, 0 ]
[ 36, 31 ]
python
en
['en', 'da', 'en']
True
EnvisalinkSensor.__init__
(self, hass, partition_name, partition_number, info, controller)
Initialize the sensor.
Initialize the sensor.
def __init__(self, hass, partition_name, partition_number, info, controller): """Initialize the sensor.""" self._icon = "mdi:alarm" self._partition_number = partition_number _LOGGER.debug("Setting up sensor for partition: %s", partition_name) super().__init__(f"{partition_name} Keypad", info, controller)
[ "def", "__init__", "(", "self", ",", "hass", ",", "partition_name", ",", "partition_number", ",", "info", ",", "controller", ")", ":", "self", ".", "_icon", "=", "\"mdi:alarm\"", "self", ".", "_partition_number", "=", "partition_number", "_LOGGER", ".", "debug", "(", "\"Setting up sensor for partition: %s\"", ",", "partition_name", ")", "super", "(", ")", ".", "__init__", "(", "f\"{partition_name} Keypad\"", ",", "info", ",", "controller", ")" ]
[ 42, 4 ]
[ 48, 70 ]
python
en
['en', 'en', 'en']
True
EnvisalinkSensor.async_added_to_hass
(self)
Register callbacks.
Register callbacks.
async def async_added_to_hass(self): """Register callbacks.""" async_dispatcher_connect(self.hass, SIGNAL_KEYPAD_UPDATE, self._update_callback) async_dispatcher_connect( self.hass, SIGNAL_PARTITION_UPDATE, self._update_callback )
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "SIGNAL_KEYPAD_UPDATE", ",", "self", ".", "_update_callback", ")", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "SIGNAL_PARTITION_UPDATE", ",", "self", ".", "_update_callback", ")" ]
[ 50, 4 ]
[ 55, 9 ]
python
en
['en', 'no', 'en']
False
EnvisalinkSensor.icon
(self)
Return the icon if any.
Return the icon if any.
def icon(self): """Return the icon if any.""" return self._icon
[ "def", "icon", "(", "self", ")", ":", "return", "self", ".", "_icon" ]
[ 58, 4 ]
[ 60, 25 ]
python
en
['en', 'en', 'en']
True
EnvisalinkSensor.state
(self)
Return the overall state.
Return the overall state.
def state(self): """Return the overall state.""" return self._info["status"]["alpha"]
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_info", "[", "\"status\"", "]", "[", "\"alpha\"", "]" ]
[ 63, 4 ]
[ 65, 44 ]
python
en
['en', 'en', 'en']
True
EnvisalinkSensor.device_state_attributes
(self)
Return the state attributes.
Return the state attributes.
def device_state_attributes(self): """Return the state attributes.""" return self._info["status"]
[ "def", "device_state_attributes", "(", "self", ")", ":", "return", "self", ".", "_info", "[", "\"status\"", "]" ]
[ 68, 4 ]
[ 70, 35 ]
python
en
['en', 'en', 'en']
True
EnvisalinkSensor._update_callback
(self, partition)
Update the partition state in HA, if needed.
Update the partition state in HA, if needed.
def _update_callback(self, partition): """Update the partition state in HA, if needed.""" if partition is None or int(partition) == self._partition_number: self.async_write_ha_state()
[ "def", "_update_callback", "(", "self", ",", "partition", ")", ":", "if", "partition", "is", "None", "or", "int", "(", "partition", ")", "==", "self", ".", "_partition_number", ":", "self", ".", "async_write_ha_state", "(", ")" ]
[ 73, 4 ]
[ 76, 39 ]
python
en
['en', 'en', 'en']
True
test_controlling_state_via_mqtt
(hass, mqtt_mock, setup_tasmota)
Test state update via MQTT.
Test state update via MQTT.
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 1 mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test normal state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"ON"}}' ) state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_ON async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"OFF"}}' ) state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF # Test periodic state update async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"ON"}') state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_ON async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"OFF"}') state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Switch1":"ON"}}' ) state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_ON async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Switch1":"OFF"}}' ) state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF
[ "async", "def", "test_controlling_state_via_mqtt", "(", "hass", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "1", "mac", "=", "config", "[", "\"mac\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "f\"{DEFAULT_PREFIX}/{mac}/config\"", ",", "json", ".", "dumps", "(", "config", ")", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "\"unavailable\"", "assert", "not", "state", ".", "attributes", ".", "get", "(", "ATTR_ASSUMED_STATE", ")", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/LWT\"", ",", "\"Online\"", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "assert", "not", "state", ".", "attributes", ".", "get", "(", "ATTR_ASSUMED_STATE", ")", "# Test normal state update", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Switch1\":{\"Action\":\"ON\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Switch1\":{\"Action\":\"OFF\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "# Test periodic state update", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/SENSOR\"", ",", "'{\"Switch1\":\"ON\"}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/SENSOR\"", ",", "'{\"Switch1\":\"OFF\"}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "# Test polled state update", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/STATUS10\"", ",", "'{\"StatusSNS\":{\"Switch1\":\"ON\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/STATUS10\"", ",", "'{\"StatusSNS\":{\"Switch1\":\"OFF\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 40, 0 ]
[ 95, 35 ]
python
en
['en', 'co', 'en']
True
test_controlling_state_via_mqtt_switchname
(hass, mqtt_mock, setup_tasmota)
Test state update via MQTT.
Test state update via MQTT.
async def test_controlling_state_via_mqtt_switchname(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 1 config["swn"][0] = "Custom Name" mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() state = hass.states.get("binary_sensor.custom_name") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("binary_sensor.custom_name") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test normal state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Custom Name":{"Action":"ON"}}' ) state = hass.states.get("binary_sensor.custom_name") assert state.state == STATE_ON async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Custom Name":{"Action":"OFF"}}' ) state = hass.states.get("binary_sensor.custom_name") assert state.state == STATE_OFF # Test periodic state update async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Custom Name":"ON"}') state = hass.states.get("binary_sensor.custom_name") assert state.state == STATE_ON async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Custom Name":"OFF"}') state = hass.states.get("binary_sensor.custom_name") assert state.state == STATE_OFF # Test polled state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Custom Name":"ON"}}' ) state = hass.states.get("binary_sensor.custom_name") assert state.state == STATE_ON async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Custom Name":"OFF"}}' ) state = hass.states.get("binary_sensor.custom_name") assert state.state == STATE_OFF
[ "async", "def", "test_controlling_state_via_mqtt_switchname", "(", "hass", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "1", "config", "[", "\"swn\"", "]", "[", "0", "]", "=", "\"Custom Name\"", "mac", "=", "config", "[", "\"mac\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "f\"{DEFAULT_PREFIX}/{mac}/config\"", ",", "json", ".", "dumps", "(", "config", ")", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "\"unavailable\"", "assert", "not", "state", ".", "attributes", ".", "get", "(", "ATTR_ASSUMED_STATE", ")", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/LWT\"", ",", "\"Online\"", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "assert", "not", "state", ".", "attributes", ".", "get", "(", "ATTR_ASSUMED_STATE", ")", "# Test normal state update", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Custom Name\":{\"Action\":\"ON\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Custom Name\":{\"Action\":\"OFF\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "# Test periodic state update", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/SENSOR\"", ",", "'{\"Custom Name\":\"ON\"}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/SENSOR\"", ",", "'{\"Custom Name\":\"OFF\"}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "# Test polled state update", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/STATUS10\"", ",", "'{\"StatusSNS\":{\"Custom Name\":\"ON\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/STATUS10\"", ",", "'{\"StatusSNS\":{\"Custom Name\":\"OFF\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.custom_name\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 98, 0 ]
[ 154, 35 ]
python
en
['en', 'co', 'en']
True
test_pushon_controlling_state_via_mqtt
(hass, mqtt_mock, setup_tasmota)
Test state update via MQTT.
Test state update via MQTT.
async def test_pushon_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 13 mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == "unavailable" assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF assert not state.attributes.get(ATTR_ASSUMED_STATE) # Test normal state update async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"ON"}}' ) state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_ON async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"OFF"}}' ) state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF # Test periodic state update is ignored async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"ON"}') state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF # Test polled state update is ignored async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/STATUS10", '{"StatusSNS":{"Switch1":"ON"}}' ) state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF
[ "async", "def", "test_pushon_controlling_state_via_mqtt", "(", "hass", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "13", "mac", "=", "config", "[", "\"mac\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "f\"{DEFAULT_PREFIX}/{mac}/config\"", ",", "json", ".", "dumps", "(", "config", ")", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "\"unavailable\"", "assert", "not", "state", ".", "attributes", ".", "get", "(", "ATTR_ASSUMED_STATE", ")", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/LWT\"", ",", "\"Online\"", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "assert", "not", "state", ".", "attributes", ".", "get", "(", "ATTR_ASSUMED_STATE", ")", "# Test normal state update", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Switch1\":{\"Action\":\"ON\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Switch1\":{\"Action\":\"OFF\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "# Test periodic state update is ignored", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/SENSOR\"", ",", "'{\"Switch1\":\"ON\"}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "# Test polled state update is ignored", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/STATUS10\"", ",", "'{\"StatusSNS\":{\"Switch1\":\"ON\"}}'", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 157, 0 ]
[ 202, 35 ]
python
en
['en', 'co', 'en']
True
test_friendly_names
(hass, mqtt_mock, setup_tasmota)
Test state update via MQTT.
Test state update via MQTT.
async def test_friendly_names(hass, mqtt_mock, setup_tasmota): """Test state update via MQTT.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 1 config["swc"][1] = 1 config["swn"][1] = "Beer" mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == "unavailable" assert state.attributes.get("friendly_name") == "Tasmota binary_sensor 1" state = hass.states.get("binary_sensor.beer") assert state.state == "unavailable" assert state.attributes.get("friendly_name") == "Beer"
[ "async", "def", "test_friendly_names", "(", "hass", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "1", "config", "[", "\"swc\"", "]", "[", "1", "]", "=", "1", "config", "[", "\"swn\"", "]", "[", "1", "]", "=", "\"Beer\"", "mac", "=", "config", "[", "\"mac\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "f\"{DEFAULT_PREFIX}/{mac}/config\"", ",", "json", ".", "dumps", "(", "config", ")", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "\"unavailable\"", "assert", "state", ".", "attributes", ".", "get", "(", "\"friendly_name\"", ")", "==", "\"Tasmota binary_sensor 1\"", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.beer\"", ")", "assert", "state", ".", "state", "==", "\"unavailable\"", "assert", "state", ".", "attributes", ".", "get", "(", "\"friendly_name\"", ")", "==", "\"Beer\"" ]
[ 205, 0 ]
[ 226, 58 ]
python
en
['en', 'co', 'en']
True
test_off_delay
(hass, mqtt_mock, setup_tasmota)
Test off_delay option.
Test off_delay option.
async def test_off_delay(hass, mqtt_mock, setup_tasmota): """Test off_delay option.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 13 # PUSHON: 1s off_delay mac = config["mac"] async_fire_mqtt_message( hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config), ) await hass.async_block_till_done() events = [] @ha.callback def callback(event): """Verify event got called.""" events.append(event.data["new_state"].state) hass.bus.async_listen(EVENT_STATE_CHANGED, callback) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online") await hass.async_block_till_done() assert events == ["off"] async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"ON"}}' ) await hass.async_block_till_done() state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_ON assert events == ["off", "on"] async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"ON"}}' ) await hass.async_block_till_done() state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_ON assert events == ["off", "on", "on"] async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1)) await hass.async_block_till_done() state = hass.states.get("binary_sensor.tasmota_binary_sensor_1") assert state.state == STATE_OFF assert events == ["off", "on", "on", "off"]
[ "async", "def", "test_off_delay", "(", "hass", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "13", "# PUSHON: 1s off_delay", "mac", "=", "config", "[", "\"mac\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "f\"{DEFAULT_PREFIX}/{mac}/config\"", ",", "json", ".", "dumps", "(", "config", ")", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "events", "=", "[", "]", "@", "ha", ".", "callback", "def", "callback", "(", "event", ")", ":", "\"\"\"Verify event got called.\"\"\"", "events", ".", "append", "(", "event", ".", "data", "[", "\"new_state\"", "]", ".", "state", ")", "hass", ".", "bus", ".", "async_listen", "(", "EVENT_STATE_CHANGED", ",", "callback", ")", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/tele/LWT\"", ",", "\"Online\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "events", "==", "[", "\"off\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Switch1\":{\"Action\":\"ON\"}}'", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "assert", "events", "==", "[", "\"off\"", ",", "\"on\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "\"tasmota_49A3BC/stat/RESULT\"", ",", "'{\"Switch1\":{\"Action\":\"ON\"}}'", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_ON", "assert", "events", "==", "[", "\"off\"", ",", "\"on\"", ",", "\"on\"", "]", "async_fire_time_changed", "(", "hass", ",", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "1", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"binary_sensor.tasmota_binary_sensor_1\"", ")", "assert", "state", ".", "state", "==", "STATE_OFF", "assert", "events", "==", "[", "\"off\"", ",", "\"on\"", ",", "\"on\"", ",", "\"off\"", "]" ]
[ 229, 0 ]
[ 274, 47 ]
python
en
['en', 'en', 'en']
True
test_availability_when_connection_lost
( hass, mqtt_client_mock, mqtt_mock, setup_tasmota )
Test availability after MQTT disconnection.
Test availability after MQTT disconnection.
async def test_availability_when_connection_lost( hass, mqtt_client_mock, mqtt_mock, setup_tasmota ): """Test availability after MQTT disconnection.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 1 config["swn"][0] = "Test" await help_test_availability_when_connection_lost( hass, mqtt_client_mock, mqtt_mock, binary_sensor.DOMAIN, config )
[ "async", "def", "test_availability_when_connection_lost", "(", "hass", ",", "mqtt_client_mock", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "1", "config", "[", "\"swn\"", "]", "[", "0", "]", "=", "\"Test\"", "await", "help_test_availability_when_connection_lost", "(", "hass", ",", "mqtt_client_mock", ",", "mqtt_mock", ",", "binary_sensor", ".", "DOMAIN", ",", "config", ")" ]
[ 277, 0 ]
[ 286, 5 ]
python
en
['en', 'en', 'en']
True
test_availability
(hass, mqtt_mock, setup_tasmota)
Test availability.
Test availability.
async def test_availability(hass, mqtt_mock, setup_tasmota): """Test availability.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 1 config["swn"][0] = "Test" await help_test_availability(hass, mqtt_mock, binary_sensor.DOMAIN, config)
[ "async", "def", "test_availability", "(", "hass", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "1", "config", "[", "\"swn\"", "]", "[", "0", "]", "=", "\"Test\"", "await", "help_test_availability", "(", "hass", ",", "mqtt_mock", ",", "binary_sensor", ".", "DOMAIN", ",", "config", ")" ]
[ 289, 0 ]
[ 294, 79 ]
python
en
['fr', 'ga', 'en']
False
test_availability_discovery_update
(hass, mqtt_mock, setup_tasmota)
Test availability discovery update.
Test availability discovery update.
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota): """Test availability discovery update.""" config = copy.deepcopy(DEFAULT_CONFIG) config["swc"][0] = 1 config["swn"][0] = "Test" await help_test_availability_discovery_update( hass, mqtt_mock, binary_sensor.DOMAIN, config )
[ "async", "def", "test_availability_discovery_update", "(", "hass", ",", "mqtt_mock", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "config", "[", "\"swc\"", "]", "[", "0", "]", "=", "1", "config", "[", "\"swn\"", "]", "[", "0", "]", "=", "\"Test\"", "await", "help_test_availability_discovery_update", "(", "hass", ",", "mqtt_mock", ",", "binary_sensor", ".", "DOMAIN", ",", "config", ")" ]
[ 297, 0 ]
[ 304, 5 ]
python
en
['en', 'en', 'en']
True