Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
list
start_point
list
end_point
list
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
_make_causal_mask
(input_ids_shape: tf.TensorShape, past_key_values_length: int = 0)
Make causal mask used for bi-directional self-attention.
Make causal mask used for bi-directional self-attention.
def _make_causal_mask(input_ids_shape: tf.TensorShape, past_key_values_length: int = 0): """ Make causal mask used for bi-directional self-attention. """ bsz, tgt_len = input_ids_shape mask = tf.ones((tgt_len, tgt_len)) * LARGE_NEGATIVE mask_cond = tf.range(shape_list(mask)[-1]) mask = tf.where(mask_cond < tf.reshape(mask_cond + 1, (shape_list(mask)[-1], 1)), 0.0, mask) if past_key_values_length > 0: mask = tf.concat([tf.zeros((tgt_len, past_key_values_length)), mask], axis=-1) return tf.tile(mask[None, None, :, :], (bsz, 1, 1, 1))
[ "def", "_make_causal_mask", "(", "input_ids_shape", ":", "tf", ".", "TensorShape", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "bsz", ",", "tgt_len", "=", "input_ids_shape", "mask", "=", "tf", ".", "ones", "(", "(", "tgt_len", ",", "tgt_len", ")", ")", "*", "LARGE_NEGATIVE", "mask_cond", "=", "tf", ".", "range", "(", "shape_list", "(", "mask", ")", "[", "-", "1", "]", ")", "mask", "=", "tf", ".", "where", "(", "mask_cond", "<", "tf", ".", "reshape", "(", "mask_cond", "+", "1", ",", "(", "shape_list", "(", "mask", ")", "[", "-", "1", "]", ",", "1", ")", ")", ",", "0.0", ",", "mask", ")", "if", "past_key_values_length", ">", "0", ":", "mask", "=", "tf", ".", "concat", "(", "[", "tf", ".", "zeros", "(", "(", "tgt_len", ",", "past_key_values_length", ")", ")", ",", "mask", "]", ",", "axis", "=", "-", "1", ")", "return", "tf", ".", "tile", "(", "mask", "[", "None", ",", "None", ",", ":", ",", ":", "]", ",", "(", "bsz", ",", "1", ",", "1", ",", "1", ")", ")" ]
[ 85, 0 ]
[ 98, 58 ]
python
en
['en', 'error', 'th']
False
_expand_mask
(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0)
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ src_len = shape_list(mask)[1] tgt_len = tgt_len if tgt_len is not None else src_len one_cst = tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1)) return (one_cst - expanded_mask) * LARGE_NEGATIVE
[ "def", "_expand_mask", "(", "mask", ":", "tf", ".", "Tensor", ",", "tgt_len", ":", "Optional", "[", "int", "]", "=", "None", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "src_len", "=", "shape_list", "(", "mask", ")", "[", "1", "]", "tgt_len", "=", "tgt_len", "if", "tgt_len", "is", "not", "None", "else", "src_len", "one_cst", "=", "tf", ".", "constant", "(", "1.0", ")", "mask", "=", "tf", ".", "cast", "(", "mask", ",", "dtype", "=", "one_cst", ".", "dtype", ")", "expanded_mask", "=", "tf", ".", "tile", "(", "mask", "[", ":", ",", "None", ",", "None", ",", ":", "]", ",", "(", "1", ",", "1", ",", "tgt_len", ",", "1", ")", ")", "return", "(", "one_cst", "-", "expanded_mask", ")", "*", "LARGE_NEGATIVE" ]
[ 102, 0 ]
[ 112, 53 ]
python
en
['en', 'error', 'th']
False
TFPegasusSinusoidalPositionalEmbedding.build
(self, input_shape: tf.TensorShape)
Build shared token embedding layer Shared weights logic adapted from https://github.com/tensorflow/models/blob/a009f4fb9d2fc4949e32192a944688925ef78659/official/transformer/v2/embedding_layer.py#L24
Build shared token embedding layer Shared weights logic adapted from https://github.com/tensorflow/models/blob/a009f4fb9d2fc4949e32192a944688925ef78659/official/transformer/v2/embedding_layer.py#L24
def build(self, input_shape: tf.TensorShape): """ Build shared token embedding layer Shared weights logic adapted from https://github.com/tensorflow/models/blob/a009f4fb9d2fc4949e32192a944688925ef78659/official/transformer/v2/embedding_layer.py#L24 """ weight = self._init_weight(self.num_positions, self.embedding_dim) self.weight = self.add_weight( name="embeddings", shape=[self.num_positions, self.embedding_dim], ) weight = tf.cast(weight, dtype=self.weight.dtype) self.weight.assign(weight) super().build(input_shape)
[ "def", "build", "(", "self", ",", "input_shape", ":", "tf", ".", "TensorShape", ")", ":", "weight", "=", "self", ".", "_init_weight", "(", "self", ".", "num_positions", ",", "self", ".", "embedding_dim", ")", "self", ".", "weight", "=", "self", ".", "add_weight", "(", "name", "=", "\"embeddings\"", ",", "shape", "=", "[", "self", ".", "num_positions", ",", "self", ".", "embedding_dim", "]", ",", ")", "weight", "=", "tf", ".", "cast", "(", "weight", ",", "dtype", "=", "self", ".", "weight", ".", "dtype", ")", "self", ".", "weight", ".", "assign", "(", "weight", ")", "super", "(", ")", ".", "build", "(", "input_shape", ")" ]
[ 128, 4 ]
[ 144, 34 ]
python
en
['en', 'error', 'th']
False
TFPegasusSinusoidalPositionalEmbedding._init_weight
(n_pos: int, dim: int)
Identical to the XLM create_sinusoidal_embeddings except features are not interleaved. The cos features are in the 2nd half of the vector. [dim // 2:]
Identical to the XLM create_sinusoidal_embeddings except features are not interleaved. The cos features are in the 2nd half of the vector. [dim // 2:]
def _init_weight(n_pos: int, dim: int): """ Identical to the XLM create_sinusoidal_embeddings except features are not interleaved. The cos features are in the 2nd half of the vector. [dim // 2:] """ position_enc = np.array( [[pos / np.power(10000, 2 * (j // 2) / dim) for j in range(dim)] for pos in range(n_pos)] ) # index 0 is all zero position_enc[:, 0 : dim // 2] = np.sin(position_enc[:, 0::2]) position_enc[:, dim // 2 :] = np.cos(position_enc[:, 1::2]) # convert to tensor table = tf.convert_to_tensor(position_enc) tf.stop_gradient(table) return table
[ "def", "_init_weight", "(", "n_pos", ":", "int", ",", "dim", ":", "int", ")", ":", "position_enc", "=", "np", ".", "array", "(", "[", "[", "pos", "/", "np", ".", "power", "(", "10000", ",", "2", "*", "(", "j", "//", "2", ")", "/", "dim", ")", "for", "j", "in", "range", "(", "dim", ")", "]", "for", "pos", "in", "range", "(", "n_pos", ")", "]", ")", "# index 0 is all zero", "position_enc", "[", ":", ",", "0", ":", "dim", "//", "2", "]", "=", "np", ".", "sin", "(", "position_enc", "[", ":", ",", "0", ":", ":", "2", "]", ")", "position_enc", "[", ":", ",", "dim", "//", "2", ":", "]", "=", "np", ".", "cos", "(", "position_enc", "[", ":", ",", "1", ":", ":", "2", "]", ")", "# convert to tensor", "table", "=", "tf", ".", "convert_to_tensor", "(", "position_enc", ")", "tf", ".", "stop_gradient", "(", "table", ")", "return", "table" ]
[ 147, 4 ]
[ 161, 20 ]
python
en
['en', 'error', 'th']
False
TFPegasusSinusoidalPositionalEmbedding.call
(self, input_shape: tf.TensorShape, past_key_values_length: int = 0)
Input is expected to be of size [bsz x seqlen].
Input is expected to be of size [bsz x seqlen].
def call(self, input_shape: tf.TensorShape, past_key_values_length: int = 0): """Input is expected to be of size [bsz x seqlen].""" bsz, seq_len = input_shape[:2] positions = tf.range(past_key_values_length, seq_len + past_key_values_length, delta=1, name="range") return tf.gather(self.weight, positions)
[ "def", "call", "(", "self", ",", "input_shape", ":", "tf", ".", "TensorShape", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "bsz", ",", "seq_len", "=", "input_shape", "[", ":", "2", "]", "positions", "=", "tf", ".", "range", "(", "past_key_values_length", ",", "seq_len", "+", "past_key_values_length", ",", "delta", "=", "1", ",", "name", "=", "\"range\"", ")", "return", "tf", ".", "gather", "(", "self", ".", "weight", ",", "positions", ")" ]
[ 163, 4 ]
[ 168, 48 ]
python
en
['en', 'en', 'en']
True
TFPegasusAttention.call
( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training=False, )
Input shape: Batch x Time x Channel
Input shape: Batch x Time x Channel
def call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training=False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, embed_dim = shape_list(hidden_states) # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True) # The tf.debugging asserts are not compliant with XLA then they # have to be disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len], message=f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}", ) if attention_mask is not None: # The tf.debugging asserts are not compliant with XLA then they # have to be disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}", ) attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is not None: # The tf.debugging asserts are not compliant with XLA then they # have to be disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", ) attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states) # The tf.debugging asserts are not compliant with XLA then they # have to be disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim], message=f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}", ) attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3) ) attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return attn_output, attn_weights, past_key_value
[ "def", "call", "(", "self", ",", "hidden_states", ":", "tf", ".", "Tensor", ",", "key_value_states", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "Tuple", "[", "tf", ".", "Tensor", "]", "]", "]", "=", "None", ",", "attention_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "training", "=", "False", ",", ")", "->", "Tuple", "[", "tf", ".", "Tensor", ",", "Optional", "[", "tf", ".", "Tensor", "]", "]", ":", "# if key_value_states are provided this layer is used as a cross-attention layer", "# for the decoder", "is_cross_attention", "=", "key_value_states", "is", "not", "None", "bsz", ",", "tgt_len", ",", "embed_dim", "=", "shape_list", "(", "hidden_states", ")", "# get query proj", "query_states", "=", "self", ".", "q_proj", "(", "hidden_states", ")", "*", "self", ".", "scaling", "# get key, value proj", "if", "is_cross_attention", "and", "past_key_value", "is", "not", "None", ":", "# reuse k,v, cross_attentions", "key_states", "=", "past_key_value", "[", "0", "]", "value_states", "=", "past_key_value", "[", "1", "]", "elif", "is_cross_attention", ":", "# cross_attentions", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "elif", "past_key_value", "is", "not", "None", ":", "# reuse k, v, self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "key_states", "=", "tf", ".", "concat", "(", "[", "past_key_value", "[", "0", "]", ",", "key_states", "]", ",", "axis", "=", "2", ")", "value_states", "=", "tf", ".", "concat", "(", "[", "past_key_value", "[", "1", "]", ",", "value_states", "]", ",", "axis", "=", "2", ")", "else", ":", "# self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "if", "self", ".", "is_decoder", ":", "# if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states.", "# Further calls to cross_attention layer can then reuse all cross-attention", "# key/value_states (first \"if\" case)", "# if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of", "# all previous decoder key/value_states. Further calls to uni-directional self-attention", "# can concat previous decoder key/value_states to current projected key/value_states (third \"elif\" case)", "# if encoder bi-directional self-attention `past_key_value` is always `None`", "past_key_value", "=", "(", "key_states", ",", "value_states", ")", "proj_shape", "=", "(", "bsz", "*", "self", ".", "num_heads", ",", "-", "1", ",", "self", ".", "head_dim", ")", "query_states", "=", "tf", ".", "reshape", "(", "self", ".", "_shape", "(", "query_states", ",", "tgt_len", ",", "bsz", ")", ",", "proj_shape", ")", "key_states", "=", "tf", ".", "reshape", "(", "key_states", ",", "proj_shape", ")", "value_states", "=", "tf", ".", "reshape", "(", "value_states", ",", "proj_shape", ")", "src_len", "=", "shape_list", "(", "key_states", ")", "[", "1", "]", "attn_weights", "=", "tf", ".", "matmul", "(", "query_states", ",", "key_states", ",", "transpose_b", "=", "True", ")", "# The tf.debugging asserts are not compliant with XLA then they", "# have to be disabled in other modes than eager.", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_weights", ")", ",", "[", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", "]", ",", "message", "=", "f\"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}\"", ",", ")", "if", "attention_mask", "is", "not", "None", ":", "# The tf.debugging asserts are not compliant with XLA then they", "# have to be disabled in other modes than eager.", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attention_mask", ")", ",", "[", "bsz", ",", "1", ",", "tgt_len", ",", "src_len", "]", ",", "message", "=", "f\"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}\"", ",", ")", "attention_mask", "=", "tf", ".", "cast", "(", "attention_mask", ",", "dtype", "=", "attn_weights", ".", "dtype", ")", "attn_weights", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "+", "attention_mask", "attn_weights", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "attn_weights", "=", "tf", ".", "nn", ".", "softmax", "(", "attn_weights", ",", "axis", "=", "-", "1", ")", "if", "layer_head_mask", "is", "not", "None", ":", "# The tf.debugging asserts are not compliant with XLA then they", "# have to be disabled in other modes than eager.", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "layer_head_mask", ")", ",", "[", "self", ".", "num_heads", "]", ",", "message", "=", "f\"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\"", ",", ")", "attn_weights", "=", "tf", ".", "reshape", "(", "layer_head_mask", ",", "(", "1", ",", "-", "1", ",", "1", ",", "1", ")", ")", "*", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "attn_weights", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "attn_probs", "=", "self", ".", "dropout", "(", "attn_weights", ",", "training", "=", "training", ")", "attn_output", "=", "tf", ".", "matmul", "(", "attn_probs", ",", "value_states", ")", "# The tf.debugging asserts are not compliant with XLA then they", "# have to be disabled in other modes than eager.", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "attn_output", ")", ",", "[", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", "]", ",", "message", "=", "f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\"", ",", ")", "attn_output", "=", "tf", ".", "transpose", "(", "tf", ".", "reshape", "(", "attn_output", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", ")", ")", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ")", "attn_output", "=", "tf", ".", "reshape", "(", "attn_output", ",", "(", "bsz", ",", "tgt_len", ",", "embed_dim", ")", ")", "attn_output", "=", "self", ".", "out_proj", "(", "attn_output", ")", "attn_weights", ":", "tf", ".", "Tensor", "=", "tf", ".", "reshape", "(", "attn_weights", ",", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", ")", "return", "attn_output", ",", "attn_weights", ",", "past_key_value" ]
[ 202, 4 ]
[ 318, 56 ]
python
en
['en', 'pl', 'en']
True
TFPegasusEncoderLayer.call
(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False)
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(encoder_attention_heads,)`
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(encoder_attention_heads,)`
def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False): """ Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(encoder_attention_heads,)` """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) hidden_states, self_attn_weights, _ = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask ) # The tf.debugging asserts are not compliant with XLA then they # have to be disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(hidden_states), shape_list(residual), message=f"Self attn modified the shape of query {shape_list(residual)} to {shape_list(hidden_states)}", ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout(hidden_states, training=training) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states return hidden_states, self_attn_weights
[ "def", "call", "(", "self", ",", "hidden_states", ":", "tf", ".", "Tensor", ",", "attention_mask", ":", "tf", ".", "Tensor", ",", "layer_head_mask", ":", "tf", ".", "Tensor", ",", "training", "=", "False", ")", ":", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "hidden_states", ",", "self_attn_weights", ",", "_", "=", "self", ".", "self_attn", "(", "hidden_states", "=", "hidden_states", ",", "attention_mask", "=", "attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ")", "# The tf.debugging asserts are not compliant with XLA then they", "# have to be disabled in other modes than eager.", "if", "tf", ".", "executing_eagerly", "(", ")", ":", "tf", ".", "debugging", ".", "assert_equal", "(", "shape_list", "(", "hidden_states", ")", ",", "shape_list", "(", "residual", ")", ",", "message", "=", "f\"Self attn modified the shape of query {shape_list(residual)} to {shape_list(hidden_states)}\"", ",", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "self", ".", "activation_dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "return", "hidden_states", ",", "self_attn_weights" ]
[ 337, 4 ]
[ 372, 47 ]
python
en
['en', 'error', 'th']
False
TFPegasusDecoderLayer.call
( self, hidden_states, attention_mask: Optional[tf.Tensor] = None, encoder_hidden_states: Optional[tf.Tensor] = None, encoder_attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, encoder_layer_head_mask: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[tf.Tensor]] = None, training=False, )
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(decoder_attention_heads,)` encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size `(encoder_attention_heads,)` past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states
Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(decoder_attention_heads,)` encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size `(encoder_attention_heads,)` past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states
def call( self, hidden_states, attention_mask: Optional[tf.Tensor] = None, encoder_hidden_states: Optional[tf.Tensor] = None, encoder_attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, encoder_layer_head_mask: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[tf.Tensor]] = None, training=False, ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: """ Args: hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`tf.Tensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size `(decoder_attention_heads,)` encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size `(encoder_attention_heads,)` past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states """ residual = hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states # Cross-Attention Block cross_attn_present_key_value = None if encoder_hidden_states is not None: residual = hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, _, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=encoder_layer_head_mask, past_key_value=cross_attn_past_key_value, ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.final_layer_norm(hidden_states) hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = self.activation_dropout(hidden_states, training=training) hidden_states = self.fc2(hidden_states) hidden_states = self.dropout(hidden_states, training=training) hidden_states = residual + hidden_states return ( hidden_states, self_attn_weights, present_key_value, )
[ "def", "call", "(", "self", ",", "hidden_states", ",", "attention_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "encoder_hidden_states", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "encoder_attention_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "encoder_layer_head_mask", ":", "Optional", "[", "tf", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "tf", ".", "Tensor", "]", "]", "=", "None", ",", "training", "=", "False", ",", ")", "->", "Tuple", "[", "tf", ".", "Tensor", ",", "tf", ".", "Tensor", ",", "Tuple", "[", "Tuple", "[", "tf", ".", "Tensor", "]", "]", "]", ":", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "# Self Attention", "# decoder uni-directional self-attention cached key/values tuple is at positions 1,2", "self_attn_past_key_value", "=", "past_key_value", "[", ":", "2", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "# add present self-attn cache to positions 1,2 of present_key_value tuple", "hidden_states", ",", "self_attn_weights", ",", "present_key_value", "=", "self", ".", "self_attn", "(", "hidden_states", "=", "hidden_states", ",", "past_key_value", "=", "self_attn_past_key_value", ",", "attention_mask", "=", "attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ",", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "# Cross-Attention Block", "cross_attn_present_key_value", "=", "None", "if", "encoder_hidden_states", "is", "not", "None", ":", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "encoder_attn_layer_norm", "(", "hidden_states", ")", "# cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple", "cross_attn_past_key_value", "=", "past_key_value", "[", "-", "2", ":", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "hidden_states", ",", "_", ",", "cross_attn_present_key_value", "=", "self", ".", "encoder_attn", "(", "hidden_states", "=", "hidden_states", ",", "key_value_states", "=", "encoder_hidden_states", ",", "attention_mask", "=", "encoder_attention_mask", ",", "layer_head_mask", "=", "encoder_layer_head_mask", ",", "past_key_value", "=", "cross_attn_past_key_value", ",", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "# add cross-attn to positions 3,4 of present_key_value tuple", "present_key_value", "=", "present_key_value", "+", "cross_attn_present_key_value", "# Fully Connected", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "self", ".", "activation_dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "self", ".", "dropout", "(", "hidden_states", ",", "training", "=", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "return", "(", "hidden_states", ",", "self_attn_weights", ",", "present_key_value", ",", ")" ]
[ 404, 4 ]
[ 479, 9 ]
python
en
['en', 'error', 'th']
False
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the configured Numato USB GPIO switch ports.
Set up the configured Numato USB GPIO switch ports.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the configured Numato USB GPIO switch ports.""" if discovery_info is None: return api = hass.data[DOMAIN][DATA_API] switches = [] devices = hass.data[DOMAIN][CONF_DEVICES] for device in [d for d in devices if CONF_SWITCHES in d]: device_id = device[CONF_ID] platform = device[CONF_SWITCHES] invert_logic = platform[CONF_INVERT_LOGIC] ports = platform[CONF_PORTS] for port, port_name in ports.items(): try: api.setup_output(device_id, port) api.write_output(device_id, port, 1 if invert_logic else 0) except NumatoGpioError as err: _LOGGER.error( "Failed to initialize switch '%s' on Numato device %s port %s: %s", port_name, device_id, port, err, ) continue switches.append( NumatoGpioSwitch( port_name, device_id, port, invert_logic, api, ) ) add_entities(switches, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "if", "discovery_info", "is", "None", ":", "return", "api", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_API", "]", "switches", "=", "[", "]", "devices", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "CONF_DEVICES", "]", "for", "device", "in", "[", "d", "for", "d", "in", "devices", "if", "CONF_SWITCHES", "in", "d", "]", ":", "device_id", "=", "device", "[", "CONF_ID", "]", "platform", "=", "device", "[", "CONF_SWITCHES", "]", "invert_logic", "=", "platform", "[", "CONF_INVERT_LOGIC", "]", "ports", "=", "platform", "[", "CONF_PORTS", "]", "for", "port", ",", "port_name", "in", "ports", ".", "items", "(", ")", ":", "try", ":", "api", ".", "setup_output", "(", "device_id", ",", "port", ")", "api", ".", "write_output", "(", "device_id", ",", "port", ",", "1", "if", "invert_logic", "else", "0", ")", "except", "NumatoGpioError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Failed to initialize switch '%s' on Numato device %s port %s: %s\"", ",", "port_name", ",", "device_id", ",", "port", ",", "err", ",", ")", "continue", "switches", ".", "append", "(", "NumatoGpioSwitch", "(", "port_name", ",", "device_id", ",", "port", ",", "invert_logic", ",", "api", ",", ")", ")", "add_entities", "(", "switches", ",", "True", ")" ]
[ 18, 0 ]
[ 53, 32 ]
python
en
['en', 'zu', 'en']
True
NumatoGpioSwitch.__init__
(self, name, device_id, port, invert_logic, api)
Initialize the port.
Initialize the port.
def __init__(self, name, device_id, port, invert_logic, api): """Initialize the port.""" self._name = name or DEVICE_DEFAULT_NAME self._device_id = device_id self._port = port self._invert_logic = invert_logic self._state = False self._api = api
[ "def", "__init__", "(", "self", ",", "name", ",", "device_id", ",", "port", ",", "invert_logic", ",", "api", ")", ":", "self", ".", "_name", "=", "name", "or", "DEVICE_DEFAULT_NAME", "self", ".", "_device_id", "=", "device_id", "self", ".", "_port", "=", "port", "self", ".", "_invert_logic", "=", "invert_logic", "self", ".", "_state", "=", "False", "self", ".", "_api", "=", "api" ]
[ 59, 4 ]
[ 66, 23 ]
python
en
['en', 'en', 'en']
True
NumatoGpioSwitch.name
(self)
Return the name of the switch.
Return the name of the switch.
def name(self): """Return the name of the switch.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 69, 4 ]
[ 71, 25 ]
python
en
['en', 'en', 'en']
True
NumatoGpioSwitch.should_poll
(self)
No polling needed.
No polling needed.
def should_poll(self): """No polling needed.""" return False
[ "def", "should_poll", "(", "self", ")", ":", "return", "False" ]
[ 74, 4 ]
[ 76, 20 ]
python
en
['en', 'en', 'en']
True
NumatoGpioSwitch.is_on
(self)
Return true if port is turned on.
Return true if port is turned on.
def is_on(self): """Return true if port is turned on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 79, 4 ]
[ 81, 26 ]
python
en
['en', 'et', 'en']
True
NumatoGpioSwitch.turn_on
(self, **kwargs)
Turn the port on.
Turn the port on.
def turn_on(self, **kwargs): """Turn the port on.""" try: self._api.write_output( self._device_id, self._port, 0 if self._invert_logic else 1 ) self._state = True self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn on Numato device %s port %s: %s", self._device_id, self._port, err, )
[ "def", "turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "try", ":", "self", ".", "_api", ".", "write_output", "(", "self", ".", "_device_id", ",", "self", ".", "_port", ",", "0", "if", "self", ".", "_invert_logic", "else", "1", ")", "self", ".", "_state", "=", "True", "self", ".", "schedule_update_ha_state", "(", ")", "except", "NumatoGpioError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Failed to turn on Numato device %s port %s: %s\"", ",", "self", ".", "_device_id", ",", "self", ".", "_port", ",", "err", ",", ")" ]
[ 83, 4 ]
[ 97, 13 ]
python
en
['en', 'en', 'en']
True
NumatoGpioSwitch.turn_off
(self, **kwargs)
Turn the port off.
Turn the port off.
def turn_off(self, **kwargs): """Turn the port off.""" try: self._api.write_output( self._device_id, self._port, 1 if self._invert_logic else 0 ) self._state = False self.schedule_update_ha_state() except NumatoGpioError as err: _LOGGER.error( "Failed to turn off Numato device %s port %s: %s", self._device_id, self._port, err, )
[ "def", "turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "try", ":", "self", ".", "_api", ".", "write_output", "(", "self", ".", "_device_id", ",", "self", ".", "_port", ",", "1", "if", "self", ".", "_invert_logic", "else", "0", ")", "self", ".", "_state", "=", "False", "self", ".", "schedule_update_ha_state", "(", ")", "except", "NumatoGpioError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Failed to turn off Numato device %s port %s: %s\"", ",", "self", ".", "_device_id", ",", "self", ".", "_port", ",", "err", ",", ")" ]
[ 99, 4 ]
[ 113, 13 ]
python
en
['en', 'en', 'en']
True
test_record_service_invalid_file
(hass)
Test record service call with invalid file.
Test record service call with invalid file.
async def test_record_service_invalid_file(hass): """Test record service call with invalid file.""" await async_setup_component(hass, "stream", {"stream": {}}) data = {CONF_STREAM_SOURCE: "rtsp://my.video", CONF_FILENAME: "/my/invalid/path"} with pytest.raises(HomeAssistantError): await hass.services.async_call(DOMAIN, SERVICE_RECORD, data, blocking=True)
[ "async", "def", "test_record_service_invalid_file", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"stream\"", ",", "{", "\"stream\"", ":", "{", "}", "}", ")", "data", "=", "{", "CONF_STREAM_SOURCE", ":", "\"rtsp://my.video\"", ",", "CONF_FILENAME", ":", "\"/my/invalid/path\"", "}", "with", "pytest", ".", "raises", "(", "HomeAssistantError", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RECORD", ",", "data", ",", "blocking", "=", "True", ")" ]
[ 17, 0 ]
[ 22, 83 ]
python
en
['en', 'en', 'en']
True
test_record_service_init_stream
(hass)
Test record service call with invalid file.
Test record service call with invalid file.
async def test_record_service_init_stream(hass): """Test record service call with invalid file.""" await async_setup_component(hass, "stream", {"stream": {}}) data = {CONF_STREAM_SOURCE: "rtsp://my.video", CONF_FILENAME: "/my/invalid/path"} with patch("homeassistant.components.stream.Stream") as stream_mock, patch.object( hass.config, "is_allowed_path", return_value=True ): # Setup stubs stream_mock.return_value.outputs = {} # Call Service await hass.services.async_call(DOMAIN, SERVICE_RECORD, data, blocking=True) # Assert assert stream_mock.called
[ "async", "def", "test_record_service_init_stream", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"stream\"", ",", "{", "\"stream\"", ":", "{", "}", "}", ")", "data", "=", "{", "CONF_STREAM_SOURCE", ":", "\"rtsp://my.video\"", ",", "CONF_FILENAME", ":", "\"/my/invalid/path\"", "}", "with", "patch", "(", "\"homeassistant.components.stream.Stream\"", ")", "as", "stream_mock", ",", "patch", ".", "object", "(", "hass", ".", "config", ",", "\"is_allowed_path\"", ",", "return_value", "=", "True", ")", ":", "# Setup stubs", "stream_mock", ".", "return_value", ".", "outputs", "=", "{", "}", "# Call Service", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RECORD", ",", "data", ",", "blocking", "=", "True", ")", "# Assert", "assert", "stream_mock", ".", "called" ]
[ 25, 0 ]
[ 39, 33 ]
python
en
['en', 'en', 'en']
True
test_record_service_existing_record_session
(hass)
Test record service call with invalid file.
Test record service call with invalid file.
async def test_record_service_existing_record_session(hass): """Test record service call with invalid file.""" await async_setup_component(hass, "stream", {"stream": {}}) source = "rtsp://my.video" data = {CONF_STREAM_SOURCE: source, CONF_FILENAME: "/my/invalid/path"} # Setup stubs stream_mock = MagicMock() stream_mock.return_value.outputs = {"recorder": MagicMock()} hass.data[DOMAIN][ATTR_STREAMS][source] = stream_mock with patch.object(hass.config, "is_allowed_path", return_value=True), pytest.raises( HomeAssistantError ): # Call Service await hass.services.async_call(DOMAIN, SERVICE_RECORD, data, blocking=True)
[ "async", "def", "test_record_service_existing_record_session", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"stream\"", ",", "{", "\"stream\"", ":", "{", "}", "}", ")", "source", "=", "\"rtsp://my.video\"", "data", "=", "{", "CONF_STREAM_SOURCE", ":", "source", ",", "CONF_FILENAME", ":", "\"/my/invalid/path\"", "}", "# Setup stubs", "stream_mock", "=", "MagicMock", "(", ")", "stream_mock", ".", "return_value", ".", "outputs", "=", "{", "\"recorder\"", ":", "MagicMock", "(", ")", "}", "hass", ".", "data", "[", "DOMAIN", "]", "[", "ATTR_STREAMS", "]", "[", "source", "]", "=", "stream_mock", "with", "patch", ".", "object", "(", "hass", ".", "config", ",", "\"is_allowed_path\"", ",", "return_value", "=", "True", ")", ",", "pytest", ".", "raises", "(", "HomeAssistantError", ")", ":", "# Call Service", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RECORD", ",", "data", ",", "blocking", "=", "True", ")" ]
[ 42, 0 ]
[ 57, 83 ]
python
en
['en', 'en', 'en']
True
test_record_service_lookback
(hass)
Test record service call with invalid file.
Test record service call with invalid file.
async def test_record_service_lookback(hass): """Test record service call with invalid file.""" await async_setup_component(hass, "stream", {"stream": {}}) data = { CONF_STREAM_SOURCE: "rtsp://my.video", CONF_FILENAME: "/my/invalid/path", CONF_LOOKBACK: 4, } with patch("homeassistant.components.stream.Stream") as stream_mock, patch.object( hass.config, "is_allowed_path", return_value=True ): # Setup stubs hls_mock = MagicMock() hls_mock.target_duration = 2 hls_mock.recv = AsyncMock(return_value=None) stream_mock.return_value.outputs = {"hls": hls_mock} # Call Service await hass.services.async_call(DOMAIN, SERVICE_RECORD, data, blocking=True) assert stream_mock.called stream_mock.return_value.add_provider.assert_called_once_with("recorder") assert hls_mock.recv.called
[ "async", "def", "test_record_service_lookback", "(", "hass", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"stream\"", ",", "{", "\"stream\"", ":", "{", "}", "}", ")", "data", "=", "{", "CONF_STREAM_SOURCE", ":", "\"rtsp://my.video\"", ",", "CONF_FILENAME", ":", "\"/my/invalid/path\"", ",", "CONF_LOOKBACK", ":", "4", ",", "}", "with", "patch", "(", "\"homeassistant.components.stream.Stream\"", ")", "as", "stream_mock", ",", "patch", ".", "object", "(", "hass", ".", "config", ",", "\"is_allowed_path\"", ",", "return_value", "=", "True", ")", ":", "# Setup stubs", "hls_mock", "=", "MagicMock", "(", ")", "hls_mock", ".", "target_duration", "=", "2", "hls_mock", ".", "recv", "=", "AsyncMock", "(", "return_value", "=", "None", ")", "stream_mock", ".", "return_value", ".", "outputs", "=", "{", "\"hls\"", ":", "hls_mock", "}", "# Call Service", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RECORD", ",", "data", ",", "blocking", "=", "True", ")", "assert", "stream_mock", ".", "called", "stream_mock", ".", "return_value", ".", "add_provider", ".", "assert_called_once_with", "(", "\"recorder\"", ")", "assert", "hls_mock", ".", "recv", ".", "called" ]
[ 60, 0 ]
[ 83, 35 ]
python
en
['en', 'en', 'en']
True
connect_client
(hass, user_input)
Connect the HLK-SW16 client.
Connect the HLK-SW16 client.
async def connect_client(hass, user_input): """Connect the HLK-SW16 client.""" client_aw = create_hlk_sw16_connection( host=user_input[CONF_HOST], port=user_input[CONF_PORT], loop=hass.loop, timeout=CONNECTION_TIMEOUT, reconnect_interval=DEFAULT_RECONNECT_INTERVAL, keep_alive_interval=DEFAULT_KEEP_ALIVE_INTERVAL, ) return await asyncio.wait_for(client_aw, timeout=CONNECTION_TIMEOUT)
[ "async", "def", "connect_client", "(", "hass", ",", "user_input", ")", ":", "client_aw", "=", "create_hlk_sw16_connection", "(", "host", "=", "user_input", "[", "CONF_HOST", "]", ",", "port", "=", "user_input", "[", "CONF_PORT", "]", ",", "loop", "=", "hass", ".", "loop", ",", "timeout", "=", "CONNECTION_TIMEOUT", ",", "reconnect_interval", "=", "DEFAULT_RECONNECT_INTERVAL", ",", "keep_alive_interval", "=", "DEFAULT_KEEP_ALIVE_INTERVAL", ",", ")", "return", "await", "asyncio", ".", "wait_for", "(", "client_aw", ",", "timeout", "=", "CONNECTION_TIMEOUT", ")" ]
[ 27, 0 ]
[ 37, 72 ]
python
en
['en', 'fr', 'en']
True
validate_input
(hass: HomeAssistant, user_input)
Validate the user input allows us to connect.
Validate the user input allows us to connect.
async def validate_input(hass: HomeAssistant, user_input): """Validate the user input allows us to connect.""" for entry in hass.config_entries.async_entries(DOMAIN): if ( entry.data[CONF_HOST] == user_input[CONF_HOST] and entry.data[CONF_PORT] == user_input[CONF_PORT] ): raise AlreadyConfigured try: client = await connect_client(hass, user_input) except asyncio.TimeoutError as err: raise CannotConnect from err try: def disconnect_callback(): if client.in_transaction: client.active_transaction.set_exception(CannotConnect) client.disconnect_callback = disconnect_callback await client.status() except CannotConnect: client.disconnect_callback = None client.stop() raise else: client.disconnect_callback = None client.stop()
[ "async", "def", "validate_input", "(", "hass", ":", "HomeAssistant", ",", "user_input", ")", ":", "for", "entry", "in", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", ":", "if", "(", "entry", ".", "data", "[", "CONF_HOST", "]", "==", "user_input", "[", "CONF_HOST", "]", "and", "entry", ".", "data", "[", "CONF_PORT", "]", "==", "user_input", "[", "CONF_PORT", "]", ")", ":", "raise", "AlreadyConfigured", "try", ":", "client", "=", "await", "connect_client", "(", "hass", ",", "user_input", ")", "except", "asyncio", ".", "TimeoutError", "as", "err", ":", "raise", "CannotConnect", "from", "err", "try", ":", "def", "disconnect_callback", "(", ")", ":", "if", "client", ".", "in_transaction", ":", "client", ".", "active_transaction", ".", "set_exception", "(", "CannotConnect", ")", "client", ".", "disconnect_callback", "=", "disconnect_callback", "await", "client", ".", "status", "(", ")", "except", "CannotConnect", ":", "client", ".", "disconnect_callback", "=", "None", "client", ".", "stop", "(", ")", "raise", "else", ":", "client", ".", "disconnect_callback", "=", "None", "client", ".", "stop", "(", ")" ]
[ 40, 0 ]
[ 67, 21 ]
python
en
['en', 'en', 'en']
True
SW16FlowHandler.async_step_import
(self, user_input)
Handle import.
Handle import.
async def async_step_import(self, user_input): """Handle import.""" return await self.async_step_user(user_input)
[ "async", "def", "async_step_import", "(", "self", ",", "user_input", ")", ":", "return", "await", "self", ".", "async_step_user", "(", "user_input", ")" ]
[ 76, 4 ]
[ 78, 53 ]
python
en
['en', 'ja', 'en']
False
SW16FlowHandler.async_step_user
(self, user_input=None)
Handle the initial step.
Handle the initial step.
async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: try: await validate_input(self.hass, user_input) address = f"{user_input[CONF_HOST]}:{user_input[CONF_PORT]}" return self.async_create_entry(title=address, data=user_input) except AlreadyConfigured: errors["base"] = "already_configured" except CannotConnect: errors["base"] = "cannot_connect" return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "try", ":", "await", "validate_input", "(", "self", ".", "hass", ",", "user_input", ")", "address", "=", "f\"{user_input[CONF_HOST]}:{user_input[CONF_PORT]}\"", "return", "self", ".", "async_create_entry", "(", "title", "=", "address", ",", "data", "=", "user_input", ")", "except", "AlreadyConfigured", ":", "errors", "[", "\"base\"", "]", "=", "\"already_configured\"", "except", "CannotConnect", ":", "errors", "[", "\"base\"", "]", "=", "\"cannot_connect\"", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "DATA_SCHEMA", ",", "errors", "=", "errors", ")" ]
[ 80, 4 ]
[ 95, 9 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass, config)
Initialize default configuration.
Initialize default configuration.
async def async_setup(hass, config): """Initialize default configuration.""" if av is None: return True return await async_setup_component(hass, "stream", config)
[ "async", "def", "async_setup", "(", "hass", ",", "config", ")", ":", "if", "av", "is", "None", ":", "return", "True", "return", "await", "async_setup_component", "(", "hass", ",", "\"stream\"", ",", "config", ")" ]
[ 11, 0 ]
[ 16, 62 ]
python
fr
['fr', 'fr', 'en']
True
async_setup
(hass, config)
Old way of setting up deCONZ integrations.
Old way of setting up deCONZ integrations.
async def async_setup(hass, config): """Old way of setting up deCONZ integrations.""" return True
[ "async", "def", "async_setup", "(", "hass", ",", "config", ")", ":", "return", "True" ]
[ 16, 0 ]
[ 18, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass, config_entry)
Set up a deCONZ bridge for a config entry. Load config, group, light and sensor data for server information. Start websocket for push notification of state changes from deCONZ.
Set up a deCONZ bridge for a config entry.
async def async_setup_entry(hass, config_entry): """Set up a deCONZ bridge for a config entry. Load config, group, light and sensor data for server information. Start websocket for push notification of state changes from deCONZ. """ if DOMAIN not in hass.data: hass.data[DOMAIN] = {} if not config_entry.options: await async_update_master_gateway(hass, config_entry) gateway = DeconzGateway(hass, config_entry) if not await gateway.async_setup(): return False # 0.104 introduced config entry unique id, this makes upgrading possible if config_entry.unique_id is None: new_data = _UNDEF if CONF_BRIDGE_ID in config_entry.data: new_data = dict(config_entry.data) new_data[CONF_GROUP_ID_BASE] = config_entry.data[CONF_BRIDGE_ID] hass.config_entries.async_update_entry( config_entry, unique_id=gateway.api.config.bridgeid, data=new_data ) hass.data[DOMAIN][config_entry.unique_id] = gateway await gateway.async_update_device_registry() await async_setup_services(hass) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, gateway.shutdown) return True
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ")", ":", "if", "DOMAIN", "not", "in", "hass", ".", "data", ":", "hass", ".", "data", "[", "DOMAIN", "]", "=", "{", "}", "if", "not", "config_entry", ".", "options", ":", "await", "async_update_master_gateway", "(", "hass", ",", "config_entry", ")", "gateway", "=", "DeconzGateway", "(", "hass", ",", "config_entry", ")", "if", "not", "await", "gateway", ".", "async_setup", "(", ")", ":", "return", "False", "# 0.104 introduced config entry unique id, this makes upgrading possible", "if", "config_entry", ".", "unique_id", "is", "None", ":", "new_data", "=", "_UNDEF", "if", "CONF_BRIDGE_ID", "in", "config_entry", ".", "data", ":", "new_data", "=", "dict", "(", "config_entry", ".", "data", ")", "new_data", "[", "CONF_GROUP_ID_BASE", "]", "=", "config_entry", ".", "data", "[", "CONF_BRIDGE_ID", "]", "hass", ".", "config_entries", ".", "async_update_entry", "(", "config_entry", ",", "unique_id", "=", "gateway", ".", "api", ".", "config", ".", "bridgeid", ",", "data", "=", "new_data", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry", ".", "unique_id", "]", "=", "gateway", "await", "gateway", ".", "async_update_device_registry", "(", ")", "await", "async_setup_services", "(", "hass", ")", "hass", ".", "bus", ".", "async_listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "gateway", ".", "shutdown", ")", "return", "True" ]
[ 21, 0 ]
[ 58, 15 ]
python
en
['en', 'fr', 'en']
True
async_unload_entry
(hass, config_entry)
Unload deCONZ config entry.
Unload deCONZ config entry.
async def async_unload_entry(hass, config_entry): """Unload deCONZ config entry.""" gateway = hass.data[DOMAIN].pop(config_entry.unique_id) if not hass.data[DOMAIN]: await async_unload_services(hass) elif gateway.master: await async_update_master_gateway(hass, config_entry) new_master_gateway = next(iter(hass.data[DOMAIN].values())) await async_update_master_gateway(hass, new_master_gateway.config_entry) return await gateway.async_reset()
[ "async", "def", "async_unload_entry", "(", "hass", ",", "config_entry", ")", ":", "gateway", "=", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "config_entry", ".", "unique_id", ")", "if", "not", "hass", ".", "data", "[", "DOMAIN", "]", ":", "await", "async_unload_services", "(", "hass", ")", "elif", "gateway", ".", "master", ":", "await", "async_update_master_gateway", "(", "hass", ",", "config_entry", ")", "new_master_gateway", "=", "next", "(", "iter", "(", "hass", ".", "data", "[", "DOMAIN", "]", ".", "values", "(", ")", ")", ")", "await", "async_update_master_gateway", "(", "hass", ",", "new_master_gateway", ".", "config_entry", ")", "return", "await", "gateway", ".", "async_reset", "(", ")" ]
[ 61, 0 ]
[ 73, 38 ]
python
da
['da', 'es', 'pt']
False
async_update_master_gateway
(hass, config_entry)
Update master gateway boolean. Called by setup_entry and unload_entry. Makes sure there is always one master available.
Update master gateway boolean.
async def async_update_master_gateway(hass, config_entry): """Update master gateway boolean. Called by setup_entry and unload_entry. Makes sure there is always one master available. """ master = not get_master_gateway(hass) options = {**config_entry.options, CONF_MASTER_GATEWAY: master} hass.config_entries.async_update_entry(config_entry, options=options)
[ "async", "def", "async_update_master_gateway", "(", "hass", ",", "config_entry", ")", ":", "master", "=", "not", "get_master_gateway", "(", "hass", ")", "options", "=", "{", "*", "*", "config_entry", ".", "options", ",", "CONF_MASTER_GATEWAY", ":", "master", "}", "hass", ".", "config_entries", ".", "async_update_entry", "(", "config_entry", ",", "options", "=", "options", ")" ]
[ 76, 0 ]
[ 85, 73 ]
python
de
['de', 'de', 'en']
True
perturbation
(hyperparameter_type, value, resample_probablity, uv, ub, lv, lb, random_state)
Perturbation for hyperparameters Parameters ---------- hyperparameter_type : str type of hyperparameter value : list parameters for sampling hyperparameter resample_probability : float probability for resampling uv : float/int upper value after perturbation ub : float/int upper bound lv : float/int lower value after perturbation lb : float/int lower bound random_state : RandomState random state
Perturbation for hyperparameters
def perturbation(hyperparameter_type, value, resample_probablity, uv, ub, lv, lb, random_state): """ Perturbation for hyperparameters Parameters ---------- hyperparameter_type : str type of hyperparameter value : list parameters for sampling hyperparameter resample_probability : float probability for resampling uv : float/int upper value after perturbation ub : float/int upper bound lv : float/int lower value after perturbation lb : float/int lower bound random_state : RandomState random state """ if random.random() < resample_probablity: if hyperparameter_type == "choice": return value.index(nni.parameter_expressions.choice(value, random_state)) else: return getattr(nni.parameter_expressions, hyperparameter_type)(*(value + [random_state])) else: if random.random() > 0.5: return min(uv, ub) else: return max(lv, lb)
[ "def", "perturbation", "(", "hyperparameter_type", ",", "value", ",", "resample_probablity", ",", "uv", ",", "ub", ",", "lv", ",", "lb", ",", "random_state", ")", ":", "if", "random", ".", "random", "(", ")", "<", "resample_probablity", ":", "if", "hyperparameter_type", "==", "\"choice\"", ":", "return", "value", ".", "index", "(", "nni", ".", "parameter_expressions", ".", "choice", "(", "value", ",", "random_state", ")", ")", "else", ":", "return", "getattr", "(", "nni", ".", "parameter_expressions", ",", "hyperparameter_type", ")", "(", "*", "(", "value", "+", "[", "random_state", "]", ")", ")", "else", ":", "if", "random", ".", "random", "(", ")", ">", "0.5", ":", "return", "min", "(", "uv", ",", "ub", ")", "else", ":", "return", "max", "(", "lv", ",", "lb", ")" ]
[ 20, 0 ]
[ 52, 30 ]
python
en
['en', 'error', 'th']
False
exploit_and_explore
(bot_trial_info, top_trial_info, factor, resample_probability, epoch, search_space)
Replace checkpoint of bot_trial with top, and perturb hyperparameters Parameters ---------- bot_trial_info : TrialInfo bottom model whose parameters should be replaced top_trial_info : TrialInfo better model factor : float factor for perturbation resample_probability : float probability for resampling epoch : int step of PBTTuner search_space : dict search_space to keep perturbed hyperparameters in range
Replace checkpoint of bot_trial with top, and perturb hyperparameters
def exploit_and_explore(bot_trial_info, top_trial_info, factor, resample_probability, epoch, search_space): """ Replace checkpoint of bot_trial with top, and perturb hyperparameters Parameters ---------- bot_trial_info : TrialInfo bottom model whose parameters should be replaced top_trial_info : TrialInfo better model factor : float factor for perturbation resample_probability : float probability for resampling epoch : int step of PBTTuner search_space : dict search_space to keep perturbed hyperparameters in range """ bot_checkpoint_dir = bot_trial_info.checkpoint_dir top_hyper_parameters = top_trial_info.hyper_parameters hyper_parameters = copy.deepcopy(top_hyper_parameters) random_state = np.random.RandomState() hyper_parameters['load_checkpoint_dir'] = hyper_parameters['save_checkpoint_dir'] hyper_parameters['save_checkpoint_dir'] = os.path.join(bot_checkpoint_dir, str(epoch)) for key in hyper_parameters.keys(): hyper_parameter = hyper_parameters[key] if key == 'load_checkpoint_dir' or key == 'save_checkpoint_dir': continue elif search_space[key]["_type"] == "choice": choices = search_space[key]["_value"] ub, uv = len(choices) - 1, choices.index(hyper_parameter) + 1 lb, lv = 0, choices.index(hyper_parameter) - 1 elif search_space[key]["_type"] == "randint": lb, ub = search_space[key]["_value"][:2] ub -= 1 uv = hyper_parameter + 1 lv = hyper_parameter - 1 elif search_space[key]["_type"] == "uniform": lb, ub = search_space[key]["_value"][:2] perturb = (ub - lb) * factor uv = hyper_parameter + perturb lv = hyper_parameter - perturb elif search_space[key]["_type"] == "quniform": lb, ub, q = search_space[key]["_value"][:3] multi = round(hyper_parameter / q) uv = (multi + 1) * q lv = (multi - 1) * q elif search_space[key]["_type"] == "loguniform": lb, ub = search_space[key]["_value"][:2] perturb = (np.log(ub) - np.log(lb)) * factor uv = np.exp(min(np.log(hyper_parameter) + perturb, np.log(ub))) lv = np.exp(max(np.log(hyper_parameter) - perturb, np.log(lb))) elif search_space[key]["_type"] == "qloguniform": lb, ub, q = search_space[key]["_value"][:3] multi = round(hyper_parameter / q) uv = (multi + 1) * q lv = (multi - 1) * q elif search_space[key]["_type"] == "normal": sigma = search_space[key]["_value"][1] perturb = sigma * factor uv = ub = hyper_parameter + perturb lv = lb = hyper_parameter - perturb elif search_space[key]["_type"] == "qnormal": q = search_space[key]["_value"][2] uv = ub = hyper_parameter + q lv = lb = hyper_parameter - q elif search_space[key]["_type"] == "lognormal": sigma = search_space[key]["_value"][1] perturb = sigma * factor uv = ub = np.exp(np.log(hyper_parameter) + perturb) lv = lb = np.exp(np.log(hyper_parameter) - perturb) elif search_space[key]["_type"] == "qlognormal": q = search_space[key]["_value"][2] uv = ub = hyper_parameter + q lv, lb = hyper_parameter - q, 1E-10 else: logger.warning("Illegal type to perturb: %s", search_space[key]["_type"]) continue if search_space[key]["_type"] == "choice": idx = perturbation(search_space[key]["_type"], search_space[key]["_value"], resample_probability, uv, ub, lv, lb, random_state) hyper_parameters[key] = choices[idx] else: hyper_parameters[key] = perturbation(search_space[key]["_type"], search_space[key]["_value"], resample_probability, uv, ub, lv, lb, random_state) bot_trial_info.hyper_parameters = hyper_parameters bot_trial_info.clean_id()
[ "def", "exploit_and_explore", "(", "bot_trial_info", ",", "top_trial_info", ",", "factor", ",", "resample_probability", ",", "epoch", ",", "search_space", ")", ":", "bot_checkpoint_dir", "=", "bot_trial_info", ".", "checkpoint_dir", "top_hyper_parameters", "=", "top_trial_info", ".", "hyper_parameters", "hyper_parameters", "=", "copy", ".", "deepcopy", "(", "top_hyper_parameters", ")", "random_state", "=", "np", ".", "random", ".", "RandomState", "(", ")", "hyper_parameters", "[", "'load_checkpoint_dir'", "]", "=", "hyper_parameters", "[", "'save_checkpoint_dir'", "]", "hyper_parameters", "[", "'save_checkpoint_dir'", "]", "=", "os", ".", "path", ".", "join", "(", "bot_checkpoint_dir", ",", "str", "(", "epoch", ")", ")", "for", "key", "in", "hyper_parameters", ".", "keys", "(", ")", ":", "hyper_parameter", "=", "hyper_parameters", "[", "key", "]", "if", "key", "==", "'load_checkpoint_dir'", "or", "key", "==", "'save_checkpoint_dir'", ":", "continue", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"choice\"", ":", "choices", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "ub", ",", "uv", "=", "len", "(", "choices", ")", "-", "1", ",", "choices", ".", "index", "(", "hyper_parameter", ")", "+", "1", "lb", ",", "lv", "=", "0", ",", "choices", ".", "index", "(", "hyper_parameter", ")", "-", "1", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"randint\"", ":", "lb", ",", "ub", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", ":", "2", "]", "ub", "-=", "1", "uv", "=", "hyper_parameter", "+", "1", "lv", "=", "hyper_parameter", "-", "1", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"uniform\"", ":", "lb", ",", "ub", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", ":", "2", "]", "perturb", "=", "(", "ub", "-", "lb", ")", "*", "factor", "uv", "=", "hyper_parameter", "+", "perturb", "lv", "=", "hyper_parameter", "-", "perturb", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"quniform\"", ":", "lb", ",", "ub", ",", "q", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", ":", "3", "]", "multi", "=", "round", "(", "hyper_parameter", "/", "q", ")", "uv", "=", "(", "multi", "+", "1", ")", "*", "q", "lv", "=", "(", "multi", "-", "1", ")", "*", "q", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"loguniform\"", ":", "lb", ",", "ub", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", ":", "2", "]", "perturb", "=", "(", "np", ".", "log", "(", "ub", ")", "-", "np", ".", "log", "(", "lb", ")", ")", "*", "factor", "uv", "=", "np", ".", "exp", "(", "min", "(", "np", ".", "log", "(", "hyper_parameter", ")", "+", "perturb", ",", "np", ".", "log", "(", "ub", ")", ")", ")", "lv", "=", "np", ".", "exp", "(", "max", "(", "np", ".", "log", "(", "hyper_parameter", ")", "-", "perturb", ",", "np", ".", "log", "(", "lb", ")", ")", ")", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"qloguniform\"", ":", "lb", ",", "ub", ",", "q", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", ":", "3", "]", "multi", "=", "round", "(", "hyper_parameter", "/", "q", ")", "uv", "=", "(", "multi", "+", "1", ")", "*", "q", "lv", "=", "(", "multi", "-", "1", ")", "*", "q", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"normal\"", ":", "sigma", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", "1", "]", "perturb", "=", "sigma", "*", "factor", "uv", "=", "ub", "=", "hyper_parameter", "+", "perturb", "lv", "=", "lb", "=", "hyper_parameter", "-", "perturb", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"qnormal\"", ":", "q", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", "2", "]", "uv", "=", "ub", "=", "hyper_parameter", "+", "q", "lv", "=", "lb", "=", "hyper_parameter", "-", "q", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"lognormal\"", ":", "sigma", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", "1", "]", "perturb", "=", "sigma", "*", "factor", "uv", "=", "ub", "=", "np", ".", "exp", "(", "np", ".", "log", "(", "hyper_parameter", ")", "+", "perturb", ")", "lv", "=", "lb", "=", "np", ".", "exp", "(", "np", ".", "log", "(", "hyper_parameter", ")", "-", "perturb", ")", "elif", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"qlognormal\"", ":", "q", "=", "search_space", "[", "key", "]", "[", "\"_value\"", "]", "[", "2", "]", "uv", "=", "ub", "=", "hyper_parameter", "+", "q", "lv", ",", "lb", "=", "hyper_parameter", "-", "q", ",", "1E-10", "else", ":", "logger", ".", "warning", "(", "\"Illegal type to perturb: %s\"", ",", "search_space", "[", "key", "]", "[", "\"_type\"", "]", ")", "continue", "if", "search_space", "[", "key", "]", "[", "\"_type\"", "]", "==", "\"choice\"", ":", "idx", "=", "perturbation", "(", "search_space", "[", "key", "]", "[", "\"_type\"", "]", ",", "search_space", "[", "key", "]", "[", "\"_value\"", "]", ",", "resample_probability", ",", "uv", ",", "ub", ",", "lv", ",", "lb", ",", "random_state", ")", "hyper_parameters", "[", "key", "]", "=", "choices", "[", "idx", "]", "else", ":", "hyper_parameters", "[", "key", "]", "=", "perturbation", "(", "search_space", "[", "key", "]", "[", "\"_type\"", "]", ",", "search_space", "[", "key", "]", "[", "\"_value\"", "]", ",", "resample_probability", ",", "uv", ",", "ub", ",", "lv", ",", "lb", ",", "random_state", ")", "bot_trial_info", ".", "hyper_parameters", "=", "hyper_parameters", "bot_trial_info", ".", "clean_id", "(", ")" ]
[ 55, 0 ]
[ 143, 29 ]
python
en
['en', 'error', 'th']
False
PBTTuner.__init__
(self, optimize_mode="maximize", all_checkpoint_dir=None, population_size=10, factor=0.2, resample_probability=0.25, fraction=0.2)
Initialization Parameters ---------- optimize_mode : str maximize or minimize all_checkpoint_dir : str directory to store training model checkpoint population_size : int number of trials for each epoch factor : float factor for perturbation resample_probability : float probability for resampling fraction : float fraction for selecting bottom and top trials
Initialization
def __init__(self, optimize_mode="maximize", all_checkpoint_dir=None, population_size=10, factor=0.2, resample_probability=0.25, fraction=0.2): """ Initialization Parameters ---------- optimize_mode : str maximize or minimize all_checkpoint_dir : str directory to store training model checkpoint population_size : int number of trials for each epoch factor : float factor for perturbation resample_probability : float probability for resampling fraction : float fraction for selecting bottom and top trials """ self.optimize_mode = OptimizeMode(optimize_mode) if all_checkpoint_dir is None: all_checkpoint_dir = os.getenv('NNI_CHECKPOINT_DIRECTORY') logger.info("Checkpoint dir is set to %s by default.", all_checkpoint_dir) self.all_checkpoint_dir = all_checkpoint_dir self.population_size = population_size self.factor = factor self.resample_probability = resample_probability self.fraction = fraction # defined in trial code #self.perturbation_interval = perturbation_interval self.population = None self.pos = -1 self.param_ids = [] self.running = {} self.finished = [] self.credit = 0 self.finished_trials = 0 self.epoch = 0 self.searchspace_json = None self.space = None self.send_trial_callback = None logger.info('PBT tuner initialization')
[ "def", "__init__", "(", "self", ",", "optimize_mode", "=", "\"maximize\"", ",", "all_checkpoint_dir", "=", "None", ",", "population_size", "=", "10", ",", "factor", "=", "0.2", ",", "resample_probability", "=", "0.25", ",", "fraction", "=", "0.2", ")", ":", "self", ".", "optimize_mode", "=", "OptimizeMode", "(", "optimize_mode", ")", "if", "all_checkpoint_dir", "is", "None", ":", "all_checkpoint_dir", "=", "os", ".", "getenv", "(", "'NNI_CHECKPOINT_DIRECTORY'", ")", "logger", ".", "info", "(", "\"Checkpoint dir is set to %s by default.\"", ",", "all_checkpoint_dir", ")", "self", ".", "all_checkpoint_dir", "=", "all_checkpoint_dir", "self", ".", "population_size", "=", "population_size", "self", ".", "factor", "=", "factor", "self", ".", "resample_probability", "=", "resample_probability", "self", ".", "fraction", "=", "fraction", "# defined in trial code", "#self.perturbation_interval = perturbation_interval", "self", ".", "population", "=", "None", "self", ".", "pos", "=", "-", "1", "self", ".", "param_ids", "=", "[", "]", "self", ".", "running", "=", "{", "}", "self", ".", "finished", "=", "[", "]", "self", ".", "credit", "=", "0", "self", ".", "finished_trials", "=", "0", "self", ".", "epoch", "=", "0", "self", ".", "searchspace_json", "=", "None", "self", ".", "space", "=", "None", "self", ".", "send_trial_callback", "=", "None", "logger", ".", "info", "(", "'PBT tuner initialization'", ")" ]
[ 172, 4 ]
[ 218, 47 ]
python
en
['en', 'error', 'th']
False
PBTTuner.update_search_space
(self, search_space)
Get search space Parameters ---------- search_space : dict Search space
Get search space
def update_search_space(self, search_space): """ Get search space Parameters ---------- search_space : dict Search space """ logger.info('Update search space %s', search_space) self.searchspace_json = search_space self.space = json2space(self.searchspace_json) self.random_state = np.random.RandomState() self.population = [] is_rand = dict() for item in self.space: is_rand[item] = True for i in range(self.population_size): hyper_parameters = json2parameter( self.searchspace_json, is_rand, self.random_state) hyper_parameters = split_index(hyper_parameters) checkpoint_dir = os.path.join(self.all_checkpoint_dir, str(i)) hyper_parameters['load_checkpoint_dir'] = os.path.join(checkpoint_dir, str(self.epoch)) hyper_parameters['save_checkpoint_dir'] = os.path.join(checkpoint_dir, str(self.epoch)) self.population.append(TrialInfo(checkpoint_dir=checkpoint_dir, hyper_parameters=hyper_parameters))
[ "def", "update_search_space", "(", "self", ",", "search_space", ")", ":", "logger", ".", "info", "(", "'Update search space %s'", ",", "search_space", ")", "self", ".", "searchspace_json", "=", "search_space", "self", ".", "space", "=", "json2space", "(", "self", ".", "searchspace_json", ")", "self", ".", "random_state", "=", "np", ".", "random", ".", "RandomState", "(", ")", "self", ".", "population", "=", "[", "]", "is_rand", "=", "dict", "(", ")", "for", "item", "in", "self", ".", "space", ":", "is_rand", "[", "item", "]", "=", "True", "for", "i", "in", "range", "(", "self", ".", "population_size", ")", ":", "hyper_parameters", "=", "json2parameter", "(", "self", ".", "searchspace_json", ",", "is_rand", ",", "self", ".", "random_state", ")", "hyper_parameters", "=", "split_index", "(", "hyper_parameters", ")", "checkpoint_dir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "all_checkpoint_dir", ",", "str", "(", "i", ")", ")", "hyper_parameters", "[", "'load_checkpoint_dir'", "]", "=", "os", ".", "path", ".", "join", "(", "checkpoint_dir", ",", "str", "(", "self", ".", "epoch", ")", ")", "hyper_parameters", "[", "'save_checkpoint_dir'", "]", "=", "os", ".", "path", ".", "join", "(", "checkpoint_dir", ",", "str", "(", "self", ".", "epoch", ")", ")", "self", ".", "population", ".", "append", "(", "TrialInfo", "(", "checkpoint_dir", "=", "checkpoint_dir", ",", "hyper_parameters", "=", "hyper_parameters", ")", ")" ]
[ 220, 4 ]
[ 247, 111 ]
python
en
['en', 'error', 'th']
False
PBTTuner.generate_multiple_parameters
(self, parameter_id_list, **kwargs)
Returns multiple sets of trial (hyper-)parameters, as iterable of serializable objects. Parameters ---------- parameter_id_list : list of int Unique identifiers for each set of requested hyper-parameters. These will later be used in :meth:`receive_trial_result`. **kwargs Used for send_trial_callback. Returns ------- list A list of newly generated configurations
Returns multiple sets of trial (hyper-)parameters, as iterable of serializable objects.
def generate_multiple_parameters(self, parameter_id_list, **kwargs): """ Returns multiple sets of trial (hyper-)parameters, as iterable of serializable objects. Parameters ---------- parameter_id_list : list of int Unique identifiers for each set of requested hyper-parameters. These will later be used in :meth:`receive_trial_result`. **kwargs Used for send_trial_callback. Returns ------- list A list of newly generated configurations """ result = [] self.send_trial_callback = kwargs['st_callback'] for parameter_id in parameter_id_list: had_exception = False try: logger.debug("generating param for %s", parameter_id) res = self.generate_parameters(parameter_id, **kwargs) except nni.NoMoreTrialError: had_exception = True if not had_exception: result.append(res) return result
[ "def", "generate_multiple_parameters", "(", "self", ",", "parameter_id_list", ",", "*", "*", "kwargs", ")", ":", "result", "=", "[", "]", "self", ".", "send_trial_callback", "=", "kwargs", "[", "'st_callback'", "]", "for", "parameter_id", "in", "parameter_id_list", ":", "had_exception", "=", "False", "try", ":", "logger", ".", "debug", "(", "\"generating param for %s\"", ",", "parameter_id", ")", "res", "=", "self", ".", "generate_parameters", "(", "parameter_id", ",", "*", "*", "kwargs", ")", "except", "nni", ".", "NoMoreTrialError", ":", "had_exception", "=", "True", "if", "not", "had_exception", ":", "result", ".", "append", "(", "res", ")", "return", "result" ]
[ 249, 4 ]
[ 277, 21 ]
python
en
['en', 'error', 'th']
False
PBTTuner.generate_parameters
(self, parameter_id, **kwargs)
Generate parameters, if no trial configration for now, self.credit plus 1 to send the config later Parameters ---------- parameter_id : int Unique identifier for requested hyper-parameters. This will later be used in :meth:`receive_trial_result`. **kwargs Not used Returns ------- dict One newly generated configuration
Generate parameters, if no trial configration for now, self.credit plus 1 to send the config later
def generate_parameters(self, parameter_id, **kwargs): """ Generate parameters, if no trial configration for now, self.credit plus 1 to send the config later Parameters ---------- parameter_id : int Unique identifier for requested hyper-parameters. This will later be used in :meth:`receive_trial_result`. **kwargs Not used Returns ------- dict One newly generated configuration """ if self.pos == self.population_size - 1: logger.debug('Credit added by one in parameters request') self.credit += 1 self.param_ids.append(parameter_id) raise nni.NoMoreTrialError('No more parameters now.') self.pos += 1 trial_info = self.population[self.pos] trial_info.parameter_id = parameter_id self.running[parameter_id] = trial_info logger.info('Generate parameter : %s', trial_info.hyper_parameters) return trial_info.hyper_parameters
[ "def", "generate_parameters", "(", "self", ",", "parameter_id", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "pos", "==", "self", ".", "population_size", "-", "1", ":", "logger", ".", "debug", "(", "'Credit added by one in parameters request'", ")", "self", ".", "credit", "+=", "1", "self", ".", "param_ids", ".", "append", "(", "parameter_id", ")", "raise", "nni", ".", "NoMoreTrialError", "(", "'No more parameters now.'", ")", "self", ".", "pos", "+=", "1", "trial_info", "=", "self", ".", "population", "[", "self", ".", "pos", "]", "trial_info", ".", "parameter_id", "=", "parameter_id", "self", ".", "running", "[", "parameter_id", "]", "=", "trial_info", "logger", ".", "info", "(", "'Generate parameter : %s'", ",", "trial_info", ".", "hyper_parameters", ")", "return", "trial_info", ".", "hyper_parameters" ]
[ 279, 4 ]
[ 307, 42 ]
python
en
['en', 'error', 'th']
False
PBTTuner.receive_trial_result
(self, parameter_id, parameters, value, **kwargs)
Receive trial's result. if the number of finished trials equals ``self.population_size``, start the next epoch to train the model. Parameters ---------- parameter_id : int Unique identifier of used hyper-parameters, same with :meth:`generate_parameters`. parameters : dict Hyper-parameters generated by :meth:`generate_parameters`. value : dict Result from trial (the return value of :func:`nni.report_final_result`).
Receive trial's result. if the number of finished trials equals ``self.population_size``, start the next epoch to train the model.
def receive_trial_result(self, parameter_id, parameters, value, **kwargs): """ Receive trial's result. if the number of finished trials equals ``self.population_size``, start the next epoch to train the model. Parameters ---------- parameter_id : int Unique identifier of used hyper-parameters, same with :meth:`generate_parameters`. parameters : dict Hyper-parameters generated by :meth:`generate_parameters`. value : dict Result from trial (the return value of :func:`nni.report_final_result`). """ logger.info('Get one trial result, id = %d, value = %s', parameter_id, value) value = extract_scalar_reward(value) trial_info = self.running.pop(parameter_id, None) trial_info.score = value self.finished.append(trial_info) self.finished_trials += 1 if self.finished_trials == self.population_size: self._proceed_next_epoch()
[ "def", "receive_trial_result", "(", "self", ",", "parameter_id", ",", "parameters", ",", "value", ",", "*", "*", "kwargs", ")", ":", "logger", ".", "info", "(", "'Get one trial result, id = %d, value = %s'", ",", "parameter_id", ",", "value", ")", "value", "=", "extract_scalar_reward", "(", "value", ")", "trial_info", "=", "self", ".", "running", ".", "pop", "(", "parameter_id", ",", "None", ")", "trial_info", ".", "score", "=", "value", "self", ".", "finished", ".", "append", "(", "trial_info", ")", "self", ".", "finished_trials", "+=", "1", "if", "self", ".", "finished_trials", "==", "self", ".", "population_size", ":", "self", ".", "_proceed_next_epoch", "(", ")" ]
[ 344, 4 ]
[ 365, 38 ]
python
en
['en', 'error', 'th']
False
PBTTuner.trial_end
(self, parameter_id, success, **kwargs)
Deal with trial failure Parameters ---------- parameter_id : int Unique identifier for hyper-parameters used by this trial. success : bool True if the trial successfully completed; False if failed or terminated. **kwargs Unstable parameters which should be ignored by normal users.
Deal with trial failure
def trial_end(self, parameter_id, success, **kwargs): """ Deal with trial failure Parameters ---------- parameter_id : int Unique identifier for hyper-parameters used by this trial. success : bool True if the trial successfully completed; False if failed or terminated. **kwargs Unstable parameters which should be ignored by normal users. """ if success: return if self.optimize_mode == OptimizeMode.Minimize: value = float('inf') else: value = float('-inf') trial_info = self.running.pop(parameter_id, None) trial_info.score = value self.finished.append(trial_info) self.finished_trials += 1 if self.finished_trials == self.population_size: self._proceed_next_epoch()
[ "def", "trial_end", "(", "self", ",", "parameter_id", ",", "success", ",", "*", "*", "kwargs", ")", ":", "if", "success", ":", "return", "if", "self", ".", "optimize_mode", "==", "OptimizeMode", ".", "Minimize", ":", "value", "=", "float", "(", "'inf'", ")", "else", ":", "value", "=", "float", "(", "'-inf'", ")", "trial_info", "=", "self", ".", "running", ".", "pop", "(", "parameter_id", ",", "None", ")", "trial_info", ".", "score", "=", "value", "self", ".", "finished", ".", "append", "(", "trial_info", ")", "self", ".", "finished_trials", "+=", "1", "if", "self", ".", "finished_trials", "==", "self", ".", "population_size", ":", "self", ".", "_proceed_next_epoch", "(", ")" ]
[ 367, 4 ]
[ 391, 38 ]
python
en
['en', 'error', 'th']
False
PBTTuner.import_data
(self, data)
Parameters ---------- data : json obj imported data records Returns ------- int the start epoch number after data imported, only used for unittest
Parameters ---------- data : json obj imported data records
def import_data(self, data): """ Parameters ---------- data : json obj imported data records Returns ------- int the start epoch number after data imported, only used for unittest """ if self.running: logger.warning("Do not support importing data in the middle of experiment") return # the following is for experiment resume _completed_num = 0 epoch_data_dict = {} for trial_info in data: logger.info("Process data record %s / %s", _completed_num, len(data)) _completed_num += 1 # simply validate data format _params = trial_info["parameter"] _value = trial_info['value'] # assign fake value for failed trials if not _value: logger.info("Useless trial data, value is %s, skip this trial data.", _value) _value = float('inf') if self.optimize_mode == OptimizeMode.Minimize else float('-inf') _value = extract_scalar_reward(_value) if 'save_checkpoint_dir' not in _params: logger.warning("Invalid data record: save_checkpoint_dir is missing, abandon data import.") return epoch_num = int(os.path.basename(_params['save_checkpoint_dir'])) if epoch_num not in epoch_data_dict: epoch_data_dict[epoch_num] = [] epoch_data_dict[epoch_num].append((_params, _value)) if not epoch_data_dict: logger.warning("No valid epochs, abandon data import.") return # figure out start epoch for resume max_epoch_num = max(epoch_data_dict, key=int) if len(epoch_data_dict[max_epoch_num]) < self.population_size: max_epoch_num -= 1 # If there is no a single complete round, no data to import, start from scratch if max_epoch_num < 0: logger.warning("No completed epoch, abandon data import.") return assert len(epoch_data_dict[max_epoch_num]) == self.population_size # check existence of trial save checkpoint dir for params, _ in epoch_data_dict[max_epoch_num]: if not os.path.isdir(params['save_checkpoint_dir']): logger.warning("save_checkpoint_dir %s does not exist, data will not be resumed", params['save_checkpoint_dir']) return # resume data self.epoch = max_epoch_num self.finished_trials = self.population_size for params, value in epoch_data_dict[max_epoch_num]: checkpoint_dir = os.path.dirname(params['save_checkpoint_dir']) self.finished.append(TrialInfo(checkpoint_dir=checkpoint_dir, hyper_parameters=params, score=value)) self._proceed_next_epoch() logger.info("Successfully import data to PBT tuner, total data: %d, imported data: %d.", len(data), self.population_size) logger.info("Start from epoch %d ...", self.epoch) return self.epoch
[ "def", "import_data", "(", "self", ",", "data", ")", ":", "if", "self", ".", "running", ":", "logger", ".", "warning", "(", "\"Do not support importing data in the middle of experiment\"", ")", "return", "# the following is for experiment resume", "_completed_num", "=", "0", "epoch_data_dict", "=", "{", "}", "for", "trial_info", "in", "data", ":", "logger", ".", "info", "(", "\"Process data record %s / %s\"", ",", "_completed_num", ",", "len", "(", "data", ")", ")", "_completed_num", "+=", "1", "# simply validate data format", "_params", "=", "trial_info", "[", "\"parameter\"", "]", "_value", "=", "trial_info", "[", "'value'", "]", "# assign fake value for failed trials", "if", "not", "_value", ":", "logger", ".", "info", "(", "\"Useless trial data, value is %s, skip this trial data.\"", ",", "_value", ")", "_value", "=", "float", "(", "'inf'", ")", "if", "self", ".", "optimize_mode", "==", "OptimizeMode", ".", "Minimize", "else", "float", "(", "'-inf'", ")", "_value", "=", "extract_scalar_reward", "(", "_value", ")", "if", "'save_checkpoint_dir'", "not", "in", "_params", ":", "logger", ".", "warning", "(", "\"Invalid data record: save_checkpoint_dir is missing, abandon data import.\"", ")", "return", "epoch_num", "=", "int", "(", "os", ".", "path", ".", "basename", "(", "_params", "[", "'save_checkpoint_dir'", "]", ")", ")", "if", "epoch_num", "not", "in", "epoch_data_dict", ":", "epoch_data_dict", "[", "epoch_num", "]", "=", "[", "]", "epoch_data_dict", "[", "epoch_num", "]", ".", "append", "(", "(", "_params", ",", "_value", ")", ")", "if", "not", "epoch_data_dict", ":", "logger", ".", "warning", "(", "\"No valid epochs, abandon data import.\"", ")", "return", "# figure out start epoch for resume", "max_epoch_num", "=", "max", "(", "epoch_data_dict", ",", "key", "=", "int", ")", "if", "len", "(", "epoch_data_dict", "[", "max_epoch_num", "]", ")", "<", "self", ".", "population_size", ":", "max_epoch_num", "-=", "1", "# If there is no a single complete round, no data to import, start from scratch", "if", "max_epoch_num", "<", "0", ":", "logger", ".", "warning", "(", "\"No completed epoch, abandon data import.\"", ")", "return", "assert", "len", "(", "epoch_data_dict", "[", "max_epoch_num", "]", ")", "==", "self", ".", "population_size", "# check existence of trial save checkpoint dir", "for", "params", ",", "_", "in", "epoch_data_dict", "[", "max_epoch_num", "]", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "params", "[", "'save_checkpoint_dir'", "]", ")", ":", "logger", ".", "warning", "(", "\"save_checkpoint_dir %s does not exist, data will not be resumed\"", ",", "params", "[", "'save_checkpoint_dir'", "]", ")", "return", "# resume data", "self", ".", "epoch", "=", "max_epoch_num", "self", ".", "finished_trials", "=", "self", ".", "population_size", "for", "params", ",", "value", "in", "epoch_data_dict", "[", "max_epoch_num", "]", ":", "checkpoint_dir", "=", "os", ".", "path", ".", "dirname", "(", "params", "[", "'save_checkpoint_dir'", "]", ")", "self", ".", "finished", ".", "append", "(", "TrialInfo", "(", "checkpoint_dir", "=", "checkpoint_dir", ",", "hyper_parameters", "=", "params", ",", "score", "=", "value", ")", ")", "self", ".", "_proceed_next_epoch", "(", ")", "logger", ".", "info", "(", "\"Successfully import data to PBT tuner, total data: %d, imported data: %d.\"", ",", "len", "(", "data", ")", ",", "self", ".", "population_size", ")", "logger", ".", "info", "(", "\"Start from epoch %d ...\"", ",", "self", ".", "epoch", ")", "return", "self", ".", "epoch" ]
[ 393, 4 ]
[ 455, 25 ]
python
en
['en', 'error', 'th']
False
validate_input
(hass: core.HomeAssistant, data)
Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user.
Validate the user input allows us to connect.
async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. """ risco = RiscoAPI(data[CONF_USERNAME], data[CONF_PASSWORD], data[CONF_PIN]) try: await risco.login(async_get_clientsession(hass)) finally: await risco.close() return {"title": risco.site_name}
[ "async", "def", "validate_input", "(", "hass", ":", "core", ".", "HomeAssistant", ",", "data", ")", ":", "risco", "=", "RiscoAPI", "(", "data", "[", "CONF_USERNAME", "]", ",", "data", "[", "CONF_PASSWORD", "]", ",", "data", "[", "CONF_PIN", "]", ")", "try", ":", "await", "risco", ".", "login", "(", "async_get_clientsession", "(", "hass", ")", ")", "finally", ":", "await", "risco", ".", "close", "(", ")", "return", "{", "\"title\"", ":", "risco", ".", "site_name", "}" ]
[ 41, 0 ]
[ 53, 37 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_get_options_flow
(config_entry)
Define the config flow to handle options.
Define the config flow to handle options.
def async_get_options_flow(config_entry): """Define the config flow to handle options.""" return RiscoOptionsFlowHandler(config_entry)
[ "def", "async_get_options_flow", "(", "config_entry", ")", ":", "return", "RiscoOptionsFlowHandler", "(", "config_entry", ")" ]
[ 64, 4 ]
[ 66, 52 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_user
(self, user_input=None)
Handle the initial step.
Handle the initial step.
async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() try: info = await validate_input(self.hass, user_input) except CannotConnectError: errors["base"] = "cannot_connect" except UnauthorizedError: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry(title=info["title"], data=user_input) return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "await", "self", ".", "async_set_unique_id", "(", "user_input", "[", "CONF_USERNAME", "]", ")", "self", ".", "_abort_if_unique_id_configured", "(", ")", "try", ":", "info", "=", "await", "validate_input", "(", "self", ".", "hass", ",", "user_input", ")", "except", "CannotConnectError", ":", "errors", "[", "\"base\"", "]", "=", "\"cannot_connect\"", "except", "UnauthorizedError", ":", "errors", "[", "\"base\"", "]", "=", "\"invalid_auth\"", "except", "Exception", ":", "# pylint: disable=broad-except", "_LOGGER", ".", "exception", "(", "\"Unexpected exception\"", ")", "errors", "[", "\"base\"", "]", "=", "\"unknown\"", "else", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "info", "[", "\"title\"", "]", ",", "data", "=", "user_input", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "DATA_SCHEMA", ",", "errors", "=", "errors", ")" ]
[ 68, 4 ]
[ 89, 9 ]
python
en
['en', 'en', 'en']
True
RiscoOptionsFlowHandler.__init__
(self, config_entry)
Initialize.
Initialize.
def __init__(self, config_entry): """Initialize.""" self.config_entry = config_entry self._data = {**DEFAULT_OPTIONS, **config_entry.options}
[ "def", "__init__", "(", "self", ",", "config_entry", ")", ":", "self", ".", "config_entry", "=", "config_entry", "self", ".", "_data", "=", "{", "*", "*", "DEFAULT_OPTIONS", ",", "*", "*", "config_entry", ".", "options", "}" ]
[ 95, 4 ]
[ 98, 64 ]
python
en
['en', 'en', 'it']
False
RiscoOptionsFlowHandler.async_step_init
(self, user_input=None)
Manage the options.
Manage the options.
async def async_step_init(self, user_input=None): """Manage the options.""" if user_input is not None: self._data = {**self._data, **user_input} return await self.async_step_risco_to_ha() return self.async_show_form(step_id="init", data_schema=self._options_schema())
[ "async", "def", "async_step_init", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "not", "None", ":", "self", ".", "_data", "=", "{", "*", "*", "self", ".", "_data", ",", "*", "*", "user_input", "}", "return", "await", "self", ".", "async_step_risco_to_ha", "(", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"init\"", ",", "data_schema", "=", "self", ".", "_options_schema", "(", ")", ")" ]
[ 116, 4 ]
[ 122, 87 ]
python
en
['en', 'en', 'en']
True
RiscoOptionsFlowHandler.async_step_risco_to_ha
(self, user_input=None)
Map Risco states to HA states.
Map Risco states to HA states.
async def async_step_risco_to_ha(self, user_input=None): """Map Risco states to HA states.""" if user_input is not None: self._data[CONF_RISCO_STATES_TO_HA] = user_input return await self.async_step_ha_to_risco() risco_to_ha = self._data[CONF_RISCO_STATES_TO_HA] options = vol.Schema( { vol.Required(risco_state, default=risco_to_ha[risco_state]): vol.In( HA_STATES ) for risco_state in RISCO_STATES } ) return self.async_show_form(step_id="risco_to_ha", data_schema=options)
[ "async", "def", "async_step_risco_to_ha", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "not", "None", ":", "self", ".", "_data", "[", "CONF_RISCO_STATES_TO_HA", "]", "=", "user_input", "return", "await", "self", ".", "async_step_ha_to_risco", "(", ")", "risco_to_ha", "=", "self", ".", "_data", "[", "CONF_RISCO_STATES_TO_HA", "]", "options", "=", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "risco_state", ",", "default", "=", "risco_to_ha", "[", "risco_state", "]", ")", ":", "vol", ".", "In", "(", "HA_STATES", ")", "for", "risco_state", "in", "RISCO_STATES", "}", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"risco_to_ha\"", ",", "data_schema", "=", "options", ")" ]
[ 124, 4 ]
[ 140, 79 ]
python
en
['en', 'en', 'en']
True
RiscoOptionsFlowHandler.async_step_ha_to_risco
(self, user_input=None)
Map HA states to Risco states.
Map HA states to Risco states.
async def async_step_ha_to_risco(self, user_input=None): """Map HA states to Risco states.""" if user_input is not None: self._data[CONF_HA_STATES_TO_RISCO] = user_input return self.async_create_entry(title="", data=self._data) options = {} risco_to_ha = self._data[CONF_RISCO_STATES_TO_HA] # we iterate over HA_STATES, instead of set(self._risco_to_ha.values()) # to ensure a consistent order for ha_state in HA_STATES: if ha_state not in risco_to_ha.values(): continue values = [ risco_state for risco_state in RISCO_STATES if risco_to_ha[risco_state] == ha_state ] current = self._data[CONF_HA_STATES_TO_RISCO].get(ha_state) if current not in values: current = values[0] options[vol.Required(ha_state, default=current)] = vol.In(values) return self.async_show_form( step_id="ha_to_risco", data_schema=vol.Schema(options) )
[ "async", "def", "async_step_ha_to_risco", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "not", "None", ":", "self", ".", "_data", "[", "CONF_HA_STATES_TO_RISCO", "]", "=", "user_input", "return", "self", ".", "async_create_entry", "(", "title", "=", "\"\"", ",", "data", "=", "self", ".", "_data", ")", "options", "=", "{", "}", "risco_to_ha", "=", "self", ".", "_data", "[", "CONF_RISCO_STATES_TO_HA", "]", "# we iterate over HA_STATES, instead of set(self._risco_to_ha.values())", "# to ensure a consistent order", "for", "ha_state", "in", "HA_STATES", ":", "if", "ha_state", "not", "in", "risco_to_ha", ".", "values", "(", ")", ":", "continue", "values", "=", "[", "risco_state", "for", "risco_state", "in", "RISCO_STATES", "if", "risco_to_ha", "[", "risco_state", "]", "==", "ha_state", "]", "current", "=", "self", ".", "_data", "[", "CONF_HA_STATES_TO_RISCO", "]", ".", "get", "(", "ha_state", ")", "if", "current", "not", "in", "values", ":", "current", "=", "values", "[", "0", "]", "options", "[", "vol", ".", "Required", "(", "ha_state", ",", "default", "=", "current", ")", "]", "=", "vol", ".", "In", "(", "values", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"ha_to_risco\"", ",", "data_schema", "=", "vol", ".", "Schema", "(", "options", ")", ")" ]
[ 142, 4 ]
[ 168, 9 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass, config: dict)
Set up the DSMR platform.
Set up the DSMR platform.
async def async_setup(hass, config: dict): """Set up the DSMR platform.""" return True
[ "async", "def", "async_setup", "(", "hass", ",", "config", ":", "dict", ")", ":", "return", "True" ]
[ 10, 0 ]
[ 12, 15 ]
python
en
['en', 'da', 'en']
True
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
Set up DSMR from a config entry.
Set up DSMR from a config entry.
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up DSMR from a config entry.""" hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = {} for platform in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, platform) ) listener = entry.add_update_listener(async_update_options) hass.data[DOMAIN][entry.entry_id][DATA_LISTENER] = listener return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "hass", ".", "data", ".", "setdefault", "(", "DOMAIN", ",", "{", "}", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "=", "{", "}", "for", "platform", "in", "PLATFORMS", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "entry", ",", "platform", ")", ")", "listener", "=", "entry", ".", "add_update_listener", "(", "async_update_options", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "[", "DATA_LISTENER", "]", "=", "listener", "return", "True" ]
[ 15, 0 ]
[ 28, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload a config entry.
Unload a config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" task = hass.data[DOMAIN][entry.entry_id][DATA_TASK] listener = hass.data[DOMAIN][entry.entry_id][DATA_LISTENER] # Cancel the reconnect task task.cancel() try: await task except CancelledError: pass unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: listener() hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "task", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "[", "DATA_TASK", "]", "listener", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "[", "DATA_LISTENER", "]", "# Cancel the reconnect task", "task", ".", "cancel", "(", ")", "try", ":", "await", "task", "except", "CancelledError", ":", "pass", "unload_ok", "=", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "component", ")", "for", "component", "in", "PLATFORMS", "]", ")", ")", "if", "unload_ok", ":", "listener", "(", ")", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "entry", ".", "entry_id", ")", "return", "unload_ok" ]
[ 31, 0 ]
[ 56, 20 ]
python
en
['en', 'es', 'en']
True
async_update_options
(hass: HomeAssistant, config_entry: ConfigEntry)
Update options.
Update options.
async def async_update_options(hass: HomeAssistant, config_entry: ConfigEntry): """Update options.""" await hass.config_entries.async_reload(config_entry.entry_id)
[ "async", "def", "async_update_options", "(", "hass", ":", "HomeAssistant", ",", "config_entry", ":", "ConfigEntry", ")", ":", "await", "hass", ".", "config_entries", ".", "async_reload", "(", "config_entry", ".", "entry_id", ")" ]
[ 59, 0 ]
[ 61, 65 ]
python
en
['en', 'en', 'en']
False
rs
(hass)
Return CommandCover instance.
Return CommandCover instance.
def rs(hass): """Return CommandCover instance.""" return cmd_rs.CommandCover( hass, "foo", "command_open", "command_close", "command_stop", "command_state", None, 15, )
[ "def", "rs", "(", "hass", ")", ":", "return", "cmd_rs", ".", "CommandCover", "(", "hass", ",", "\"foo\"", ",", "\"command_open\"", ",", "\"command_close\"", ",", "\"command_stop\"", ",", "\"command_state\"", ",", "None", ",", "15", ",", ")" ]
[ 24, 0 ]
[ 35, 5 ]
python
en
['en', 'en', 'en']
True
test_should_poll_new
(rs)
Test the setting of polling.
Test the setting of polling.
def test_should_poll_new(rs): """Test the setting of polling.""" assert rs.should_poll is True rs._command_state = None assert rs.should_poll is False
[ "def", "test_should_poll_new", "(", "rs", ")", ":", "assert", "rs", ".", "should_poll", "is", "True", "rs", ".", "_command_state", "=", "None", "assert", "rs", ".", "should_poll", "is", "False" ]
[ 38, 0 ]
[ 42, 34 ]
python
en
['en', 'en', 'en']
True
test_query_state_value
(rs)
Test with state value.
Test with state value.
def test_query_state_value(rs): """Test with state value.""" with mock.patch("subprocess.check_output") as mock_run: mock_run.return_value = b" foo bar " result = rs._query_state_value("runme") assert "foo bar" == result assert mock_run.call_count == 1 assert mock_run.call_args == mock.call( "runme", shell=True, timeout=15 # nosec # shell by design )
[ "def", "test_query_state_value", "(", "rs", ")", ":", "with", "mock", ".", "patch", "(", "\"subprocess.check_output\"", ")", "as", "mock_run", ":", "mock_run", ".", "return_value", "=", "b\" foo bar \"", "result", "=", "rs", ".", "_query_state_value", "(", "\"runme\"", ")", "assert", "\"foo bar\"", "==", "result", "assert", "mock_run", ".", "call_count", "==", "1", "assert", "mock_run", ".", "call_args", "==", "mock", ".", "call", "(", "\"runme\"", ",", "shell", "=", "True", ",", "timeout", "=", "15", "# nosec # shell by design", ")" ]
[ 45, 0 ]
[ 54, 9 ]
python
en
['en', 'en', 'en']
True
test_state_value
(hass)
Test with state value.
Test with state value.
async def test_state_value(hass): """Test with state value.""" with tempfile.TemporaryDirectory() as tempdirname: path = os.path.join(tempdirname, "cover_status") test_cover = { "command_state": f"cat {path}", "command_open": f"echo 1 > {path}", "command_close": f"echo 1 > {path}", "command_stop": f"echo 0 > {path}", "value_template": "{{ value }}", } assert ( await async_setup_component( hass, DOMAIN, {"cover": {"platform": "command_line", "covers": {"test": test_cover}}}, ) is True ) await hass.async_block_till_done() assert "unknown" == hass.states.get("cover.test").state await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True ) assert "open" == hass.states.get("cover.test").state await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True ) assert "open" == hass.states.get("cover.test").state await hass.services.async_call( DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: "cover.test"}, blocking=True ) assert "closed" == hass.states.get("cover.test").state
[ "async", "def", "test_state_value", "(", "hass", ")", ":", "with", "tempfile", ".", "TemporaryDirectory", "(", ")", "as", "tempdirname", ":", "path", "=", "os", ".", "path", ".", "join", "(", "tempdirname", ",", "\"cover_status\"", ")", "test_cover", "=", "{", "\"command_state\"", ":", "f\"cat {path}\"", ",", "\"command_open\"", ":", "f\"echo 1 > {path}\"", ",", "\"command_close\"", ":", "f\"echo 1 > {path}\"", ",", "\"command_stop\"", ":", "f\"echo 0 > {path}\"", ",", "\"value_template\"", ":", "\"{{ value }}\"", ",", "}", "assert", "(", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "\"cover\"", ":", "{", "\"platform\"", ":", "\"command_line\"", ",", "\"covers\"", ":", "{", "\"test\"", ":", "test_cover", "}", "}", "}", ",", ")", "is", "True", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "\"unknown\"", "==", "hass", ".", "states", ".", "get", "(", "\"cover.test\"", ")", ".", "state", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_OPEN_COVER", ",", "{", "ATTR_ENTITY_ID", ":", "\"cover.test\"", "}", ",", "blocking", "=", "True", ")", "assert", "\"open\"", "==", "hass", ".", "states", ".", "get", "(", "\"cover.test\"", ")", ".", "state", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_CLOSE_COVER", ",", "{", "ATTR_ENTITY_ID", ":", "\"cover.test\"", "}", ",", "blocking", "=", "True", ")", "assert", "\"open\"", "==", "hass", ".", "states", ".", "get", "(", "\"cover.test\"", ")", ".", "state", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_STOP_COVER", ",", "{", "ATTR_ENTITY_ID", ":", "\"cover.test\"", "}", ",", "blocking", "=", "True", ")", "assert", "\"closed\"", "==", "hass", ".", "states", ".", "get", "(", "\"cover.test\"", ")", ".", "state" ]
[ 57, 0 ]
[ 93, 62 ]
python
en
['en', 'en', 'en']
True
test_reload
(hass)
Verify we can reload command_line covers.
Verify we can reload command_line covers.
async def test_reload(hass): """Verify we can reload command_line covers.""" test_cover = { "command_state": "echo open", "value_template": "{{ value }}", } await async_setup_component( hass, DOMAIN, {"cover": {"platform": "command_line", "covers": {"test": test_cover}}}, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 1 assert hass.states.get("cover.test").state yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "command_line/configuration.yaml", ) with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( "command_line", SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 1 assert hass.states.get("cover.test") is None assert hass.states.get("cover.from_yaml")
[ "async", "def", "test_reload", "(", "hass", ")", ":", "test_cover", "=", "{", "\"command_state\"", ":", "\"echo open\"", ",", "\"value_template\"", ":", "\"{{ value }}\"", ",", "}", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "\"cover\"", ":", "{", "\"platform\"", ":", "\"command_line\"", ",", "\"covers\"", ":", "{", "\"test\"", ":", "test_cover", "}", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "1", "assert", "hass", ".", "states", ".", "get", "(", "\"cover.test\"", ")", ".", "state", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"command_line/configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "hass_config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "\"command_line\"", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "1", "assert", "hass", ".", "states", ".", "get", "(", "\"cover.test\"", ")", "is", "None", "assert", "hass", ".", "states", ".", "get", "(", "\"cover.from_yaml\"", ")" ]
[ 96, 0 ]
[ 130, 45 ]
python
en
['en', 'en', 'en']
True
exit
(state: int = 0, msg: str = None)
Exit and show msg in sys.stderr
Exit and show msg in sys.stderr
def exit(state: int = 0, msg: str = None): """Exit and show msg in sys.stderr""" if msg is not None: sys.stderr.write(msg) sys.exit(state)
[ "def", "exit", "(", "state", ":", "int", "=", "0", ",", "msg", ":", "str", "=", "None", ")", ":", "if", "msg", "is", "not", "None", ":", "sys", ".", "stderr", ".", "write", "(", "msg", ")", "sys", ".", "exit", "(", "state", ")" ]
[ 6, 0 ]
[ 11, 19 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up switches for deCONZ component. Switches are based on the same device class as lights in deCONZ.
Set up switches for deCONZ component.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up switches for deCONZ component. Switches are based on the same device class as lights in deCONZ. """ gateway = get_gateway_from_config_entry(hass, config_entry) gateway.entities[DOMAIN] = set() @callback def async_add_switch(lights): """Add switch from deCONZ.""" entities = [] for light in lights: if ( light.type in POWER_PLUGS and light.uniqueid not in gateway.entities[DOMAIN] ): entities.append(DeconzPowerPlug(light, gateway)) elif ( light.type in SIRENS and light.uniqueid not in gateway.entities[DOMAIN] ): entities.append(DeconzSiren(light, gateway)) if entities: async_add_entities(entities) gateway.listeners.append( async_dispatcher_connect( hass, gateway.async_signal_new_device(NEW_LIGHT), async_add_switch ) ) async_add_switch(gateway.api.lights.values())
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "gateway", "=", "get_gateway_from_config_entry", "(", "hass", ",", "config_entry", ")", "gateway", ".", "entities", "[", "DOMAIN", "]", "=", "set", "(", ")", "@", "callback", "def", "async_add_switch", "(", "lights", ")", ":", "\"\"\"Add switch from deCONZ.\"\"\"", "entities", "=", "[", "]", "for", "light", "in", "lights", ":", "if", "(", "light", ".", "type", "in", "POWER_PLUGS", "and", "light", ".", "uniqueid", "not", "in", "gateway", ".", "entities", "[", "DOMAIN", "]", ")", ":", "entities", ".", "append", "(", "DeconzPowerPlug", "(", "light", ",", "gateway", ")", ")", "elif", "(", "light", ".", "type", "in", "SIRENS", "and", "light", ".", "uniqueid", "not", "in", "gateway", ".", "entities", "[", "DOMAIN", "]", ")", ":", "entities", ".", "append", "(", "DeconzSiren", "(", "light", ",", "gateway", ")", ")", "if", "entities", ":", "async_add_entities", "(", "entities", ")", "gateway", ".", "listeners", ".", "append", "(", "async_dispatcher_connect", "(", "hass", ",", "gateway", ".", "async_signal_new_device", "(", "NEW_LIGHT", ")", ",", "async_add_switch", ")", ")", "async_add_switch", "(", "gateway", ".", "api", ".", "lights", ".", "values", "(", ")", ")" ]
[ 10, 0 ]
[ 45, 49 ]
python
en
['en', 'en', 'en']
True
DeconzPowerPlug.is_on
(self)
Return true if switch is on.
Return true if switch is on.
def is_on(self): """Return true if switch is on.""" return self._device.state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_device", ".", "state" ]
[ 54, 4 ]
[ 56, 33 ]
python
en
['en', 'fy', 'en']
True
DeconzPowerPlug.async_turn_on
(self, **kwargs)
Turn on switch.
Turn on switch.
async def async_turn_on(self, **kwargs): """Turn on switch.""" data = {"on": True} await self._device.async_set_state(data)
[ "async", "def", "async_turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"on\"", ":", "True", "}", "await", "self", ".", "_device", ".", "async_set_state", "(", "data", ")" ]
[ 58, 4 ]
[ 61, 48 ]
python
en
['en', 'en', 'en']
True
DeconzPowerPlug.async_turn_off
(self, **kwargs)
Turn off switch.
Turn off switch.
async def async_turn_off(self, **kwargs): """Turn off switch.""" data = {"on": False} await self._device.async_set_state(data)
[ "async", "def", "async_turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"on\"", ":", "False", "}", "await", "self", ".", "_device", ".", "async_set_state", "(", "data", ")" ]
[ 63, 4 ]
[ 66, 48 ]
python
en
['en', 'en', 'en']
True
DeconzSiren.is_on
(self)
Return true if switch is on.
Return true if switch is on.
def is_on(self): """Return true if switch is on.""" return self._device.alert == "lselect"
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_device", ".", "alert", "==", "\"lselect\"" ]
[ 75, 4 ]
[ 77, 46 ]
python
en
['en', 'fy', 'en']
True
DeconzSiren.async_turn_on
(self, **kwargs)
Turn on switch.
Turn on switch.
async def async_turn_on(self, **kwargs): """Turn on switch.""" data = {"alert": "lselect"} await self._device.async_set_state(data)
[ "async", "def", "async_turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"alert\"", ":", "\"lselect\"", "}", "await", "self", ".", "_device", ".", "async_set_state", "(", "data", ")" ]
[ 79, 4 ]
[ 82, 48 ]
python
en
['en', 'en', 'en']
True
DeconzSiren.async_turn_off
(self, **kwargs)
Turn off switch.
Turn off switch.
async def async_turn_off(self, **kwargs): """Turn off switch.""" data = {"alert": "none"} await self._device.async_set_state(data)
[ "async", "def", "async_turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"alert\"", ":", "\"none\"", "}", "await", "self", ".", "_device", ".", "async_set_state", "(", "data", ")" ]
[ 84, 4 ]
[ 87, 48 ]
python
en
['en', 'en', 'en']
True
test_show_form
(hass)
Test that the form is served with no input.
Test that the form is served with no input.
async def test_show_form(hass): """Test that the form is served with no input.""" flow = config_flow.AbodeFlowHandler() flow.hass = hass result = await flow.async_step_user(user_input=None) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user"
[ "async", "def", "test_show_form", "(", "hass", ")", ":", "flow", "=", "config_flow", ".", "AbodeFlowHandler", "(", ")", "flow", ".", "hass", "=", "hass", "result", "=", "await", "flow", ".", "async_step_user", "(", "user_input", "=", "None", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"user\"" ]
[ 13, 0 ]
[ 21, 38 ]
python
en
['en', 'en', 'en']
True
test_one_config_allowed
(hass)
Test that only one Abode configuration is allowed.
Test that only one Abode configuration is allowed.
async def test_one_config_allowed(hass): """Test that only one Abode configuration is allowed.""" flow = config_flow.AbodeFlowHandler() flow.hass = hass MockConfigEntry( domain="abode", data={CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"}, ).add_to_hass(hass) step_user_result = await flow.async_step_user() assert step_user_result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert step_user_result["reason"] == "single_instance_allowed" conf = { CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password", CONF_POLLING: False, } import_config_result = await flow.async_step_import(conf) assert import_config_result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert import_config_result["reason"] == "single_instance_allowed"
[ "async", "def", "test_one_config_allowed", "(", "hass", ")", ":", "flow", "=", "config_flow", ".", "AbodeFlowHandler", "(", ")", "flow", ".", "hass", "=", "hass", "MockConfigEntry", "(", "domain", "=", "\"abode\"", ",", "data", "=", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", "}", ",", ")", ".", "add_to_hass", "(", "hass", ")", "step_user_result", "=", "await", "flow", ".", "async_step_user", "(", ")", "assert", "step_user_result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "step_user_result", "[", "\"reason\"", "]", "==", "\"single_instance_allowed\"", "conf", "=", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "CONF_POLLING", ":", "False", ",", "}", "import_config_result", "=", "await", "flow", ".", "async_step_import", "(", "conf", ")", "assert", "import_config_result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "import_config_result", "[", "\"reason\"", "]", "==", "\"single_instance_allowed\"" ]
[ 24, 0 ]
[ 48, 70 ]
python
en
['en', 'en', 'en']
True
test_invalid_credentials
(hass)
Test that invalid credentials throws an error.
Test that invalid credentials throws an error.
async def test_invalid_credentials(hass): """Test that invalid credentials throws an error.""" conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"} flow = config_flow.AbodeFlowHandler() flow.hass = hass with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException((400, "auth error")), ): result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "invalid_auth"}
[ "async", "def", "test_invalid_credentials", "(", "hass", ")", ":", "conf", "=", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", "}", "flow", "=", "config_flow", ".", "AbodeFlowHandler", "(", ")", "flow", ".", "hass", "=", "hass", "with", "patch", "(", "\"homeassistant.components.abode.config_flow.Abode\"", ",", "side_effect", "=", "AbodeAuthenticationException", "(", "(", "400", ",", "\"auth error\"", ")", ")", ",", ")", ":", "result", "=", "await", "flow", ".", "async_step_user", "(", "user_input", "=", "conf", ")", "assert", "result", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"invalid_auth\"", "}" ]
[ 51, 0 ]
[ 63, 59 ]
python
en
['en', 'en', 'en']
True
test_connection_error
(hass)
Test other than invalid credentials throws an error.
Test other than invalid credentials throws an error.
async def test_connection_error(hass): """Test other than invalid credentials throws an error.""" conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"} flow = config_flow.AbodeFlowHandler() flow.hass = hass with patch( "homeassistant.components.abode.config_flow.Abode", side_effect=AbodeAuthenticationException( (HTTP_INTERNAL_SERVER_ERROR, "connection error") ), ): result = await flow.async_step_user(user_input=conf) assert result["errors"] == {"base": "cannot_connect"}
[ "async", "def", "test_connection_error", "(", "hass", ")", ":", "conf", "=", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", "}", "flow", "=", "config_flow", ".", "AbodeFlowHandler", "(", ")", "flow", ".", "hass", "=", "hass", "with", "patch", "(", "\"homeassistant.components.abode.config_flow.Abode\"", ",", "side_effect", "=", "AbodeAuthenticationException", "(", "(", "HTTP_INTERNAL_SERVER_ERROR", ",", "\"connection error\"", ")", ")", ",", ")", ":", "result", "=", "await", "flow", ".", "async_step_user", "(", "user_input", "=", "conf", ")", "assert", "result", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"cannot_connect\"", "}" ]
[ 66, 0 ]
[ 80, 61 ]
python
en
['en', 'en', 'en']
True
test_step_import
(hass)
Test that the import step works.
Test that the import step works.
async def test_step_import(hass): """Test that the import step works.""" conf = { CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password", CONF_POLLING: False, } flow = config_flow.AbodeFlowHandler() flow.hass = hass with patch("homeassistant.components.abode.config_flow.Abode"): result = await flow.async_step_import(import_config=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY result = await flow.async_step_user(user_input=result["data"]) assert result["title"] == "[email protected]" assert result["data"] == { CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password", CONF_POLLING: False, }
[ "async", "def", "test_step_import", "(", "hass", ")", ":", "conf", "=", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "CONF_POLLING", ":", "False", ",", "}", "flow", "=", "config_flow", ".", "AbodeFlowHandler", "(", ")", "flow", ".", "hass", "=", "hass", "with", "patch", "(", "\"homeassistant.components.abode.config_flow.Abode\"", ")", ":", "result", "=", "await", "flow", ".", "async_step_import", "(", "import_config", "=", "conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "result", "=", "await", "flow", ".", "async_step_user", "(", "user_input", "=", "result", "[", "\"data\"", "]", ")", "assert", "result", "[", "\"title\"", "]", "==", "\"[email protected]\"", "assert", "result", "[", "\"data\"", "]", "==", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "CONF_POLLING", ":", "False", ",", "}" ]
[ 83, 0 ]
[ 103, 9 ]
python
en
['en', 'en', 'en']
True
test_step_user
(hass)
Test that the user step works.
Test that the user step works.
async def test_step_user(hass): """Test that the user step works.""" conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"} flow = config_flow.AbodeFlowHandler() flow.hass = hass with patch("homeassistant.components.abode.config_flow.Abode"): result = await flow.async_step_user(user_input=conf) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "[email protected]" assert result["data"] == { CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password", CONF_POLLING: False, }
[ "async", "def", "test_step_user", "(", "hass", ")", ":", "conf", "=", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", "}", "flow", "=", "config_flow", ".", "AbodeFlowHandler", "(", ")", "flow", ".", "hass", "=", "hass", "with", "patch", "(", "\"homeassistant.components.abode.config_flow.Abode\"", ")", ":", "result", "=", "await", "flow", ".", "async_step_user", "(", "user_input", "=", "conf", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "\"[email protected]\"", "assert", "result", "[", "\"data\"", "]", "==", "{", "CONF_USERNAME", ":", "\"[email protected]\"", ",", "CONF_PASSWORD", ":", "\"password\"", ",", "CONF_POLLING", ":", "False", ",", "}" ]
[ 106, 0 ]
[ 121, 9 ]
python
en
['en', 'en', 'en']
True
test_subscription_registry
(hass: HomeAssistant)
Test subscription registry polling.
Test subscription registry polling.
async def test_subscription_registry(hass: HomeAssistant) -> None: """Test subscription registry polling.""" subscription_registry = SubscriptionRegistry(hass) # pylint: disable=protected-access subscription_registry.poll_server_once = poll_server_once_mock = MagicMock() poll_server_once_mock.return_value = True await hass.async_add_executor_job(subscription_registry.start) async_fire_time_changed(hass, utcnow() + timedelta(seconds=1)) await hass.async_block_till_done() poll_server_once_mock.assert_called_once() # Last poll was successful and already scheduled the next poll for 1s in the future. # This will ensure that future poll will fail. poll_server_once_mock.return_value = False # Asserting future poll runs. poll_server_once_mock.reset_mock() async_fire_time_changed(hass, utcnow() + timedelta(seconds=2)) await hass.async_block_till_done() poll_server_once_mock.assert_called_once() # Asserting a future poll is delayed due to the failure set above. async_fire_time_changed(hass, utcnow() + timedelta(seconds=2)) poll_server_once_mock.reset_mock() poll_server_once_mock.assert_not_called() poll_server_once_mock.reset_mock() async_fire_time_changed(hass, utcnow() + timedelta(seconds=60)) await hass.async_block_till_done() poll_server_once_mock.assert_called_once() poll_server_once_mock.reset_mock() await hass.async_add_executor_job(subscription_registry.stop) # Assert no further polling is performed. async_fire_time_changed(hass, utcnow() + timedelta(seconds=65)) await hass.async_block_till_done() poll_server_once_mock.assert_not_called()
[ "async", "def", "test_subscription_registry", "(", "hass", ":", "HomeAssistant", ")", "->", "None", ":", "subscription_registry", "=", "SubscriptionRegistry", "(", "hass", ")", "# pylint: disable=protected-access", "subscription_registry", ".", "poll_server_once", "=", "poll_server_once_mock", "=", "MagicMock", "(", ")", "poll_server_once_mock", ".", "return_value", "=", "True", "await", "hass", ".", "async_add_executor_job", "(", "subscription_registry", ".", "start", ")", "async_fire_time_changed", "(", "hass", ",", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "1", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "poll_server_once_mock", ".", "assert_called_once", "(", ")", "# Last poll was successful and already scheduled the next poll for 1s in the future.", "# This will ensure that future poll will fail.", "poll_server_once_mock", ".", "return_value", "=", "False", "# Asserting future poll runs.", "poll_server_once_mock", ".", "reset_mock", "(", ")", "async_fire_time_changed", "(", "hass", ",", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "2", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "poll_server_once_mock", ".", "assert_called_once", "(", ")", "# Asserting a future poll is delayed due to the failure set above.", "async_fire_time_changed", "(", "hass", ",", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "2", ")", ")", "poll_server_once_mock", ".", "reset_mock", "(", ")", "poll_server_once_mock", ".", "assert_not_called", "(", ")", "poll_server_once_mock", ".", "reset_mock", "(", ")", "async_fire_time_changed", "(", "hass", ",", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "60", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "poll_server_once_mock", ".", "assert_called_once", "(", ")", "poll_server_once_mock", ".", "reset_mock", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "subscription_registry", ".", "stop", ")", "# Assert no further polling is performed.", "async_fire_time_changed", "(", "hass", ",", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "65", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "poll_server_once_mock", ".", "assert_not_called", "(", ")" ]
[ 11, 0 ]
[ 49, 45 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up AdvantageAir motion platform.
Set up AdvantageAir motion platform.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up AdvantageAir motion platform.""" instance = hass.data[ADVANTAGE_AIR_DOMAIN][config_entry.entry_id] entities = [] for ac_key, ac_device in instance["coordinator"].data["aircons"].items(): entities.append(AdvantageAirZoneFilter(instance, ac_key)) for zone_key, zone in ac_device["zones"].items(): # Only add motion sensor when motion is enabled if zone["motionConfig"] >= 2: entities.append(AdvantageAirZoneMotion(instance, ac_key, zone_key)) async_add_entities(entities)
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "instance", "=", "hass", ".", "data", "[", "ADVANTAGE_AIR_DOMAIN", "]", "[", "config_entry", ".", "entry_id", "]", "entities", "=", "[", "]", "for", "ac_key", ",", "ac_device", "in", "instance", "[", "\"coordinator\"", "]", ".", "data", "[", "\"aircons\"", "]", ".", "items", "(", ")", ":", "entities", ".", "append", "(", "AdvantageAirZoneFilter", "(", "instance", ",", "ac_key", ")", ")", "for", "zone_key", ",", "zone", "in", "ac_device", "[", "\"zones\"", "]", ".", "items", "(", ")", ":", "# Only add motion sensor when motion is enabled", "if", "zone", "[", "\"motionConfig\"", "]", ">=", "2", ":", "entities", ".", "append", "(", "AdvantageAirZoneMotion", "(", "instance", ",", "ac_key", ",", "zone_key", ")", ")", "async_add_entities", "(", "entities", ")" ]
[ 14, 0 ]
[ 26, 32 ]
python
en
['en', 'lv', 'en']
True
AdvantageAirZoneFilter.name
(self)
Return the name.
Return the name.
def name(self): """Return the name.""" return f'{self._ac["name"]} Filter'
[ "def", "name", "(", "self", ")", ":", "return", "f'{self._ac[\"name\"]} Filter'" ]
[ 33, 4 ]
[ 35, 43 ]
python
en
['en', 'ig', 'en']
True
AdvantageAirZoneFilter.unique_id
(self)
Return a unique id.
Return a unique id.
def unique_id(self): """Return a unique id.""" return f'{self.coordinator.data["system"]["rid"]}-{self.ac_key}-filter'
[ "def", "unique_id", "(", "self", ")", ":", "return", "f'{self.coordinator.data[\"system\"][\"rid\"]}-{self.ac_key}-filter'" ]
[ 38, 4 ]
[ 40, 79 ]
python
ca
['fr', 'ca', 'en']
False
AdvantageAirZoneFilter.device_class
(self)
Return the device class of the vent.
Return the device class of the vent.
def device_class(self): """Return the device class of the vent.""" return DEVICE_CLASS_PROBLEM
[ "def", "device_class", "(", "self", ")", ":", "return", "DEVICE_CLASS_PROBLEM" ]
[ 43, 4 ]
[ 45, 35 ]
python
en
['en', 'en', 'en']
True
AdvantageAirZoneFilter.is_on
(self)
Return if filter needs cleaning.
Return if filter needs cleaning.
def is_on(self): """Return if filter needs cleaning.""" return self._ac["filterCleanStatus"]
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_ac", "[", "\"filterCleanStatus\"", "]" ]
[ 48, 4 ]
[ 50, 44 ]
python
en
['en', 'en', 'en']
True
AdvantageAirZoneMotion.name
(self)
Return the name.
Return the name.
def name(self): """Return the name.""" return f'{self._zone["name"]} Motion'
[ "def", "name", "(", "self", ")", ":", "return", "f'{self._zone[\"name\"]} Motion'" ]
[ 57, 4 ]
[ 59, 45 ]
python
en
['en', 'ig', 'en']
True
AdvantageAirZoneMotion.unique_id
(self)
Return a unique id.
Return a unique id.
def unique_id(self): """Return a unique id.""" return f'{self.coordinator.data["system"]["rid"]}-{self.ac_key}-{self.zone_key}-motion'
[ "def", "unique_id", "(", "self", ")", ":", "return", "f'{self.coordinator.data[\"system\"][\"rid\"]}-{self.ac_key}-{self.zone_key}-motion'" ]
[ 62, 4 ]
[ 64, 95 ]
python
ca
['fr', 'ca', 'en']
False
AdvantageAirZoneMotion.device_class
(self)
Return the device class of the vent.
Return the device class of the vent.
def device_class(self): """Return the device class of the vent.""" return DEVICE_CLASS_MOTION
[ "def", "device_class", "(", "self", ")", ":", "return", "DEVICE_CLASS_MOTION" ]
[ 67, 4 ]
[ 69, 34 ]
python
en
['en', 'en', 'en']
True
AdvantageAirZoneMotion.is_on
(self)
Return if motion is detect.
Return if motion is detect.
def is_on(self): """Return if motion is detect.""" return self._zone["motion"]
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_zone", "[", "\"motion\"", "]" ]
[ 72, 4 ]
[ 74, 35 ]
python
en
['en', 'en', 'en']
True
FirmataEntity.__init__
(self, api)
Initialize the entity.
Initialize the entity.
def __init__(self, api): """Initialize the entity.""" self._api = api
[ "def", "__init__", "(", "self", ",", "api", ")", ":", "self", ".", "_api", "=", "api" ]
[ 13, 4 ]
[ 15, 23 ]
python
en
['en', 'en', 'en']
True
FirmataEntity.device_info
(self)
Return device info.
Return device info.
def device_info(self) -> dict: """Return device info.""" return { "connections": {}, "identifiers": {(DOMAIN, self._api.board.name)}, "manufacturer": FIRMATA_MANUFACTURER, "name": self._api.board.name, "sw_version": self._api.board.firmware_version, }
[ "def", "device_info", "(", "self", ")", "->", "dict", ":", "return", "{", "\"connections\"", ":", "{", "}", ",", "\"identifiers\"", ":", "{", "(", "DOMAIN", ",", "self", ".", "_api", ".", "board", ".", "name", ")", "}", ",", "\"manufacturer\"", ":", "FIRMATA_MANUFACTURER", ",", "\"name\"", ":", "self", ".", "_api", ".", "board", ".", "name", ",", "\"sw_version\"", ":", "self", ".", "_api", ".", "board", ".", "firmware_version", ",", "}" ]
[ 18, 4 ]
[ 26, 9 ]
python
en
['es', 'hr', 'en']
False
FirmataPinEntity.__init__
( self, api: Type[FirmataBoardPin], config_entry: ConfigEntry, name: str, pin: FirmataPinType, )
Initialize the pin entity.
Initialize the pin entity.
def __init__( self, api: Type[FirmataBoardPin], config_entry: ConfigEntry, name: str, pin: FirmataPinType, ): """Initialize the pin entity.""" super().__init__(api) self._name = name location = (config_entry.entry_id, "pin", pin) self._unique_id = "_".join(str(i) for i in location)
[ "def", "__init__", "(", "self", ",", "api", ":", "Type", "[", "FirmataBoardPin", "]", ",", "config_entry", ":", "ConfigEntry", ",", "name", ":", "str", ",", "pin", ":", "FirmataPinType", ",", ")", ":", "super", "(", ")", ".", "__init__", "(", "api", ")", "self", ".", "_name", "=", "name", "location", "=", "(", "config_entry", ".", "entry_id", ",", "\"pin\"", ",", "pin", ")", "self", ".", "_unique_id", "=", "\"_\"", ".", "join", "(", "str", "(", "i", ")", "for", "i", "in", "location", ")" ]
[ 32, 4 ]
[ 44, 60 ]
python
en
['en', 'en', 'en']
True
FirmataPinEntity.name
(self)
Get the name of the pin.
Get the name of the pin.
def name(self) -> str: """Get the name of the pin.""" return self._name
[ "def", "name", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_name" ]
[ 47, 4 ]
[ 49, 25 ]
python
en
['en', 'en', 'en']
True
FirmataPinEntity.should_poll
(self)
No polling needed.
No polling needed.
def should_poll(self) -> bool: """No polling needed.""" return False
[ "def", "should_poll", "(", "self", ")", "->", "bool", ":", "return", "False" ]
[ 52, 4 ]
[ 54, 20 ]
python
en
['en', 'en', 'en']
True
FirmataPinEntity.unique_id
(self)
Return a unique identifier for this device.
Return a unique identifier for this device.
def unique_id(self) -> str: """Return a unique identifier for this device.""" return self._unique_id
[ "def", "unique_id", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_unique_id" ]
[ 57, 4 ]
[ 59, 30 ]
python
en
['en', 'en', 'en']
True
test_cannot_connect
(hass)
Test connection error.
Test connection error.
async def test_cannot_connect(hass): """Test connection error.""" with patch( "homeassistant.components.risco.RiscoAPI.login", side_effect=CannotConnectError, ): config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_CONFIG) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() registry = await hass.helpers.entity_registry.async_get_registry() assert not registry.async_is_registered(FIRST_ENTITY_ID) assert not registry.async_is_registered(SECOND_ENTITY_ID)
[ "async", "def", "test_cannot_connect", "(", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.risco.RiscoAPI.login\"", ",", "side_effect", "=", "CannotConnectError", ",", ")", ":", "config_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "data", "=", "TEST_CONFIG", ")", "config_entry", ".", "add_to_hass", "(", "hass", ")", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "config_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "registry", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "assert", "not", "registry", ".", "async_is_registered", "(", "FIRST_ENTITY_ID", ")", "assert", "not", "registry", ".", "async_is_registered", "(", "SECOND_ENTITY_ID", ")" ]
[ 16, 0 ]
[ 29, 65 ]
python
de
['eu', 'de', 'en']
False
test_unauthorized
(hass)
Test unauthorized error.
Test unauthorized error.
async def test_unauthorized(hass): """Test unauthorized error.""" with patch( "homeassistant.components.risco.RiscoAPI.login", side_effect=UnauthorizedError, ): config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_CONFIG) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() registry = await hass.helpers.entity_registry.async_get_registry() assert not registry.async_is_registered(FIRST_ENTITY_ID) assert not registry.async_is_registered(SECOND_ENTITY_ID)
[ "async", "def", "test_unauthorized", "(", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.risco.RiscoAPI.login\"", ",", "side_effect", "=", "UnauthorizedError", ",", ")", ":", "config_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "data", "=", "TEST_CONFIG", ")", "config_entry", ".", "add_to_hass", "(", "hass", ")", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "config_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "registry", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "assert", "not", "registry", ".", "async_is_registered", "(", "FIRST_ENTITY_ID", ")", "assert", "not", "registry", ".", "async_is_registered", "(", "SECOND_ENTITY_ID", ")" ]
[ 32, 0 ]
[ 45, 65 ]
python
ca
['ca', 'de', 'it']
False
test_setup
(hass, two_zone_alarm)
Test entity setup.
Test entity setup.
async def test_setup(hass, two_zone_alarm): # noqa: F811 """Test entity setup.""" registry = await hass.helpers.entity_registry.async_get_registry() assert not registry.async_is_registered(FIRST_ENTITY_ID) assert not registry.async_is_registered(SECOND_ENTITY_ID) await setup_risco(hass) assert registry.async_is_registered(FIRST_ENTITY_ID) assert registry.async_is_registered(SECOND_ENTITY_ID) registry = await hass.helpers.device_registry.async_get_registry() device = registry.async_get_device({(DOMAIN, TEST_SITE_UUID + "_zone_0")}, {}) assert device is not None assert device.manufacturer == "Risco" device = registry.async_get_device({(DOMAIN, TEST_SITE_UUID + "_zone_1")}, {}) assert device is not None assert device.manufacturer == "Risco"
[ "async", "def", "test_setup", "(", "hass", ",", "two_zone_alarm", ")", ":", "# noqa: F811", "registry", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "assert", "not", "registry", ".", "async_is_registered", "(", "FIRST_ENTITY_ID", ")", "assert", "not", "registry", ".", "async_is_registered", "(", "SECOND_ENTITY_ID", ")", "await", "setup_risco", "(", "hass", ")", "assert", "registry", ".", "async_is_registered", "(", "FIRST_ENTITY_ID", ")", "assert", "registry", ".", "async_is_registered", "(", "SECOND_ENTITY_ID", ")", "registry", "=", "await", "hass", ".", "helpers", ".", "device_registry", ".", "async_get_registry", "(", ")", "device", "=", "registry", ".", "async_get_device", "(", "{", "(", "DOMAIN", ",", "TEST_SITE_UUID", "+", "\"_zone_0\"", ")", "}", ",", "{", "}", ")", "assert", "device", "is", "not", "None", "assert", "device", ".", "manufacturer", "==", "\"Risco\"", "device", "=", "registry", ".", "async_get_device", "(", "{", "(", "DOMAIN", ",", "TEST_SITE_UUID", "+", "\"_zone_1\"", ")", "}", ",", "{", "}", ")", "assert", "device", "is", "not", "None", "assert", "device", ".", "manufacturer", "==", "\"Risco\"" ]
[ 48, 0 ]
[ 67, 41 ]
python
en
['en', 'zu', 'en']
True
test_states
(hass, two_zone_alarm)
Test the various alarm states.
Test the various alarm states.
async def test_states(hass, two_zone_alarm): # noqa: F811 """Test the various alarm states.""" await setup_risco(hass) await _check_state(hass, two_zone_alarm, True, True, FIRST_ENTITY_ID, 0) await _check_state(hass, two_zone_alarm, True, False, FIRST_ENTITY_ID, 0) await _check_state(hass, two_zone_alarm, False, True, FIRST_ENTITY_ID, 0) await _check_state(hass, two_zone_alarm, False, False, FIRST_ENTITY_ID, 0) await _check_state(hass, two_zone_alarm, True, True, SECOND_ENTITY_ID, 1) await _check_state(hass, two_zone_alarm, True, False, SECOND_ENTITY_ID, 1) await _check_state(hass, two_zone_alarm, False, True, SECOND_ENTITY_ID, 1) await _check_state(hass, two_zone_alarm, False, False, SECOND_ENTITY_ID, 1)
[ "async", "def", "test_states", "(", "hass", ",", "two_zone_alarm", ")", ":", "# noqa: F811", "await", "setup_risco", "(", "hass", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "True", ",", "True", ",", "FIRST_ENTITY_ID", ",", "0", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "True", ",", "False", ",", "FIRST_ENTITY_ID", ",", "0", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "False", ",", "True", ",", "FIRST_ENTITY_ID", ",", "0", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "False", ",", "False", ",", "FIRST_ENTITY_ID", ",", "0", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "True", ",", "True", ",", "SECOND_ENTITY_ID", ",", "1", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "True", ",", "False", ",", "SECOND_ENTITY_ID", ",", "1", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "False", ",", "True", ",", "SECOND_ENTITY_ID", ",", "1", ")", "await", "_check_state", "(", "hass", ",", "two_zone_alarm", ",", "False", ",", "False", ",", "SECOND_ENTITY_ID", ",", "1", ")" ]
[ 89, 0 ]
[ 100, 79 ]
python
en
['en', 'el-Latn', 'en']
True
test_bypass
(hass, two_zone_alarm)
Test bypassing a zone.
Test bypassing a zone.
async def test_bypass(hass, two_zone_alarm): # noqa: F811 """Test bypassing a zone.""" await setup_risco(hass) with patch("homeassistant.components.risco.RiscoAPI.bypass_zone") as mock: data = {"entity_id": FIRST_ENTITY_ID} await hass.services.async_call( DOMAIN, "bypass_zone", service_data=data, blocking=True ) mock.assert_awaited_once_with(0, True)
[ "async", "def", "test_bypass", "(", "hass", ",", "two_zone_alarm", ")", ":", "# noqa: F811", "await", "setup_risco", "(", "hass", ")", "with", "patch", "(", "\"homeassistant.components.risco.RiscoAPI.bypass_zone\"", ")", "as", "mock", ":", "data", "=", "{", "\"entity_id\"", ":", "FIRST_ENTITY_ID", "}", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "\"bypass_zone\"", ",", "service_data", "=", "data", ",", "blocking", "=", "True", ")", "mock", ".", "assert_awaited_once_with", "(", "0", ",", "True", ")" ]
[ 103, 0 ]
[ 113, 46 ]
python
en
['en', 'en', 'en']
True
test_unbypass
(hass, two_zone_alarm)
Test unbypassing a zone.
Test unbypassing a zone.
async def test_unbypass(hass, two_zone_alarm): # noqa: F811 """Test unbypassing a zone.""" await setup_risco(hass) with patch("homeassistant.components.risco.RiscoAPI.bypass_zone") as mock: data = {"entity_id": FIRST_ENTITY_ID} await hass.services.async_call( DOMAIN, "unbypass_zone", service_data=data, blocking=True ) mock.assert_awaited_once_with(0, False)
[ "async", "def", "test_unbypass", "(", "hass", ",", "two_zone_alarm", ")", ":", "# noqa: F811", "await", "setup_risco", "(", "hass", ")", "with", "patch", "(", "\"homeassistant.components.risco.RiscoAPI.bypass_zone\"", ")", "as", "mock", ":", "data", "=", "{", "\"entity_id\"", ":", "FIRST_ENTITY_ID", "}", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "\"unbypass_zone\"", ",", "service_data", "=", "data", ",", "blocking", "=", "True", ")", "mock", ".", "assert_awaited_once_with", "(", "0", ",", "False", ")" ]
[ 116, 0 ]
[ 126, 47 ]
python
en
['en', 'en', 'it']
True
async_setup_entry
( hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities )
Set up discovered sensors.
Set up discovered sensors.
async def async_setup_entry( hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities ) -> None: """Set up discovered sensors.""" devs = [] for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]: devs.append(HassAqualinkSensor(dev)) async_add_entities(devs, True)
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistantType", ",", "config_entry", ":", "ConfigEntry", ",", "async_add_entities", ")", "->", "None", ":", "devs", "=", "[", "]", "for", "dev", "in", "hass", ".", "data", "[", "AQUALINK_DOMAIN", "]", "[", "DOMAIN", "]", ":", "devs", ".", "append", "(", "HassAqualinkSensor", "(", "dev", ")", ")", "async_add_entities", "(", "devs", ",", "True", ")" ]
[ 14, 0 ]
[ 21, 34 ]
python
en
['en', 'da', 'en']
True
HassAqualinkSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self) -> str: """Return the name of the sensor.""" return self.dev.label
[ "def", "name", "(", "self", ")", "->", "str", ":", "return", "self", ".", "dev", ".", "label" ]
[ 28, 4 ]
[ 30, 29 ]
python
en
['en', 'mi', 'en']
True
HassAqualinkSensor.unit_of_measurement
(self)
Return the measurement unit for the sensor.
Return the measurement unit for the sensor.
def unit_of_measurement(self) -> Optional[str]: """Return the measurement unit for the sensor.""" if self.dev.name.endswith("_temp"): if self.dev.system.temp_unit == "F": return TEMP_FAHRENHEIT return TEMP_CELSIUS return None
[ "def", "unit_of_measurement", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "if", "self", ".", "dev", ".", "name", ".", "endswith", "(", "\"_temp\"", ")", ":", "if", "self", ".", "dev", ".", "system", ".", "temp_unit", "==", "\"F\"", ":", "return", "TEMP_FAHRENHEIT", "return", "TEMP_CELSIUS", "return", "None" ]
[ 33, 4 ]
[ 39, 19 ]
python
en
['en', 'sq', 'en']
True
HassAqualinkSensor.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self) -> Optional[str]: """Return the state of the sensor.""" if self.dev.state == "": return None try: state = int(self.dev.state) except ValueError: state = float(self.dev.state) return state
[ "def", "state", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "if", "self", ".", "dev", ".", "state", "==", "\"\"", ":", "return", "None", "try", ":", "state", "=", "int", "(", "self", ".", "dev", ".", "state", ")", "except", "ValueError", ":", "state", "=", "float", "(", "self", ".", "dev", ".", "state", ")", "return", "state" ]
[ 42, 4 ]
[ 51, 20 ]
python
en
['en', 'en', 'en']
True
HassAqualinkSensor.device_class
(self)
Return the class of the sensor.
Return the class of the sensor.
def device_class(self) -> Optional[str]: """Return the class of the sensor.""" if self.dev.name.endswith("_temp"): return DEVICE_CLASS_TEMPERATURE return None
[ "def", "device_class", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "if", "self", ".", "dev", ".", "name", ".", "endswith", "(", "\"_temp\"", ")", ":", "return", "DEVICE_CLASS_TEMPERATURE", "return", "None" ]
[ 54, 4 ]
[ 58, 19 ]
python
en
['en', 'pt', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the myStrom light integration.
Set up the myStrom light integration.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the myStrom light integration.""" host = config.get(CONF_HOST) mac = config.get(CONF_MAC) name = config.get(CONF_NAME) bulb = MyStromBulb(host, mac) try: await bulb.get_state() if bulb.bulb_type != "rgblamp": _LOGGER.error("Device %s (%s) is not a myStrom bulb", host, mac) return except MyStromConnectionError as err: _LOGGER.warning("No route to myStrom bulb: %s", host) raise PlatformNotReady() from err async_add_entities([MyStromLight(bulb, name, mac)], True)
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "host", "=", "config", ".", "get", "(", "CONF_HOST", ")", "mac", "=", "config", ".", "get", "(", "CONF_MAC", ")", "name", "=", "config", ".", "get", "(", "CONF_NAME", ")", "bulb", "=", "MyStromBulb", "(", "host", ",", "mac", ")", "try", ":", "await", "bulb", ".", "get_state", "(", ")", "if", "bulb", ".", "bulb_type", "!=", "\"rgblamp\"", ":", "_LOGGER", ".", "error", "(", "\"Device %s (%s) is not a myStrom bulb\"", ",", "host", ",", "mac", ")", "return", "except", "MyStromConnectionError", "as", "err", ":", "_LOGGER", ".", "warning", "(", "\"No route to myStrom bulb: %s\"", ",", "host", ")", "raise", "PlatformNotReady", "(", ")", "from", "err", "async_add_entities", "(", "[", "MyStromLight", "(", "bulb", ",", "name", ",", "mac", ")", "]", ",", "True", ")" ]
[ 42, 0 ]
[ 58, 61 ]
python
en
['en', 'sv', 'en']
True
MyStromLight.__init__
(self, bulb, name, mac)
Initialize the light.
Initialize the light.
def __init__(self, bulb, name, mac): """Initialize the light.""" self._bulb = bulb self._name = name self._state = None self._available = False self._brightness = 0 self._color_h = 0 self._color_s = 0 self._mac = mac
[ "def", "__init__", "(", "self", ",", "bulb", ",", "name", ",", "mac", ")", ":", "self", ".", "_bulb", "=", "bulb", "self", ".", "_name", "=", "name", "self", ".", "_state", "=", "None", "self", ".", "_available", "=", "False", "self", ".", "_brightness", "=", "0", "self", ".", "_color_h", "=", "0", "self", ".", "_color_s", "=", "0", "self", ".", "_mac", "=", "mac" ]
[ 64, 4 ]
[ 73, 23 ]
python
en
['en', 'en', 'en']
True
MyStromLight.name
(self)
Return the display name of this light.
Return the display name of this light.
def name(self): """Return the display name of this light.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 76, 4 ]
[ 78, 25 ]
python
en
['en', 'en', 'en']
True
MyStromLight.unique_id
(self)
Return a unique ID.
Return a unique ID.
def unique_id(self): """Return a unique ID.""" return self._mac
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_mac" ]
[ 81, 4 ]
[ 83, 24 ]
python
ca
['fr', 'ca', 'en']
False
MyStromLight.supported_features
(self)
Flag supported features.
Flag supported features.
def supported_features(self): """Flag supported features.""" return SUPPORT_MYSTROM
[ "def", "supported_features", "(", "self", ")", ":", "return", "SUPPORT_MYSTROM" ]
[ 86, 4 ]
[ 88, 30 ]
python
en
['da', 'en', 'en']
True
MyStromLight.brightness
(self)
Return the brightness of the light.
Return the brightness of the light.
def brightness(self): """Return the brightness of the light.""" return self._brightness
[ "def", "brightness", "(", "self", ")", ":", "return", "self", ".", "_brightness" ]
[ 91, 4 ]
[ 93, 31 ]
python
en
['en', 'no', 'en']
True