Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
CloudPreferences._load_cloud_user
(self)
Load cloud user if available.
Load cloud user if available.
async def _load_cloud_user(self) -> Optional[User]: """Load cloud user if available.""" user_id = self._prefs.get(PREF_CLOUD_USER) if user_id is None: return None # Fetch the user. It can happen that the user no longer exists if # an image was restored without restoring the cloud prefs. return await self._hass.auth.async_get_user(user_id)
[ "async", "def", "_load_cloud_user", "(", "self", ")", "->", "Optional", "[", "User", "]", ":", "user_id", "=", "self", ".", "_prefs", ".", "get", "(", "PREF_CLOUD_USER", ")", "if", "user_id", "is", "None", ":", "return", "None", "# Fetch the user. It can happen that the user no longer exists if", "# an image was restored without restoring the cloud prefs.", "return", "await", "self", ".", "_hass", ".", "auth", ".", "async_get_user", "(", "user_id", ")" ]
[ 294, 4 ]
[ 303, 60 ]
python
en
['en', 'en', 'en']
True
CloudPreferences._has_local_trusted_network
(self)
Return if we allow localhost to bypass auth.
Return if we allow localhost to bypass auth.
def _has_local_trusted_network(self) -> bool: """Return if we allow localhost to bypass auth.""" local4 = ip_address("127.0.0.1") local6 = ip_address("::1") for prv in self._hass.auth.auth_providers: if prv.type != "trusted_networks": continue for network in prv.trusted_networks: if local4 in network or local6 in network: return True return False
[ "def", "_has_local_trusted_network", "(", "self", ")", "->", "bool", ":", "local4", "=", "ip_address", "(", "\"127.0.0.1\"", ")", "local6", "=", "ip_address", "(", "\"::1\"", ")", "for", "prv", "in", "self", ".", "_hass", ".", "auth", ".", "auth_providers", ":", "if", "prv", ".", "type", "!=", "\"trusted_networks\"", ":", "continue", "for", "network", "in", "prv", ".", "trusted_networks", ":", "if", "local4", "in", "network", "or", "local6", "in", "network", ":", "return", "True", "return", "False" ]
[ 306, 4 ]
[ 319, 20 ]
python
en
['en', 'en', 'en']
True
CloudPreferences._has_local_trusted_proxies
(self)
Return if we allow localhost to be a proxy and use its data.
Return if we allow localhost to be a proxy and use its data.
def _has_local_trusted_proxies(self) -> bool: """Return if we allow localhost to be a proxy and use its data.""" if not hasattr(self._hass, "http"): return False local4 = ip_address("127.0.0.1") local6 = ip_address("::1") if any( local4 in nwk or local6 in nwk for nwk in self._hass.http.trusted_proxies ): return True return False
[ "def", "_has_local_trusted_proxies", "(", "self", ")", "->", "bool", ":", "if", "not", "hasattr", "(", "self", ".", "_hass", ",", "\"http\"", ")", ":", "return", "False", "local4", "=", "ip_address", "(", "\"127.0.0.1\"", ")", "local6", "=", "ip_address", "(", "\"::1\"", ")", "if", "any", "(", "local4", "in", "nwk", "or", "local6", "in", "nwk", "for", "nwk", "in", "self", ".", "_hass", ".", "http", ".", "trusted_proxies", ")", ":", "return", "True", "return", "False" ]
[ 322, 4 ]
[ 335, 20 ]
python
en
['en', 'en', 'en']
True
CloudPreferences._save_prefs
(self, prefs)
Save preferences to disk.
Save preferences to disk.
async def _save_prefs(self, prefs): """Save preferences to disk.""" self._prefs = prefs await self._store.async_save(self._prefs) for listener in self._listeners: self._hass.async_create_task(async_create_catching_coro(listener(self)))
[ "async", "def", "_save_prefs", "(", "self", ",", "prefs", ")", ":", "self", ".", "_prefs", "=", "prefs", "await", "self", ".", "_store", ".", "async_save", "(", "self", ".", "_prefs", ")", "for", "listener", "in", "self", ".", "_listeners", ":", "self", ".", "_hass", ".", "async_create_task", "(", "async_create_catching_coro", "(", "listener", "(", "self", ")", ")", ")" ]
[ 337, 4 ]
[ 343, 84 ]
python
en
['en', 'en', 'en']
True
CloudPreferences._empty_config
(self, username)
Return an empty config.
Return an empty config.
def _empty_config(self, username): """Return an empty config.""" return { PREF_ALEXA_DEFAULT_EXPOSE: DEFAULT_EXPOSED_DOMAINS, PREF_ALEXA_ENTITY_CONFIGS: {}, PREF_CLOUD_USER: None, PREF_CLOUDHOOKS: {}, PREF_ENABLE_ALEXA: True, PREF_ENABLE_GOOGLE: True, PREF_ENABLE_REMOTE: False, PREF_GOOGLE_DEFAULT_EXPOSE: DEFAULT_EXPOSED_DOMAINS, PREF_GOOGLE_ENTITY_CONFIGS: {}, PREF_GOOGLE_LOCAL_WEBHOOK_ID: self._hass.components.webhook.async_generate_id(), PREF_GOOGLE_SECURE_DEVICES_PIN: None, PREF_USERNAME: username, }
[ "def", "_empty_config", "(", "self", ",", "username", ")", ":", "return", "{", "PREF_ALEXA_DEFAULT_EXPOSE", ":", "DEFAULT_EXPOSED_DOMAINS", ",", "PREF_ALEXA_ENTITY_CONFIGS", ":", "{", "}", ",", "PREF_CLOUD_USER", ":", "None", ",", "PREF_CLOUDHOOKS", ":", "{", "}", ",", "PREF_ENABLE_ALEXA", ":", "True", ",", "PREF_ENABLE_GOOGLE", ":", "True", ",", "PREF_ENABLE_REMOTE", ":", "False", ",", "PREF_GOOGLE_DEFAULT_EXPOSE", ":", "DEFAULT_EXPOSED_DOMAINS", ",", "PREF_GOOGLE_ENTITY_CONFIGS", ":", "{", "}", ",", "PREF_GOOGLE_LOCAL_WEBHOOK_ID", ":", "self", ".", "_hass", ".", "components", ".", "webhook", ".", "async_generate_id", "(", ")", ",", "PREF_GOOGLE_SECURE_DEVICES_PIN", ":", "None", ",", "PREF_USERNAME", ":", "username", ",", "}" ]
[ 346, 4 ]
[ 361, 9 ]
python
en
['en', 'cy', 'en']
True
test_async_setup_entry
(hass)
Test a successful setup entry.
Test a successful setup entry.
async def test_async_setup_entry(hass): """Test a successful setup entry.""" await init_integration(hass) state = hass.states.get("weather.home") assert state is not None assert state.state != STATE_UNAVAILABLE assert state.state == "sunny"
[ "async", "def", "test_async_setup_entry", "(", "hass", ")", ":", "await", "init_integration", "(", "hass", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"weather.home\"", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "state", "!=", "STATE_UNAVAILABLE", "assert", "state", ".", "state", "==", "\"sunny\"" ]
[ 14, 0 ]
[ 21, 33 ]
python
en
['en', 'en', 'en']
True
test_config_not_ready
(hass)
Test for setup failure if connection to AccuWeather is missing.
Test for setup failure if connection to AccuWeather is missing.
async def test_config_not_ready(hass): """Test for setup failure if connection to AccuWeather is missing.""" entry = MockConfigEntry( domain=DOMAIN, title="Home", unique_id="0123456", data={ "api_key": "32-character-string-1234567890qw", "latitude": 55.55, "longitude": 122.12, "name": "Home", }, ) with patch( "homeassistant.components.accuweather.AccuWeather._async_get_data", side_effect=ConnectionError(), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) assert entry.state == ENTRY_STATE_SETUP_RETRY
[ "async", "def", "test_config_not_ready", "(", "hass", ")", ":", "entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "title", "=", "\"Home\"", ",", "unique_id", "=", "\"0123456\"", ",", "data", "=", "{", "\"api_key\"", ":", "\"32-character-string-1234567890qw\"", ",", "\"latitude\"", ":", "55.55", ",", "\"longitude\"", ":", "122.12", ",", "\"name\"", ":", "\"Home\"", ",", "}", ",", ")", "with", "patch", "(", "\"homeassistant.components.accuweather.AccuWeather._async_get_data\"", ",", "side_effect", "=", "ConnectionError", "(", ")", ",", ")", ":", "entry", ".", "add_to_hass", "(", "hass", ")", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "entry", ".", "entry_id", ")", "assert", "entry", ".", "state", "==", "ENTRY_STATE_SETUP_RETRY" ]
[ 24, 0 ]
[ 44, 53 ]
python
en
['en', 'en', 'en']
True
test_unload_entry
(hass)
Test successful unload of entry.
Test successful unload of entry.
async def test_unload_entry(hass): """Test successful unload of entry.""" entry = await init_integration(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert entry.state == ENTRY_STATE_LOADED assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() assert entry.state == ENTRY_STATE_NOT_LOADED assert not hass.data.get(DOMAIN)
[ "async", "def", "test_unload_entry", "(", "hass", ")", ":", "entry", "=", "await", "init_integration", "(", "hass", ")", "assert", "len", "(", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", ")", "==", "1", "assert", "entry", ".", "state", "==", "ENTRY_STATE_LOADED", "assert", "await", "hass", ".", "config_entries", ".", "async_unload", "(", "entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "entry", ".", "state", "==", "ENTRY_STATE_NOT_LOADED", "assert", "not", "hass", ".", "data", ".", "get", "(", "DOMAIN", ")" ]
[ 47, 0 ]
[ 58, 36 ]
python
en
['en', 'en', 'en']
True
sample_logits
(embedding, bias, labels, inputs, sampler)
embedding: an nn.Embedding layer bias: [n_vocab] labels: [b1, b2] inputs: [b1, b2, n_emb] sampler: you may use a LogUniformSampler Return logits: [b1, b2, 1 + n_sample]
embedding: an nn.Embedding layer bias: [n_vocab] labels: [b1, b2] inputs: [b1, b2, n_emb] sampler: you may use a LogUniformSampler Return logits: [b1, b2, 1 + n_sample]
def sample_logits(embedding, bias, labels, inputs, sampler): """ embedding: an nn.Embedding layer bias: [n_vocab] labels: [b1, b2] inputs: [b1, b2, n_emb] sampler: you may use a LogUniformSampler Return logits: [b1, b2, 1 + n_sample] """ true_log_probs, samp_log_probs, neg_samples = sampler.sample(labels) n_sample = neg_samples.size(0) b1, b2 = labels.size(0), labels.size(1) all_ids = torch.cat([labels.view(-1), neg_samples]) all_w = embedding(all_ids) true_w = all_w[: -n_sample].view(b1, b2, -1) sample_w = all_w[- n_sample:].view(n_sample, -1) all_b = bias[all_ids] true_b = all_b[: -n_sample].view(b1, b2) sample_b = all_b[- n_sample:] hit = (labels[:, :, None] == neg_samples).detach() true_logits = torch.einsum('ijk,ijk->ij', [true_w, inputs]) + true_b - true_log_probs sample_logits = torch.einsum('lk,ijk->ijl', [sample_w, inputs]) + sample_b - samp_log_probs sample_logits.masked_fill_(hit, -1e30) logits = torch.cat([true_logits[:, :, None], sample_logits], -1) return logits
[ "def", "sample_logits", "(", "embedding", ",", "bias", ",", "labels", ",", "inputs", ",", "sampler", ")", ":", "true_log_probs", ",", "samp_log_probs", ",", "neg_samples", "=", "sampler", ".", "sample", "(", "labels", ")", "n_sample", "=", "neg_samples", ".", "size", "(", "0", ")", "b1", ",", "b2", "=", "labels", ".", "size", "(", "0", ")", ",", "labels", ".", "size", "(", "1", ")", "all_ids", "=", "torch", ".", "cat", "(", "[", "labels", ".", "view", "(", "-", "1", ")", ",", "neg_samples", "]", ")", "all_w", "=", "embedding", "(", "all_ids", ")", "true_w", "=", "all_w", "[", ":", "-", "n_sample", "]", ".", "view", "(", "b1", ",", "b2", ",", "-", "1", ")", "sample_w", "=", "all_w", "[", "-", "n_sample", ":", "]", ".", "view", "(", "n_sample", ",", "-", "1", ")", "all_b", "=", "bias", "[", "all_ids", "]", "true_b", "=", "all_b", "[", ":", "-", "n_sample", "]", ".", "view", "(", "b1", ",", "b2", ")", "sample_b", "=", "all_b", "[", "-", "n_sample", ":", "]", "hit", "=", "(", "labels", "[", ":", ",", ":", ",", "None", "]", "==", "neg_samples", ")", ".", "detach", "(", ")", "true_logits", "=", "torch", ".", "einsum", "(", "'ijk,ijk->ij'", ",", "[", "true_w", ",", "inputs", "]", ")", "+", "true_b", "-", "true_log_probs", "sample_logits", "=", "torch", ".", "einsum", "(", "'lk,ijk->ijl'", ",", "[", "sample_w", ",", "inputs", "]", ")", "+", "sample_b", "-", "samp_log_probs", "sample_logits", ".", "masked_fill_", "(", "hit", ",", "-", "1e30", ")", "logits", "=", "torch", ".", "cat", "(", "[", "true_logits", "[", ":", ",", ":", ",", "None", "]", ",", "sample_logits", "]", ",", "-", "1", ")", "return", "logits" ]
[ 301, 0 ]
[ 332, 17 ]
python
en
['en', 'error', 'th']
False
ProjectedAdaptiveLogSoftmax.forward
(self, hidden, target=None, keep_order=False)
Params: hidden :: [len*bsz x d_proj] target :: [len*bsz] Return: if target is None: out :: [len*bsz] Negative log likelihood else: out :: [len*bsz x n_tokens] log probabilities of tokens over the vocabulary We could replace this implementation by the native PyTorch one if their's had an option to set bias on all clusters in the native one. here: https://github.com/pytorch/pytorch/blob/dbe6a7a9ff1a364a8706bf5df58a1ca96d2fd9da/torch/nn/modules/adaptive.py#L138
Params: hidden :: [len*bsz x d_proj] target :: [len*bsz] Return: if target is None: out :: [len*bsz] Negative log likelihood else: out :: [len*bsz x n_tokens] log probabilities of tokens over the vocabulary We could replace this implementation by the native PyTorch one if their's had an option to set bias on all clusters in the native one. here: https://github.com/pytorch/pytorch/blob/dbe6a7a9ff1a364a8706bf5df58a1ca96d2fd9da/torch/nn/modules/adaptive.py#L138
def forward(self, hidden, target=None, keep_order=False): ''' Params: hidden :: [len*bsz x d_proj] target :: [len*bsz] Return: if target is None: out :: [len*bsz] Negative log likelihood else: out :: [len*bsz x n_tokens] log probabilities of tokens over the vocabulary We could replace this implementation by the native PyTorch one if their's had an option to set bias on all clusters in the native one. here: https://github.com/pytorch/pytorch/blob/dbe6a7a9ff1a364a8706bf5df58a1ca96d2fd9da/torch/nn/modules/adaptive.py#L138 ''' if target is not None: target = target.view(-1) if hidden.size(0) != target.size(0): raise RuntimeError('Input and target should have the same size ' 'in the batch dimension.') if self.n_clusters == 0: logit = self._compute_logit(hidden, self.out_layers[0].weight, self.out_layers[0].bias, self.out_projs[0]) if target is not None: output = -F.log_softmax(logit, dim=-1) \ .gather(1, target.unsqueeze(1)).squeeze(1) else: output = F.log_softmax(logit, dim=-1) else: # construct weights and biases weights, biases = [], [] for i in range(len(self.cutoffs)): if self.div_val == 1: l_idx, r_idx = self.cutoff_ends[i], self.cutoff_ends[i + 1] weight_i = self.out_layers[0].weight[l_idx:r_idx] bias_i = self.out_layers[0].bias[l_idx:r_idx] else: weight_i = self.out_layers[i].weight bias_i = self.out_layers[i].bias if i == 0: weight_i = torch.cat( [weight_i, self.cluster_weight], dim=0) bias_i = torch.cat( [bias_i, self.cluster_bias], dim=0) weights.append(weight_i) biases.append(bias_i) head_weight, head_bias, head_proj = weights[0], biases[0], self.out_projs[0] head_logit = self._compute_logit(hidden, head_weight, head_bias, head_proj) head_logprob = F.log_softmax(head_logit, dim=1) if target is None: out = hidden.new_empty((head_logit.size(0), self.n_token)) else: out = torch.zeros_like(target, dtype=hidden.dtype, device=hidden.device) offset = 0 cutoff_values = [0] + self.cutoffs for i in range(len(cutoff_values) - 1): l_idx, r_idx = cutoff_values[i], cutoff_values[i + 1] if target is not None: mask_i = (target >= l_idx) & (target < r_idx) indices_i = mask_i.nonzero().squeeze() if indices_i.numel() == 0: continue target_i = target.index_select(0, indices_i) - l_idx head_logprob_i = head_logprob.index_select(0, indices_i) hidden_i = hidden.index_select(0, indices_i) else: hidden_i = hidden if i == 0: if target is not None: logprob_i = head_logprob_i.gather(1, target_i[:, None]).squeeze(1) else: out[:, :self.cutoffs[0]] = head_logprob[:, :self.cutoffs[0]] else: weight_i, bias_i, proj_i = weights[i], biases[i], self.out_projs[i] tail_logit_i = self._compute_logit(hidden_i, weight_i, bias_i, proj_i) tail_logprob_i = F.log_softmax(tail_logit_i, dim=1) cluster_prob_idx = self.cutoffs[0] + i - 1 # No probability for the head cluster if target is not None: logprob_i = head_logprob_i[:, cluster_prob_idx] \ + tail_logprob_i.gather(1, target_i[:, None]).squeeze(1) else: logprob_i = head_logprob[:, cluster_prob_idx, None] + tail_logprob_i out[:, l_idx:r_idx] = logprob_i if target is not None: if (hasattr(self, 'keep_order') and self.keep_order) or keep_order: out.index_copy_(0, indices_i, -logprob_i) else: out[offset:offset+logprob_i.size(0)].copy_(-logprob_i) offset += logprob_i.size(0) return out
[ "def", "forward", "(", "self", ",", "hidden", ",", "target", "=", "None", ",", "keep_order", "=", "False", ")", ":", "if", "target", "is", "not", "None", ":", "target", "=", "target", ".", "view", "(", "-", "1", ")", "if", "hidden", ".", "size", "(", "0", ")", "!=", "target", ".", "size", "(", "0", ")", ":", "raise", "RuntimeError", "(", "'Input and target should have the same size '", "'in the batch dimension.'", ")", "if", "self", ".", "n_clusters", "==", "0", ":", "logit", "=", "self", ".", "_compute_logit", "(", "hidden", ",", "self", ".", "out_layers", "[", "0", "]", ".", "weight", ",", "self", ".", "out_layers", "[", "0", "]", ".", "bias", ",", "self", ".", "out_projs", "[", "0", "]", ")", "if", "target", "is", "not", "None", ":", "output", "=", "-", "F", ".", "log_softmax", "(", "logit", ",", "dim", "=", "-", "1", ")", ".", "gather", "(", "1", ",", "target", ".", "unsqueeze", "(", "1", ")", ")", ".", "squeeze", "(", "1", ")", "else", ":", "output", "=", "F", ".", "log_softmax", "(", "logit", ",", "dim", "=", "-", "1", ")", "else", ":", "# construct weights and biases", "weights", ",", "biases", "=", "[", "]", ",", "[", "]", "for", "i", "in", "range", "(", "len", "(", "self", ".", "cutoffs", ")", ")", ":", "if", "self", ".", "div_val", "==", "1", ":", "l_idx", ",", "r_idx", "=", "self", ".", "cutoff_ends", "[", "i", "]", ",", "self", ".", "cutoff_ends", "[", "i", "+", "1", "]", "weight_i", "=", "self", ".", "out_layers", "[", "0", "]", ".", "weight", "[", "l_idx", ":", "r_idx", "]", "bias_i", "=", "self", ".", "out_layers", "[", "0", "]", ".", "bias", "[", "l_idx", ":", "r_idx", "]", "else", ":", "weight_i", "=", "self", ".", "out_layers", "[", "i", "]", ".", "weight", "bias_i", "=", "self", ".", "out_layers", "[", "i", "]", ".", "bias", "if", "i", "==", "0", ":", "weight_i", "=", "torch", ".", "cat", "(", "[", "weight_i", ",", "self", ".", "cluster_weight", "]", ",", "dim", "=", "0", ")", "bias_i", "=", "torch", ".", "cat", "(", "[", "bias_i", ",", "self", ".", "cluster_bias", "]", ",", "dim", "=", "0", ")", "weights", ".", "append", "(", "weight_i", ")", "biases", ".", "append", "(", "bias_i", ")", "head_weight", ",", "head_bias", ",", "head_proj", "=", "weights", "[", "0", "]", ",", "biases", "[", "0", "]", ",", "self", ".", "out_projs", "[", "0", "]", "head_logit", "=", "self", ".", "_compute_logit", "(", "hidden", ",", "head_weight", ",", "head_bias", ",", "head_proj", ")", "head_logprob", "=", "F", ".", "log_softmax", "(", "head_logit", ",", "dim", "=", "1", ")", "if", "target", "is", "None", ":", "out", "=", "hidden", ".", "new_empty", "(", "(", "head_logit", ".", "size", "(", "0", ")", ",", "self", ".", "n_token", ")", ")", "else", ":", "out", "=", "torch", ".", "zeros_like", "(", "target", ",", "dtype", "=", "hidden", ".", "dtype", ",", "device", "=", "hidden", ".", "device", ")", "offset", "=", "0", "cutoff_values", "=", "[", "0", "]", "+", "self", ".", "cutoffs", "for", "i", "in", "range", "(", "len", "(", "cutoff_values", ")", "-", "1", ")", ":", "l_idx", ",", "r_idx", "=", "cutoff_values", "[", "i", "]", ",", "cutoff_values", "[", "i", "+", "1", "]", "if", "target", "is", "not", "None", ":", "mask_i", "=", "(", "target", ">=", "l_idx", ")", "&", "(", "target", "<", "r_idx", ")", "indices_i", "=", "mask_i", ".", "nonzero", "(", ")", ".", "squeeze", "(", ")", "if", "indices_i", ".", "numel", "(", ")", "==", "0", ":", "continue", "target_i", "=", "target", ".", "index_select", "(", "0", ",", "indices_i", ")", "-", "l_idx", "head_logprob_i", "=", "head_logprob", ".", "index_select", "(", "0", ",", "indices_i", ")", "hidden_i", "=", "hidden", ".", "index_select", "(", "0", ",", "indices_i", ")", "else", ":", "hidden_i", "=", "hidden", "if", "i", "==", "0", ":", "if", "target", "is", "not", "None", ":", "logprob_i", "=", "head_logprob_i", ".", "gather", "(", "1", ",", "target_i", "[", ":", ",", "None", "]", ")", ".", "squeeze", "(", "1", ")", "else", ":", "out", "[", ":", ",", ":", "self", ".", "cutoffs", "[", "0", "]", "]", "=", "head_logprob", "[", ":", ",", ":", "self", ".", "cutoffs", "[", "0", "]", "]", "else", ":", "weight_i", ",", "bias_i", ",", "proj_i", "=", "weights", "[", "i", "]", ",", "biases", "[", "i", "]", ",", "self", ".", "out_projs", "[", "i", "]", "tail_logit_i", "=", "self", ".", "_compute_logit", "(", "hidden_i", ",", "weight_i", ",", "bias_i", ",", "proj_i", ")", "tail_logprob_i", "=", "F", ".", "log_softmax", "(", "tail_logit_i", ",", "dim", "=", "1", ")", "cluster_prob_idx", "=", "self", ".", "cutoffs", "[", "0", "]", "+", "i", "-", "1", "# No probability for the head cluster", "if", "target", "is", "not", "None", ":", "logprob_i", "=", "head_logprob_i", "[", ":", ",", "cluster_prob_idx", "]", "+", "tail_logprob_i", ".", "gather", "(", "1", ",", "target_i", "[", ":", ",", "None", "]", ")", ".", "squeeze", "(", "1", ")", "else", ":", "logprob_i", "=", "head_logprob", "[", ":", ",", "cluster_prob_idx", ",", "None", "]", "+", "tail_logprob_i", "out", "[", ":", ",", "l_idx", ":", "r_idx", "]", "=", "logprob_i", "if", "target", "is", "not", "None", ":", "if", "(", "hasattr", "(", "self", ",", "'keep_order'", ")", "and", "self", ".", "keep_order", ")", "or", "keep_order", ":", "out", ".", "index_copy_", "(", "0", ",", "indices_i", ",", "-", "logprob_i", ")", "else", ":", "out", "[", "offset", ":", "offset", "+", "logprob_i", ".", "size", "(", "0", ")", "]", ".", "copy_", "(", "-", "logprob_i", ")", "offset", "+=", "logprob_i", ".", "size", "(", "0", ")", "return", "out" ]
[ 91, 4 ]
[ 194, 18 ]
python
en
['en', 'error', 'th']
False
ProjectedAdaptiveLogSoftmax.log_prob
(self, hidden)
r""" Computes log probabilities for all :math:`n\_classes` From: https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/adaptive.py Args: hidden (Tensor): a minibatch of examples Returns: log-probabilities of for each class :math:`c` in range :math:`0 <= c <= n\_classes`, where :math:`n\_classes` is a parameter passed to ``AdaptiveLogSoftmaxWithLoss`` constructor. Shape: - Input: :math:`(N, in\_features)` - Output: :math:`(N, n\_classes)`
r""" Computes log probabilities for all :math:`n\_classes` From: https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/adaptive.py Args: hidden (Tensor): a minibatch of examples Returns: log-probabilities of for each class :math:`c` in range :math:`0 <= c <= n\_classes`, where :math:`n\_classes` is a parameter passed to ``AdaptiveLogSoftmaxWithLoss`` constructor. Shape: - Input: :math:`(N, in\_features)` - Output: :math:`(N, n\_classes)`
def log_prob(self, hidden): r""" Computes log probabilities for all :math:`n\_classes` From: https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/adaptive.py Args: hidden (Tensor): a minibatch of examples Returns: log-probabilities of for each class :math:`c` in range :math:`0 <= c <= n\_classes`, where :math:`n\_classes` is a parameter passed to ``AdaptiveLogSoftmaxWithLoss`` constructor. Shape: - Input: :math:`(N, in\_features)` - Output: :math:`(N, n\_classes)` """ if self.n_clusters == 0: logit = self._compute_logit(hidden, self.out_layers[0].weight, self.out_layers[0].bias, self.out_projs[0]) return F.log_softmax(logit, dim=-1) else: # construct weights and biases weights, biases = [], [] for i in range(len(self.cutoffs)): if self.div_val == 1: l_idx, r_idx = self.cutoff_ends[i], self.cutoff_ends[i + 1] weight_i = self.out_layers[0].weight[l_idx:r_idx] bias_i = self.out_layers[0].bias[l_idx:r_idx] else: weight_i = self.out_layers[i].weight bias_i = self.out_layers[i].bias if i == 0: weight_i = torch.cat( [weight_i, self.cluster_weight], dim=0) bias_i = torch.cat( [bias_i, self.cluster_bias], dim=0) weights.append(weight_i) biases.append(bias_i) head_weight, head_bias, head_proj = weights[0], biases[0], self.out_projs[0] head_logit = self._compute_logit(hidden, head_weight, head_bias, head_proj) out = hidden.new_empty((head_logit.size(0), self.n_token)) head_logprob = F.log_softmax(head_logit, dim=1) cutoff_values = [0] + self.cutoffs for i in range(len(cutoff_values) - 1): start_idx, stop_idx = cutoff_values[i], cutoff_values[i + 1] if i == 0: out[:, :self.cutoffs[0]] = head_logprob[:, :self.cutoffs[0]] else: weight_i, bias_i, proj_i = weights[i], biases[i], self.out_projs[i] tail_logit_i = self._compute_logit(hidden, weight_i, bias_i, proj_i) tail_logprob_i = F.log_softmax(tail_logit_i, dim=1) logprob_i = head_logprob[:, -i] + tail_logprob_i out[:, start_idx, stop_idx] = logprob_i return out
[ "def", "log_prob", "(", "self", ",", "hidden", ")", ":", "if", "self", ".", "n_clusters", "==", "0", ":", "logit", "=", "self", ".", "_compute_logit", "(", "hidden", ",", "self", ".", "out_layers", "[", "0", "]", ".", "weight", ",", "self", ".", "out_layers", "[", "0", "]", ".", "bias", ",", "self", ".", "out_projs", "[", "0", "]", ")", "return", "F", ".", "log_softmax", "(", "logit", ",", "dim", "=", "-", "1", ")", "else", ":", "# construct weights and biases", "weights", ",", "biases", "=", "[", "]", ",", "[", "]", "for", "i", "in", "range", "(", "len", "(", "self", ".", "cutoffs", ")", ")", ":", "if", "self", ".", "div_val", "==", "1", ":", "l_idx", ",", "r_idx", "=", "self", ".", "cutoff_ends", "[", "i", "]", ",", "self", ".", "cutoff_ends", "[", "i", "+", "1", "]", "weight_i", "=", "self", ".", "out_layers", "[", "0", "]", ".", "weight", "[", "l_idx", ":", "r_idx", "]", "bias_i", "=", "self", ".", "out_layers", "[", "0", "]", ".", "bias", "[", "l_idx", ":", "r_idx", "]", "else", ":", "weight_i", "=", "self", ".", "out_layers", "[", "i", "]", ".", "weight", "bias_i", "=", "self", ".", "out_layers", "[", "i", "]", ".", "bias", "if", "i", "==", "0", ":", "weight_i", "=", "torch", ".", "cat", "(", "[", "weight_i", ",", "self", ".", "cluster_weight", "]", ",", "dim", "=", "0", ")", "bias_i", "=", "torch", ".", "cat", "(", "[", "bias_i", ",", "self", ".", "cluster_bias", "]", ",", "dim", "=", "0", ")", "weights", ".", "append", "(", "weight_i", ")", "biases", ".", "append", "(", "bias_i", ")", "head_weight", ",", "head_bias", ",", "head_proj", "=", "weights", "[", "0", "]", ",", "biases", "[", "0", "]", ",", "self", ".", "out_projs", "[", "0", "]", "head_logit", "=", "self", ".", "_compute_logit", "(", "hidden", ",", "head_weight", ",", "head_bias", ",", "head_proj", ")", "out", "=", "hidden", ".", "new_empty", "(", "(", "head_logit", ".", "size", "(", "0", ")", ",", "self", ".", "n_token", ")", ")", "head_logprob", "=", "F", ".", "log_softmax", "(", "head_logit", ",", "dim", "=", "1", ")", "cutoff_values", "=", "[", "0", "]", "+", "self", ".", "cutoffs", "for", "i", "in", "range", "(", "len", "(", "cutoff_values", ")", "-", "1", ")", ":", "start_idx", ",", "stop_idx", "=", "cutoff_values", "[", "i", "]", ",", "cutoff_values", "[", "i", "+", "1", "]", "if", "i", "==", "0", ":", "out", "[", ":", ",", ":", "self", ".", "cutoffs", "[", "0", "]", "]", "=", "head_logprob", "[", ":", ",", ":", "self", ".", "cutoffs", "[", "0", "]", "]", "else", ":", "weight_i", ",", "bias_i", ",", "proj_i", "=", "weights", "[", "i", "]", ",", "biases", "[", "i", "]", ",", "self", ".", "out_projs", "[", "i", "]", "tail_logit_i", "=", "self", ".", "_compute_logit", "(", "hidden", ",", "weight_i", ",", "bias_i", ",", "proj_i", ")", "tail_logprob_i", "=", "F", ".", "log_softmax", "(", "tail_logit_i", ",", "dim", "=", "1", ")", "logprob_i", "=", "head_logprob", "[", ":", ",", "-", "i", "]", "+", "tail_logprob_i", "out", "[", ":", ",", "start_idx", ",", "stop_idx", "]", "=", "logprob_i", "return", "out" ]
[ 197, 4 ]
[ 256, 22 ]
python
en
['en', 'en', 'en']
True
LogUniformSampler.__init__
(self, range_max, n_sample)
Reference : https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/candidate_sampling_ops.py `P(class) = (log(class + 2) - log(class + 1)) / log(range_max + 1)` expected count can be approximated by 1 - (1 - p)^n and we use a numerically stable version -expm1(num_tries * log1p(-p)) Our implementation fixes num_tries at 2 * n_sample, and the actual #samples will vary from run to run
Reference : https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/candidate_sampling_ops.py `P(class) = (log(class + 2) - log(class + 1)) / log(range_max + 1)`
def __init__(self, range_max, n_sample): """ Reference : https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/candidate_sampling_ops.py `P(class) = (log(class + 2) - log(class + 1)) / log(range_max + 1)` expected count can be approximated by 1 - (1 - p)^n and we use a numerically stable version -expm1(num_tries * log1p(-p)) Our implementation fixes num_tries at 2 * n_sample, and the actual #samples will vary from run to run """ with torch.no_grad(): self.range_max = range_max log_indices = torch.arange(1., range_max+2., 1.).log_() self.dist = (log_indices[1:] - log_indices[:-1]) / log_indices[-1] # print('P', self.dist.numpy().tolist()[-30:]) self.log_q = (- (-self.dist.double().log1p_() * 2 * n_sample).expm1_()).log_().float() self.n_sample = n_sample
[ "def", "__init__", "(", "self", ",", "range_max", ",", "n_sample", ")", ":", "with", "torch", ".", "no_grad", "(", ")", ":", "self", ".", "range_max", "=", "range_max", "log_indices", "=", "torch", ".", "arange", "(", "1.", ",", "range_max", "+", "2.", ",", "1.", ")", ".", "log_", "(", ")", "self", ".", "dist", "=", "(", "log_indices", "[", "1", ":", "]", "-", "log_indices", "[", ":", "-", "1", "]", ")", "/", "log_indices", "[", "-", "1", "]", "# print('P', self.dist.numpy().tolist()[-30:])", "self", ".", "log_q", "=", "(", "-", "(", "-", "self", ".", "dist", ".", "double", "(", ")", ".", "log1p_", "(", ")", "*", "2", "*", "n_sample", ")", ".", "expm1_", "(", ")", ")", ".", "log_", "(", ")", ".", "float", "(", ")", "self", ".", "n_sample", "=", "n_sample" ]
[ 260, 4 ]
[ 278, 32 ]
python
en
['en', 'error', 'th']
False
LogUniformSampler.sample
(self, labels)
labels: [b1, b2] Return true_log_probs: [b1, b2] samp_log_probs: [n_sample] neg_samples: [n_sample]
labels: [b1, b2] Return true_log_probs: [b1, b2] samp_log_probs: [n_sample] neg_samples: [n_sample]
def sample(self, labels): """ labels: [b1, b2] Return true_log_probs: [b1, b2] samp_log_probs: [n_sample] neg_samples: [n_sample] """ # neg_samples = torch.empty(0).long() n_sample = self.n_sample n_tries = 2 * n_sample with torch.no_grad(): neg_samples = torch.multinomial(self.dist, n_tries, replacement=True).unique() device = labels.device neg_samples = neg_samples.to(device) true_log_probs = self.log_q[labels].to(device) samp_log_probs = self.log_q[neg_samples].to(device) return true_log_probs, samp_log_probs, neg_samples
[ "def", "sample", "(", "self", ",", "labels", ")", ":", "# neg_samples = torch.empty(0).long()", "n_sample", "=", "self", ".", "n_sample", "n_tries", "=", "2", "*", "n_sample", "with", "torch", ".", "no_grad", "(", ")", ":", "neg_samples", "=", "torch", ".", "multinomial", "(", "self", ".", "dist", ",", "n_tries", ",", "replacement", "=", "True", ")", ".", "unique", "(", ")", "device", "=", "labels", ".", "device", "neg_samples", "=", "neg_samples", ".", "to", "(", "device", ")", "true_log_probs", "=", "self", ".", "log_q", "[", "labels", "]", ".", "to", "(", "device", ")", "samp_log_probs", "=", "self", ".", "log_q", "[", "neg_samples", "]", ".", "to", "(", "device", ")", "return", "true_log_probs", ",", "samp_log_probs", ",", "neg_samples" ]
[ 280, 4 ]
[ 299, 62 ]
python
en
['en', 'error', 'th']
False
FlaxAutoModel.from_config
(cls, config)
r""" Instantiates one of the base model classes of the library from a configuration. Args: config (:class:`~transformers.PretrainedConfig`): The model class to instantiate is selected based on the configuration class: - isInstance of `roberta` configuration class: :class:`~transformers.FlaxRobertaModel` (RoBERTa model) - isInstance of `bert` configuration class: :class:`~transformers.FlaxBertModel` (Bert model Examples:: config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from huggingface.co and cache. model = FlaxAutoModel.from_config(config) # E.g. model was saved using `save_pretrained('./test/saved_model/')`
r""" Instantiates one of the base model classes of the library from a configuration.
def from_config(cls, config): r""" Instantiates one of the base model classes of the library from a configuration. Args: config (:class:`~transformers.PretrainedConfig`): The model class to instantiate is selected based on the configuration class: - isInstance of `roberta` configuration class: :class:`~transformers.FlaxRobertaModel` (RoBERTa model) - isInstance of `bert` configuration class: :class:`~transformers.FlaxBertModel` (Bert model Examples:: config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from huggingface.co and cache. model = FlaxAutoModel.from_config(config) # E.g. model was saved using `save_pretrained('./test/saved_model/')` """ for config_class, model_class in FLAX_MODEL_MAPPING.items(): if isinstance(config, config_class): return model_class(config) raise ValueError( f"Unrecognized configuration class {config.__class__} " f"for this kind of FlaxAutoModel: {cls.__name__}.\n" f"Model type should be one of {', '.join(c.__name__ for c in FLAX_MODEL_MAPPING.keys())}." )
[ "def", "from_config", "(", "cls", ",", "config", ")", ":", "for", "config_class", ",", "model_class", "in", "FLAX_MODEL_MAPPING", ".", "items", "(", ")", ":", "if", "isinstance", "(", "config", ",", "config_class", ")", ":", "return", "model_class", "(", "config", ")", "raise", "ValueError", "(", "f\"Unrecognized configuration class {config.__class__} \"", "f\"for this kind of FlaxAutoModel: {cls.__name__}.\\n\"", "f\"Model type should be one of {', '.join(c.__name__ for c in FLAX_MODEL_MAPPING.keys())}.\"", ")" ]
[ 54, 4 ]
[ 79, 9 ]
python
cy
['en', 'cy', 'hi']
False
FlaxAutoModel.from_pretrained
(cls, pretrained_model_name_or_path, *model_args, **kwargs)
r""" Instantiates one of the base model classes of the library from a pre-trained model configuration. The `from_pretrained()` method takes care of returning the correct model class instance based on the `model_type` property of the config object, or when it's missing, falling back to using pattern matching on the `pretrained_model_name_or_path` string. The base model class to instantiate is selected as the first pattern matching in the `pretrained_model_name_or_path` string (in the following order): - contains `roberta`: :class:`~transformers.FlaxRobertaModel` (RoBERTa model) - contains `bert`: :class:`~transformers.FlaxBertModel` (Bert model) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) To train the model, you should first set it back in training mode with `model.train()` Args: pretrained_model_name_or_path: either: - a string, the `model id` of a pretrained model hosted inside a model repo on huggingface.co. Valid model ids can be located at the root-level, like ``bert-base-uncased``, or namespaced under a user or organization name, like ``dbmdz/bert-base-german-cased``. - a path to a `directory` containing model weights saved using :func:`~transformers.FlaxPreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/``. - a path or url to a `pytorch index checkpoint file` (e.g. `./pt_model/pytorch_model.bin`). In this case, ``from_pt`` should be set to True and a configuration object should be provided as ``config`` argument. model_args: (`optional`) Sequence of positional arguments: All remaining positional arguments will be passed to the underlying model's ``__init__`` method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an automatically loaded configuration. Configuration can be automatically loaded when: - the model is a model provided by the library (loaded with the ``shortcut-name`` string of a pretrained model), or - the model was saved using :func:`~transformers.FlaxPreTrainedModel.save_pretrained` and is reloaded by supplying the save directory. - the model is loaded by supplying a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory. cache_dir: (`optional`) string: Path to a directory in which a downloaded pre-trained model configuration should be cached if the standard cache should not be used. force_download: (`optional`) boolean, default False: Force to (re-)download the model weights and configuration files and override the cached versions if they exists. resume_download: (`optional`) boolean, default False: Do not delete incompletely received file. Attempt to resume the download if such a file exists. proxies: (`optional`) dict, default None: A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return a dictionary containing missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments: These arguments will be passed to the configuration and the model. Examples:: model = FlaxAutoModel.from_pretrained('bert-base-uncased') # Download model and configuration from huggingface.co and cache. model = FlaxAutoModel.from_pretrained('./test/bert_model/') # E.g. model was saved using `save_pretrained('./test/saved_model/')` assert model.config.output_attention == True
r""" Instantiates one of the base model classes of the library from a pre-trained model configuration.
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): r""" Instantiates one of the base model classes of the library from a pre-trained model configuration. The `from_pretrained()` method takes care of returning the correct model class instance based on the `model_type` property of the config object, or when it's missing, falling back to using pattern matching on the `pretrained_model_name_or_path` string. The base model class to instantiate is selected as the first pattern matching in the `pretrained_model_name_or_path` string (in the following order): - contains `roberta`: :class:`~transformers.FlaxRobertaModel` (RoBERTa model) - contains `bert`: :class:`~transformers.FlaxBertModel` (Bert model) The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) To train the model, you should first set it back in training mode with `model.train()` Args: pretrained_model_name_or_path: either: - a string, the `model id` of a pretrained model hosted inside a model repo on huggingface.co. Valid model ids can be located at the root-level, like ``bert-base-uncased``, or namespaced under a user or organization name, like ``dbmdz/bert-base-german-cased``. - a path to a `directory` containing model weights saved using :func:`~transformers.FlaxPreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/``. - a path or url to a `pytorch index checkpoint file` (e.g. `./pt_model/pytorch_model.bin`). In this case, ``from_pt`` should be set to True and a configuration object should be provided as ``config`` argument. model_args: (`optional`) Sequence of positional arguments: All remaining positional arguments will be passed to the underlying model's ``__init__`` method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an automatically loaded configuration. Configuration can be automatically loaded when: - the model is a model provided by the library (loaded with the ``shortcut-name`` string of a pretrained model), or - the model was saved using :func:`~transformers.FlaxPreTrainedModel.save_pretrained` and is reloaded by supplying the save directory. - the model is loaded by supplying a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory. cache_dir: (`optional`) string: Path to a directory in which a downloaded pre-trained model configuration should be cached if the standard cache should not be used. force_download: (`optional`) boolean, default False: Force to (re-)download the model weights and configuration files and override the cached versions if they exists. resume_download: (`optional`) boolean, default False: Do not delete incompletely received file. Attempt to resume the download if such a file exists. proxies: (`optional`) dict, default None: A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return a dictionary containing missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments: These arguments will be passed to the configuration and the model. Examples:: model = FlaxAutoModel.from_pretrained('bert-base-uncased') # Download model and configuration from huggingface.co and cache. model = FlaxAutoModel.from_pretrained('./test/bert_model/') # E.g. model was saved using `save_pretrained('./test/saved_model/')` assert model.config.output_attention == True """ config = kwargs.pop("config", None) if not isinstance(config, PretrainedConfig): config = AutoConfig.from_pretrained(pretrained_model_name_or_path, **kwargs) for config_class, model_class in FLAX_MODEL_MAPPING.items(): if isinstance(config, config_class): return model_class.from_pretrained(pretrained_model_name_or_path, *model_args, config=config, **kwargs) raise ValueError( f"Unrecognized configuration class {config.__class__} " f"for this kind of FlaxAutoModel: {cls.__name__}.\n" f"Model type should be one of {', '.join(c.__name__ for c in FLAX_MODEL_MAPPING.keys())}" )
[ "def", "from_pretrained", "(", "cls", ",", "pretrained_model_name_or_path", ",", "*", "model_args", ",", "*", "*", "kwargs", ")", ":", "config", "=", "kwargs", ".", "pop", "(", "\"config\"", ",", "None", ")", "if", "not", "isinstance", "(", "config", ",", "PretrainedConfig", ")", ":", "config", "=", "AutoConfig", ".", "from_pretrained", "(", "pretrained_model_name_or_path", ",", "*", "*", "kwargs", ")", "for", "config_class", ",", "model_class", "in", "FLAX_MODEL_MAPPING", ".", "items", "(", ")", ":", "if", "isinstance", "(", "config", ",", "config_class", ")", ":", "return", "model_class", ".", "from_pretrained", "(", "pretrained_model_name_or_path", ",", "*", "model_args", ",", "config", "=", "config", ",", "*", "*", "kwargs", ")", "raise", "ValueError", "(", "f\"Unrecognized configuration class {config.__class__} \"", "f\"for this kind of FlaxAutoModel: {cls.__name__}.\\n\"", "f\"Model type should be one of {', '.join(c.__name__ for c in FLAX_MODEL_MAPPING.keys())}\"", ")" ]
[ 82, 4 ]
[ 165, 9 ]
python
cy
['en', 'cy', 'hi']
False
TestCommandSensorSensor.setUp
(self)
Set up things to be run when tests are started.
Set up things to be run when tests are started.
def setUp(self): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.addCleanup(self.hass.stop)
[ "def", "setUp", "(", "self", ")", ":", "self", ".", "hass", "=", "get_test_home_assistant", "(", ")", "self", ".", "addCleanup", "(", "self", ".", "hass", ".", "stop", ")" ]
[ 13, 4 ]
[ 16, 39 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.update_side_effect
(self, data)
Side effect function for mocking CommandSensorData.update().
Side effect function for mocking CommandSensorData.update().
def update_side_effect(self, data): """Side effect function for mocking CommandSensorData.update().""" self.commandline.data = data
[ "def", "update_side_effect", "(", "self", ",", "data", ")", ":", "self", ".", "commandline", ".", "data", "=", "data" ]
[ 18, 4 ]
[ 20, 36 ]
python
en
['en', 'da', 'en']
True
TestCommandSensorSensor.test_setup
(self)
Test sensor setup.
Test sensor setup.
def test_setup(self): """Test sensor setup.""" config = { "name": "Test", "unit_of_measurement": "in", "command": "echo 5", "command_timeout": 15, } devices = [] def add_dev_callback(devs, update): """Add callback to add devices.""" for dev in devs: devices.append(dev) command_line.setup_platform(self.hass, config, add_dev_callback) assert len(devices) == 1 entity = devices[0] entity.update() assert entity.name == "Test" assert entity.unit_of_measurement == "in" assert entity.state == "5"
[ "def", "test_setup", "(", "self", ")", ":", "config", "=", "{", "\"name\"", ":", "\"Test\"", ",", "\"unit_of_measurement\"", ":", "\"in\"", ",", "\"command\"", ":", "\"echo 5\"", ",", "\"command_timeout\"", ":", "15", ",", "}", "devices", "=", "[", "]", "def", "add_dev_callback", "(", "devs", ",", "update", ")", ":", "\"\"\"Add callback to add devices.\"\"\"", "for", "dev", "in", "devs", ":", "devices", ".", "append", "(", "dev", ")", "command_line", ".", "setup_platform", "(", "self", ".", "hass", ",", "config", ",", "add_dev_callback", ")", "assert", "len", "(", "devices", ")", "==", "1", "entity", "=", "devices", "[", "0", "]", "entity", ".", "update", "(", ")", "assert", "entity", ".", "name", "==", "\"Test\"", "assert", "entity", ".", "unit_of_measurement", "==", "\"in\"", "assert", "entity", ".", "state", "==", "\"5\"" ]
[ 22, 4 ]
[ 44, 34 ]
python
en
['en', 'bs', 'en']
True
TestCommandSensorSensor.test_template
(self)
Test command sensor with template.
Test command sensor with template.
def test_template(self): """Test command sensor with template.""" data = command_line.CommandSensorData(self.hass, "echo 50", 15) entity = command_line.CommandSensor( self.hass, data, "test", "in", Template("{{ value | multiply(0.1) }}", self.hass), [], ) entity.update() assert float(entity.state) == 5
[ "def", "test_template", "(", "self", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "\"echo 50\"", ",", "15", ")", "entity", "=", "command_line", ".", "CommandSensor", "(", "self", ".", "hass", ",", "data", ",", "\"test\"", ",", "\"in\"", ",", "Template", "(", "\"{{ value | multiply(0.1) }}\"", ",", "self", ".", "hass", ")", ",", "[", "]", ",", ")", "entity", ".", "update", "(", ")", "assert", "float", "(", "entity", ".", "state", ")", "==", "5" ]
[ 46, 4 ]
[ 60, 39 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_template_render
(self)
Ensure command with templates get rendered properly.
Ensure command with templates get rendered properly.
def test_template_render(self): """Ensure command with templates get rendered properly.""" self.hass.states.set("sensor.test_state", "Works") data = command_line.CommandSensorData( self.hass, "echo {{ states.sensor.test_state.state }}", 15 ) data.update() assert data.value == "Works"
[ "def", "test_template_render", "(", "self", ")", ":", "self", ".", "hass", ".", "states", ".", "set", "(", "\"sensor.test_state\"", ",", "\"Works\"", ")", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "\"echo {{ states.sensor.test_state.state }}\"", ",", "15", ")", "data", ".", "update", "(", ")", "assert", "data", ".", "value", "==", "\"Works\"" ]
[ 62, 4 ]
[ 70, 36 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_template_render_with_quote
(self)
Ensure command with templates and quotes get rendered properly.
Ensure command with templates and quotes get rendered properly.
def test_template_render_with_quote(self): """Ensure command with templates and quotes get rendered properly.""" self.hass.states.set("sensor.test_state", "Works 2") with patch( "homeassistant.components.command_line.subprocess.check_output", return_value=b"Works\n", ) as check_output: data = command_line.CommandSensorData( self.hass, 'echo "{{ states.sensor.test_state.state }}" "3 4"', 15, ) data.update() assert data.value == "Works" check_output.assert_called_once_with( 'echo "Works 2" "3 4"', shell=True, timeout=15 # nosec # shell by design )
[ "def", "test_template_render_with_quote", "(", "self", ")", ":", "self", ".", "hass", ".", "states", ".", "set", "(", "\"sensor.test_state\"", ",", "\"Works 2\"", ")", "with", "patch", "(", "\"homeassistant.components.command_line.subprocess.check_output\"", ",", "return_value", "=", "b\"Works\\n\"", ",", ")", "as", "check_output", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "'echo \"{{ states.sensor.test_state.state }}\" \"3 4\"'", ",", "15", ",", ")", "data", ".", "update", "(", ")", "assert", "data", ".", "value", "==", "\"Works\"", "check_output", ".", "assert_called_once_with", "(", "'echo \"Works 2\" \"3 4\"'", ",", "shell", "=", "True", ",", "timeout", "=", "15", "# nosec # shell by design", ")" ]
[ 72, 4 ]
[ 89, 9 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_bad_command
(self)
Test bad command.
Test bad command.
def test_bad_command(self): """Test bad command.""" data = command_line.CommandSensorData(self.hass, "asdfasdf", 15) data.update() assert data.value is None
[ "def", "test_bad_command", "(", "self", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "\"asdfasdf\"", ",", "15", ")", "data", ".", "update", "(", ")", "assert", "data", ".", "value", "is", "None" ]
[ 91, 4 ]
[ 96, 33 ]
python
en
['en', 'bg', 'en']
True
TestCommandSensorSensor.test_update_with_json_attrs
(self)
Test attributes get extracted from a JSON result.
Test attributes get extracted from a JSON result.
def test_update_with_json_attrs(self): """Test attributes get extracted from a JSON result.""" data = command_line.CommandSensorData( self.hass, ( 'echo { \\"key\\": \\"some_json_value\\", \\"another_key\\":\ \\"another_json_value\\", \\"key_three\\": \\"value_three\\" }' ), 15, ) self.sensor = command_line.CommandSensor( self.hass, data, "test", None, None, ["key", "another_key", "key_three"] ) self.sensor.update() assert self.sensor.device_state_attributes["key"] == "some_json_value" assert ( self.sensor.device_state_attributes["another_key"] == "another_json_value" ) assert self.sensor.device_state_attributes["key_three"] == "value_three"
[ "def", "test_update_with_json_attrs", "(", "self", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "(", "'echo { \\\\\"key\\\\\": \\\\\"some_json_value\\\\\", \\\\\"another_key\\\\\":\\\n \\\\\"another_json_value\\\\\", \\\\\"key_three\\\\\": \\\\\"value_three\\\\\" }'", ")", ",", "15", ",", ")", "self", ".", "sensor", "=", "command_line", ".", "CommandSensor", "(", "self", ".", "hass", ",", "data", ",", "\"test\"", ",", "None", ",", "None", ",", "[", "\"key\"", ",", "\"another_key\"", ",", "\"key_three\"", "]", ")", "self", ".", "sensor", ".", "update", "(", ")", "assert", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"key\"", "]", "==", "\"some_json_value\"", "assert", "(", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"another_key\"", "]", "==", "\"another_json_value\"", ")", "assert", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"key_three\"", "]", "==", "\"value_three\"" ]
[ 98, 4 ]
[ 117, 80 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_update_with_json_attrs_no_data
(self, mock_logger)
Test attributes when no JSON result fetched.
Test attributes when no JSON result fetched.
def test_update_with_json_attrs_no_data(self, mock_logger): """Test attributes when no JSON result fetched.""" data = command_line.CommandSensorData(self.hass, "echo ", 15) self.sensor = command_line.CommandSensor( self.hass, data, "test", None, None, ["key"] ) self.sensor.update() assert {} == self.sensor.device_state_attributes assert mock_logger.warning.called
[ "def", "test_update_with_json_attrs_no_data", "(", "self", ",", "mock_logger", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "\"echo \"", ",", "15", ")", "self", ".", "sensor", "=", "command_line", ".", "CommandSensor", "(", "self", ".", "hass", ",", "data", ",", "\"test\"", ",", "None", ",", "None", ",", "[", "\"key\"", "]", ")", "self", ".", "sensor", ".", "update", "(", ")", "assert", "{", "}", "==", "self", ".", "sensor", ".", "device_state_attributes", "assert", "mock_logger", ".", "warning", ".", "called" ]
[ 120, 4 ]
[ 128, 41 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_update_with_json_attrs_not_dict
(self, mock_logger)
Test attributes get extracted from a JSON result.
Test attributes get extracted from a JSON result.
def test_update_with_json_attrs_not_dict(self, mock_logger): """Test attributes get extracted from a JSON result.""" data = command_line.CommandSensorData(self.hass, "echo [1, 2, 3]", 15) self.sensor = command_line.CommandSensor( self.hass, data, "test", None, None, ["key"] ) self.sensor.update() assert {} == self.sensor.device_state_attributes assert mock_logger.warning.called
[ "def", "test_update_with_json_attrs_not_dict", "(", "self", ",", "mock_logger", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "\"echo [1, 2, 3]\"", ",", "15", ")", "self", ".", "sensor", "=", "command_line", ".", "CommandSensor", "(", "self", ".", "hass", ",", "data", ",", "\"test\"", ",", "None", ",", "None", ",", "[", "\"key\"", "]", ")", "self", ".", "sensor", ".", "update", "(", ")", "assert", "{", "}", "==", "self", ".", "sensor", ".", "device_state_attributes", "assert", "mock_logger", ".", "warning", ".", "called" ]
[ 131, 4 ]
[ 139, 41 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_update_with_json_attrs_bad_JSON
(self, mock_logger)
Test attributes get extracted from a JSON result.
Test attributes get extracted from a JSON result.
def test_update_with_json_attrs_bad_JSON(self, mock_logger): """Test attributes get extracted from a JSON result.""" data = command_line.CommandSensorData( self.hass, "echo This is text rather than JSON data.", 15 ) self.sensor = command_line.CommandSensor( self.hass, data, "test", None, None, ["key"] ) self.sensor.update() assert {} == self.sensor.device_state_attributes assert mock_logger.warning.called
[ "def", "test_update_with_json_attrs_bad_JSON", "(", "self", ",", "mock_logger", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "\"echo This is text rather than JSON data.\"", ",", "15", ")", "self", ".", "sensor", "=", "command_line", ".", "CommandSensor", "(", "self", ".", "hass", ",", "data", ",", "\"test\"", ",", "None", ",", "None", ",", "[", "\"key\"", "]", ")", "self", ".", "sensor", ".", "update", "(", ")", "assert", "{", "}", "==", "self", ".", "sensor", ".", "device_state_attributes", "assert", "mock_logger", ".", "warning", ".", "called" ]
[ 142, 4 ]
[ 152, 41 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_update_with_missing_json_attrs
(self)
Test attributes get extracted from a JSON result.
Test attributes get extracted from a JSON result.
def test_update_with_missing_json_attrs(self): """Test attributes get extracted from a JSON result.""" data = command_line.CommandSensorData( self.hass, ( 'echo { \\"key\\": \\"some_json_value\\", \\"another_key\\":\ \\"another_json_value\\", \\"key_three\\": \\"value_three\\" }' ), 15, ) self.sensor = command_line.CommandSensor( self.hass, data, "test", None, None, ["key", "another_key", "key_three", "special_key"], ) self.sensor.update() assert self.sensor.device_state_attributes["key"] == "some_json_value" assert ( self.sensor.device_state_attributes["another_key"] == "another_json_value" ) assert self.sensor.device_state_attributes["key_three"] == "value_three" assert "special_key" not in self.sensor.device_state_attributes
[ "def", "test_update_with_missing_json_attrs", "(", "self", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "(", "'echo { \\\\\"key\\\\\": \\\\\"some_json_value\\\\\", \\\\\"another_key\\\\\":\\\n \\\\\"another_json_value\\\\\", \\\\\"key_three\\\\\": \\\\\"value_three\\\\\" }'", ")", ",", "15", ",", ")", "self", ".", "sensor", "=", "command_line", ".", "CommandSensor", "(", "self", ".", "hass", ",", "data", ",", "\"test\"", ",", "None", ",", "None", ",", "[", "\"key\"", ",", "\"another_key\"", ",", "\"key_three\"", ",", "\"special_key\"", "]", ",", ")", "self", ".", "sensor", ".", "update", "(", ")", "assert", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"key\"", "]", "==", "\"some_json_value\"", "assert", "(", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"another_key\"", "]", "==", "\"another_json_value\"", ")", "assert", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"key_three\"", "]", "==", "\"value_three\"", "assert", "\"special_key\"", "not", "in", "self", ".", "sensor", ".", "device_state_attributes" ]
[ 154, 4 ]
[ 179, 71 ]
python
en
['en', 'en', 'en']
True
TestCommandSensorSensor.test_update_with_unnecessary_json_attrs
(self)
Test attributes get extracted from a JSON result.
Test attributes get extracted from a JSON result.
def test_update_with_unnecessary_json_attrs(self): """Test attributes get extracted from a JSON result.""" data = command_line.CommandSensorData( self.hass, ( 'echo { \\"key\\": \\"some_json_value\\", \\"another_key\\":\ \\"another_json_value\\", \\"key_three\\": \\"value_three\\" }' ), 15, ) self.sensor = command_line.CommandSensor( self.hass, data, "test", None, None, ["key", "another_key"] ) self.sensor.update() assert self.sensor.device_state_attributes["key"] == "some_json_value" assert ( self.sensor.device_state_attributes["another_key"] == "another_json_value" ) assert "key_three" not in self.sensor.device_state_attributes
[ "def", "test_update_with_unnecessary_json_attrs", "(", "self", ")", ":", "data", "=", "command_line", ".", "CommandSensorData", "(", "self", ".", "hass", ",", "(", "'echo { \\\\\"key\\\\\": \\\\\"some_json_value\\\\\", \\\\\"another_key\\\\\":\\\n \\\\\"another_json_value\\\\\", \\\\\"key_three\\\\\": \\\\\"value_three\\\\\" }'", ")", ",", "15", ",", ")", "self", ".", "sensor", "=", "command_line", ".", "CommandSensor", "(", "self", ".", "hass", ",", "data", ",", "\"test\"", ",", "None", ",", "None", ",", "[", "\"key\"", ",", "\"another_key\"", "]", ")", "self", ".", "sensor", ".", "update", "(", ")", "assert", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"key\"", "]", "==", "\"some_json_value\"", "assert", "(", "self", ".", "sensor", ".", "device_state_attributes", "[", "\"another_key\"", "]", "==", "\"another_json_value\"", ")", "assert", "\"key_three\"", "not", "in", "self", ".", "sensor", ".", "device_state_attributes" ]
[ 181, 4 ]
[ 200, 69 ]
python
en
['en', 'en', 'en']
True
mock_controller_assert
()
Mock the velbus controller with an assert.
Mock the velbus controller with an assert.
def mock_controller_assert(): """Mock the velbus controller with an assert.""" with patch("velbus.Controller", side_effect=Exception()): yield
[ "def", "mock_controller_assert", "(", ")", ":", "with", "patch", "(", "\"velbus.Controller\"", ",", "side_effect", "=", "Exception", "(", ")", ")", ":", "yield" ]
[ 15, 0 ]
[ 18, 13 ]
python
en
['en', 'en', 'en']
True
mock_controller
()
Mock a successful velbus controller.
Mock a successful velbus controller.
def mock_controller(): """Mock a successful velbus controller.""" controller = Mock() with patch("velbus.Controller", return_value=controller): yield controller
[ "def", "mock_controller", "(", ")", ":", "controller", "=", "Mock", "(", ")", "with", "patch", "(", "\"velbus.Controller\"", ",", "return_value", "=", "controller", ")", ":", "yield", "controller" ]
[ 22, 0 ]
[ 26, 24 ]
python
cs
['cs', 'ro', 'en']
False
init_config_flow
(hass)
Init a configuration flow.
Init a configuration flow.
def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.VelbusConfigFlow() flow.hass = hass return flow
[ "def", "init_config_flow", "(", "hass", ")", ":", "flow", "=", "config_flow", ".", "VelbusConfigFlow", "(", ")", "flow", ".", "hass", "=", "hass", "return", "flow" ]
[ 29, 0 ]
[ 33, 15 ]
python
en
['es', 'fr', 'en']
False
test_user
(hass, controller)
Test user config.
Test user config.
async def test_user(hass, controller): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await flow.async_step_user( {CONF_NAME: "Velbus Test Serial", CONF_PORT: PORT_SERIAL} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "velbus_test_serial" assert result["data"][CONF_PORT] == PORT_SERIAL result = await flow.async_step_user( {CONF_NAME: "Velbus Test TCP", CONF_PORT: PORT_TCP} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "velbus_test_tcp" assert result["data"][CONF_PORT] == PORT_TCP
[ "async", "def", "test_user", "(", "hass", ",", "controller", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "result", "=", "await", "flow", ".", "async_step_user", "(", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"user\"", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_NAME", ":", "\"Velbus Test Serial\"", ",", "CONF_PORT", ":", "PORT_SERIAL", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "\"velbus_test_serial\"", "assert", "result", "[", "\"data\"", "]", "[", "CONF_PORT", "]", "==", "PORT_SERIAL", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_NAME", ":", "\"Velbus Test TCP\"", ",", "CONF_PORT", ":", "PORT_TCP", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "\"velbus_test_tcp\"", "assert", "result", "[", "\"data\"", "]", "[", "CONF_PORT", "]", "==", "PORT_TCP" ]
[ 36, 0 ]
[ 56, 48 ]
python
en
['en', 'da', 'en']
True
test_user_fail
(hass, controller_assert)
Test user config.
Test user config.
async def test_user_fail(hass, controller_assert): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user( {CONF_NAME: "Velbus Test Serial", CONF_PORT: PORT_SERIAL} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_PORT: "cannot_connect"} result = await flow.async_step_user( {CONF_NAME: "Velbus Test TCP", CONF_PORT: PORT_TCP} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_PORT: "cannot_connect"}
[ "async", "def", "test_user_fail", "(", "hass", ",", "controller_assert", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_NAME", ":", "\"Velbus Test Serial\"", ",", "CONF_PORT", ":", "PORT_SERIAL", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "CONF_PORT", ":", "\"cannot_connect\"", "}", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_NAME", ":", "\"Velbus Test TCP\"", ",", "CONF_PORT", ":", "PORT_TCP", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "CONF_PORT", ":", "\"cannot_connect\"", "}" ]
[ 59, 0 ]
[ 73, 60 ]
python
en
['en', 'da', 'en']
True
test_import
(hass, controller)
Test import step.
Test import step.
async def test_import(hass, controller): """Test import step.""" flow = init_config_flow(hass) result = await flow.async_step_import({CONF_PORT: PORT_TCP}) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "velbus_import"
[ "async", "def", "test_import", "(", "hass", ",", "controller", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "result", "=", "await", "flow", ".", "async_step_import", "(", "{", "CONF_PORT", ":", "PORT_TCP", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "\"velbus_import\"" ]
[ 76, 0 ]
[ 82, 45 ]
python
de
['de', 'sd', 'en']
False
test_abort_if_already_setup
(hass)
Test we abort if Daikin is already setup.
Test we abort if Daikin is already setup.
async def test_abort_if_already_setup(hass): """Test we abort if Daikin is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="velbus", data={CONF_PORT: PORT_TCP, CONF_NAME: "velbus home"} ).add_to_hass(hass) result = await flow.async_step_import( {CONF_PORT: PORT_TCP, CONF_NAME: "velbus import test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" result = await flow.async_step_user( {CONF_PORT: PORT_TCP, CONF_NAME: "velbus import test"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {"port": "already_configured"}
[ "async", "def", "test_abort_if_already_setup", "(", "hass", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "MockConfigEntry", "(", "domain", "=", "\"velbus\"", ",", "data", "=", "{", "CONF_PORT", ":", "PORT_TCP", ",", "CONF_NAME", ":", "\"velbus home\"", "}", ")", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "flow", ".", "async_step_import", "(", "{", "CONF_PORT", ":", "PORT_TCP", ",", "CONF_NAME", ":", "\"velbus import test\"", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_PORT", ":", "PORT_TCP", ",", "CONF_NAME", ":", "\"velbus import test\"", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "\"port\"", ":", "\"already_configured\"", "}" ]
[ 85, 0 ]
[ 102, 61 ]
python
en
['en', 'de', 'en']
True
get_scanner
(hass, config)
Validate the configuration and return a Nmap scanner.
Validate the configuration and return a Nmap scanner.
def get_scanner(hass, config): """Validate the configuration and return a Nmap scanner.""" return NmapDeviceScanner(config[DOMAIN])
[ "def", "get_scanner", "(", "hass", ",", "config", ")", ":", "return", "NmapDeviceScanner", "(", "config", "[", "DOMAIN", "]", ")" ]
[ 37, 0 ]
[ 39, 44 ]
python
en
['en', 'en', 'en']
True
NmapDeviceScanner.__init__
(self, config)
Initialize the scanner.
Initialize the scanner.
def __init__(self, config): """Initialize the scanner.""" self.last_results = [] self.hosts = config[CONF_HOSTS] self.exclude = config[CONF_EXCLUDE] minutes = config[CONF_HOME_INTERVAL] self._options = config[CONF_OPTIONS] self.home_interval = timedelta(minutes=minutes) _LOGGER.debug("Scanner initialized")
[ "def", "__init__", "(", "self", ",", "config", ")", ":", "self", ".", "last_results", "=", "[", "]", "self", ".", "hosts", "=", "config", "[", "CONF_HOSTS", "]", "self", ".", "exclude", "=", "config", "[", "CONF_EXCLUDE", "]", "minutes", "=", "config", "[", "CONF_HOME_INTERVAL", "]", "self", ".", "_options", "=", "config", "[", "CONF_OPTIONS", "]", "self", ".", "home_interval", "=", "timedelta", "(", "minutes", "=", "minutes", ")", "_LOGGER", ".", "debug", "(", "\"Scanner initialized\"", ")" ]
[ 50, 4 ]
[ 60, 44 ]
python
en
['en', 'en', 'en']
True
NmapDeviceScanner.scan_devices
(self)
Scan for new devices and return a list with found device IDs.
Scan for new devices and return a list with found device IDs.
def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self._update_info() _LOGGER.debug("Nmap last results %s", self.last_results) return [device.mac for device in self.last_results]
[ "def", "scan_devices", "(", "self", ")", ":", "self", ".", "_update_info", "(", ")", "_LOGGER", ".", "debug", "(", "\"Nmap last results %s\"", ",", "self", ".", "last_results", ")", "return", "[", "device", ".", "mac", "for", "device", "in", "self", ".", "last_results", "]" ]
[ 62, 4 ]
[ 68, 59 ]
python
en
['en', 'en', 'en']
True
NmapDeviceScanner.get_device_name
(self, device)
Return the name of the given device or None if we don't know.
Return the name of the given device or None if we don't know.
def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" filter_named = [ result.name for result in self.last_results if result.mac == device ] if filter_named: return filter_named[0] return None
[ "def", "get_device_name", "(", "self", ",", "device", ")", ":", "filter_named", "=", "[", "result", ".", "name", "for", "result", "in", "self", ".", "last_results", "if", "result", ".", "mac", "==", "device", "]", "if", "filter_named", ":", "return", "filter_named", "[", "0", "]", "return", "None" ]
[ 70, 4 ]
[ 78, 19 ]
python
en
['en', 'en', 'en']
True
NmapDeviceScanner.get_extra_attributes
(self, device)
Return the IP of the given device.
Return the IP of the given device.
def get_extra_attributes(self, device): """Return the IP of the given device.""" filter_ip = next( (result.ip for result in self.last_results if result.mac == device), None ) return {"ip": filter_ip}
[ "def", "get_extra_attributes", "(", "self", ",", "device", ")", ":", "filter_ip", "=", "next", "(", "(", "result", ".", "ip", "for", "result", "in", "self", ".", "last_results", "if", "result", ".", "mac", "==", "device", ")", ",", "None", ")", "return", "{", "\"ip\"", ":", "filter_ip", "}" ]
[ 80, 4 ]
[ 85, 32 ]
python
en
['en', 'en', 'en']
True
NmapDeviceScanner._update_info
(self)
Scan the network for devices. Returns boolean if scanning successful.
Scan the network for devices.
def _update_info(self): """Scan the network for devices. Returns boolean if scanning successful. """ _LOGGER.debug("Scanning...") scanner = PortScanner() options = self._options if self.home_interval: boundary = dt_util.now() - self.home_interval last_results = [ device for device in self.last_results if device.last_update > boundary ] if last_results: exclude_hosts = self.exclude + [device.ip for device in last_results] else: exclude_hosts = self.exclude else: last_results = [] exclude_hosts = self.exclude if exclude_hosts: options += f" --exclude {','.join(exclude_hosts)}" try: result = scanner.scan(hosts=" ".join(self.hosts), arguments=options) except PortScannerError: return False now = dt_util.now() for ipv4, info in result["scan"].items(): if info["status"]["state"] != "up": continue name = info["hostnames"][0]["name"] if info["hostnames"] else ipv4 # Mac address only returned if nmap ran as root mac = info["addresses"].get("mac") or get_mac_address(ip=ipv4) if mac is None: _LOGGER.info("No MAC address found for %s", ipv4) continue last_results.append(Device(mac.upper(), name, ipv4, now)) self.last_results = last_results _LOGGER.debug("nmap scan successful") return True
[ "def", "_update_info", "(", "self", ")", ":", "_LOGGER", ".", "debug", "(", "\"Scanning...\"", ")", "scanner", "=", "PortScanner", "(", ")", "options", "=", "self", ".", "_options", "if", "self", ".", "home_interval", ":", "boundary", "=", "dt_util", ".", "now", "(", ")", "-", "self", ".", "home_interval", "last_results", "=", "[", "device", "for", "device", "in", "self", ".", "last_results", "if", "device", ".", "last_update", ">", "boundary", "]", "if", "last_results", ":", "exclude_hosts", "=", "self", ".", "exclude", "+", "[", "device", ".", "ip", "for", "device", "in", "last_results", "]", "else", ":", "exclude_hosts", "=", "self", ".", "exclude", "else", ":", "last_results", "=", "[", "]", "exclude_hosts", "=", "self", ".", "exclude", "if", "exclude_hosts", ":", "options", "+=", "f\" --exclude {','.join(exclude_hosts)}\"", "try", ":", "result", "=", "scanner", ".", "scan", "(", "hosts", "=", "\" \"", ".", "join", "(", "self", ".", "hosts", ")", ",", "arguments", "=", "options", ")", "except", "PortScannerError", ":", "return", "False", "now", "=", "dt_util", ".", "now", "(", ")", "for", "ipv4", ",", "info", "in", "result", "[", "\"scan\"", "]", ".", "items", "(", ")", ":", "if", "info", "[", "\"status\"", "]", "[", "\"state\"", "]", "!=", "\"up\"", ":", "continue", "name", "=", "info", "[", "\"hostnames\"", "]", "[", "0", "]", "[", "\"name\"", "]", "if", "info", "[", "\"hostnames\"", "]", "else", "ipv4", "# Mac address only returned if nmap ran as root", "mac", "=", "info", "[", "\"addresses\"", "]", ".", "get", "(", "\"mac\"", ")", "or", "get_mac_address", "(", "ip", "=", "ipv4", ")", "if", "mac", "is", "None", ":", "_LOGGER", ".", "info", "(", "\"No MAC address found for %s\"", ",", "ipv4", ")", "continue", "last_results", ".", "append", "(", "Device", "(", "mac", ".", "upper", "(", ")", ",", "name", ",", "ipv4", ",", "now", ")", ")", "self", ".", "last_results", "=", "last_results", "_LOGGER", ".", "debug", "(", "\"nmap scan successful\"", ")", "return", "True" ]
[ 87, 4 ]
[ 133, 19 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass, config)
Set up the iFrame frontend panels.
Set up the iFrame frontend panels.
async def async_setup(hass, config): """Set up the iFrame frontend panels.""" for url_path, info in config[DOMAIN].items(): hass.components.frontend.async_register_built_in_panel( "iframe", info.get(CONF_TITLE), info.get(CONF_ICON), url_path, {"url": info[CONF_URL]}, require_admin=info[CONF_REQUIRE_ADMIN], ) return True
[ "async", "def", "async_setup", "(", "hass", ",", "config", ")", ":", "for", "url_path", ",", "info", "in", "config", "[", "DOMAIN", "]", ".", "items", "(", ")", ":", "hass", ".", "components", ".", "frontend", ".", "async_register_built_in_panel", "(", "\"iframe\"", ",", "info", ".", "get", "(", "CONF_TITLE", ")", ",", "info", ".", "get", "(", "CONF_ICON", ")", ",", "url_path", ",", "{", "\"url\"", ":", "info", "[", "CONF_URL", "]", "}", ",", "require_admin", "=", "info", "[", "CONF_REQUIRE_ADMIN", "]", ",", ")", "return", "True" ]
[ 37, 0 ]
[ 49, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up Tasmota switch dynamically through discovery.
Set up Tasmota switch dynamically through discovery.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Tasmota switch dynamically through discovery.""" @callback def async_discover(tasmota_entity, discovery_hash): """Discover and add a Tasmota switch.""" async_add_entities( [ TasmotaSwitch( tasmota_entity=tasmota_entity, discovery_hash=discovery_hash ) ] ) hass.data[ DATA_REMOVE_DISCOVER_COMPONENT.format(switch.DOMAIN) ] = async_dispatcher_connect( hass, TASMOTA_DISCOVERY_ENTITY_NEW.format(switch.DOMAIN, TASMOTA_DOMAIN), async_discover, )
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "@", "callback", "def", "async_discover", "(", "tasmota_entity", ",", "discovery_hash", ")", ":", "\"\"\"Discover and add a Tasmota switch.\"\"\"", "async_add_entities", "(", "[", "TasmotaSwitch", "(", "tasmota_entity", "=", "tasmota_entity", ",", "discovery_hash", "=", "discovery_hash", ")", "]", ")", "hass", ".", "data", "[", "DATA_REMOVE_DISCOVER_COMPONENT", ".", "format", "(", "switch", ".", "DOMAIN", ")", "]", "=", "async_dispatcher_connect", "(", "hass", ",", "TASMOTA_DISCOVERY_ENTITY_NEW", ".", "format", "(", "switch", ".", "DOMAIN", ",", "TASMOTA_DOMAIN", ")", ",", "async_discover", ",", ")" ]
[ 12, 0 ]
[ 32, 5 ]
python
en
['en', 'en', 'en']
True
TasmotaSwitch.__init__
(self, **kwds)
Initialize the Tasmota switch.
Initialize the Tasmota switch.
def __init__(self, **kwds): """Initialize the Tasmota switch.""" self._state = False super().__init__( discovery_update=self.discovery_update, **kwds, )
[ "def", "__init__", "(", "self", ",", "*", "*", "kwds", ")", ":", "self", ".", "_state", "=", "False", "super", "(", ")", ".", "__init__", "(", "discovery_update", "=", "self", ".", "discovery_update", ",", "*", "*", "kwds", ",", ")" ]
[ 42, 4 ]
[ 49, 9 ]
python
en
['en', 'pl', 'en']
True
TasmotaSwitch.is_on
(self)
Return true if device is on.
Return true if device is on.
def is_on(self): """Return true if device is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 52, 4 ]
[ 54, 26 ]
python
en
['en', 'fy', 'en']
True
TasmotaSwitch.async_turn_on
(self, **kwargs)
Turn the device on.
Turn the device on.
async def async_turn_on(self, **kwargs): """Turn the device on.""" self._tasmota_entity.set_state(True)
[ "async", "def", "async_turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_tasmota_entity", ".", "set_state", "(", "True", ")" ]
[ 56, 4 ]
[ 58, 44 ]
python
en
['en', 'en', 'en']
True
TasmotaSwitch.async_turn_off
(self, **kwargs)
Turn the device off.
Turn the device off.
async def async_turn_off(self, **kwargs): """Turn the device off.""" self._tasmota_entity.set_state(False)
[ "async", "def", "async_turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_tasmota_entity", ".", "set_state", "(", "False", ")" ]
[ 60, 4 ]
[ 62, 45 ]
python
en
['en', 'en', 'en']
True
RoombaVacuum.device_state_attributes
(self)
Return the state attributes of the device.
Return the state attributes of the device.
def device_state_attributes(self): """Return the state attributes of the device.""" state_attrs = super().device_state_attributes # Get bin state bin_raw_state = self.vacuum_state.get("bin", {}) bin_state = {} if bin_raw_state.get("present") is not None: bin_state[ATTR_BIN_PRESENT] = bin_raw_state.get("present") if bin_raw_state.get("full") is not None: bin_state[ATTR_BIN_FULL] = bin_raw_state.get("full") state_attrs.update(bin_state) return state_attrs
[ "def", "device_state_attributes", "(", "self", ")", ":", "state_attrs", "=", "super", "(", ")", ".", "device_state_attributes", "# Get bin state", "bin_raw_state", "=", "self", ".", "vacuum_state", ".", "get", "(", "\"bin\"", ",", "{", "}", ")", "bin_state", "=", "{", "}", "if", "bin_raw_state", ".", "get", "(", "\"present\"", ")", "is", "not", "None", ":", "bin_state", "[", "ATTR_BIN_PRESENT", "]", "=", "bin_raw_state", ".", "get", "(", "\"present\"", ")", "if", "bin_raw_state", ".", "get", "(", "\"full\"", ")", "is", "not", "None", ":", "bin_state", "[", "ATTR_BIN_FULL", "]", "=", "bin_raw_state", ".", "get", "(", "\"full\"", ")", "state_attrs", ".", "update", "(", "bin_state", ")", "return", "state_attrs" ]
[ 25, 4 ]
[ 38, 26 ]
python
en
['en', 'en', 'en']
True
RoombaVacuumCarpetBoost.supported_features
(self)
Flag vacuum cleaner robot features that are supported.
Flag vacuum cleaner robot features that are supported.
def supported_features(self): """Flag vacuum cleaner robot features that are supported.""" return SUPPORT_ROOMBA_CARPET_BOOST
[ "def", "supported_features", "(", "self", ")", ":", "return", "SUPPORT_ROOMBA_CARPET_BOOST" ]
[ 45, 4 ]
[ 47, 42 ]
python
en
['en', 'en', 'en']
True
RoombaVacuumCarpetBoost.fan_speed
(self)
Return the fan speed of the vacuum cleaner.
Return the fan speed of the vacuum cleaner.
def fan_speed(self): """Return the fan speed of the vacuum cleaner.""" fan_speed = None carpet_boost = self.vacuum_state.get("carpetBoost") high_perf = self.vacuum_state.get("vacHigh") if carpet_boost is not None and high_perf is not None: if carpet_boost: fan_speed = FAN_SPEED_AUTOMATIC elif high_perf: fan_speed = FAN_SPEED_PERFORMANCE else: # carpet_boost and high_perf are False fan_speed = FAN_SPEED_ECO return fan_speed
[ "def", "fan_speed", "(", "self", ")", ":", "fan_speed", "=", "None", "carpet_boost", "=", "self", ".", "vacuum_state", ".", "get", "(", "\"carpetBoost\"", ")", "high_perf", "=", "self", ".", "vacuum_state", ".", "get", "(", "\"vacHigh\"", ")", "if", "carpet_boost", "is", "not", "None", "and", "high_perf", "is", "not", "None", ":", "if", "carpet_boost", ":", "fan_speed", "=", "FAN_SPEED_AUTOMATIC", "elif", "high_perf", ":", "fan_speed", "=", "FAN_SPEED_PERFORMANCE", "else", ":", "# carpet_boost and high_perf are False", "fan_speed", "=", "FAN_SPEED_ECO", "return", "fan_speed" ]
[ 50, 4 ]
[ 62, 24 ]
python
en
['en', 'en', 'en']
True
RoombaVacuumCarpetBoost.fan_speed_list
(self)
Get the list of available fan speed steps of the vacuum cleaner.
Get the list of available fan speed steps of the vacuum cleaner.
def fan_speed_list(self): """Get the list of available fan speed steps of the vacuum cleaner.""" return FAN_SPEEDS
[ "def", "fan_speed_list", "(", "self", ")", ":", "return", "FAN_SPEEDS" ]
[ 65, 4 ]
[ 67, 25 ]
python
en
['en', 'en', 'en']
True
RoombaVacuumCarpetBoost.async_set_fan_speed
(self, fan_speed, **kwargs)
Set fan speed.
Set fan speed.
async def async_set_fan_speed(self, fan_speed, **kwargs): """Set fan speed.""" if fan_speed.capitalize() in FAN_SPEEDS: fan_speed = fan_speed.capitalize() _LOGGER.debug("Set fan speed to: %s", fan_speed) high_perf = None carpet_boost = None if fan_speed == FAN_SPEED_AUTOMATIC: high_perf = False carpet_boost = True elif fan_speed == FAN_SPEED_ECO: high_perf = False carpet_boost = False elif fan_speed == FAN_SPEED_PERFORMANCE: high_perf = True carpet_boost = False else: _LOGGER.error("No such fan speed available: %s", fan_speed) return # The set_preference method does only accept string values await self.hass.async_add_executor_job( self.vacuum.set_preference, "carpetBoost", str(carpet_boost) ) await self.hass.async_add_executor_job( self.vacuum.set_preference, "vacHigh", str(high_perf) )
[ "async", "def", "async_set_fan_speed", "(", "self", ",", "fan_speed", ",", "*", "*", "kwargs", ")", ":", "if", "fan_speed", ".", "capitalize", "(", ")", "in", "FAN_SPEEDS", ":", "fan_speed", "=", "fan_speed", ".", "capitalize", "(", ")", "_LOGGER", ".", "debug", "(", "\"Set fan speed to: %s\"", ",", "fan_speed", ")", "high_perf", "=", "None", "carpet_boost", "=", "None", "if", "fan_speed", "==", "FAN_SPEED_AUTOMATIC", ":", "high_perf", "=", "False", "carpet_boost", "=", "True", "elif", "fan_speed", "==", "FAN_SPEED_ECO", ":", "high_perf", "=", "False", "carpet_boost", "=", "False", "elif", "fan_speed", "==", "FAN_SPEED_PERFORMANCE", ":", "high_perf", "=", "True", "carpet_boost", "=", "False", "else", ":", "_LOGGER", ".", "error", "(", "\"No such fan speed available: %s\"", ",", "fan_speed", ")", "return", "# The set_preference method does only accept string values", "await", "self", ".", "hass", ".", "async_add_executor_job", "(", "self", ".", "vacuum", ".", "set_preference", ",", "\"carpetBoost\"", ",", "str", "(", "carpet_boost", ")", ")", "await", "self", ".", "hass", ".", "async_add_executor_job", "(", "self", ".", "vacuum", ".", "set_preference", ",", "\"vacHigh\"", ",", "str", "(", "high_perf", ")", ")" ]
[ 69, 4 ]
[ 94, 9 ]
python
fy
['sv', 'fy', 'ur']
False
test_reload
(hass)
Verify we can reload filter sensors.
Verify we can reload filter sensors.
async def test_reload(hass): """Verify we can reload filter sensors.""" await hass.async_add_executor_job( init_recorder_component, hass ) # force in memory db hass.states.async_set("sensor.test_monitored", 12345) await async_setup_component( hass, "sensor", { "sensor": { "platform": "filter", "name": "test", "entity_id": "sensor.test_monitored", "filters": [ {"filter": "outlier", "window_size": 10, "radius": 4.0}, {"filter": "lowpass", "time_constant": 10, "precision": 2}, {"filter": "throttle", "window_size": 1}, ], } }, ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() assert len(hass.states.async_all()) == 2 assert hass.states.get("sensor.test") yaml_path = path.join( _get_fixtures_base_path(), "fixtures", "filter/configuration.yaml", ) with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path): await hass.services.async_call( DOMAIN, SERVICE_RELOAD, {}, blocking=True, ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 2 assert hass.states.get("sensor.test") is None assert hass.states.get("sensor.filtered_realistic_humidity")
[ "async", "def", "test_reload", "(", "hass", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "# force in memory db", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test_monitored\"", ",", "12345", ")", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "\"filter\"", ",", "\"name\"", ":", "\"test\"", ",", "\"entity_id\"", ":", "\"sensor.test_monitored\"", ",", "\"filters\"", ":", "[", "{", "\"filter\"", ":", "\"outlier\"", ",", "\"window_size\"", ":", "10", ",", "\"radius\"", ":", "4.0", "}", ",", "{", "\"filter\"", ":", "\"lowpass\"", ",", "\"time_constant\"", ":", "10", ",", "\"precision\"", ":", "2", "}", ",", "{", "\"filter\"", ":", "\"throttle\"", ",", "\"window_size\"", ":", "1", "}", ",", "]", ",", "}", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "2", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.test\"", ")", "yaml_path", "=", "path", ".", "join", "(", "_get_fixtures_base_path", "(", ")", ",", "\"fixtures\"", ",", "\"filter/configuration.yaml\"", ",", ")", "with", "patch", ".", "object", "(", "hass_config", ",", "\"YAML_CONFIG_FILE\"", ",", "yaml_path", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_RELOAD", ",", "{", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "hass", ".", "states", ".", "async_all", "(", ")", ")", "==", "2", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.test\"", ")", "is", "None", "assert", "hass", ".", "states", ".", "get", "(", "\"sensor.filtered_realistic_humidity\"", ")" ]
[ 315, 0 ]
[ 363, 64 ]
python
en
['en', 'da', 'en']
True
TestFilterSensor.setup_method
(self, method)
Set up things to be run when tests are started.
Set up things to be run when tests are started.
def setup_method(self, method): """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() self.hass.config.components.add("history") raw_values = [20, 19, 18, 21, 22, 0] self.values = [] timestamp = dt_util.utcnow() for val in raw_values: self.values.append( ha.State("sensor.test_monitored", val, last_updated=timestamp) ) timestamp += timedelta(minutes=1)
[ "def", "setup_method", "(", "self", ",", "method", ")", ":", "self", ".", "hass", "=", "get_test_home_assistant", "(", ")", "self", ".", "hass", ".", "config", ".", "components", ".", "add", "(", "\"history\"", ")", "raw_values", "=", "[", "20", ",", "19", ",", "18", ",", "21", ",", "22", ",", "0", "]", "self", ".", "values", "=", "[", "]", "timestamp", "=", "dt_util", ".", "utcnow", "(", ")", "for", "val", "in", "raw_values", ":", "self", ".", "values", ".", "append", "(", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "val", ",", "last_updated", "=", "timestamp", ")", ")", "timestamp", "+=", "timedelta", "(", "minutes", "=", "1", ")" ]
[ 31, 4 ]
[ 43, 45 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.teardown_method
(self, method)
Stop everything that was started.
Stop everything that was started.
def teardown_method(self, method): """Stop everything that was started.""" self.hass.stop()
[ "def", "teardown_method", "(", "self", ",", "method", ")", ":", "self", ".", "hass", ".", "stop", "(", ")" ]
[ 45, 4 ]
[ 47, 24 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.init_recorder
(self)
Initialize the recorder.
Initialize the recorder.
def init_recorder(self): """Initialize the recorder.""" init_recorder_component(self.hass) self.hass.start()
[ "def", "init_recorder", "(", "self", ")", ":", "init_recorder_component", "(", "self", ".", "hass", ")", "self", ".", "hass", ".", "start", "(", ")" ]
[ 49, 4 ]
[ 52, 25 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.test_setup_fail
(self)
Test if filter doesn't exist.
Test if filter doesn't exist.
def test_setup_fail(self): """Test if filter doesn't exist.""" config = { "sensor": { "platform": "filter", "entity_id": "sensor.test_monitored", "filters": [{"filter": "nonexisting"}], } } with assert_setup_component(0): assert setup_component(self.hass, "sensor", config) self.hass.block_till_done()
[ "def", "test_setup_fail", "(", "self", ")", ":", "config", "=", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "\"filter\"", ",", "\"entity_id\"", ":", "\"sensor.test_monitored\"", ",", "\"filters\"", ":", "[", "{", "\"filter\"", ":", "\"nonexisting\"", "}", "]", ",", "}", "}", "with", "assert_setup_component", "(", "0", ")", ":", "assert", "setup_component", "(", "self", ".", "hass", ",", "\"sensor\"", ",", "config", ")", "self", ".", "hass", ".", "block_till_done", "(", ")" ]
[ 54, 4 ]
[ 65, 39 ]
python
en
['de', 'en', 'en']
True
TestFilterSensor.test_chain
(self)
Test if filter chaining works.
Test if filter chaining works.
def test_chain(self): """Test if filter chaining works.""" config = { "sensor": { "platform": "filter", "name": "test", "entity_id": "sensor.test_monitored", "filters": [ {"filter": "outlier", "window_size": 10, "radius": 4.0}, {"filter": "lowpass", "time_constant": 10, "precision": 2}, {"filter": "throttle", "window_size": 1}, ], } } with assert_setup_component(1, "sensor"): assert setup_component(self.hass, "sensor", config) self.hass.block_till_done() for value in self.values: self.hass.states.set(config["sensor"]["entity_id"], value.state) self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert "18.05" == state.state
[ "def", "test_chain", "(", "self", ")", ":", "config", "=", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "\"filter\"", ",", "\"name\"", ":", "\"test\"", ",", "\"entity_id\"", ":", "\"sensor.test_monitored\"", ",", "\"filters\"", ":", "[", "{", "\"filter\"", ":", "\"outlier\"", ",", "\"window_size\"", ":", "10", ",", "\"radius\"", ":", "4.0", "}", ",", "{", "\"filter\"", ":", "\"lowpass\"", ",", "\"time_constant\"", ":", "10", ",", "\"precision\"", ":", "2", "}", ",", "{", "\"filter\"", ":", "\"throttle\"", ",", "\"window_size\"", ":", "1", "}", ",", "]", ",", "}", "}", "with", "assert_setup_component", "(", "1", ",", "\"sensor\"", ")", ":", "assert", "setup_component", "(", "self", ".", "hass", ",", "\"sensor\"", ",", "config", ")", "self", ".", "hass", ".", "block_till_done", "(", ")", "for", "value", "in", "self", ".", "values", ":", "self", ".", "hass", ".", "states", ".", "set", "(", "config", "[", "\"sensor\"", "]", "[", "\"entity_id\"", "]", ",", "value", ".", "state", ")", "self", ".", "hass", ".", "block_till_done", "(", ")", "state", "=", "self", ".", "hass", ".", "states", ".", "get", "(", "\"sensor.test\"", ")", "assert", "\"18.05\"", "==", "state", ".", "state" ]
[ 67, 4 ]
[ 91, 41 ]
python
en
['nl', 'en', 'en']
True
TestFilterSensor.test_chain_history
(self, missing=False)
Test if filter chaining works.
Test if filter chaining works.
def test_chain_history(self, missing=False): """Test if filter chaining works.""" self.init_recorder() config = { "history": {}, "sensor": { "platform": "filter", "name": "test", "entity_id": "sensor.test_monitored", "filters": [ {"filter": "outlier", "window_size": 10, "radius": 4.0}, {"filter": "lowpass", "time_constant": 10, "precision": 2}, {"filter": "throttle", "window_size": 1}, ], }, } t_0 = dt_util.utcnow() - timedelta(minutes=1) t_1 = dt_util.utcnow() - timedelta(minutes=2) t_2 = dt_util.utcnow() - timedelta(minutes=3) t_3 = dt_util.utcnow() - timedelta(minutes=4) if missing: fake_states = {} else: fake_states = { "sensor.test_monitored": [ ha.State("sensor.test_monitored", 18.0, last_changed=t_0), ha.State("sensor.test_monitored", "unknown", last_changed=t_1), ha.State("sensor.test_monitored", 19.0, last_changed=t_2), ha.State("sensor.test_monitored", 18.2, last_changed=t_3), ] } with patch( "homeassistant.components.history.state_changes_during_period", return_value=fake_states, ): with patch( "homeassistant.components.history.get_last_state_changes", return_value=fake_states, ): with assert_setup_component(1, "sensor"): assert setup_component(self.hass, "sensor", config) self.hass.block_till_done() for value in self.values: self.hass.states.set(config["sensor"]["entity_id"], value.state) self.hass.block_till_done() state = self.hass.states.get("sensor.test") if missing: assert "18.05" == state.state else: assert "17.05" == state.state
[ "def", "test_chain_history", "(", "self", ",", "missing", "=", "False", ")", ":", "self", ".", "init_recorder", "(", ")", "config", "=", "{", "\"history\"", ":", "{", "}", ",", "\"sensor\"", ":", "{", "\"platform\"", ":", "\"filter\"", ",", "\"name\"", ":", "\"test\"", ",", "\"entity_id\"", ":", "\"sensor.test_monitored\"", ",", "\"filters\"", ":", "[", "{", "\"filter\"", ":", "\"outlier\"", ",", "\"window_size\"", ":", "10", ",", "\"radius\"", ":", "4.0", "}", ",", "{", "\"filter\"", ":", "\"lowpass\"", ",", "\"time_constant\"", ":", "10", ",", "\"precision\"", ":", "2", "}", ",", "{", "\"filter\"", ":", "\"throttle\"", ",", "\"window_size\"", ":", "1", "}", ",", "]", ",", "}", ",", "}", "t_0", "=", "dt_util", ".", "utcnow", "(", ")", "-", "timedelta", "(", "minutes", "=", "1", ")", "t_1", "=", "dt_util", ".", "utcnow", "(", ")", "-", "timedelta", "(", "minutes", "=", "2", ")", "t_2", "=", "dt_util", ".", "utcnow", "(", ")", "-", "timedelta", "(", "minutes", "=", "3", ")", "t_3", "=", "dt_util", ".", "utcnow", "(", ")", "-", "timedelta", "(", "minutes", "=", "4", ")", "if", "missing", ":", "fake_states", "=", "{", "}", "else", ":", "fake_states", "=", "{", "\"sensor.test_monitored\"", ":", "[", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "18.0", ",", "last_changed", "=", "t_0", ")", ",", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "\"unknown\"", ",", "last_changed", "=", "t_1", ")", ",", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "19.0", ",", "last_changed", "=", "t_2", ")", ",", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "18.2", ",", "last_changed", "=", "t_3", ")", ",", "]", "}", "with", "patch", "(", "\"homeassistant.components.history.state_changes_during_period\"", ",", "return_value", "=", "fake_states", ",", ")", ":", "with", "patch", "(", "\"homeassistant.components.history.get_last_state_changes\"", ",", "return_value", "=", "fake_states", ",", ")", ":", "with", "assert_setup_component", "(", "1", ",", "\"sensor\"", ")", ":", "assert", "setup_component", "(", "self", ".", "hass", ",", "\"sensor\"", ",", "config", ")", "self", ".", "hass", ".", "block_till_done", "(", ")", "for", "value", "in", "self", ".", "values", ":", "self", ".", "hass", ".", "states", ".", "set", "(", "config", "[", "\"sensor\"", "]", "[", "\"entity_id\"", "]", ",", "value", ".", "state", ")", "self", ".", "hass", ".", "block_till_done", "(", ")", "state", "=", "self", ".", "hass", ".", "states", ".", "get", "(", "\"sensor.test\"", ")", "if", "missing", ":", "assert", "\"18.05\"", "==", "state", ".", "state", "else", ":", "assert", "\"17.05\"", "==", "state", ".", "state" ]
[ 93, 4 ]
[ 146, 49 ]
python
en
['nl', 'en', 'en']
True
TestFilterSensor.test_chain_history_missing
(self)
Test if filter chaining works when recorder is enabled but the source is not recorded.
Test if filter chaining works when recorder is enabled but the source is not recorded.
def test_chain_history_missing(self): """Test if filter chaining works when recorder is enabled but the source is not recorded.""" return self.test_chain_history(missing=True)
[ "def", "test_chain_history_missing", "(", "self", ")", ":", "return", "self", ".", "test_chain_history", "(", "missing", "=", "True", ")" ]
[ 148, 4 ]
[ 150, 52 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.test_history_time
(self)
Test loading from history based on a time window.
Test loading from history based on a time window.
def test_history_time(self): """Test loading from history based on a time window.""" self.init_recorder() config = { "history": {}, "sensor": { "platform": "filter", "name": "test", "entity_id": "sensor.test_monitored", "filters": [{"filter": "time_throttle", "window_size": "00:01"}], }, } t_0 = dt_util.utcnow() - timedelta(minutes=1) t_1 = dt_util.utcnow() - timedelta(minutes=2) t_2 = dt_util.utcnow() - timedelta(minutes=3) fake_states = { "sensor.test_monitored": [ ha.State("sensor.test_monitored", 18.0, last_changed=t_0), ha.State("sensor.test_monitored", 19.0, last_changed=t_1), ha.State("sensor.test_monitored", 18.2, last_changed=t_2), ] } with patch( "homeassistant.components.history.state_changes_during_period", return_value=fake_states, ): with patch( "homeassistant.components.history.get_last_state_changes", return_value=fake_states, ): with assert_setup_component(1, "sensor"): assert setup_component(self.hass, "sensor", config) self.hass.block_till_done() self.hass.block_till_done() state = self.hass.states.get("sensor.test") assert "18.0" == state.state
[ "def", "test_history_time", "(", "self", ")", ":", "self", ".", "init_recorder", "(", ")", "config", "=", "{", "\"history\"", ":", "{", "}", ",", "\"sensor\"", ":", "{", "\"platform\"", ":", "\"filter\"", ",", "\"name\"", ":", "\"test\"", ",", "\"entity_id\"", ":", "\"sensor.test_monitored\"", ",", "\"filters\"", ":", "[", "{", "\"filter\"", ":", "\"time_throttle\"", ",", "\"window_size\"", ":", "\"00:01\"", "}", "]", ",", "}", ",", "}", "t_0", "=", "dt_util", ".", "utcnow", "(", ")", "-", "timedelta", "(", "minutes", "=", "1", ")", "t_1", "=", "dt_util", ".", "utcnow", "(", ")", "-", "timedelta", "(", "minutes", "=", "2", ")", "t_2", "=", "dt_util", ".", "utcnow", "(", ")", "-", "timedelta", "(", "minutes", "=", "3", ")", "fake_states", "=", "{", "\"sensor.test_monitored\"", ":", "[", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "18.0", ",", "last_changed", "=", "t_0", ")", ",", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "19.0", ",", "last_changed", "=", "t_1", ")", ",", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "18.2", ",", "last_changed", "=", "t_2", ")", ",", "]", "}", "with", "patch", "(", "\"homeassistant.components.history.state_changes_during_period\"", ",", "return_value", "=", "fake_states", ",", ")", ":", "with", "patch", "(", "\"homeassistant.components.history.get_last_state_changes\"", ",", "return_value", "=", "fake_states", ",", ")", ":", "with", "assert_setup_component", "(", "1", ",", "\"sensor\"", ")", ":", "assert", "setup_component", "(", "self", ".", "hass", ",", "\"sensor\"", ",", "config", ")", "self", ".", "hass", ".", "block_till_done", "(", ")", "self", ".", "hass", ".", "block_till_done", "(", ")", "state", "=", "self", ".", "hass", ".", "states", ".", "get", "(", "\"sensor.test\"", ")", "assert", "\"18.0\"", "==", "state", ".", "state" ]
[ 152, 4 ]
[ 189, 44 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.test_outlier
(self)
Test if outlier filter works.
Test if outlier filter works.
def test_outlier(self): """Test if outlier filter works.""" filt = OutlierFilter(window_size=3, precision=2, entity=None, radius=4.0) for state in self.values: filtered = filt.filter_state(state) assert 21 == filtered.state
[ "def", "test_outlier", "(", "self", ")", ":", "filt", "=", "OutlierFilter", "(", "window_size", "=", "3", ",", "precision", "=", "2", ",", "entity", "=", "None", ",", "radius", "=", "4.0", ")", "for", "state", "in", "self", ".", "values", ":", "filtered", "=", "filt", ".", "filter_state", "(", "state", ")", "assert", "21", "==", "filtered", ".", "state" ]
[ 191, 4 ]
[ 196, 35 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.test_outlier_step
(self)
Test step-change handling in outlier. Test if outlier filter handles long-running step-changes correctly. It should converge to no longer filter once just over half the window_size is occupied by the new post step-change values.
Test step-change handling in outlier.
def test_outlier_step(self): """ Test step-change handling in outlier. Test if outlier filter handles long-running step-changes correctly. It should converge to no longer filter once just over half the window_size is occupied by the new post step-change values. """ filt = OutlierFilter(window_size=3, precision=2, entity=None, radius=1.1) self.values[-1].state = 22 for state in self.values: filtered = filt.filter_state(state) assert 22 == filtered.state
[ "def", "test_outlier_step", "(", "self", ")", ":", "filt", "=", "OutlierFilter", "(", "window_size", "=", "3", ",", "precision", "=", "2", ",", "entity", "=", "None", ",", "radius", "=", "1.1", ")", "self", ".", "values", "[", "-", "1", "]", ".", "state", "=", "22", "for", "state", "in", "self", ".", "values", ":", "filtered", "=", "filt", ".", "filter_state", "(", "state", ")", "assert", "22", "==", "filtered", ".", "state" ]
[ 198, 4 ]
[ 210, 35 ]
python
en
['en', 'error', 'th']
False
TestFilterSensor.test_initial_outlier
(self)
Test issue #13363.
Test issue #13363.
def test_initial_outlier(self): """Test issue #13363.""" filt = OutlierFilter(window_size=3, precision=2, entity=None, radius=4.0) out = ha.State("sensor.test_monitored", 4000) for state in [out] + self.values: filtered = filt.filter_state(state) assert 21 == filtered.state
[ "def", "test_initial_outlier", "(", "self", ")", ":", "filt", "=", "OutlierFilter", "(", "window_size", "=", "3", ",", "precision", "=", "2", ",", "entity", "=", "None", ",", "radius", "=", "4.0", ")", "out", "=", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "4000", ")", "for", "state", "in", "[", "out", "]", "+", "self", ".", "values", ":", "filtered", "=", "filt", ".", "filter_state", "(", "state", ")", "assert", "21", "==", "filtered", ".", "state" ]
[ 212, 4 ]
[ 218, 35 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.test_unknown_state_outlier
(self)
Test issue #32395.
Test issue #32395.
def test_unknown_state_outlier(self): """Test issue #32395.""" filt = OutlierFilter(window_size=3, precision=2, entity=None, radius=4.0) out = ha.State("sensor.test_monitored", "unknown") for state in [out] + self.values + [out]: try: filtered = filt.filter_state(state) except ValueError: assert state.state == "unknown" assert 21 == filtered.state
[ "def", "test_unknown_state_outlier", "(", "self", ")", ":", "filt", "=", "OutlierFilter", "(", "window_size", "=", "3", ",", "precision", "=", "2", ",", "entity", "=", "None", ",", "radius", "=", "4.0", ")", "out", "=", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "\"unknown\"", ")", "for", "state", "in", "[", "out", "]", "+", "self", ".", "values", "+", "[", "out", "]", ":", "try", ":", "filtered", "=", "filt", ".", "filter_state", "(", "state", ")", "except", "ValueError", ":", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "21", "==", "filtered", ".", "state" ]
[ 220, 4 ]
[ 229, 35 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.test_precision_zero
(self)
Test if precision of zero returns an integer.
Test if precision of zero returns an integer.
def test_precision_zero(self): """Test if precision of zero returns an integer.""" filt = LowPassFilter(window_size=10, precision=0, entity=None, time_constant=10) for state in self.values: filtered = filt.filter_state(state) assert isinstance(filtered.state, int)
[ "def", "test_precision_zero", "(", "self", ")", ":", "filt", "=", "LowPassFilter", "(", "window_size", "=", "10", ",", "precision", "=", "0", ",", "entity", "=", "None", ",", "time_constant", "=", "10", ")", "for", "state", "in", "self", ".", "values", ":", "filtered", "=", "filt", ".", "filter_state", "(", "state", ")", "assert", "isinstance", "(", "filtered", ".", "state", ",", "int", ")" ]
[ 231, 4 ]
[ 236, 46 ]
python
en
['en', 'lb', 'en']
True
TestFilterSensor.test_lowpass
(self)
Test if lowpass filter works.
Test if lowpass filter works.
def test_lowpass(self): """Test if lowpass filter works.""" filt = LowPassFilter(window_size=10, precision=2, entity=None, time_constant=10) out = ha.State("sensor.test_monitored", "unknown") for state in [out] + self.values + [out]: try: filtered = filt.filter_state(state) except ValueError: assert state.state == "unknown" assert 18.05 == filtered.state
[ "def", "test_lowpass", "(", "self", ")", ":", "filt", "=", "LowPassFilter", "(", "window_size", "=", "10", ",", "precision", "=", "2", ",", "entity", "=", "None", ",", "time_constant", "=", "10", ")", "out", "=", "ha", ".", "State", "(", "\"sensor.test_monitored\"", ",", "\"unknown\"", ")", "for", "state", "in", "[", "out", "]", "+", "self", ".", "values", "+", "[", "out", "]", ":", "try", ":", "filtered", "=", "filt", ".", "filter_state", "(", "state", ")", "except", "ValueError", ":", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "18.05", "==", "filtered", ".", "state" ]
[ 238, 4 ]
[ 247, 38 ]
python
en
['nl', 'en', 'en']
True
TestFilterSensor.test_range
(self)
Test if range filter works.
Test if range filter works.
def test_range(self): """Test if range filter works.""" lower = 10 upper = 20 filt = RangeFilter( entity=None, precision=2, lower_bound=lower, upper_bound=upper ) for unf_state in self.values: unf = float(unf_state.state) filtered = filt.filter_state(unf_state) if unf < lower: assert lower == filtered.state elif unf > upper: assert upper == filtered.state else: assert unf == filtered.state
[ "def", "test_range", "(", "self", ")", ":", "lower", "=", "10", "upper", "=", "20", "filt", "=", "RangeFilter", "(", "entity", "=", "None", ",", "precision", "=", "2", ",", "lower_bound", "=", "lower", ",", "upper_bound", "=", "upper", ")", "for", "unf_state", "in", "self", ".", "values", ":", "unf", "=", "float", "(", "unf_state", ".", "state", ")", "filtered", "=", "filt", ".", "filter_state", "(", "unf_state", ")", "if", "unf", "<", "lower", ":", "assert", "lower", "==", "filtered", ".", "state", "elif", "unf", ">", "upper", ":", "assert", "upper", "==", "filtered", ".", "state", "else", ":", "assert", "unf", "==", "filtered", ".", "state" ]
[ 249, 4 ]
[ 264, 44 ]
python
da
['nl', 'da', 'en']
False
TestFilterSensor.test_range_zero
(self)
Test if range filter works with zeroes as bounds.
Test if range filter works with zeroes as bounds.
def test_range_zero(self): """Test if range filter works with zeroes as bounds.""" lower = 0 upper = 0 filt = RangeFilter( entity=None, precision=2, lower_bound=lower, upper_bound=upper ) for unf_state in self.values: unf = float(unf_state.state) filtered = filt.filter_state(unf_state) if unf < lower: assert lower == filtered.state elif unf > upper: assert upper == filtered.state else: assert unf == filtered.state
[ "def", "test_range_zero", "(", "self", ")", ":", "lower", "=", "0", "upper", "=", "0", "filt", "=", "RangeFilter", "(", "entity", "=", "None", ",", "precision", "=", "2", ",", "lower_bound", "=", "lower", ",", "upper_bound", "=", "upper", ")", "for", "unf_state", "in", "self", ".", "values", ":", "unf", "=", "float", "(", "unf_state", ".", "state", ")", "filtered", "=", "filt", ".", "filter_state", "(", "unf_state", ")", "if", "unf", "<", "lower", ":", "assert", "lower", "==", "filtered", ".", "state", "elif", "unf", ">", "upper", ":", "assert", "upper", "==", "filtered", ".", "state", "else", ":", "assert", "unf", "==", "filtered", ".", "state" ]
[ 266, 4 ]
[ 281, 44 ]
python
en
['en', 'en', 'en']
True
TestFilterSensor.test_throttle
(self)
Test if lowpass filter works.
Test if lowpass filter works.
def test_throttle(self): """Test if lowpass filter works.""" filt = ThrottleFilter(window_size=3, precision=2, entity=None) filtered = [] for state in self.values: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) assert [20, 21] == [f.state for f in filtered]
[ "def", "test_throttle", "(", "self", ")", ":", "filt", "=", "ThrottleFilter", "(", "window_size", "=", "3", ",", "precision", "=", "2", ",", "entity", "=", "None", ")", "filtered", "=", "[", "]", "for", "state", "in", "self", ".", "values", ":", "new_state", "=", "filt", ".", "filter_state", "(", "state", ")", "if", "not", "filt", ".", "skip_processing", ":", "filtered", ".", "append", "(", "new_state", ")", "assert", "[", "20", ",", "21", "]", "==", "[", "f", ".", "state", "for", "f", "in", "filtered", "]" ]
[ 283, 4 ]
[ 291, 54 ]
python
en
['nl', 'en', 'en']
True
TestFilterSensor.test_time_throttle
(self)
Test if lowpass filter works.
Test if lowpass filter works.
def test_time_throttle(self): """Test if lowpass filter works.""" filt = TimeThrottleFilter( window_size=timedelta(minutes=2), precision=2, entity=None ) filtered = [] for state in self.values: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) assert [20, 18, 22] == [f.state for f in filtered]
[ "def", "test_time_throttle", "(", "self", ")", ":", "filt", "=", "TimeThrottleFilter", "(", "window_size", "=", "timedelta", "(", "minutes", "=", "2", ")", ",", "precision", "=", "2", ",", "entity", "=", "None", ")", "filtered", "=", "[", "]", "for", "state", "in", "self", ".", "values", ":", "new_state", "=", "filt", ".", "filter_state", "(", "state", ")", "if", "not", "filt", ".", "skip_processing", ":", "filtered", ".", "append", "(", "new_state", ")", "assert", "[", "20", ",", "18", ",", "22", "]", "==", "[", "f", ".", "state", "for", "f", "in", "filtered", "]" ]
[ 293, 4 ]
[ 303, 58 ]
python
en
['nl', 'en', 'en']
True
TestFilterSensor.test_time_sma
(self)
Test if time_sma filter works.
Test if time_sma filter works.
def test_time_sma(self): """Test if time_sma filter works.""" filt = TimeSMAFilter( window_size=timedelta(minutes=2), precision=2, entity=None, type="last" ) for state in self.values: filtered = filt.filter_state(state) assert 21.5 == filtered.state
[ "def", "test_time_sma", "(", "self", ")", ":", "filt", "=", "TimeSMAFilter", "(", "window_size", "=", "timedelta", "(", "minutes", "=", "2", ")", ",", "precision", "=", "2", ",", "entity", "=", "None", ",", "type", "=", "\"last\"", ")", "for", "state", "in", "self", ".", "values", ":", "filtered", "=", "filt", ".", "filter_state", "(", "state", ")", "assert", "21.5", "==", "filtered", ".", "state" ]
[ 305, 4 ]
[ 312, 37 ]
python
en
['en', 'en', 'en']
True
dedup
(lst)
Preservers order
Preservers order
def dedup(lst): """Preservers order""" new_lst = [] for item in lst: if not item: continue elif item in new_lst: continue else: new_lst.append(item) return new_lst
[ "def", "dedup", "(", "lst", ")", ":", "new_lst", "=", "[", "]", "for", "item", "in", "lst", ":", "if", "not", "item", ":", "continue", "elif", "item", "in", "new_lst", ":", "continue", "else", ":", "new_lst", ".", "append", "(", "item", ")", "return", "new_lst" ]
[ 1246, 0 ]
[ 1256, 18 ]
python
en
['en', 'af', 'en']
False
TatoebaConverter.resolve_lang_code
(self, r)
R is a row in ported
R is a row in ported
def resolve_lang_code(self, r) -> Tuple[List[str], str, str]: """R is a row in ported""" short_pair = r.short_pair src, tgt = short_pair.split("-") src_tags, src_multilingual = self.get_tags(src, r.src_name) assert isinstance(src_tags, list) tgt_tags, tgt_multilingual = self.get_tags(tgt, r.tgt_name) assert isinstance(tgt_tags, list) return dedup(src_tags + tgt_tags), src_multilingual, tgt_multilingual
[ "def", "resolve_lang_code", "(", "self", ",", "r", ")", "->", "Tuple", "[", "List", "[", "str", "]", ",", "str", ",", "str", "]", ":", "short_pair", "=", "r", ".", "short_pair", "src", ",", "tgt", "=", "short_pair", ".", "split", "(", "\"-\"", ")", "src_tags", ",", "src_multilingual", "=", "self", ".", "get_tags", "(", "src", ",", "r", ".", "src_name", ")", "assert", "isinstance", "(", "src_tags", ",", "list", ")", "tgt_tags", ",", "tgt_multilingual", "=", "self", ".", "get_tags", "(", "tgt", ",", "r", ".", "tgt_name", ")", "assert", "isinstance", "(", "tgt_tags", ",", "list", ")", "return", "dedup", "(", "src_tags", "+", "tgt_tags", ")", ",", "src_multilingual", ",", "tgt_multilingual" ]
[ 149, 4 ]
[ 158, 77 ]
python
en
['en', 'en', 'en']
True
TatoebaConverter.write_model_card
( self, hf_model_id: str, repo_root=DEFAULT_REPO, dry_run=False, )
Copy the most recent model's readme section from opus, and add metadata. upload command: aws s3 sync model_card_dir s3://models.huggingface.co/bert/Helsinki-NLP/ --dryrun
Copy the most recent model's readme section from opus, and add metadata. upload command: aws s3 sync model_card_dir s3://models.huggingface.co/bert/Helsinki-NLP/ --dryrun
def write_model_card( self, hf_model_id: str, repo_root=DEFAULT_REPO, dry_run=False, ) -> str: """ Copy the most recent model's readme section from opus, and add metadata. upload command: aws s3 sync model_card_dir s3://models.huggingface.co/bert/Helsinki-NLP/ --dryrun """ short_pair = remove_prefix(hf_model_id, "opus-mt-") extra_metadata = self.metadata.loc[short_pair].drop("2m") extra_metadata["short_pair"] = short_pair lang_tags, src_multilingual, tgt_multilingual = self.resolve_lang_code(extra_metadata) opus_name = f"{extra_metadata.src_alpha3}-{extra_metadata.tgt_alpha3}" # opus_name: str = self.convert_hf_name_to_opus_name(hf_model_name) assert repo_root in ("OPUS-MT-train", "Tatoeba-Challenge") opus_readme_path = Path(repo_root).joinpath("models", opus_name, "README.md") assert opus_readme_path.exists(), f"Readme file {opus_readme_path} not found" opus_src, opus_tgt = [x.split("+") for x in opus_name.split("-")] readme_url = f"https://github.com/Helsinki-NLP/{repo_root}/tree/master/models/{opus_name}/README.md" s, t = ",".join(opus_src), ",".join(opus_tgt) metadata = { "hf_name": short_pair, "source_languages": s, "target_languages": t, "opus_readme_url": readme_url, "original_repo": repo_root, "tags": ["translation"], "languages": lang_tags, } lang_tags = l2front_matter(lang_tags) metadata["src_constituents"] = self.constituents[s] metadata["tgt_constituents"] = self.constituents[t] metadata["src_multilingual"] = src_multilingual metadata["tgt_multilingual"] = tgt_multilingual metadata.update(extra_metadata) metadata.update(get_system_metadata(repo_root)) # combine with Tatoeba markdown extra_markdown = f"### {short_pair}\n\n* source group: {metadata['src_name']} \n* target group: {metadata['tgt_name']} \n* OPUS readme: [{opus_name}]({readme_url})\n" content = opus_readme_path.open().read() content = content.split("\n# ")[-1] # Get the lowest level 1 header in the README -- the most recent model. splat = content.split("*")[2:] content = "*".join(splat) # BETTER FRONT MATTER LOGIC content = ( FRONT_MATTER_TEMPLATE.format(lang_tags) + extra_markdown + "\n* " + content.replace("download", "download original " "weights") ) items = "\n\n".join([f"- {k}: {v}" for k, v in metadata.items()]) sec3 = "\n### System Info: \n" + items content += sec3 if dry_run: return content, metadata sub_dir = self.model_card_dir / hf_model_id sub_dir.mkdir(exist_ok=True) dest = sub_dir / "README.md" dest.open("w").write(content) pd.Series(metadata).to_json(sub_dir / "metadata.json") return content, metadata
[ "def", "write_model_card", "(", "self", ",", "hf_model_id", ":", "str", ",", "repo_root", "=", "DEFAULT_REPO", ",", "dry_run", "=", "False", ",", ")", "->", "str", ":", "short_pair", "=", "remove_prefix", "(", "hf_model_id", ",", "\"opus-mt-\"", ")", "extra_metadata", "=", "self", ".", "metadata", ".", "loc", "[", "short_pair", "]", ".", "drop", "(", "\"2m\"", ")", "extra_metadata", "[", "\"short_pair\"", "]", "=", "short_pair", "lang_tags", ",", "src_multilingual", ",", "tgt_multilingual", "=", "self", ".", "resolve_lang_code", "(", "extra_metadata", ")", "opus_name", "=", "f\"{extra_metadata.src_alpha3}-{extra_metadata.tgt_alpha3}\"", "# opus_name: str = self.convert_hf_name_to_opus_name(hf_model_name)", "assert", "repo_root", "in", "(", "\"OPUS-MT-train\"", ",", "\"Tatoeba-Challenge\"", ")", "opus_readme_path", "=", "Path", "(", "repo_root", ")", ".", "joinpath", "(", "\"models\"", ",", "opus_name", ",", "\"README.md\"", ")", "assert", "opus_readme_path", ".", "exists", "(", ")", ",", "f\"Readme file {opus_readme_path} not found\"", "opus_src", ",", "opus_tgt", "=", "[", "x", ".", "split", "(", "\"+\"", ")", "for", "x", "in", "opus_name", ".", "split", "(", "\"-\"", ")", "]", "readme_url", "=", "f\"https://github.com/Helsinki-NLP/{repo_root}/tree/master/models/{opus_name}/README.md\"", "s", ",", "t", "=", "\",\"", ".", "join", "(", "opus_src", ")", ",", "\",\"", ".", "join", "(", "opus_tgt", ")", "metadata", "=", "{", "\"hf_name\"", ":", "short_pair", ",", "\"source_languages\"", ":", "s", ",", "\"target_languages\"", ":", "t", ",", "\"opus_readme_url\"", ":", "readme_url", ",", "\"original_repo\"", ":", "repo_root", ",", "\"tags\"", ":", "[", "\"translation\"", "]", ",", "\"languages\"", ":", "lang_tags", ",", "}", "lang_tags", "=", "l2front_matter", "(", "lang_tags", ")", "metadata", "[", "\"src_constituents\"", "]", "=", "self", ".", "constituents", "[", "s", "]", "metadata", "[", "\"tgt_constituents\"", "]", "=", "self", ".", "constituents", "[", "t", "]", "metadata", "[", "\"src_multilingual\"", "]", "=", "src_multilingual", "metadata", "[", "\"tgt_multilingual\"", "]", "=", "tgt_multilingual", "metadata", ".", "update", "(", "extra_metadata", ")", "metadata", ".", "update", "(", "get_system_metadata", "(", "repo_root", ")", ")", "# combine with Tatoeba markdown", "extra_markdown", "=", "f\"### {short_pair}\\n\\n* source group: {metadata['src_name']} \\n* target group: {metadata['tgt_name']} \\n* OPUS readme: [{opus_name}]({readme_url})\\n\"", "content", "=", "opus_readme_path", ".", "open", "(", ")", ".", "read", "(", ")", "content", "=", "content", ".", "split", "(", "\"\\n# \"", ")", "[", "-", "1", "]", "# Get the lowest level 1 header in the README -- the most recent model.", "splat", "=", "content", ".", "split", "(", "\"*\"", ")", "[", "2", ":", "]", "content", "=", "\"*\"", ".", "join", "(", "splat", ")", "# BETTER FRONT MATTER LOGIC", "content", "=", "(", "FRONT_MATTER_TEMPLATE", ".", "format", "(", "lang_tags", ")", "+", "extra_markdown", "+", "\"\\n* \"", "+", "content", ".", "replace", "(", "\"download\"", ",", "\"download original \"", "\"weights\"", ")", ")", "items", "=", "\"\\n\\n\"", ".", "join", "(", "[", "f\"- {k}: {v}\"", "for", "k", ",", "v", "in", "metadata", ".", "items", "(", ")", "]", ")", "sec3", "=", "\"\\n### System Info: \\n\"", "+", "items", "content", "+=", "sec3", "if", "dry_run", ":", "return", "content", ",", "metadata", "sub_dir", "=", "self", ".", "model_card_dir", "/", "hf_model_id", "sub_dir", ".", "mkdir", "(", "exist_ok", "=", "True", ")", "dest", "=", "sub_dir", "/", "\"README.md\"", "dest", ".", "open", "(", "\"w\"", ")", ".", "write", "(", "content", ")", "pd", ".", "Series", "(", "metadata", ")", ".", "to_json", "(", "sub_dir", "/", "\"metadata.json\"", ")", "return", "content", ",", "metadata" ]
[ 160, 4 ]
[ 233, 32 ]
python
en
['en', 'error', 'th']
False
async_setup
(hass: HomeAssistant, config: dict)
Set up the NEW_NAME integration.
Set up the NEW_NAME integration.
async def async_setup(hass: HomeAssistant, config: dict): """Set up the NEW_NAME integration.""" return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "dict", ")", ":", "return", "True" ]
[ 10, 0 ]
[ 12, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up Z-Wave Cover from Config Entry.
Set up Z-Wave Cover from Config Entry.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Z-Wave Cover from Config Entry.""" @callback def async_add_cover(values): """Add Z-Wave Cover.""" if values.primary.command_class == CommandClass.BARRIER_OPERATOR: cover = ZwaveGarageDoorBarrier(values) else: cover = ZWaveCoverEntity(values) async_add_entities([cover]) hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append( async_dispatcher_connect(hass, f"{DOMAIN}_new_{COVER_DOMAIN}", async_add_cover) )
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "@", "callback", "def", "async_add_cover", "(", "values", ")", ":", "\"\"\"Add Z-Wave Cover.\"\"\"", "if", "values", ".", "primary", ".", "command_class", "==", "CommandClass", ".", "BARRIER_OPERATOR", ":", "cover", "=", "ZwaveGarageDoorBarrier", "(", "values", ")", "else", ":", "cover", "=", "ZWaveCoverEntity", "(", "values", ")", "async_add_entities", "(", "[", "cover", "]", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry", ".", "entry_id", "]", "[", "DATA_UNSUBSCRIBE", "]", ".", "append", "(", "async_dispatcher_connect", "(", "hass", ",", "f\"{DOMAIN}_new_{COVER_DOMAIN}\"", ",", "async_add_cover", ")", ")" ]
[ 23, 0 ]
[ 38, 5 ]
python
en
['en', 'en', 'en']
True
percent_to_zwave_position
(value)
Convert position in 0-100 scale to 0-99 scale. `value` -- (int) Position byte value from 0-100.
Convert position in 0-100 scale to 0-99 scale.
def percent_to_zwave_position(value): """Convert position in 0-100 scale to 0-99 scale. `value` -- (int) Position byte value from 0-100. """ if value > 0: return max(1, round((value / 100) * 99)) return 0
[ "def", "percent_to_zwave_position", "(", "value", ")", ":", "if", "value", ">", "0", ":", "return", "max", "(", "1", ",", "round", "(", "(", "value", "/", "100", ")", "*", "99", ")", ")", "return", "0" ]
[ 41, 0 ]
[ 48, 12 ]
python
en
['en', 'en', 'en']
True
ZWaveCoverEntity.supported_features
(self)
Flag supported features.
Flag supported features.
def supported_features(self): """Flag supported features.""" return SUPPORTED_FEATURES_POSITION
[ "def", "supported_features", "(", "self", ")", ":", "return", "SUPPORTED_FEATURES_POSITION" ]
[ 55, 4 ]
[ 57, 42 ]
python
en
['da', 'en', 'en']
True
ZWaveCoverEntity.is_closed
(self)
Return true if cover is closed.
Return true if cover is closed.
def is_closed(self): """Return true if cover is closed.""" return self.values.primary.value == 0
[ "def", "is_closed", "(", "self", ")", ":", "return", "self", ".", "values", ".", "primary", ".", "value", "==", "0" ]
[ 60, 4 ]
[ 62, 45 ]
python
en
['en', 'en', 'en']
True
ZWaveCoverEntity.current_cover_position
(self)
Return the current position of cover where 0 means closed and 100 is fully open.
Return the current position of cover where 0 means closed and 100 is fully open.
def current_cover_position(self): """Return the current position of cover where 0 means closed and 100 is fully open.""" return round((self.values.primary.value / 99) * 100)
[ "def", "current_cover_position", "(", "self", ")", ":", "return", "round", "(", "(", "self", ".", "values", ".", "primary", ".", "value", "/", "99", ")", "*", "100", ")" ]
[ 65, 4 ]
[ 67, 60 ]
python
en
['en', 'en', 'en']
True
ZWaveCoverEntity.async_set_cover_position
(self, **kwargs)
Move the cover to a specific position.
Move the cover to a specific position.
async def async_set_cover_position(self, **kwargs): """Move the cover to a specific position.""" self.values.primary.send_value(percent_to_zwave_position(kwargs[ATTR_POSITION]))
[ "async", "def", "async_set_cover_position", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "values", ".", "primary", ".", "send_value", "(", "percent_to_zwave_position", "(", "kwargs", "[", "ATTR_POSITION", "]", ")", ")" ]
[ 69, 4 ]
[ 71, 88 ]
python
en
['en', 'en', 'en']
True
ZWaveCoverEntity.async_open_cover
(self, **kwargs)
Open the cover.
Open the cover.
async def async_open_cover(self, **kwargs): """Open the cover.""" self.values.primary.send_value(99)
[ "async", "def", "async_open_cover", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "values", ".", "primary", ".", "send_value", "(", "99", ")" ]
[ 73, 4 ]
[ 75, 42 ]
python
en
['en', 'en', 'en']
True
ZWaveCoverEntity.async_close_cover
(self, **kwargs)
Close cover.
Close cover.
async def async_close_cover(self, **kwargs): """Close cover.""" self.values.primary.send_value(0)
[ "async", "def", "async_close_cover", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "values", ".", "primary", ".", "send_value", "(", "0", ")" ]
[ 77, 4 ]
[ 79, 41 ]
python
en
['en', 'la', 'en']
False
ZwaveGarageDoorBarrier.supported_features
(self)
Flag supported features.
Flag supported features.
def supported_features(self): """Flag supported features.""" return SUPPORT_GARAGE
[ "def", "supported_features", "(", "self", ")", ":", "return", "SUPPORT_GARAGE" ]
[ 86, 4 ]
[ 88, 29 ]
python
en
['da', 'en', 'en']
True
ZwaveGarageDoorBarrier.device_class
(self)
Return the class of this device, from component DEVICE_CLASSES.
Return the class of this device, from component DEVICE_CLASSES.
def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_GARAGE
[ "def", "device_class", "(", "self", ")", ":", "return", "DEVICE_CLASS_GARAGE" ]
[ 91, 4 ]
[ 93, 34 ]
python
en
['en', 'en', 'en']
True
ZwaveGarageDoorBarrier.is_opening
(self)
Return true if cover is in an opening state.
Return true if cover is in an opening state.
def is_opening(self): """Return true if cover is in an opening state.""" return self.values.primary.value[VALUE_SELECTED_ID] == 3
[ "def", "is_opening", "(", "self", ")", ":", "return", "self", ".", "values", ".", "primary", ".", "value", "[", "VALUE_SELECTED_ID", "]", "==", "3" ]
[ 96, 4 ]
[ 98, 64 ]
python
en
['en', 'en', 'en']
True
ZwaveGarageDoorBarrier.is_closing
(self)
Return true if cover is in a closing state.
Return true if cover is in a closing state.
def is_closing(self): """Return true if cover is in a closing state.""" return self.values.primary.value[VALUE_SELECTED_ID] == 1
[ "def", "is_closing", "(", "self", ")", ":", "return", "self", ".", "values", ".", "primary", ".", "value", "[", "VALUE_SELECTED_ID", "]", "==", "1" ]
[ 101, 4 ]
[ 103, 64 ]
python
en
['en', 'en', 'en']
True
ZwaveGarageDoorBarrier.is_closed
(self)
Return the current position of Zwave garage door.
Return the current position of Zwave garage door.
def is_closed(self): """Return the current position of Zwave garage door.""" return self.values.primary.value[VALUE_SELECTED_ID] == 0
[ "def", "is_closed", "(", "self", ")", ":", "return", "self", ".", "values", ".", "primary", ".", "value", "[", "VALUE_SELECTED_ID", "]", "==", "0" ]
[ 106, 4 ]
[ 108, 64 ]
python
en
['en', 'nl', 'en']
True
ZwaveGarageDoorBarrier.async_close_cover
(self, **kwargs)
Close the garage door.
Close the garage door.
async def async_close_cover(self, **kwargs): """Close the garage door.""" self.values.primary.send_value(0)
[ "async", "def", "async_close_cover", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "values", ".", "primary", ".", "send_value", "(", "0", ")" ]
[ 110, 4 ]
[ 112, 41 ]
python
en
['en', 'nl', 'en']
True
ZwaveGarageDoorBarrier.async_open_cover
(self, **kwargs)
Open the garage door.
Open the garage door.
async def async_open_cover(self, **kwargs): """Open the garage door.""" self.values.primary.send_value(4)
[ "async", "def", "async_open_cover", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "values", ".", "primary", ".", "send_value", "(", "4", ")" ]
[ 114, 4 ]
[ 116, 41 ]
python
en
['en', 'nl', 'en']
True
_add_player
(hass, async_add_entities, host, port=None, name=None)
Add Bluesound players.
Add Bluesound players.
def _add_player(hass, async_add_entities, host, port=None, name=None): """Add Bluesound players.""" if host in [x.host for x in hass.data[DATA_BLUESOUND]]: return @callback def _init_player(event=None): """Start polling.""" hass.async_create_task(player.async_init()) @callback def _start_polling(event=None): """Start polling.""" player.start_polling() @callback def _stop_polling(): """Stop polling.""" player.stop_polling() @callback def _add_player_cb(): """Add player after first sync fetch.""" async_add_entities([player]) _LOGGER.info("Added device with name: %s", player.name) if hass.is_running: _start_polling() else: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _start_polling) hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _stop_polling) player = BluesoundPlayer(hass, host, port, name, _add_player_cb) hass.data[DATA_BLUESOUND].append(player) if hass.is_running: _init_player() else: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _init_player)
[ "def", "_add_player", "(", "hass", ",", "async_add_entities", ",", "host", ",", "port", "=", "None", ",", "name", "=", "None", ")", ":", "if", "host", "in", "[", "x", ".", "host", "for", "x", "in", "hass", ".", "data", "[", "DATA_BLUESOUND", "]", "]", ":", "return", "@", "callback", "def", "_init_player", "(", "event", "=", "None", ")", ":", "\"\"\"Start polling.\"\"\"", "hass", ".", "async_create_task", "(", "player", ".", "async_init", "(", ")", ")", "@", "callback", "def", "_start_polling", "(", "event", "=", "None", ")", ":", "\"\"\"Start polling.\"\"\"", "player", ".", "start_polling", "(", ")", "@", "callback", "def", "_stop_polling", "(", ")", ":", "\"\"\"Stop polling.\"\"\"", "player", ".", "stop_polling", "(", ")", "@", "callback", "def", "_add_player_cb", "(", ")", ":", "\"\"\"Add player after first sync fetch.\"\"\"", "async_add_entities", "(", "[", "player", "]", ")", "_LOGGER", ".", "info", "(", "\"Added device with name: %s\"", ",", "player", ".", "name", ")", "if", "hass", ".", "is_running", ":", "_start_polling", "(", ")", "else", ":", "hass", ".", "bus", ".", "async_listen_once", "(", "EVENT_HOMEASSISTANT_START", ",", "_start_polling", ")", "hass", ".", "bus", ".", "async_listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "_stop_polling", ")", "player", "=", "BluesoundPlayer", "(", "hass", ",", "host", ",", "port", ",", "name", ",", "_add_player_cb", ")", "hass", ".", "data", "[", "DATA_BLUESOUND", "]", ".", "append", "(", "player", ")", "if", "hass", ".", "is_running", ":", "_init_player", "(", ")", "else", ":", "hass", ".", "bus", ".", "async_listen_once", "(", "EVENT_HOMEASSISTANT_START", ",", "_init_player", ")" ]
[ 106, 0 ]
[ 145, 75 ]
python
en
['en', 'ny', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the Bluesound platforms.
Set up the Bluesound platforms.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Bluesound platforms.""" if DATA_BLUESOUND not in hass.data: hass.data[DATA_BLUESOUND] = [] if discovery_info: _add_player( hass, async_add_entities, discovery_info.get(CONF_HOST), discovery_info.get(CONF_PORT), ) return hosts = config.get(CONF_HOSTS) if hosts: for host in hosts: _add_player( hass, async_add_entities, host.get(CONF_HOST), host.get(CONF_PORT), host.get(CONF_NAME), ) async def async_service_handler(service): """Map services to method of Bluesound devices.""" method = SERVICE_TO_METHOD.get(service.service) if not method: return params = { key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID } entity_ids = service.data.get(ATTR_ENTITY_ID) if entity_ids: target_players = [ player for player in hass.data[DATA_BLUESOUND] if player.entity_id in entity_ids ] else: target_players = hass.data[DATA_BLUESOUND] for player in target_players: await getattr(player, method["method"])(**params) for service in SERVICE_TO_METHOD: schema = SERVICE_TO_METHOD[service]["schema"] hass.services.async_register( DOMAIN, service, async_service_handler, schema=schema )
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "if", "DATA_BLUESOUND", "not", "in", "hass", ".", "data", ":", "hass", ".", "data", "[", "DATA_BLUESOUND", "]", "=", "[", "]", "if", "discovery_info", ":", "_add_player", "(", "hass", ",", "async_add_entities", ",", "discovery_info", ".", "get", "(", "CONF_HOST", ")", ",", "discovery_info", ".", "get", "(", "CONF_PORT", ")", ",", ")", "return", "hosts", "=", "config", ".", "get", "(", "CONF_HOSTS", ")", "if", "hosts", ":", "for", "host", "in", "hosts", ":", "_add_player", "(", "hass", ",", "async_add_entities", ",", "host", ".", "get", "(", "CONF_HOST", ")", ",", "host", ".", "get", "(", "CONF_PORT", ")", ",", "host", ".", "get", "(", "CONF_NAME", ")", ",", ")", "async", "def", "async_service_handler", "(", "service", ")", ":", "\"\"\"Map services to method of Bluesound devices.\"\"\"", "method", "=", "SERVICE_TO_METHOD", ".", "get", "(", "service", ".", "service", ")", "if", "not", "method", ":", "return", "params", "=", "{", "key", ":", "value", "for", "key", ",", "value", "in", "service", ".", "data", ".", "items", "(", ")", "if", "key", "!=", "ATTR_ENTITY_ID", "}", "entity_ids", "=", "service", ".", "data", ".", "get", "(", "ATTR_ENTITY_ID", ")", "if", "entity_ids", ":", "target_players", "=", "[", "player", "for", "player", "in", "hass", ".", "data", "[", "DATA_BLUESOUND", "]", "if", "player", ".", "entity_id", "in", "entity_ids", "]", "else", ":", "target_players", "=", "hass", ".", "data", "[", "DATA_BLUESOUND", "]", "for", "player", "in", "target_players", ":", "await", "getattr", "(", "player", ",", "method", "[", "\"method\"", "]", ")", "(", "*", "*", "params", ")", "for", "service", "in", "SERVICE_TO_METHOD", ":", "schema", "=", "SERVICE_TO_METHOD", "[", "service", "]", "[", "\"schema\"", "]", "hass", ".", "services", ".", "async_register", "(", "DOMAIN", ",", "service", ",", "async_service_handler", ",", "schema", "=", "schema", ")" ]
[ 148, 0 ]
[ 199, 9 ]
python
en
['en', 'cs', 'en']
True
BluesoundPlayer.__init__
(self, hass, host, port=None, name=None, init_callback=None)
Initialize the media player.
Initialize the media player.
def __init__(self, hass, host, port=None, name=None, init_callback=None): """Initialize the media player.""" self.host = host self._hass = hass self.port = port self._polling_session = async_get_clientsession(hass) self._polling_task = None # The actual polling task. self._name = name self._icon = None self._capture_items = [] self._services_items = [] self._preset_items = [] self._sync_status = {} self._status = None self._last_status_update = None self._is_online = False self._retry_remove = None self._muted = False self._master = None self._is_master = False self._group_name = None self._group_list = [] self._bluesound_device_name = None self._init_callback = init_callback if self.port is None: self.port = DEFAULT_PORT
[ "def", "__init__", "(", "self", ",", "hass", ",", "host", ",", "port", "=", "None", ",", "name", "=", "None", ",", "init_callback", "=", "None", ")", ":", "self", ".", "host", "=", "host", "self", ".", "_hass", "=", "hass", "self", ".", "port", "=", "port", "self", ".", "_polling_session", "=", "async_get_clientsession", "(", "hass", ")", "self", ".", "_polling_task", "=", "None", "# The actual polling task.", "self", ".", "_name", "=", "name", "self", ".", "_icon", "=", "None", "self", ".", "_capture_items", "=", "[", "]", "self", ".", "_services_items", "=", "[", "]", "self", ".", "_preset_items", "=", "[", "]", "self", ".", "_sync_status", "=", "{", "}", "self", ".", "_status", "=", "None", "self", ".", "_last_status_update", "=", "None", "self", ".", "_is_online", "=", "False", "self", ".", "_retry_remove", "=", "None", "self", ".", "_muted", "=", "False", "self", ".", "_master", "=", "None", "self", ".", "_is_master", "=", "False", "self", ".", "_group_name", "=", "None", "self", ".", "_group_list", "=", "[", "]", "self", ".", "_bluesound_device_name", "=", "None", "self", ".", "_init_callback", "=", "init_callback", "if", "self", ".", "port", "is", "None", ":", "self", ".", "port", "=", "DEFAULT_PORT" ]
[ 205, 4 ]
[ 231, 36 ]
python
en
['en', 'en', 'en']
True
BluesoundPlayer._try_get_index
(string, search_string)
Get the index.
Get the index.
def _try_get_index(string, search_string): """Get the index.""" try: return string.index(search_string) except ValueError: return -1
[ "def", "_try_get_index", "(", "string", ",", "search_string", ")", ":", "try", ":", "return", "string", ".", "index", "(", "search_string", ")", "except", "ValueError", ":", "return", "-", "1" ]
[ 237, 4 ]
[ 242, 21 ]
python
en
['en', 'nl', 'en']
True
BluesoundPlayer.force_update_sync_status
(self, on_updated_cb=None, raise_timeout=False)
Update the internal status.
Update the internal status.
async def force_update_sync_status(self, on_updated_cb=None, raise_timeout=False): """Update the internal status.""" resp = await self.send_bluesound_command( "SyncStatus", raise_timeout, raise_timeout ) if not resp: return None self._sync_status = resp["SyncStatus"].copy() if not self._name: self._name = self._sync_status.get("@name", self.host) if not self._bluesound_device_name: self._bluesound_device_name = self._sync_status.get("@name", self.host) if not self._icon: self._icon = self._sync_status.get("@icon", self.host) master = self._sync_status.get("master") if master is not None: self._is_master = False master_host = master.get("#text") master_device = [ device for device in self._hass.data[DATA_BLUESOUND] if device.host == master_host ] if master_device and master_host != self.host: self._master = master_device[0] else: self._master = None _LOGGER.error("Master not found %s", master_host) else: if self._master is not None: self._master = None slaves = self._sync_status.get("slave") self._is_master = slaves is not None if on_updated_cb: on_updated_cb() return True
[ "async", "def", "force_update_sync_status", "(", "self", ",", "on_updated_cb", "=", "None", ",", "raise_timeout", "=", "False", ")", ":", "resp", "=", "await", "self", ".", "send_bluesound_command", "(", "\"SyncStatus\"", ",", "raise_timeout", ",", "raise_timeout", ")", "if", "not", "resp", ":", "return", "None", "self", ".", "_sync_status", "=", "resp", "[", "\"SyncStatus\"", "]", ".", "copy", "(", ")", "if", "not", "self", ".", "_name", ":", "self", ".", "_name", "=", "self", ".", "_sync_status", ".", "get", "(", "\"@name\"", ",", "self", ".", "host", ")", "if", "not", "self", ".", "_bluesound_device_name", ":", "self", ".", "_bluesound_device_name", "=", "self", ".", "_sync_status", ".", "get", "(", "\"@name\"", ",", "self", ".", "host", ")", "if", "not", "self", ".", "_icon", ":", "self", ".", "_icon", "=", "self", ".", "_sync_status", ".", "get", "(", "\"@icon\"", ",", "self", ".", "host", ")", "master", "=", "self", ".", "_sync_status", ".", "get", "(", "\"master\"", ")", "if", "master", "is", "not", "None", ":", "self", ".", "_is_master", "=", "False", "master_host", "=", "master", ".", "get", "(", "\"#text\"", ")", "master_device", "=", "[", "device", "for", "device", "in", "self", ".", "_hass", ".", "data", "[", "DATA_BLUESOUND", "]", "if", "device", ".", "host", "==", "master_host", "]", "if", "master_device", "and", "master_host", "!=", "self", ".", "host", ":", "self", ".", "_master", "=", "master_device", "[", "0", "]", "else", ":", "self", ".", "_master", "=", "None", "_LOGGER", ".", "error", "(", "\"Master not found %s\"", ",", "master_host", ")", "else", ":", "if", "self", ".", "_master", "is", "not", "None", ":", "self", ".", "_master", "=", "None", "slaves", "=", "self", ".", "_sync_status", ".", "get", "(", "\"slave\"", ")", "self", ".", "_is_master", "=", "slaves", "is", "not", "None", "if", "on_updated_cb", ":", "on_updated_cb", "(", ")", "return", "True" ]
[ 244, 4 ]
[ 284, 19 ]
python
en
['en', 'sn', 'en']
True
BluesoundPlayer._start_poll_command
(self)
Loop which polls the status of the player.
Loop which polls the status of the player.
async def _start_poll_command(self): """Loop which polls the status of the player.""" try: while True: await self.async_update_status() except (asyncio.TimeoutError, ClientError, BluesoundPlayer._TimeoutException): _LOGGER.info("Node %s is offline, retrying later", self._name) await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) self.start_polling() except CancelledError: _LOGGER.debug("Stopping the polling of node %s", self._name) except Exception: _LOGGER.exception("Unexpected error in %s", self._name) raise
[ "async", "def", "_start_poll_command", "(", "self", ")", ":", "try", ":", "while", "True", ":", "await", "self", ".", "async_update_status", "(", ")", "except", "(", "asyncio", ".", "TimeoutError", ",", "ClientError", ",", "BluesoundPlayer", ".", "_TimeoutException", ")", ":", "_LOGGER", ".", "info", "(", "\"Node %s is offline, retrying later\"", ",", "self", ".", "_name", ")", "await", "asyncio", ".", "sleep", "(", "NODE_OFFLINE_CHECK_TIMEOUT", ")", "self", ".", "start_polling", "(", ")", "except", "CancelledError", ":", "_LOGGER", ".", "debug", "(", "\"Stopping the polling of node %s\"", ",", "self", ".", "_name", ")", "except", "Exception", ":", "_LOGGER", ".", "exception", "(", "\"Unexpected error in %s\"", ",", "self", ".", "_name", ")", "raise" ]
[ 286, 4 ]
[ 301, 17 ]
python
en
['en', 'en', 'en']
True
BluesoundPlayer.start_polling
(self)
Start the polling task.
Start the polling task.
def start_polling(self): """Start the polling task.""" self._polling_task = self._hass.async_create_task(self._start_poll_command())
[ "def", "start_polling", "(", "self", ")", ":", "self", ".", "_polling_task", "=", "self", ".", "_hass", ".", "async_create_task", "(", "self", ".", "_start_poll_command", "(", ")", ")" ]
[ 303, 4 ]
[ 305, 85 ]
python
en
['en', 'no', 'en']
True
BluesoundPlayer.stop_polling
(self)
Stop the polling task.
Stop the polling task.
def stop_polling(self): """Stop the polling task.""" self._polling_task.cancel()
[ "def", "stop_polling", "(", "self", ")", ":", "self", ".", "_polling_task", ".", "cancel", "(", ")" ]
[ 307, 4 ]
[ 309, 35 ]
python
en
['en', 'en', 'en']
True
BluesoundPlayer.async_init
(self, triggered=None)
Initialize the player async.
Initialize the player async.
async def async_init(self, triggered=None): """Initialize the player async.""" try: if self._retry_remove is not None: self._retry_remove() self._retry_remove = None await self.force_update_sync_status(self._init_callback, True) except (asyncio.TimeoutError, ClientError): _LOGGER.info("Node %s is offline, retrying later", self.host) self._retry_remove = async_track_time_interval( self._hass, self.async_init, NODE_RETRY_INITIATION ) except Exception: _LOGGER.exception("Unexpected when initiating error in %s", self.host) raise
[ "async", "def", "async_init", "(", "self", ",", "triggered", "=", "None", ")", ":", "try", ":", "if", "self", ".", "_retry_remove", "is", "not", "None", ":", "self", ".", "_retry_remove", "(", ")", "self", ".", "_retry_remove", "=", "None", "await", "self", ".", "force_update_sync_status", "(", "self", ".", "_init_callback", ",", "True", ")", "except", "(", "asyncio", ".", "TimeoutError", ",", "ClientError", ")", ":", "_LOGGER", ".", "info", "(", "\"Node %s is offline, retrying later\"", ",", "self", ".", "host", ")", "self", ".", "_retry_remove", "=", "async_track_time_interval", "(", "self", ".", "_hass", ",", "self", ".", "async_init", ",", "NODE_RETRY_INITIATION", ")", "except", "Exception", ":", "_LOGGER", ".", "exception", "(", "\"Unexpected when initiating error in %s\"", ",", "self", ".", "host", ")", "raise" ]
[ 311, 4 ]
[ 326, 17 ]
python
en
['en', 'en', 'en']
True