body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def test_resized_viewbox_no_width_height(self): 'Truncate decimals' svg = ElementTree.fromstring((XML_HEADER + b'<svg viewBox="-10.23 32.18 75.876 75.956"></svg>')) result = ElementTree.tostring(svg_util.make_square(svg)) self.assertTrue((b'width="80"' in result)) self.assertTrue((b'height="80"' in result)) self.assertTrue((b'viewBox="-10 32 75 75"' in result))
-8,967,835,290,666,746,000
Truncate decimals
peacecorps/peacecorps/tests/test_util_svg.py
test_resized_viewbox_no_width_height
18F/peacecorps-site
python
def test_resized_viewbox_no_width_height(self): svg = ElementTree.fromstring((XML_HEADER + b'<svg viewBox="-10.23 32.18 75.876 75.956"></svg>')) result = ElementTree.tostring(svg_util.make_square(svg)) self.assertTrue((b'width="80"' in result)) self.assertTrue((b'height="80"' in result)) self.assertTrue((b'viewBox="-10 32 75 75"' in result))
def GetChromeProxyRequestHeaderValue(self, key): 'Get a specific Chrome-Proxy request header value.\n\n Returns:\n The value for a specific Chrome-Proxy request header value for a\n given key. Returns None if no such key is present.\n ' if ('Chrome-Proxy' not in self.response.request_headers): return None chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy'] values = [v.strip() for v in chrome_proxy_request_header.split(',')] for value in values: kvp = value.split('=', 1) if ((len(kvp) == 2) and (kvp[0].strip() == key)): return kvp[1].strip() return None
-2,043,771,418,356,114,200
Get a specific Chrome-Proxy request header value. Returns: The value for a specific Chrome-Proxy request header value for a given key. Returns None if no such key is present.
third_party/webrtc/src/chromium/src/tools/chrome_proxy/common/chrome_proxy_metrics.py
GetChromeProxyRequestHeaderValue
Teamxrtc/webrtc-streaming-node
python
def GetChromeProxyRequestHeaderValue(self, key): 'Get a specific Chrome-Proxy request header value.\n\n Returns:\n The value for a specific Chrome-Proxy request header value for a\n given key. Returns None if no such key is present.\n ' if ('Chrome-Proxy' not in self.response.request_headers): return None chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy'] values = [v.strip() for v in chrome_proxy_request_header.split(',')] for value in values: kvp = value.split('=', 1) if ((len(kvp) == 2) and (kvp[0].strip() == key)): return kvp[1].strip() return None
def GetChromeProxyClientType(self): 'Get the client type directive from the Chrome-Proxy request header.\n\n Returns:\n The client type directive from the Chrome-Proxy request header for the\n request that lead to this response. For example, if the request header\n "Chrome-Proxy: c=android" is present, then this method would return\n "android". Returns None if no client type directive is present.\n ' return self.GetChromeProxyRequestHeaderValue('c')
7,091,982,491,205,581,000
Get the client type directive from the Chrome-Proxy request header. Returns: The client type directive from the Chrome-Proxy request header for the request that lead to this response. For example, if the request header "Chrome-Proxy: c=android" is present, then this method would return "android". Returns None if no client type directive is present.
third_party/webrtc/src/chromium/src/tools/chrome_proxy/common/chrome_proxy_metrics.py
GetChromeProxyClientType
Teamxrtc/webrtc-streaming-node
python
def GetChromeProxyClientType(self): 'Get the client type directive from the Chrome-Proxy request header.\n\n Returns:\n The client type directive from the Chrome-Proxy request header for the\n request that lead to this response. For example, if the request header\n "Chrome-Proxy: c=android" is present, then this method would return\n "android". Returns None if no client type directive is present.\n ' return self.GetChromeProxyRequestHeaderValue('c')
def matched_sample_distribution(floats_arr: np.array, samples_no: int, granularity: int=100, logmode: bool=False) -> np.array: '\n Tries to guess a distribution of floats and sample from it.\n uses np.histogram with the number of bins equal to the granularity parameter. For each\n sample, selects which bin to sample and then picks from the bin a float according to a\n uniform distribution. if logmode is enabled, histogram will be in the log-space, as well as\n the sampling.\n\n :param floats_arr: array of floats for which to match the distribution\n :param samples_no: number of random samples to retrieve\n :param granularity: granularity at which to operate\n :param logmode: if sample in log-space\n :return: samples drawn from the empirically matched distribution\n ' if logmode: floats_arr = np.log(floats_arr) (hist, bin_edges) = np.histogram(floats_arr, bins=granularity, density=True) pad = np.arange(granularity) locations = np.choice(pad, samples_no, p=hist) samples = [] for i in locations: samples.append(np.random.uniform(bin_edges[i], bin_edges[(i + 1)])) if logmode: return np.exp(samples) else: return samples
7,744,731,314,882,504,000
Tries to guess a distribution of floats and sample from it. uses np.histogram with the number of bins equal to the granularity parameter. For each sample, selects which bin to sample and then picks from the bin a float according to a uniform distribution. if logmode is enabled, histogram will be in the log-space, as well as the sampling. :param floats_arr: array of floats for which to match the distribution :param samples_no: number of random samples to retrieve :param granularity: granularity at which to operate :param logmode: if sample in log-space :return: samples drawn from the empirically matched distribution
bioflow/algorithms_bank/sampling_policies.py
matched_sample_distribution
chiffa/BioFlow
python
def matched_sample_distribution(floats_arr: np.array, samples_no: int, granularity: int=100, logmode: bool=False) -> np.array: '\n Tries to guess a distribution of floats and sample from it.\n uses np.histogram with the number of bins equal to the granularity parameter. For each\n sample, selects which bin to sample and then picks from the bin a float according to a\n uniform distribution. if logmode is enabled, histogram will be in the log-space, as well as\n the sampling.\n\n :param floats_arr: array of floats for which to match the distribution\n :param samples_no: number of random samples to retrieve\n :param granularity: granularity at which to operate\n :param logmode: if sample in log-space\n :return: samples drawn from the empirically matched distribution\n ' if logmode: floats_arr = np.log(floats_arr) (hist, bin_edges) = np.histogram(floats_arr, bins=granularity, density=True) pad = np.arange(granularity) locations = np.choice(pad, samples_no, p=hist) samples = [] for i in locations: samples.append(np.random.uniform(bin_edges[i], bin_edges[(i + 1)])) if logmode: return np.exp(samples) else: return samples
def _reduce_distribution(floats_arr: np.array): '\n Basically gets a distribution in the [0, 1] in 100 bins, rounds to the nearest 0.01. Used for\n hashing and distribution matching\n\n :param floats_arr: floats for which to calculate the rounded distribution\n :return: rounded distribution\n ' normalized_arr = (floats_arr / np.max(floats_arr)) bins = np.linspace(0, 1.001, 101) (hist, bin_edges) = np.histogram(normalized_arr, bins=bins, density=True) rounded_hist = np.array((hist * 100)).astype(np.int) return rounded_hist
-7,937,689,576,270,980,000
Basically gets a distribution in the [0, 1] in 100 bins, rounds to the nearest 0.01. Used for hashing and distribution matching :param floats_arr: floats for which to calculate the rounded distribution :return: rounded distribution
bioflow/algorithms_bank/sampling_policies.py
_reduce_distribution
chiffa/BioFlow
python
def _reduce_distribution(floats_arr: np.array): '\n Basically gets a distribution in the [0, 1] in 100 bins, rounds to the nearest 0.01. Used for\n hashing and distribution matching\n\n :param floats_arr: floats for which to calculate the rounded distribution\n :return: rounded distribution\n ' normalized_arr = (floats_arr / np.max(floats_arr)) bins = np.linspace(0, 1.001, 101) (hist, bin_edges) = np.histogram(normalized_arr, bins=bins, density=True) rounded_hist = np.array((hist * 100)).astype(np.int) return rounded_hist
def _characterize_set(sample: Union[(List[int], List[Tuple[(int, float)]])]): '\n None-robust helper function to characterize a sample set by its length, nature of items in\n teh sample and eventual distribution of weights within the sample.\n\n :param sample: sample to characterize\n :return: set length (0 if None), 1 if items are ids, 2 if ids and weights (0 if\n None), rounded distribution ([] if None or items are ids)\n ' if (sample is None): return (0, 0, []) if (len(sample) == 1): if _is_int(sample[0]): return (1, 1, []) else: return (1, 2, []) if _is_int(sample[0]): rounded_hist = ([1] * 100) rounded_hist = np.array(rounded_hist).astype(np.int) return (len(sample), 1, rounded_hist.tolist()) else: rounded_hist = _reduce_distribution(np.array(sample).astype(np.float)[:, 1]) return (len(sample), 2, rounded_hist.tolist())
5,761,955,160,226,254,000
None-robust helper function to characterize a sample set by its length, nature of items in teh sample and eventual distribution of weights within the sample. :param sample: sample to characterize :return: set length (0 if None), 1 if items are ids, 2 if ids and weights (0 if None), rounded distribution ([] if None or items are ids)
bioflow/algorithms_bank/sampling_policies.py
_characterize_set
chiffa/BioFlow
python
def _characterize_set(sample: Union[(List[int], List[Tuple[(int, float)]])]): '\n None-robust helper function to characterize a sample set by its length, nature of items in\n teh sample and eventual distribution of weights within the sample.\n\n :param sample: sample to characterize\n :return: set length (0 if None), 1 if items are ids, 2 if ids and weights (0 if\n None), rounded distribution ([] if None or items are ids)\n ' if (sample is None): return (0, 0, []) if (len(sample) == 1): if _is_int(sample[0]): return (1, 1, []) else: return (1, 2, []) if _is_int(sample[0]): rounded_hist = ([1] * 100) rounded_hist = np.array(rounded_hist).astype(np.int) return (len(sample), 1, rounded_hist.tolist()) else: rounded_hist = _reduce_distribution(np.array(sample).astype(np.float)[:, 1]) return (len(sample), 2, rounded_hist.tolist())
def characterize_flow_parameters(sample: Union[(List[int], List[Tuple[(int, float)]])], secondary_sample: Union[(List[int], List[Tuple[(int, float)]], None)], sparse_rounds: int): '\n Characterizes the primary and secondary sets and computes their hash, that can be used ot\n match similar samples for random sampling.\n\n :param sample: primary set\n :param secondary_sample: secondary set\n :param sparse_rounds: if sparse rounds are to be performed\n :return: first set length, shape, hist, second set length, shape, hist, sparse rounds, hash\n ' (prim_len, prim_shape, prim_hist) = _characterize_set(sample) (sec_len, sec_shape, sec_hist) = _characterize_set(secondary_sample) _hash = hashlib.md5(json.dumps([prim_len, prim_shape, prim_hist, sec_len, sec_shape, sec_hist, sparse_rounds]).encode('utf-8')).hexdigest() log.debug(('hashed a flow parameters from:\n%d/%d/%s; \n%d/%d/%s; \n%d \nto %s' % (prim_len, prim_shape, prim_hist, sec_len, sec_shape, sec_hist, sparse_rounds, _hash))) return (prim_len, prim_shape, prim_hist, sec_len, sec_shape, sec_hist, sparse_rounds, _hash)
7,264,387,114,084,906,000
Characterizes the primary and secondary sets and computes their hash, that can be used ot match similar samples for random sampling. :param sample: primary set :param secondary_sample: secondary set :param sparse_rounds: if sparse rounds are to be performed :return: first set length, shape, hist, second set length, shape, hist, sparse rounds, hash
bioflow/algorithms_bank/sampling_policies.py
characterize_flow_parameters
chiffa/BioFlow
python
def characterize_flow_parameters(sample: Union[(List[int], List[Tuple[(int, float)]])], secondary_sample: Union[(List[int], List[Tuple[(int, float)]], None)], sparse_rounds: int): '\n Characterizes the primary and secondary sets and computes their hash, that can be used ot\n match similar samples for random sampling.\n\n :param sample: primary set\n :param secondary_sample: secondary set\n :param sparse_rounds: if sparse rounds are to be performed\n :return: first set length, shape, hist, second set length, shape, hist, sparse rounds, hash\n ' (prim_len, prim_shape, prim_hist) = _characterize_set(sample) (sec_len, sec_shape, sec_hist) = _characterize_set(secondary_sample) _hash = hashlib.md5(json.dumps([prim_len, prim_shape, prim_hist, sec_len, sec_shape, sec_hist, sparse_rounds]).encode('utf-8')).hexdigest() log.debug(('hashed a flow parameters from:\n%d/%d/%s; \n%d/%d/%s; \n%d \nto %s' % (prim_len, prim_shape, prim_hist, sec_len, sec_shape, sec_hist, sparse_rounds, _hash))) return (prim_len, prim_shape, prim_hist, sec_len, sec_shape, sec_hist, sparse_rounds, _hash)
def _sample_floats(floats, float_sampling_method='exact', matched_distro_precision: int=100): '\n A wrapper methods to sample a float distribution according to a method\n\n :param floats:\n :param float_sampling_method: exact (permutation of weights) | distro (trying to match the\n empirical distribution) | logdistro (trying to match the empirical distribution in the log\n space)\n :param matched_distro_precision: how closely to try to match the distribution (granularity\n parameter pass-through to the matched_sample_distribution)\n :return: sample of floats\n ' if (float_sampling_method == 'exact'): ret_floats = floats.copy() np.random.shuffle(ret_floats) return ret_floats if (float_sampling_method == 'distro'): return matched_sample_distribution(floats, len(floats), granularity=matched_distro_precision) if (float_sampling_method == 'logdistro'): return matched_sample_distribution(floats, len(floats), granularity=matched_distro_precision, logmode=True)
47,125,547,739,960,570
A wrapper methods to sample a float distribution according to a method :param floats: :param float_sampling_method: exact (permutation of weights) | distro (trying to match the empirical distribution) | logdistro (trying to match the empirical distribution in the log space) :param matched_distro_precision: how closely to try to match the distribution (granularity parameter pass-through to the matched_sample_distribution) :return: sample of floats
bioflow/algorithms_bank/sampling_policies.py
_sample_floats
chiffa/BioFlow
python
def _sample_floats(floats, float_sampling_method='exact', matched_distro_precision: int=100): '\n A wrapper methods to sample a float distribution according to a method\n\n :param floats:\n :param float_sampling_method: exact (permutation of weights) | distro (trying to match the\n empirical distribution) | logdistro (trying to match the empirical distribution in the log\n space)\n :param matched_distro_precision: how closely to try to match the distribution (granularity\n parameter pass-through to the matched_sample_distribution)\n :return: sample of floats\n ' if (float_sampling_method == 'exact'): ret_floats = floats.copy() np.random.shuffle(ret_floats) return ret_floats if (float_sampling_method == 'distro'): return matched_sample_distribution(floats, len(floats), granularity=matched_distro_precision) if (float_sampling_method == 'logdistro'): return matched_sample_distribution(floats, len(floats), granularity=matched_distro_precision, logmode=True)
def matched_sampling(sample, secondary_sample, background, samples, float_sampling_method='exact'): '\n The general random sampling strategy that sample sets of the same size and shape as primary\n and secondary sample set and, if they are weighted, try to match the random sample weights\n according to the\n\n\n :param sample: primary sample set\n :param secondary_sample: secondary sample_set\n :param background: background of ids (and potentially weights) from which to sample\n :param samples: random samples wanted\n :param sampling_mode: exact/distro/logdistro. the sampling parametrization method ingesting\n all the parameters in a single string argument in the general case, here, a pass- through\n parameter for the _sample_floats function if samples are weighted and the distribution of\n weights is being matched.\n :return:\n ' if _is_int(background[0]): background_ids = np.array(background) background_whg = np.ones_like(background_ids).astype(np.float) else: background_ids = np.array(background)[:, 0] background_whg = np.array(background)[:, 1] log.debug(('debug sum %s, type: %s, all:%s' % (np.sum(background_whg), type(background_whg), background_whg))) background_whg /= np.sum(background_whg) if (secondary_sample is None): if _is_int(sample[0]): for i in range(0, samples): selected = np.random.choice(background_ids, len(sample), p=background_whg, replace=False) (yield (i, selected, None)) else: for i in range(0, samples): id_loads = np.random.choice(background_ids, len(sample), p=background_whg, replace=False) float_part = _sample_floats(np.array(sample)[:, 1], float_sampling_method) ids_and_floats = [(_id, _float) for (_id, _float) in zip(id_loads, float_part)] (yield (i, ids_and_floats, None)) elif _is_int(sample[0]): for i in range(0, samples): selected = np.random.choice(background_ids, (len(sample) + len(secondary_sample)), p=background_whg, replace=False) np.random.shuffle(selected) (yield (i, selected[:len(sample)], selected[(- len(secondary_sample)):])) else: for i in range(0, samples): selected = np.random.choice(background_ids, (len(sample) + len(secondary_sample)), p=background_whg, replace=False) np.random.shuffle(selected) id_loads = selected[:len(sample)] float_part = _sample_floats(np.array(sample)[:, 1], float_sampling_method) ids_and_floats = [(_id, _float) for (_id, _float) in zip(id_loads, float_part)] sec_id_loads = selected[(- len(secondary_sample)):] sec_float_part = _sample_floats(np.array(secondary_sample)[:, 1], float_sampling_method) sec_ids_and_floats = [(_id, _float) for (_id, _float) in zip(sec_id_loads, sec_float_part)] (yield (i, ids_and_floats, sec_ids_and_floats))
703,084,849,194,356,000
The general random sampling strategy that sample sets of the same size and shape as primary and secondary sample set and, if they are weighted, try to match the random sample weights according to the :param sample: primary sample set :param secondary_sample: secondary sample_set :param background: background of ids (and potentially weights) from which to sample :param samples: random samples wanted :param sampling_mode: exact/distro/logdistro. the sampling parametrization method ingesting all the parameters in a single string argument in the general case, here, a pass- through parameter for the _sample_floats function if samples are weighted and the distribution of weights is being matched. :return:
bioflow/algorithms_bank/sampling_policies.py
matched_sampling
chiffa/BioFlow
python
def matched_sampling(sample, secondary_sample, background, samples, float_sampling_method='exact'): '\n The general random sampling strategy that sample sets of the same size and shape as primary\n and secondary sample set and, if they are weighted, try to match the random sample weights\n according to the\n\n\n :param sample: primary sample set\n :param secondary_sample: secondary sample_set\n :param background: background of ids (and potentially weights) from which to sample\n :param samples: random samples wanted\n :param sampling_mode: exact/distro/logdistro. the sampling parametrization method ingesting\n all the parameters in a single string argument in the general case, here, a pass- through\n parameter for the _sample_floats function if samples are weighted and the distribution of\n weights is being matched.\n :return:\n ' if _is_int(background[0]): background_ids = np.array(background) background_whg = np.ones_like(background_ids).astype(np.float) else: background_ids = np.array(background)[:, 0] background_whg = np.array(background)[:, 1] log.debug(('debug sum %s, type: %s, all:%s' % (np.sum(background_whg), type(background_whg), background_whg))) background_whg /= np.sum(background_whg) if (secondary_sample is None): if _is_int(sample[0]): for i in range(0, samples): selected = np.random.choice(background_ids, len(sample), p=background_whg, replace=False) (yield (i, selected, None)) else: for i in range(0, samples): id_loads = np.random.choice(background_ids, len(sample), p=background_whg, replace=False) float_part = _sample_floats(np.array(sample)[:, 1], float_sampling_method) ids_and_floats = [(_id, _float) for (_id, _float) in zip(id_loads, float_part)] (yield (i, ids_and_floats, None)) elif _is_int(sample[0]): for i in range(0, samples): selected = np.random.choice(background_ids, (len(sample) + len(secondary_sample)), p=background_whg, replace=False) np.random.shuffle(selected) (yield (i, selected[:len(sample)], selected[(- len(secondary_sample)):])) else: for i in range(0, samples): selected = np.random.choice(background_ids, (len(sample) + len(secondary_sample)), p=background_whg, replace=False) np.random.shuffle(selected) id_loads = selected[:len(sample)] float_part = _sample_floats(np.array(sample)[:, 1], float_sampling_method) ids_and_floats = [(_id, _float) for (_id, _float) in zip(id_loads, float_part)] sec_id_loads = selected[(- len(secondary_sample)):] sec_float_part = _sample_floats(np.array(secondary_sample)[:, 1], float_sampling_method) sec_ids_and_floats = [(_id, _float) for (_id, _float) in zip(sec_id_loads, sec_float_part)] (yield (i, ids_and_floats, sec_ids_and_floats))
def __init__(__self__, *, resource_group_name: pulumi.Input[str], id: Optional[pulumi.Input[str]]=None, location: Optional[pulumi.Input[str]]=None, manual_private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]=None, private_endpoint_name: Optional[pulumi.Input[str]]=None, private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]=None, subnet: Optional[pulumi.Input['SubnetArgs']]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None): "\n The set of arguments for constructing a PrivateEndpoint resource.\n :param pulumi.Input[str] resource_group_name: The name of the resource group.\n :param pulumi.Input[str] id: Resource ID.\n :param pulumi.Input[str] location: Resource location.\n :param pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]] manual_private_link_service_connections: A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.\n :param pulumi.Input[str] private_endpoint_name: The name of the private endpoint.\n :param pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]] private_link_service_connections: A grouping of information about the connection to the remote resource.\n :param pulumi.Input['SubnetArgs'] subnet: The ID of the subnet from which the private IP will be allocated.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.\n " pulumi.set(__self__, 'resource_group_name', resource_group_name) if (id is not None): pulumi.set(__self__, 'id', id) if (location is not None): pulumi.set(__self__, 'location', location) if (manual_private_link_service_connections is not None): pulumi.set(__self__, 'manual_private_link_service_connections', manual_private_link_service_connections) if (private_endpoint_name is not None): pulumi.set(__self__, 'private_endpoint_name', private_endpoint_name) if (private_link_service_connections is not None): pulumi.set(__self__, 'private_link_service_connections', private_link_service_connections) if (subnet is not None): pulumi.set(__self__, 'subnet', subnet) if (tags is not None): pulumi.set(__self__, 'tags', tags)
5,233,328,910,972,875,000
The set of arguments for constructing a PrivateEndpoint resource. :param pulumi.Input[str] resource_group_name: The name of the resource group. :param pulumi.Input[str] id: Resource ID. :param pulumi.Input[str] location: Resource location. :param pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]] manual_private_link_service_connections: A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource. :param pulumi.Input[str] private_endpoint_name: The name of the private endpoint. :param pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]] private_link_service_connections: A grouping of information about the connection to the remote resource. :param pulumi.Input['SubnetArgs'] subnet: The ID of the subnet from which the private IP will be allocated. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
__init__
polivbr/pulumi-azure-native
python
def __init__(__self__, *, resource_group_name: pulumi.Input[str], id: Optional[pulumi.Input[str]]=None, location: Optional[pulumi.Input[str]]=None, manual_private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]=None, private_endpoint_name: Optional[pulumi.Input[str]]=None, private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]=None, subnet: Optional[pulumi.Input['SubnetArgs']]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None): "\n The set of arguments for constructing a PrivateEndpoint resource.\n :param pulumi.Input[str] resource_group_name: The name of the resource group.\n :param pulumi.Input[str] id: Resource ID.\n :param pulumi.Input[str] location: Resource location.\n :param pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]] manual_private_link_service_connections: A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.\n :param pulumi.Input[str] private_endpoint_name: The name of the private endpoint.\n :param pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]] private_link_service_connections: A grouping of information about the connection to the remote resource.\n :param pulumi.Input['SubnetArgs'] subnet: The ID of the subnet from which the private IP will be allocated.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.\n " pulumi.set(__self__, 'resource_group_name', resource_group_name) if (id is not None): pulumi.set(__self__, 'id', id) if (location is not None): pulumi.set(__self__, 'location', location) if (manual_private_link_service_connections is not None): pulumi.set(__self__, 'manual_private_link_service_connections', manual_private_link_service_connections) if (private_endpoint_name is not None): pulumi.set(__self__, 'private_endpoint_name', private_endpoint_name) if (private_link_service_connections is not None): pulumi.set(__self__, 'private_link_service_connections', private_link_service_connections) if (subnet is not None): pulumi.set(__self__, 'subnet', subnet) if (tags is not None): pulumi.set(__self__, 'tags', tags)
@property @pulumi.getter(name='resourceGroupName') def resource_group_name(self) -> pulumi.Input[str]: '\n The name of the resource group.\n ' return pulumi.get(self, 'resource_group_name')
5,898,586,357,340,442,000
The name of the resource group.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
resource_group_name
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='resourceGroupName') def resource_group_name(self) -> pulumi.Input[str]: '\n \n ' return pulumi.get(self, 'resource_group_name')
@property @pulumi.getter def id(self) -> Optional[pulumi.Input[str]]: '\n Resource ID.\n ' return pulumi.get(self, 'id')
4,003,078,074,025,280,500
Resource ID.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
id
polivbr/pulumi-azure-native
python
@property @pulumi.getter def id(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'id')
@property @pulumi.getter def location(self) -> Optional[pulumi.Input[str]]: '\n Resource location.\n ' return pulumi.get(self, 'location')
5,685,883,695,381,965,000
Resource location.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
location
polivbr/pulumi-azure-native
python
@property @pulumi.getter def location(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'location')
@property @pulumi.getter(name='manualPrivateLinkServiceConnections') def manual_private_link_service_connections(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]: '\n A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.\n ' return pulumi.get(self, 'manual_private_link_service_connections')
-6,351,649,816,882,946,000
A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
manual_private_link_service_connections
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='manualPrivateLinkServiceConnections') def manual_private_link_service_connections(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]: '\n \n ' return pulumi.get(self, 'manual_private_link_service_connections')
@property @pulumi.getter(name='privateEndpointName') def private_endpoint_name(self) -> Optional[pulumi.Input[str]]: '\n The name of the private endpoint.\n ' return pulumi.get(self, 'private_endpoint_name')
-8,703,745,273,820,877,000
The name of the private endpoint.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
private_endpoint_name
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='privateEndpointName') def private_endpoint_name(self) -> Optional[pulumi.Input[str]]: '\n \n ' return pulumi.get(self, 'private_endpoint_name')
@property @pulumi.getter(name='privateLinkServiceConnections') def private_link_service_connections(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]: '\n A grouping of information about the connection to the remote resource.\n ' return pulumi.get(self, 'private_link_service_connections')
-5,282,137,409,068,493,000
A grouping of information about the connection to the remote resource.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
private_link_service_connections
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='privateLinkServiceConnections') def private_link_service_connections(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PrivateLinkServiceConnectionArgs']]]]: '\n \n ' return pulumi.get(self, 'private_link_service_connections')
@property @pulumi.getter def subnet(self) -> Optional[pulumi.Input['SubnetArgs']]: '\n The ID of the subnet from which the private IP will be allocated.\n ' return pulumi.get(self, 'subnet')
-2,245,546,924,618,331,100
The ID of the subnet from which the private IP will be allocated.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
subnet
polivbr/pulumi-azure-native
python
@property @pulumi.getter def subnet(self) -> Optional[pulumi.Input['SubnetArgs']]: '\n \n ' return pulumi.get(self, 'subnet')
@property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]: '\n Resource tags.\n ' return pulumi.get(self, 'tags')
-2,047,115,851,061,118,500
Resource tags.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
tags
polivbr/pulumi-azure-native
python
@property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]: '\n \n ' return pulumi.get(self, 'tags')
@overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]=None, id: Optional[pulumi.Input[str]]=None, location: Optional[pulumi.Input[str]]=None, manual_private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]]]=None, private_endpoint_name: Optional[pulumi.Input[str]]=None, private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]]]=None, resource_group_name: Optional[pulumi.Input[str]]=None, subnet: Optional[pulumi.Input[pulumi.InputType['SubnetArgs']]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, __props__=None): "\n Private endpoint resource.\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] id: Resource ID.\n :param pulumi.Input[str] location: Resource location.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]] manual_private_link_service_connections: A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.\n :param pulumi.Input[str] private_endpoint_name: The name of the private endpoint.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]] private_link_service_connections: A grouping of information about the connection to the remote resource.\n :param pulumi.Input[str] resource_group_name: The name of the resource group.\n :param pulumi.Input[pulumi.InputType['SubnetArgs']] subnet: The ID of the subnet from which the private IP will be allocated.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.\n " ...
-4,391,438,611,083,021,300
Private endpoint resource. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] id: Resource ID. :param pulumi.Input[str] location: Resource location. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]] manual_private_link_service_connections: A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource. :param pulumi.Input[str] private_endpoint_name: The name of the private endpoint. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]] private_link_service_connections: A grouping of information about the connection to the remote resource. :param pulumi.Input[str] resource_group_name: The name of the resource group. :param pulumi.Input[pulumi.InputType['SubnetArgs']] subnet: The ID of the subnet from which the private IP will be allocated. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
__init__
polivbr/pulumi-azure-native
python
@overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]=None, id: Optional[pulumi.Input[str]]=None, location: Optional[pulumi.Input[str]]=None, manual_private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]]]=None, private_endpoint_name: Optional[pulumi.Input[str]]=None, private_link_service_connections: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]]]=None, resource_group_name: Optional[pulumi.Input[str]]=None, subnet: Optional[pulumi.Input[pulumi.InputType['SubnetArgs']]]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, __props__=None): "\n Private endpoint resource.\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] id: Resource ID.\n :param pulumi.Input[str] location: Resource location.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]] manual_private_link_service_connections: A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.\n :param pulumi.Input[str] private_endpoint_name: The name of the private endpoint.\n :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionArgs']]]] private_link_service_connections: A grouping of information about the connection to the remote resource.\n :param pulumi.Input[str] resource_group_name: The name of the resource group.\n :param pulumi.Input[pulumi.InputType['SubnetArgs']] subnet: The ID of the subnet from which the private IP will be allocated.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.\n " ...
@overload def __init__(__self__, resource_name: str, args: PrivateEndpointArgs, opts: Optional[pulumi.ResourceOptions]=None): "\n Private endpoint resource.\n\n :param str resource_name: The name of the resource.\n :param PrivateEndpointArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n " ...
5,082,290,568,587,639,000
Private endpoint resource. :param str resource_name: The name of the resource. :param PrivateEndpointArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
__init__
polivbr/pulumi-azure-native
python
@overload def __init__(__self__, resource_name: str, args: PrivateEndpointArgs, opts: Optional[pulumi.ResourceOptions]=None): "\n Private endpoint resource.\n\n :param str resource_name: The name of the resource.\n :param PrivateEndpointArgs args: The arguments to use to populate this resource's properties.\n :param pulumi.ResourceOptions opts: Options for the resource.\n " ...
@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]=None) -> 'PrivateEndpoint': "\n Get an existing PrivateEndpoint resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n " opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = PrivateEndpointArgs.__new__(PrivateEndpointArgs) __props__.__dict__['etag'] = None __props__.__dict__['location'] = None __props__.__dict__['manual_private_link_service_connections'] = None __props__.__dict__['name'] = None __props__.__dict__['network_interfaces'] = None __props__.__dict__['private_link_service_connections'] = None __props__.__dict__['provisioning_state'] = None __props__.__dict__['subnet'] = None __props__.__dict__['tags'] = None __props__.__dict__['type'] = None return PrivateEndpoint(resource_name, opts=opts, __props__=__props__)
3,226,211,340,263,033,000
Get an existing PrivateEndpoint resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
get
polivbr/pulumi-azure-native
python
@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]=None) -> 'PrivateEndpoint': "\n Get an existing PrivateEndpoint resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n " opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = PrivateEndpointArgs.__new__(PrivateEndpointArgs) __props__.__dict__['etag'] = None __props__.__dict__['location'] = None __props__.__dict__['manual_private_link_service_connections'] = None __props__.__dict__['name'] = None __props__.__dict__['network_interfaces'] = None __props__.__dict__['private_link_service_connections'] = None __props__.__dict__['provisioning_state'] = None __props__.__dict__['subnet'] = None __props__.__dict__['tags'] = None __props__.__dict__['type'] = None return PrivateEndpoint(resource_name, opts=opts, __props__=__props__)
@property @pulumi.getter def etag(self) -> pulumi.Output[str]: '\n A unique read-only string that changes whenever the resource is updated.\n ' return pulumi.get(self, 'etag')
5,960,741,373,667,297,000
A unique read-only string that changes whenever the resource is updated.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
etag
polivbr/pulumi-azure-native
python
@property @pulumi.getter def etag(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'etag')
@property @pulumi.getter def location(self) -> pulumi.Output[Optional[str]]: '\n Resource location.\n ' return pulumi.get(self, 'location')
-6,585,394,763,848,456,000
Resource location.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
location
polivbr/pulumi-azure-native
python
@property @pulumi.getter def location(self) -> pulumi.Output[Optional[str]]: '\n \n ' return pulumi.get(self, 'location')
@property @pulumi.getter(name='manualPrivateLinkServiceConnections') def manual_private_link_service_connections(self) -> pulumi.Output[Optional[Sequence['outputs.PrivateLinkServiceConnectionResponse']]]: '\n A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.\n ' return pulumi.get(self, 'manual_private_link_service_connections')
7,044,445,811,079,634,000
A grouping of information about the connection to the remote resource. Used when the network admin does not have access to approve connections to the remote resource.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
manual_private_link_service_connections
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='manualPrivateLinkServiceConnections') def manual_private_link_service_connections(self) -> pulumi.Output[Optional[Sequence['outputs.PrivateLinkServiceConnectionResponse']]]: '\n \n ' return pulumi.get(self, 'manual_private_link_service_connections')
@property @pulumi.getter def name(self) -> pulumi.Output[str]: '\n Resource name.\n ' return pulumi.get(self, 'name')
4,695,236,134,441,039,000
Resource name.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
name
polivbr/pulumi-azure-native
python
@property @pulumi.getter def name(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'name')
@property @pulumi.getter(name='networkInterfaces') def network_interfaces(self) -> pulumi.Output[Sequence['outputs.NetworkInterfaceResponse']]: '\n An array of references to the network interfaces created for this private endpoint.\n ' return pulumi.get(self, 'network_interfaces')
-2,116,992,413,112,385,500
An array of references to the network interfaces created for this private endpoint.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
network_interfaces
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='networkInterfaces') def network_interfaces(self) -> pulumi.Output[Sequence['outputs.NetworkInterfaceResponse']]: '\n \n ' return pulumi.get(self, 'network_interfaces')
@property @pulumi.getter(name='privateLinkServiceConnections') def private_link_service_connections(self) -> pulumi.Output[Optional[Sequence['outputs.PrivateLinkServiceConnectionResponse']]]: '\n A grouping of information about the connection to the remote resource.\n ' return pulumi.get(self, 'private_link_service_connections')
3,713,283,600,789,212,000
A grouping of information about the connection to the remote resource.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
private_link_service_connections
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='privateLinkServiceConnections') def private_link_service_connections(self) -> pulumi.Output[Optional[Sequence['outputs.PrivateLinkServiceConnectionResponse']]]: '\n \n ' return pulumi.get(self, 'private_link_service_connections')
@property @pulumi.getter(name='provisioningState') def provisioning_state(self) -> pulumi.Output[str]: '\n The provisioning state of the private endpoint resource.\n ' return pulumi.get(self, 'provisioning_state')
-3,047,066,359,649,695,000
The provisioning state of the private endpoint resource.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
provisioning_state
polivbr/pulumi-azure-native
python
@property @pulumi.getter(name='provisioningState') def provisioning_state(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'provisioning_state')
@property @pulumi.getter def subnet(self) -> pulumi.Output[Optional['outputs.SubnetResponse']]: '\n The ID of the subnet from which the private IP will be allocated.\n ' return pulumi.get(self, 'subnet')
3,362,244,572,786,356,700
The ID of the subnet from which the private IP will be allocated.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
subnet
polivbr/pulumi-azure-native
python
@property @pulumi.getter def subnet(self) -> pulumi.Output[Optional['outputs.SubnetResponse']]: '\n \n ' return pulumi.get(self, 'subnet')
@property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[(str, str)]]]: '\n Resource tags.\n ' return pulumi.get(self, 'tags')
-2,929,197,049,816,896,000
Resource tags.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
tags
polivbr/pulumi-azure-native
python
@property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[(str, str)]]]: '\n \n ' return pulumi.get(self, 'tags')
@property @pulumi.getter def type(self) -> pulumi.Output[str]: '\n Resource type.\n ' return pulumi.get(self, 'type')
2,132,950,812,122,862,800
Resource type.
sdk/python/pulumi_azure_native/network/v20190901/private_endpoint.py
type
polivbr/pulumi-azure-native
python
@property @pulumi.getter def type(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'type')
def concat(xs, axis=1): 'Concatenates given variables along an axis.\n\n Args:\n xs (tuple of Variables): Variables to be concatenated.\n axis (int): Axis that the input arrays are concatenated along.\n\n Returns:\n ~chainer.Variable: Output variable.\n\n ' return Concat(axis=axis)(*xs)
3,311,227,305,912,610,300
Concatenates given variables along an axis. Args: xs (tuple of Variables): Variables to be concatenated. axis (int): Axis that the input arrays are concatenated along. Returns: ~chainer.Variable: Output variable.
chainer/functions/concat.py
concat
umitanuki/chainer
python
def concat(xs, axis=1): 'Concatenates given variables along an axis.\n\n Args:\n xs (tuple of Variables): Variables to be concatenated.\n axis (int): Axis that the input arrays are concatenated along.\n\n Returns:\n ~chainer.Variable: Output variable.\n\n ' return Concat(axis=axis)(*xs)
def encoder_from_encoder_spec(encoder_spec, chosen_locations, num_known_timesteps, forecast_window_size, output_window_size, static_features=None, static_overrides=None, covariates=None, forecasted_covariates=None, covariate_overrides=None, ts_categorical_features=None, random_seed=0, static_scalers=None, ts_scalers=None, trainable=True): 'Returns a `FeatureEncoder` built as specified in the `encoder_spec`.' encoder_kwargs = encoder_spec.encoder_kwargs if (encoder_spec.encoder_type == 'gam'): gam_kwargs = {} for kwarg in encoder_kwargs: if (kwarg == 'link_fn'): gam_kwargs['link_fn'] = encoder_kwargs['link_fn'] elif (kwarg == 'distribution'): gam_kwargs['distribution'] = encoder_kwargs['distribution'] elif (kwarg == 'initial_bias'): gam_kwargs['initial_bias'] = encoder_kwargs['initial_bias'] elif (kwarg == 'location_dependent_bias'): gam_kwargs['location_dependent_bias'] = encoder_kwargs['location_dependent_bias'] elif (kwarg == 'lower_bound'): gam_kwargs['lower_bound'] = encoder_kwargs['lower_bound'] elif (kwarg == 'upper_bound'): gam_kwargs['upper_bound'] = encoder_kwargs['upper_bound'] elif (kwarg == 'use_fixed_covariate_mask'): gam_kwargs['use_fixed_covariate_mask'] = encoder_kwargs['use_fixed_covariate_mask'] else: raise ValueError(f'Unexpected kwarg: {kwarg} passed to encoder of type {encoder_spec.encoder_type}') return gam_encoder.GamEncoder(chosen_locations, num_known_timesteps, forecast_window_size=forecast_window_size, output_window_size=output_window_size, static_features=static_features, static_scalers=static_scalers, static_overrides=static_overrides, covariates=covariates, ts_scalers=ts_scalers, forecasted_covariates=forecasted_covariates, covariate_overrides=covariate_overrides, static_feature_specs=encoder_spec.static_feature_specs, covariate_feature_specs=encoder_spec.covariate_feature_specs, ts_categorical_features=ts_categorical_features, covariate_feature_time_offset=encoder_spec.covariate_feature_time_offset, covariate_feature_window=encoder_spec.covariate_feature_window, random_seed=random_seed, name=encoder_spec.encoder_name, trainable=trainable, **gam_kwargs) elif (encoder_spec.encoder_type == 'static'): return variable_encoders.StaticEncoder() elif (encoder_spec.encoder_type == 'passthrough'): return variable_encoders.PassThroughEncoder(chosen_locations, num_known_timesteps, forecast_window_size, covariates=covariates, forecasted_covariates=forecasted_covariates, covariate_overrides=covariate_overrides, covariate_feature_specs=encoder_spec.covariate_feature_specs, ts_categorical_features=ts_categorical_features, name=encoder_spec.encoder_name) elif (encoder_spec.encoder_type == 'vaccine'): return variable_encoders.VaccineEncoder(chosen_locations, num_known_timesteps, forecast_window_size, covariates=covariates, forecasted_covariates=forecasted_covariates, covariate_overrides=covariate_overrides, covariate_feature_specs=encoder_spec.covariate_feature_specs, ts_categorical_features=ts_categorical_features, name=encoder_spec.encoder_name, vaccine_type=encoder_spec.vaccine_type) else: raise ValueError(f'encoder_spec passed in with invalid encoder_type: {encoder_spec.encoder_type}')
-6,044,850,341,288,337,000
Returns a `FeatureEncoder` built as specified in the `encoder_spec`.
covid_epidemiology/src/models/encoders/variable_encoder_builder.py
encoder_from_encoder_spec
04mayukh/google-research
python
def encoder_from_encoder_spec(encoder_spec, chosen_locations, num_known_timesteps, forecast_window_size, output_window_size, static_features=None, static_overrides=None, covariates=None, forecasted_covariates=None, covariate_overrides=None, ts_categorical_features=None, random_seed=0, static_scalers=None, ts_scalers=None, trainable=True): encoder_kwargs = encoder_spec.encoder_kwargs if (encoder_spec.encoder_type == 'gam'): gam_kwargs = {} for kwarg in encoder_kwargs: if (kwarg == 'link_fn'): gam_kwargs['link_fn'] = encoder_kwargs['link_fn'] elif (kwarg == 'distribution'): gam_kwargs['distribution'] = encoder_kwargs['distribution'] elif (kwarg == 'initial_bias'): gam_kwargs['initial_bias'] = encoder_kwargs['initial_bias'] elif (kwarg == 'location_dependent_bias'): gam_kwargs['location_dependent_bias'] = encoder_kwargs['location_dependent_bias'] elif (kwarg == 'lower_bound'): gam_kwargs['lower_bound'] = encoder_kwargs['lower_bound'] elif (kwarg == 'upper_bound'): gam_kwargs['upper_bound'] = encoder_kwargs['upper_bound'] elif (kwarg == 'use_fixed_covariate_mask'): gam_kwargs['use_fixed_covariate_mask'] = encoder_kwargs['use_fixed_covariate_mask'] else: raise ValueError(f'Unexpected kwarg: {kwarg} passed to encoder of type {encoder_spec.encoder_type}') return gam_encoder.GamEncoder(chosen_locations, num_known_timesteps, forecast_window_size=forecast_window_size, output_window_size=output_window_size, static_features=static_features, static_scalers=static_scalers, static_overrides=static_overrides, covariates=covariates, ts_scalers=ts_scalers, forecasted_covariates=forecasted_covariates, covariate_overrides=covariate_overrides, static_feature_specs=encoder_spec.static_feature_specs, covariate_feature_specs=encoder_spec.covariate_feature_specs, ts_categorical_features=ts_categorical_features, covariate_feature_time_offset=encoder_spec.covariate_feature_time_offset, covariate_feature_window=encoder_spec.covariate_feature_window, random_seed=random_seed, name=encoder_spec.encoder_name, trainable=trainable, **gam_kwargs) elif (encoder_spec.encoder_type == 'static'): return variable_encoders.StaticEncoder() elif (encoder_spec.encoder_type == 'passthrough'): return variable_encoders.PassThroughEncoder(chosen_locations, num_known_timesteps, forecast_window_size, covariates=covariates, forecasted_covariates=forecasted_covariates, covariate_overrides=covariate_overrides, covariate_feature_specs=encoder_spec.covariate_feature_specs, ts_categorical_features=ts_categorical_features, name=encoder_spec.encoder_name) elif (encoder_spec.encoder_type == 'vaccine'): return variable_encoders.VaccineEncoder(chosen_locations, num_known_timesteps, forecast_window_size, covariates=covariates, forecasted_covariates=forecasted_covariates, covariate_overrides=covariate_overrides, covariate_feature_specs=encoder_spec.covariate_feature_specs, ts_categorical_features=ts_categorical_features, name=encoder_spec.encoder_name, vaccine_type=encoder_spec.vaccine_type) else: raise ValueError(f'encoder_spec passed in with invalid encoder_type: {encoder_spec.encoder_type}')
def generate_caseRunConfigurations(self, library): ' Generates caseRunConfigurations for testcases in library relevant to this event\n\n :param library: Library\n :type library: tplib.Library\n :return: CaseRunConfigurations\n :rtype: CaseRunConfigurationsList\n ' caseruns = CaseRunConfigurationsList() for testplan in self.filter_testPlans(library): testplan_configurations = ConfigurationsList(testplan.configurations, merge_method=self.settings.get('library', 'defaultCaseConfigMergeMethod')) for testcase in testplan.verificationTestCases: caserun_configurations = testplan_configurations.merge(testcase.configurations) for configuration in caserun_configurations: caseruns.append(CaseRunConfiguration(testcase, configuration, [testplan])) return caseruns
-6,780,673,543,612,013,000
Generates caseRunConfigurations for testcases in library relevant to this event :param library: Library :type library: tplib.Library :return: CaseRunConfigurations :rtype: CaseRunConfigurationsList
libpermian/events/base.py
generate_caseRunConfigurations
rhinstaller/permian
python
def generate_caseRunConfigurations(self, library): ' Generates caseRunConfigurations for testcases in library relevant to this event\n\n :param library: Library\n :type library: tplib.Library\n :return: CaseRunConfigurations\n :rtype: CaseRunConfigurationsList\n ' caseruns = CaseRunConfigurationsList() for testplan in self.filter_testPlans(library): testplan_configurations = ConfigurationsList(testplan.configurations, merge_method=self.settings.get('library', 'defaultCaseConfigMergeMethod')) for testcase in testplan.verificationTestCases: caserun_configurations = testplan_configurations.merge(testcase.configurations) for configuration in caserun_configurations: caseruns.append(CaseRunConfiguration(testcase, configuration, [testplan])) return caseruns
def handles_testplan_artifact_type(self, artifact_type): '\n Decide if this event is relevant to the provided artifact_type (which\n is found in test plan).\n ' return dotted_startswith(self.type, artifact_type)
2,603,444,795,506,606,600
Decide if this event is relevant to the provided artifact_type (which is found in test plan).
libpermian/events/base.py
handles_testplan_artifact_type
rhinstaller/permian
python
def handles_testplan_artifact_type(self, artifact_type): '\n Decide if this event is relevant to the provided artifact_type (which\n is found in test plan).\n ' return dotted_startswith(self.type, artifact_type)
def filter_testPlans(self, library): ' Filters testplan from library based on:\n - event type and testplan.artifact_type\n - testplan execute_on filter\n\n :param library: pipeline library\n :type library: tplib.Library\n :return: Filtered testplans\n :rtype: list of tplib.TestPlan\n ' return library.getTestPlansByQuery('event.handles_testplan_artifact_type(tp.artifact_type) and tp.eval_execute_on(event=event)', event=self)
39,500,111,890,816,350
Filters testplan from library based on: - event type and testplan.artifact_type - testplan execute_on filter :param library: pipeline library :type library: tplib.Library :return: Filtered testplans :rtype: list of tplib.TestPlan
libpermian/events/base.py
filter_testPlans
rhinstaller/permian
python
def filter_testPlans(self, library): ' Filters testplan from library based on:\n - event type and testplan.artifact_type\n - testplan execute_on filter\n\n :param library: pipeline library\n :type library: tplib.Library\n :return: Filtered testplans\n :rtype: list of tplib.TestPlan\n ' return library.getTestPlansByQuery('event.handles_testplan_artifact_type(tp.artifact_type) and tp.eval_execute_on(event=event)', event=self)
@property def additional_testplans_data(self): ' Event can provide additional testplans. Returns python\n dicts, as if they were tplib files read by yaml.safe_load.\n\n :return: list of testplan data\n :rtype: tuple\n ' return None
-4,555,565,715,031,543,000
Event can provide additional testplans. Returns python dicts, as if they were tplib files read by yaml.safe_load. :return: list of testplan data :rtype: tuple
libpermian/events/base.py
additional_testplans_data
rhinstaller/permian
python
@property def additional_testplans_data(self): ' Event can provide additional testplans. Returns python\n dicts, as if they were tplib files read by yaml.safe_load.\n\n :return: list of testplan data\n :rtype: tuple\n ' return None
@property def additional_requrements_data(self): ' Event can provide additional requrements. Returns python\n dicts, as if they were tplib files read by yaml.safe_load.\n\n :return: list of requrements data\n :rtype: tuple\n ' return None
5,101,496,771,720,215,000
Event can provide additional requrements. Returns python dicts, as if they were tplib files read by yaml.safe_load. :return: list of requrements data :rtype: tuple
libpermian/events/base.py
additional_requrements_data
rhinstaller/permian
python
@property def additional_requrements_data(self): ' Event can provide additional requrements. Returns python\n dicts, as if they were tplib files read by yaml.safe_load.\n\n :return: list of requrements data\n :rtype: tuple\n ' return None
@property def additional_testcases_data(self): ' Event can provide additional testcases. Returns python\n dicts, as if they were tplib files read by yaml.safe_load.\n\n :return: list of testcases data\n :rtype: tuple\n ' return None
-3,993,988,197,691,913,000
Event can provide additional testcases. Returns python dicts, as if they were tplib files read by yaml.safe_load. :return: list of testcases data :rtype: tuple
libpermian/events/base.py
additional_testcases_data
rhinstaller/permian
python
@property def additional_testcases_data(self): ' Event can provide additional testcases. Returns python\n dicts, as if they were tplib files read by yaml.safe_load.\n\n :return: list of testcases data\n :rtype: tuple\n ' return None
def __init__(self, all_for_sec=None, days=None, per_day=None): '\n Keyword args:\n all_for_sec (int): The length of time to keep the specified snapshots. Measured in seconds.\n days (int): The number of days to keep the snapshots after the `all_for_sec` period has passed.\n per_day (int): The number of snapshots to keep per day after the `all_for_sec` period has passed.\n ' if (all_for_sec is not None): self.all_for_sec = all_for_sec if (days is not None): self.days = days if (per_day is not None): self.per_day = per_day
5,860,685,410,813,140,000
Keyword args: all_for_sec (int): The length of time to keep the specified snapshots. Measured in seconds. days (int): The number of days to keep the snapshots after the `all_for_sec` period has passed. per_day (int): The number of snapshots to keep per day after the `all_for_sec` period has passed.
pypureclient/flasharray/FA_2_1/models/retention_policy.py
__init__
Flav-STOR-WL/py-pure-client
python
def __init__(self, all_for_sec=None, days=None, per_day=None): '\n Keyword args:\n all_for_sec (int): The length of time to keep the specified snapshots. Measured in seconds.\n days (int): The number of days to keep the snapshots after the `all_for_sec` period has passed.\n per_day (int): The number of snapshots to keep per day after the `all_for_sec` period has passed.\n ' if (all_for_sec is not None): self.all_for_sec = all_for_sec if (days is not None): self.days = days if (per_day is not None): self.per_day = per_day
def to_dict(self): 'Returns the model properties as a dict' result = {} for (attr, _) in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(RetentionPolicy, dict): for (key, value) in self.items(): result[key] = value return result
7,346,697,134,304,610,000
Returns the model properties as a dict
pypureclient/flasharray/FA_2_1/models/retention_policy.py
to_dict
Flav-STOR-WL/py-pure-client
python
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(RetentionPolicy, dict): for (key, value) in self.items(): result[key] = value return result
def to_str(self): 'Returns the string representation of the model' return pprint.pformat(self.to_dict())
5,849,158,643,760,736,000
Returns the string representation of the model
pypureclient/flasharray/FA_2_1/models/retention_policy.py
to_str
Flav-STOR-WL/py-pure-client
python
def to_str(self): return pprint.pformat(self.to_dict())
def __repr__(self): 'For `print` and `pprint`' return self.to_str()
-8,960,031,694,814,905,000
For `print` and `pprint`
pypureclient/flasharray/FA_2_1/models/retention_policy.py
__repr__
Flav-STOR-WL/py-pure-client
python
def __repr__(self): return self.to_str()
def __eq__(self, other): 'Returns true if both objects are equal' if (not isinstance(other, RetentionPolicy)): return False return (self.__dict__ == other.__dict__)
-5,835,544,153,022,462,000
Returns true if both objects are equal
pypureclient/flasharray/FA_2_1/models/retention_policy.py
__eq__
Flav-STOR-WL/py-pure-client
python
def __eq__(self, other): if (not isinstance(other, RetentionPolicy)): return False return (self.__dict__ == other.__dict__)
def __ne__(self, other): 'Returns true if both objects are not equal' return (not (self == other))
7,764,124,047,908,058,000
Returns true if both objects are not equal
pypureclient/flasharray/FA_2_1/models/retention_policy.py
__ne__
Flav-STOR-WL/py-pure-client
python
def __ne__(self, other): return (not (self == other))
def list(self, group_name: str, service_name: str, project_name: str, task_type: Optional[str]=None, **kwargs) -> AsyncIterable['models.TaskList']: 'Get tasks in a service.\n\n The services resource is the top-level resource that represents the Database Migration Service.\n This method returns a list of tasks owned by a service resource. Some tasks may have a status\n of Unknown, which indicates that an error occurred while querying the status of that task.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_type: Filter tasks by task type.\n :type task_type: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either TaskList or the result of cls(response)\n :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.TaskList]\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' def prepare_request(next_link=None): header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') if (not next_link): url = self.list.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') if (task_type is not None): query_parameters['taskType'] = self._serialize.query('task_type', task_type, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('TaskList', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return ((deserialized.next_link or None), AsyncList(list_of_elem)) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged(get_next, extract_data)
245,612,584,995,217,300
Get tasks in a service. The services resource is the top-level resource that represents the Database Migration Service. This method returns a list of tasks owned by a service resource. Some tasks may have a status of Unknown, which indicates that an error occurred while querying the status of that task. :param group_name: Name of the resource group. :type group_name: str :param service_name: Name of the service. :type service_name: str :param project_name: Name of the project. :type project_name: str :param task_type: Filter tasks by task type. :type task_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either TaskList or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.TaskList] :raises: ~azure.core.exceptions.HttpResponseError
src/datamigration/azext_datamigration/vendored_sdks/datamigration/aio/operations/_tasks_operations.py
list
Hamster-Huey/azure-cli-extensions
python
def list(self, group_name: str, service_name: str, project_name: str, task_type: Optional[str]=None, **kwargs) -> AsyncIterable['models.TaskList']: 'Get tasks in a service.\n\n The services resource is the top-level resource that represents the Database Migration Service.\n This method returns a list of tasks owned by a service resource. Some tasks may have a status\n of Unknown, which indicates that an error occurred while querying the status of that task.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_type: Filter tasks by task type.\n :type task_type: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either TaskList or the result of cls(response)\n :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datamigration.models.TaskList]\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' def prepare_request(next_link=None): header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') if (not next_link): url = self.list.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') if (task_type is not None): query_parameters['taskType'] = self._serialize.query('task_type', task_type, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('TaskList', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return ((deserialized.next_link or None), AsyncList(list_of_elem)) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged(get_next, extract_data)
async def create_or_update(self, group_name: str, service_name: str, project_name: str, task_name: str, parameters: 'models.ProjectTask', **kwargs) -> 'models.ProjectTask': 'Create or update task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The PUT method creates a new task or updates an existing one, although since tasks\n have no mutable custom properties, there is little reason to update an existing one.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param parameters: Information about the task.\n :type parameters: ~azure.mgmt.datamigration.models.ProjectTask\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' content_type = kwargs.pop('content_type', 'application/json') accept = 'application/json' url = self.create_or_update.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header('content_type', content_type, 'str') header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(parameters, 'ProjectTask') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200, 201]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if (response.status_code == 200): deserialized = self._deserialize('ProjectTask', pipeline_response) if (response.status_code == 201): deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
-2,965,820,772,915,707,000
Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS instance. The PUT method creates a new task or updates an existing one, although since tasks have no mutable custom properties, there is little reason to update an existing one. :param group_name: Name of the resource group. :type group_name: str :param service_name: Name of the service. :type service_name: str :param project_name: Name of the project. :type project_name: str :param task_name: Name of the Task. :type task_name: str :param parameters: Information about the task. :type parameters: ~azure.mgmt.datamigration.models.ProjectTask :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask, or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises: ~azure.core.exceptions.HttpResponseError
src/datamigration/azext_datamigration/vendored_sdks/datamigration/aio/operations/_tasks_operations.py
create_or_update
Hamster-Huey/azure-cli-extensions
python
async def create_or_update(self, group_name: str, service_name: str, project_name: str, task_name: str, parameters: 'models.ProjectTask', **kwargs) -> 'models.ProjectTask': 'Create or update task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The PUT method creates a new task or updates an existing one, although since tasks\n have no mutable custom properties, there is little reason to update an existing one.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param parameters: Information about the task.\n :type parameters: ~azure.mgmt.datamigration.models.ProjectTask\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' content_type = kwargs.pop('content_type', 'application/json') accept = 'application/json' url = self.create_or_update.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header('content_type', content_type, 'str') header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(parameters, 'ProjectTask') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200, 201]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if (response.status_code == 200): deserialized = self._deserialize('ProjectTask', pipeline_response) if (response.status_code == 201): deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
async def get(self, group_name: str, service_name: str, project_name: str, task_name: str, expand: Optional[str]=None, **kwargs) -> 'models.ProjectTask': 'Get task information.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The GET method retrieves information about a task.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param expand: Expand the response.\n :type expand: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' url = self.get.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') if (expand is not None): query_parameters['$expand'] = self._serialize.query('expand', expand, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
4,512,955,439,058,534,000
Get task information. The tasks resource is a nested, proxy-only resource representing work performed by a DMS instance. The GET method retrieves information about a task. :param group_name: Name of the resource group. :type group_name: str :param service_name: Name of the service. :type service_name: str :param project_name: Name of the project. :type project_name: str :param task_name: Name of the Task. :type task_name: str :param expand: Expand the response. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask, or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises: ~azure.core.exceptions.HttpResponseError
src/datamigration/azext_datamigration/vendored_sdks/datamigration/aio/operations/_tasks_operations.py
get
Hamster-Huey/azure-cli-extensions
python
async def get(self, group_name: str, service_name: str, project_name: str, task_name: str, expand: Optional[str]=None, **kwargs) -> 'models.ProjectTask': 'Get task information.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The GET method retrieves information about a task.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param expand: Expand the response.\n :type expand: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' url = self.get.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') if (expand is not None): query_parameters['$expand'] = self._serialize.query('expand', expand, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
async def delete(self, group_name: str, service_name: str, project_name: str, task_name: str, delete_running_tasks: Optional[bool]=None, **kwargs) -> None: "Delete task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The DELETE method deletes a task, canceling it first if it's running.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param delete_running_tasks: Delete the resource even if it contains running tasks.\n :type delete_running_tasks: bool\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: None, or the result of cls(response)\n :rtype: None\n :raises: ~azure.core.exceptions.HttpResponseError\n " cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' url = self.delete.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') if (delete_running_tasks is not None): query_parameters['deleteRunningTasks'] = self._serialize.query('delete_running_tasks', delete_running_tasks, 'bool') header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200, 204]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {})
4,686,494,169,580,922,000
Delete task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS instance. The DELETE method deletes a task, canceling it first if it's running. :param group_name: Name of the resource group. :type group_name: str :param service_name: Name of the service. :type service_name: str :param project_name: Name of the project. :type project_name: str :param task_name: Name of the Task. :type task_name: str :param delete_running_tasks: Delete the resource even if it contains running tasks. :type delete_running_tasks: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError
src/datamigration/azext_datamigration/vendored_sdks/datamigration/aio/operations/_tasks_operations.py
delete
Hamster-Huey/azure-cli-extensions
python
async def delete(self, group_name: str, service_name: str, project_name: str, task_name: str, delete_running_tasks: Optional[bool]=None, **kwargs) -> None: "Delete task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The DELETE method deletes a task, canceling it first if it's running.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param delete_running_tasks: Delete the resource even if it contains running tasks.\n :type delete_running_tasks: bool\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: None, or the result of cls(response)\n :rtype: None\n :raises: ~azure.core.exceptions.HttpResponseError\n " cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' url = self.delete.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') if (delete_running_tasks is not None): query_parameters['deleteRunningTasks'] = self._serialize.query('delete_running_tasks', delete_running_tasks, 'bool') header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200, 204]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {})
async def update(self, group_name: str, service_name: str, project_name: str, task_name: str, parameters: 'models.ProjectTask', **kwargs) -> 'models.ProjectTask': 'Create or update task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The PATCH method updates an existing task, but since tasks have no mutable custom\n properties, there is little reason to do so.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param parameters: Information about the task.\n :type parameters: ~azure.mgmt.datamigration.models.ProjectTask\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' content_type = kwargs.pop('content_type', 'application/json') accept = 'application/json' url = self.update.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header('content_type', content_type, 'str') header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(parameters, 'ProjectTask') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
-7,988,383,540,604,401,000
Create or update task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS instance. The PATCH method updates an existing task, but since tasks have no mutable custom properties, there is little reason to do so. :param group_name: Name of the resource group. :type group_name: str :param service_name: Name of the service. :type service_name: str :param project_name: Name of the project. :type project_name: str :param task_name: Name of the Task. :type task_name: str :param parameters: Information about the task. :type parameters: ~azure.mgmt.datamigration.models.ProjectTask :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask, or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises: ~azure.core.exceptions.HttpResponseError
src/datamigration/azext_datamigration/vendored_sdks/datamigration/aio/operations/_tasks_operations.py
update
Hamster-Huey/azure-cli-extensions
python
async def update(self, group_name: str, service_name: str, project_name: str, task_name: str, parameters: 'models.ProjectTask', **kwargs) -> 'models.ProjectTask': 'Create or update task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. The PATCH method updates an existing task, but since tasks have no mutable custom\n properties, there is little reason to do so.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param parameters: Information about the task.\n :type parameters: ~azure.mgmt.datamigration.models.ProjectTask\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' content_type = kwargs.pop('content_type', 'application/json') accept = 'application/json' url = self.update.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header('content_type', content_type, 'str') header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(parameters, 'ProjectTask') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
async def cancel(self, group_name: str, service_name: str, project_name: str, task_name: str, **kwargs) -> 'models.ProjectTask': "Cancel a task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. This method cancels a task if it's currently queued or running.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n " cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' url = self.cancel.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
9,121,790,174,815,307,000
Cancel a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS instance. This method cancels a task if it's currently queued or running. :param group_name: Name of the resource group. :type group_name: str :param service_name: Name of the service. :type service_name: str :param project_name: Name of the project. :type project_name: str :param task_name: Name of the Task. :type task_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ProjectTask, or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.ProjectTask :raises: ~azure.core.exceptions.HttpResponseError
src/datamigration/azext_datamigration/vendored_sdks/datamigration/aio/operations/_tasks_operations.py
cancel
Hamster-Huey/azure-cli-extensions
python
async def cancel(self, group_name: str, service_name: str, project_name: str, task_name: str, **kwargs) -> 'models.ProjectTask': "Cancel a task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. This method cancels a task if it's currently queued or running.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ProjectTask, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.ProjectTask\n :raises: ~azure.core.exceptions.HttpResponseError\n " cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' accept = 'application/json' url = self.cancel.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProjectTask', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
async def command(self, group_name: str, service_name: str, project_name: str, task_name: str, parameters: 'models.CommandProperties', **kwargs) -> 'models.CommandProperties': 'Execute a command on a task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. This method executes a command on a running task.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param parameters: Command to execute.\n :type parameters: ~azure.mgmt.datamigration.models.CommandProperties\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: CommandProperties, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.CommandProperties\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' content_type = kwargs.pop('content_type', 'application/json') accept = 'application/json' url = self.command.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header('content_type', content_type, 'str') header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(parameters, 'CommandProperties') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('CommandProperties', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
6,424,933,398,628,367,000
Execute a command on a task. The tasks resource is a nested, proxy-only resource representing work performed by a DMS instance. This method executes a command on a running task. :param group_name: Name of the resource group. :type group_name: str :param service_name: Name of the service. :type service_name: str :param project_name: Name of the project. :type project_name: str :param task_name: Name of the Task. :type task_name: str :param parameters: Command to execute. :type parameters: ~azure.mgmt.datamigration.models.CommandProperties :keyword callable cls: A custom type or function that will be passed the direct response :return: CommandProperties, or the result of cls(response) :rtype: ~azure.mgmt.datamigration.models.CommandProperties :raises: ~azure.core.exceptions.HttpResponseError
src/datamigration/azext_datamigration/vendored_sdks/datamigration/aio/operations/_tasks_operations.py
command
Hamster-Huey/azure-cli-extensions
python
async def command(self, group_name: str, service_name: str, project_name: str, task_name: str, parameters: 'models.CommandProperties', **kwargs) -> 'models.CommandProperties': 'Execute a command on a task.\n\n The tasks resource is a nested, proxy-only resource representing work performed by a DMS\n instance. This method executes a command on a running task.\n\n :param group_name: Name of the resource group.\n :type group_name: str\n :param service_name: Name of the service.\n :type service_name: str\n :param project_name: Name of the project.\n :type project_name: str\n :param task_name: Name of the Task.\n :type task_name: str\n :param parameters: Command to execute.\n :type parameters: ~azure.mgmt.datamigration.models.CommandProperties\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: CommandProperties, or the result of cls(response)\n :rtype: ~azure.mgmt.datamigration.models.CommandProperties\n :raises: ~azure.core.exceptions.HttpResponseError\n ' cls = kwargs.pop('cls', None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop('error_map', {})) api_version = '2021-10-30-preview' content_type = kwargs.pop('content_type', 'application/json') accept = 'application/json' url = self.command.metadata['url'] path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'groupName': self._serialize.url('group_name', group_name, 'str'), 'serviceName': self._serialize.url('service_name', service_name, 'str'), 'projectName': self._serialize.url('project_name', project_name, 'str'), 'taskName': self._serialize.url('task_name', task_name, 'str')} url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header('content_type', content_type, 'str') header_parameters['Accept'] = self._serialize.header('accept', accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(parameters, 'CommandProperties') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs)) response = pipeline_response.http_response if (response.status_code not in [200]): map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.ApiError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('CommandProperties', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
def _maybe_create_dir(path): "Create directory (and parents) if they don't exist." try: os.makedirs(path) except OSError: if (not os.path.isdir(path)): raise
6,105,453,633,667,462,000
Create directory (and parents) if they don't exist.
parcels/scripts/get_examples.py
_maybe_create_dir
becgorton/parcels
python
def _maybe_create_dir(path): try: os.makedirs(path) except OSError: if (not os.path.isdir(path)): raise
def copy_data_and_examples_from_package_to(target_path): 'Copy example data from Parcels directory.\n\n Return thos parths of the list `file_names` that were not found in the\n package.\n\n ' examples_in_package = pkg_resources.resource_filename('parcels', 'examples') try: shutil.copytree(examples_in_package, target_path) except Exception as e: print(e) pass
7,610,559,079,219,055,000
Copy example data from Parcels directory. Return thos parths of the list `file_names` that were not found in the package.
parcels/scripts/get_examples.py
copy_data_and_examples_from_package_to
becgorton/parcels
python
def copy_data_and_examples_from_package_to(target_path): 'Copy example data from Parcels directory.\n\n Return thos parths of the list `file_names` that were not found in the\n package.\n\n ' examples_in_package = pkg_resources.resource_filename('parcels', 'examples') try: shutil.copytree(examples_in_package, target_path) except Exception as e: print(e) pass
def set_jupyter_kernel_to_python_version(path, python_version=2): 'Set notebook kernelspec to desired python version.\n\n This also drops all other meta data from the notebook.\n ' for file_name in glob(os.path.join(path, '*.ipynb')): with open(file_name, 'r') as f: notebook_data = json.load(f) notebook_data['metadata'] = {'kernelspec': {'display_name': 'Python {}'.format(python_version), 'language': 'python', 'name': 'python{}'.format(python_version)}} with open(file_name, 'w') as f: json.dump(notebook_data, f, indent=2)
1,613,269,214,994,440,000
Set notebook kernelspec to desired python version. This also drops all other meta data from the notebook.
parcels/scripts/get_examples.py
set_jupyter_kernel_to_python_version
becgorton/parcels
python
def set_jupyter_kernel_to_python_version(path, python_version=2): 'Set notebook kernelspec to desired python version.\n\n This also drops all other meta data from the notebook.\n ' for file_name in glob(os.path.join(path, '*.ipynb')): with open(file_name, 'r') as f: notebook_data = json.load(f) notebook_data['metadata'] = {'kernelspec': {'display_name': 'Python {}'.format(python_version), 'language': 'python', 'name': 'python{}'.format(python_version)}} with open(file_name, 'w') as f: json.dump(notebook_data, f, indent=2)
def _still_to_download(file_names, target_path): 'Only return the files that are not yet present on disk.' for fn in list(file_names): if os.path.exists(os.path.join(target_path, fn)): file_names.remove(fn) return file_names
8,455,357,851,695,611,000
Only return the files that are not yet present on disk.
parcels/scripts/get_examples.py
_still_to_download
becgorton/parcels
python
def _still_to_download(file_names, target_path): for fn in list(file_names): if os.path.exists(os.path.join(target_path, fn)): file_names.remove(fn) return file_names
def download_files(source_url, file_names, target_path): 'Mirror file_names from source_url to target_path.' _maybe_create_dir(target_path) pbar = ProgressBar() print(('Downloading %s ...' % source_url.split('/')[(- 1)])) for filename in pbar(file_names): _maybe_create_dir(os.path.join(target_path, os.path.dirname(filename))) if (not os.path.exists(os.path.join(target_path, filename))): download_url = ((source_url + '/') + filename) src = urlopen(download_url) with open(os.path.join(target_path, filename), 'wb') as dst: dst.write(src.read())
-9,082,911,184,324,230,000
Mirror file_names from source_url to target_path.
parcels/scripts/get_examples.py
download_files
becgorton/parcels
python
def download_files(source_url, file_names, target_path): _maybe_create_dir(target_path) pbar = ProgressBar() print(('Downloading %s ...' % source_url.split('/')[(- 1)])) for filename in pbar(file_names): _maybe_create_dir(os.path.join(target_path, os.path.dirname(filename))) if (not os.path.exists(os.path.join(target_path, filename))): download_url = ((source_url + '/') + filename) src = urlopen(download_url) with open(os.path.join(target_path, filename), 'wb') as dst: dst.write(src.read())
def main(target_path=None): 'Get example scripts, example notebooks, and example data.\n\n Copy the examples from the package directory and get the example data either\n from the package directory or from the Parcels website.\n ' if (target_path is None): parser = argparse.ArgumentParser(description='Get Parcels example data.') parser.add_argument('target_path', help='Where to put the tutorials? (This path will be created.)') args = parser.parse_args() target_path = args.target_path if os.path.exists(target_path): print('Error: {} already exists.'.format(target_path)) return copy_data_and_examples_from_package_to(target_path) set_jupyter_kernel_to_python_version(target_path, python_version=sys.version_info[0]) remaining_example_data_files = _still_to_download(example_data_files, target_path) download_files(example_data_url, remaining_example_data_files, target_path)
37,062,962,504,163,710
Get example scripts, example notebooks, and example data. Copy the examples from the package directory and get the example data either from the package directory or from the Parcels website.
parcels/scripts/get_examples.py
main
becgorton/parcels
python
def main(target_path=None): 'Get example scripts, example notebooks, and example data.\n\n Copy the examples from the package directory and get the example data either\n from the package directory or from the Parcels website.\n ' if (target_path is None): parser = argparse.ArgumentParser(description='Get Parcels example data.') parser.add_argument('target_path', help='Where to put the tutorials? (This path will be created.)') args = parser.parse_args() target_path = args.target_path if os.path.exists(target_path): print('Error: {} already exists.'.format(target_path)) return copy_data_and_examples_from_package_to(target_path) set_jupyter_kernel_to_python_version(target_path, python_version=sys.version_info[0]) remaining_example_data_files = _still_to_download(example_data_files, target_path) download_files(example_data_url, remaining_example_data_files, target_path)
def _WatchBucket(self): 'Creates a watch on a bucket given in self.args.' self.CheckArguments() identifier = None client_token = None if self.sub_opts: for (o, a) in self.sub_opts: if (o == '-i'): identifier = a if (o == '-t'): client_token = a identifier = (identifier or str(uuid.uuid4())) watch_url = self.args[0] bucket_arg = self.args[(- 1)] if (not watch_url.lower().startswith('https://')): raise CommandException('The application URL must be an https:// URL.') bucket_url = StorageUrlFromString(bucket_arg) if (not (bucket_url.IsBucket() and (bucket_url.scheme == 'gs'))): raise CommandException(('The %s command can only be used with gs:// bucket URLs.' % self.command_name)) if (not bucket_url.IsBucket()): raise CommandException(('URL must name a bucket for the %s command.' % self.command_name)) self.logger.info('Watching bucket %s with application URL %s ...', bucket_url, watch_url) try: channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name, watch_url, identifier, token=client_token, provider=bucket_url.scheme) except AccessDeniedException as e: self.logger.warn(NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(watch_error=str(e), watch_url=watch_url)) raise channel_id = channel.id resource_id = channel.resourceId client_token = channel.token self.logger.info('Successfully created watch notification channel.') self.logger.info('Watch channel identifier: %s', channel_id) self.logger.info('Canonicalized resource identifier: %s', resource_id) self.logger.info('Client state token: %s', client_token) return 0
-8,251,014,348,727,324,000
Creates a watch on a bucket given in self.args.
gslib/commands/notification.py
_WatchBucket
BobiGilburd/gsutil
python
def _WatchBucket(self): self.CheckArguments() identifier = None client_token = None if self.sub_opts: for (o, a) in self.sub_opts: if (o == '-i'): identifier = a if (o == '-t'): client_token = a identifier = (identifier or str(uuid.uuid4())) watch_url = self.args[0] bucket_arg = self.args[(- 1)] if (not watch_url.lower().startswith('https://')): raise CommandException('The application URL must be an https:// URL.') bucket_url = StorageUrlFromString(bucket_arg) if (not (bucket_url.IsBucket() and (bucket_url.scheme == 'gs'))): raise CommandException(('The %s command can only be used with gs:// bucket URLs.' % self.command_name)) if (not bucket_url.IsBucket()): raise CommandException(('URL must name a bucket for the %s command.' % self.command_name)) self.logger.info('Watching bucket %s with application URL %s ...', bucket_url, watch_url) try: channel = self.gsutil_api.WatchBucket(bucket_url.bucket_name, watch_url, identifier, token=client_token, provider=bucket_url.scheme) except AccessDeniedException as e: self.logger.warn(NOTIFICATION_AUTHORIZATION_FAILED_MESSAGE.format(watch_error=str(e), watch_url=watch_url)) raise channel_id = channel.id resource_id = channel.resourceId client_token = channel.token self.logger.info('Successfully created watch notification channel.') self.logger.info('Watch channel identifier: %s', channel_id) self.logger.info('Canonicalized resource identifier: %s', resource_id) self.logger.info('Client state token: %s', client_token) return 0
def _ListChannels(self, bucket_arg): 'Lists active channel watches on a bucket given in self.args.' bucket_url = StorageUrlFromString(bucket_arg) if (not (bucket_url.IsBucket() and (bucket_url.scheme == 'gs'))): raise CommandException(('The %s command can only be used with gs:// bucket URLs.' % self.command_name)) if (not bucket_url.IsBucket()): raise CommandException(('URL must name a bucket for the %s command.' % self.command_name)) channels = self.gsutil_api.ListChannels(bucket_url.bucket_name, provider='gs').items self.logger.info('Bucket %s has the following active Object Change Notifications:', bucket_url.bucket_name) for (idx, channel) in enumerate(channels): self.logger.info('\tNotification channel %d:', (idx + 1)) self.logger.info('\t\tChannel identifier: %s', channel.channel_id) self.logger.info('\t\tResource identifier: %s', channel.resource_id) self.logger.info('\t\tApplication URL: %s', channel.push_url) self.logger.info('\t\tCreated by: %s', channel.subscriber_email) self.logger.info('\t\tCreation time: %s', str(datetime.fromtimestamp((channel.creation_time_ms / 1000)))) return 0
6,163,886,225,114,070,000
Lists active channel watches on a bucket given in self.args.
gslib/commands/notification.py
_ListChannels
BobiGilburd/gsutil
python
def _ListChannels(self, bucket_arg): bucket_url = StorageUrlFromString(bucket_arg) if (not (bucket_url.IsBucket() and (bucket_url.scheme == 'gs'))): raise CommandException(('The %s command can only be used with gs:// bucket URLs.' % self.command_name)) if (not bucket_url.IsBucket()): raise CommandException(('URL must name a bucket for the %s command.' % self.command_name)) channels = self.gsutil_api.ListChannels(bucket_url.bucket_name, provider='gs').items self.logger.info('Bucket %s has the following active Object Change Notifications:', bucket_url.bucket_name) for (idx, channel) in enumerate(channels): self.logger.info('\tNotification channel %d:', (idx + 1)) self.logger.info('\t\tChannel identifier: %s', channel.channel_id) self.logger.info('\t\tResource identifier: %s', channel.resource_id) self.logger.info('\t\tApplication URL: %s', channel.push_url) self.logger.info('\t\tCreated by: %s', channel.subscriber_email) self.logger.info('\t\tCreation time: %s', str(datetime.fromtimestamp((channel.creation_time_ms / 1000)))) return 0
def _CreateTopic(self, pubsub_topic, service_account): 'Assures that a topic exists, creating it if necessary.\n\n Also adds GCS as a publisher on that bucket, if necessary.\n\n Args:\n pubsub_topic: name of the Cloud Pub/Sub topic to use/create.\n service_account: the GCS service account that needs publish permission.\n\n Returns:\n true if we modified IAM permissions, otherwise false.\n ' pubsub_api = PubsubApi(logger=self.logger) try: pubsub_api.GetTopic(topic_name=pubsub_topic) self.logger.debug('Topic %s already exists', pubsub_topic) except NotFoundException: self.logger.debug('Creating topic %s', pubsub_topic) pubsub_api.CreateTopic(topic_name=pubsub_topic) self.logger.info('Created Cloud Pub/Sub topic %s', pubsub_topic) policy = pubsub_api.GetTopicIamPolicy(topic_name=pubsub_topic) binding = Binding(role='roles/pubsub.publisher', members=[('serviceAccount:%s' % service_account)]) if (binding not in policy.bindings): policy.bindings.append(binding) pubsub_api.SetTopicIamPolicy(topic_name=pubsub_topic, policy=policy) return True else: self.logger.debug('GCS already has publish permission to topic %s.', pubsub_topic) return False
-5,651,226,351,373,527,000
Assures that a topic exists, creating it if necessary. Also adds GCS as a publisher on that bucket, if necessary. Args: pubsub_topic: name of the Cloud Pub/Sub topic to use/create. service_account: the GCS service account that needs publish permission. Returns: true if we modified IAM permissions, otherwise false.
gslib/commands/notification.py
_CreateTopic
BobiGilburd/gsutil
python
def _CreateTopic(self, pubsub_topic, service_account): 'Assures that a topic exists, creating it if necessary.\n\n Also adds GCS as a publisher on that bucket, if necessary.\n\n Args:\n pubsub_topic: name of the Cloud Pub/Sub topic to use/create.\n service_account: the GCS service account that needs publish permission.\n\n Returns:\n true if we modified IAM permissions, otherwise false.\n ' pubsub_api = PubsubApi(logger=self.logger) try: pubsub_api.GetTopic(topic_name=pubsub_topic) self.logger.debug('Topic %s already exists', pubsub_topic) except NotFoundException: self.logger.debug('Creating topic %s', pubsub_topic) pubsub_api.CreateTopic(topic_name=pubsub_topic) self.logger.info('Created Cloud Pub/Sub topic %s', pubsub_topic) policy = pubsub_api.GetTopicIamPolicy(topic_name=pubsub_topic) binding = Binding(role='roles/pubsub.publisher', members=[('serviceAccount:%s' % service_account)]) if (binding not in policy.bindings): policy.bindings.append(binding) pubsub_api.SetTopicIamPolicy(topic_name=pubsub_topic, policy=policy) return True else: self.logger.debug('GCS already has publish permission to topic %s.', pubsub_topic) return False
def _EnumerateNotificationsFromArgs(self, accept_notification_configs=True): 'Yields bucket/notification tuples from command-line args.\n\n Given a list of strings that are bucket names (gs://foo) or notification\n config IDs, yield tuples of bucket names and their associated notifications.\n\n Args:\n accept_notification_configs: whether notification configs are valid args.\n Yields:\n Tuples of the form (bucket_name, Notification)\n ' path_regex = self._GetNotificationPathRegex() for list_entry in self.args: match = path_regex.match(list_entry) if match: if (not accept_notification_configs): raise CommandException(('%s %s accepts only bucket names, but you provided %s' % (self.command_name, self.subcommand_name, list_entry))) bucket_name = match.group('bucket') notification_id = match.group('notification') found = False for notification in self.gsutil_api.ListNotificationConfigs(bucket_name, provider='gs'): if (notification.id == notification_id): (yield (bucket_name, notification)) found = True break if (not found): raise NotFoundException(('Could not find notification %s' % list_entry)) else: storage_url = StorageUrlFromString(list_entry) if (not storage_url.IsCloudUrl()): raise CommandException(('The %s command must be used on cloud buckets or notification config names.' % self.command_name)) if (storage_url.scheme != 'gs'): raise CommandException('The %s command only works on gs:// buckets.') path = None if storage_url.IsProvider(): path = 'gs://*' elif storage_url.IsBucket(): path = list_entry if (not path): raise CommandException(('The %s command cannot be used on cloud objects, only buckets' % self.command_name)) for blr in self.WildcardIterator(path).IterBuckets(bucket_fields=['id']): for notification in self.gsutil_api.ListNotificationConfigs(blr.storage_url.bucket_name, provider='gs'): (yield (blr.storage_url.bucket_name, notification))
-556,215,268,115,043,140
Yields bucket/notification tuples from command-line args. Given a list of strings that are bucket names (gs://foo) or notification config IDs, yield tuples of bucket names and their associated notifications. Args: accept_notification_configs: whether notification configs are valid args. Yields: Tuples of the form (bucket_name, Notification)
gslib/commands/notification.py
_EnumerateNotificationsFromArgs
BobiGilburd/gsutil
python
def _EnumerateNotificationsFromArgs(self, accept_notification_configs=True): 'Yields bucket/notification tuples from command-line args.\n\n Given a list of strings that are bucket names (gs://foo) or notification\n config IDs, yield tuples of bucket names and their associated notifications.\n\n Args:\n accept_notification_configs: whether notification configs are valid args.\n Yields:\n Tuples of the form (bucket_name, Notification)\n ' path_regex = self._GetNotificationPathRegex() for list_entry in self.args: match = path_regex.match(list_entry) if match: if (not accept_notification_configs): raise CommandException(('%s %s accepts only bucket names, but you provided %s' % (self.command_name, self.subcommand_name, list_entry))) bucket_name = match.group('bucket') notification_id = match.group('notification') found = False for notification in self.gsutil_api.ListNotificationConfigs(bucket_name, provider='gs'): if (notification.id == notification_id): (yield (bucket_name, notification)) found = True break if (not found): raise NotFoundException(('Could not find notification %s' % list_entry)) else: storage_url = StorageUrlFromString(list_entry) if (not storage_url.IsCloudUrl()): raise CommandException(('The %s command must be used on cloud buckets or notification config names.' % self.command_name)) if (storage_url.scheme != 'gs'): raise CommandException('The %s command only works on gs:// buckets.') path = None if storage_url.IsProvider(): path = 'gs://*' elif storage_url.IsBucket(): path = list_entry if (not path): raise CommandException(('The %s command cannot be used on cloud objects, only buckets' % self.command_name)) for blr in self.WildcardIterator(path).IterBuckets(bucket_fields=['id']): for notification in self.gsutil_api.ListNotificationConfigs(blr.storage_url.bucket_name, provider='gs'): (yield (blr.storage_url.bucket_name, notification))
def RunCommand(self): 'Command entry point for the notification command.' self.subcommand_name = self.args.pop(0) if (self.subcommand_name in NotificationCommand.SUBCOMMANDS): metrics.LogCommandParams(subcommands=[self.subcommand_name]) return self._RunSubCommand(NotificationCommand.SUBCOMMANDS[self.subcommand_name]) else: raise CommandException(('Invalid subcommand "%s" for the %s command.' % (self.subcommand_name, self.command_name)))
5,564,108,628,043,477,000
Command entry point for the notification command.
gslib/commands/notification.py
RunCommand
BobiGilburd/gsutil
python
def RunCommand(self): self.subcommand_name = self.args.pop(0) if (self.subcommand_name in NotificationCommand.SUBCOMMANDS): metrics.LogCommandParams(subcommands=[self.subcommand_name]) return self._RunSubCommand(NotificationCommand.SUBCOMMANDS[self.subcommand_name]) else: raise CommandException(('Invalid subcommand "%s" for the %s command.' % (self.subcommand_name, self.command_name)))
def plot_plane(ax, distances: list, z_coords: list, label: str=None, decorate: bool=True, show_half: bool=False, **kwargs): '\n Plot plane.\n\n Args:\n ax: matplotlib ax.\n distances (list): List of plane intervals.\n z_coords (list): List of z coordinate of each plane.\n label (str): Plot label.\n decorate (bool): If True, ax is decorated.\n show_half: If True, atom planes which are periodically equivalent are\n not showed.\n ' if decorate: xlabel = 'Distance' ylabel = 'Hight' else: xlabel = ylabel = None _distances = deepcopy(distances) _z_coords = deepcopy(z_coords) _distances.insert(0, distances[(- 1)]) _distances.append(distances[0]) _z_coords.insert(0, (- distances[(- 1)])) _z_coords.append((z_coords[(- 1)] + distances[0])) c = np.sum(distances) fixed_z_coords = ((_z_coords + (distances[0] / 2)) - (c / 2)) num = len(fixed_z_coords) bulk_distance = _distances[int((num / 4))] if show_half: n = int(((num + 2) / 4)) _distances = _distances[n:(3 * n)] fixed_z_coords = fixed_z_coords[n:(3 * n)] line_chart(ax=ax, xdata=_distances, ydata=fixed_z_coords, xlabel=xlabel, ylabel=ylabel, label=label, sort_by='y', **kwargs) if decorate: xmin = (bulk_distance - 0.025) xmax = (bulk_distance + 0.025) if show_half: ax.hlines(0, xmin=(xmin - 0.01), xmax=(xmax + 0.01), linestyle='--', color='k', linewidth=1.0) else: tb_idx = [1, int((num / 2)), (num - 1)] for idx in tb_idx: ax.hlines((fixed_z_coords[idx] - (distances[0] / 2)), xmin=(xmin - 0.01), xmax=(xmax + 0.01), linestyle='--', color='k', linewidth=1.0)
4,762,202,496,574,044,000
Plot plane. Args: ax: matplotlib ax. distances (list): List of plane intervals. z_coords (list): List of z coordinate of each plane. label (str): Plot label. decorate (bool): If True, ax is decorated. show_half: If True, atom planes which are periodically equivalent are not showed.
twinpy/plot/twinboundary.py
plot_plane
kei0822kei/twinpy
python
def plot_plane(ax, distances: list, z_coords: list, label: str=None, decorate: bool=True, show_half: bool=False, **kwargs): '\n Plot plane.\n\n Args:\n ax: matplotlib ax.\n distances (list): List of plane intervals.\n z_coords (list): List of z coordinate of each plane.\n label (str): Plot label.\n decorate (bool): If True, ax is decorated.\n show_half: If True, atom planes which are periodically equivalent are\n not showed.\n ' if decorate: xlabel = 'Distance' ylabel = 'Hight' else: xlabel = ylabel = None _distances = deepcopy(distances) _z_coords = deepcopy(z_coords) _distances.insert(0, distances[(- 1)]) _distances.append(distances[0]) _z_coords.insert(0, (- distances[(- 1)])) _z_coords.append((z_coords[(- 1)] + distances[0])) c = np.sum(distances) fixed_z_coords = ((_z_coords + (distances[0] / 2)) - (c / 2)) num = len(fixed_z_coords) bulk_distance = _distances[int((num / 4))] if show_half: n = int(((num + 2) / 4)) _distances = _distances[n:(3 * n)] fixed_z_coords = fixed_z_coords[n:(3 * n)] line_chart(ax=ax, xdata=_distances, ydata=fixed_z_coords, xlabel=xlabel, ylabel=ylabel, label=label, sort_by='y', **kwargs) if decorate: xmin = (bulk_distance - 0.025) xmax = (bulk_distance + 0.025) if show_half: ax.hlines(0, xmin=(xmin - 0.01), xmax=(xmax + 0.01), linestyle='--', color='k', linewidth=1.0) else: tb_idx = [1, int((num / 2)), (num - 1)] for idx in tb_idx: ax.hlines((fixed_z_coords[idx] - (distances[0] / 2)), xmin=(xmin - 0.01), xmax=(xmax + 0.01), linestyle='--', color='k', linewidth=1.0)
def plot_angle(ax, angles: list, z_coords: list, label: str=None, decorate: bool=True): '\n Plot angle.\n\n Args:\n ax: matplotlib ax.\n z_coords (list): List of z coordinate of each plane.\n label (str): Plot label.\n decorate (bool): If True, ax is decorated.\n ' if decorate: xlabel = 'Angle' ylabel = 'Hight' else: xlabel = ylabel = None _angles = deepcopy(angles) _z_coords = deepcopy(z_coords) _angles.append(angles[0]) _z_coords.append((z_coords[(- 1)] + z_coords[1])) line_chart(ax=ax, xdata=_angles, ydata=_z_coords, xlabel=xlabel, ylabel=ylabel, label=label, sort_by='y') if decorate: num = len(_z_coords) tb_idx = [0, int((num / 2)), (num - 1)] bulk_angle = angles[int((num / 4))] for idx in tb_idx: ax.hlines(_z_coords[idx], xmin=(- 1), xmax=(bulk_angle + 2), linestyle='--', linewidth=1.5)
8,997,156,145,619,759,000
Plot angle. Args: ax: matplotlib ax. z_coords (list): List of z coordinate of each plane. label (str): Plot label. decorate (bool): If True, ax is decorated.
twinpy/plot/twinboundary.py
plot_angle
kei0822kei/twinpy
python
def plot_angle(ax, angles: list, z_coords: list, label: str=None, decorate: bool=True): '\n Plot angle.\n\n Args:\n ax: matplotlib ax.\n z_coords (list): List of z coordinate of each plane.\n label (str): Plot label.\n decorate (bool): If True, ax is decorated.\n ' if decorate: xlabel = 'Angle' ylabel = 'Hight' else: xlabel = ylabel = None _angles = deepcopy(angles) _z_coords = deepcopy(z_coords) _angles.append(angles[0]) _z_coords.append((z_coords[(- 1)] + z_coords[1])) line_chart(ax=ax, xdata=_angles, ydata=_z_coords, xlabel=xlabel, ylabel=ylabel, label=label, sort_by='y') if decorate: num = len(_z_coords) tb_idx = [0, int((num / 2)), (num - 1)] bulk_angle = angles[int((num / 4))] for idx in tb_idx: ax.hlines(_z_coords[idx], xmin=(- 1), xmax=(bulk_angle + 2), linestyle='--', linewidth=1.5)
def plot_pair_distance(ax, pair_distances: list, z_coords: list, label: str=None, decorate: bool=True): '\n Plot angle.\n\n Args:\n ax: matplotlib ax.\n pair_distances (list): List of A-B pair distances, which is originally\n primitive pair in HCP structure.\n z_coords (list): List of z coordinate of each plane.\n label (str): Plot label.\n decorate (bool): If True, ax is decorated.\n ' if decorate: xlabel = 'Pair Distance' ylabel = 'Hight' else: xlabel = ylabel = None _pair_distances = deepcopy(pair_distances) _z_coords = deepcopy(z_coords) _pair_distances.append(pair_distances[0]) _z_coords.append((z_coords[(- 1)] + z_coords[1])) line_chart(ax=ax, xdata=_pair_distances, ydata=_z_coords, xlabel=xlabel, ylabel=ylabel, label=label, sort_by='y') if decorate: num = len(_z_coords) tb_idx = [0, int((num / 2)), (num - 1)] bulk_pair_distance = pair_distances[int((num / 4))] for idx in tb_idx: ax.hlines(_z_coords[idx], xmin=(- 1), xmax=(bulk_pair_distance + 2), linestyle='--', linewidth=1.5)
999,060,658,695,816,800
Plot angle. Args: ax: matplotlib ax. pair_distances (list): List of A-B pair distances, which is originally primitive pair in HCP structure. z_coords (list): List of z coordinate of each plane. label (str): Plot label. decorate (bool): If True, ax is decorated.
twinpy/plot/twinboundary.py
plot_pair_distance
kei0822kei/twinpy
python
def plot_pair_distance(ax, pair_distances: list, z_coords: list, label: str=None, decorate: bool=True): '\n Plot angle.\n\n Args:\n ax: matplotlib ax.\n pair_distances (list): List of A-B pair distances, which is originally\n primitive pair in HCP structure.\n z_coords (list): List of z coordinate of each plane.\n label (str): Plot label.\n decorate (bool): If True, ax is decorated.\n ' if decorate: xlabel = 'Pair Distance' ylabel = 'Hight' else: xlabel = ylabel = None _pair_distances = deepcopy(pair_distances) _z_coords = deepcopy(z_coords) _pair_distances.append(pair_distances[0]) _z_coords.append((z_coords[(- 1)] + z_coords[1])) line_chart(ax=ax, xdata=_pair_distances, ydata=_z_coords, xlabel=xlabel, ylabel=ylabel, label=label, sort_by='y') if decorate: num = len(_z_coords) tb_idx = [0, int((num / 2)), (num - 1)] bulk_pair_distance = pair_distances[int((num / 4))] for idx in tb_idx: ax.hlines(_z_coords[idx], xmin=(- 1), xmax=(bulk_pair_distance + 2), linestyle='--', linewidth=1.5)
def single_gpu_test(model, data_loader): ' Test model with single GPU, used for visualization.\n\n Args:\n model (nn.Module): Model to be tested.\n data_loader (nn.Dataloader): Pytorch data loader.\n\n Returns:\n dict: test results\n ' model.eval() results = dict() results['texts'] = [] results['img_info'] = [] results['glimpses'] = [] results['scores'] = [] dataset = data_loader.dataset prog_bar = mmcv.ProgressBar(len(dataset)) for (i, data) in enumerate(data_loader): with torch.no_grad(): result = model(return_loss=False, rescale=True, **data) texts = result['text'] glimpses = result['glimpses'] glimpses = glimpses.cpu().numpy() img_infos = result['img_info'] scores = result['scores'] scores = scores.cpu().numpy() scores = scores.reshape((- 1)) batch_size = len(texts) results['texts'].extend(texts) results['img_info'].extend(img_infos) results['glimpses'].extend(glimpses) results['scores'].extend(scores) for _ in range(batch_size): prog_bar.update() new_glimpse = np.stack(results['glimpses']) results['glimpses'] = new_glimpse return results
-7,651,348,466,465,535,000
Test model with single GPU, used for visualization. Args: model (nn.Module): Model to be tested. data_loader (nn.Dataloader): Pytorch data loader. Returns: dict: test results
davarocr/davarocr/davar_videotext/apis/test.py
single_gpu_test
hikopensource/DAVAR-Lab-OCR
python
def single_gpu_test(model, data_loader): ' Test model with single GPU, used for visualization.\n\n Args:\n model (nn.Module): Model to be tested.\n data_loader (nn.Dataloader): Pytorch data loader.\n\n Returns:\n dict: test results\n ' model.eval() results = dict() results['texts'] = [] results['img_info'] = [] results['glimpses'] = [] results['scores'] = [] dataset = data_loader.dataset prog_bar = mmcv.ProgressBar(len(dataset)) for (i, data) in enumerate(data_loader): with torch.no_grad(): result = model(return_loss=False, rescale=True, **data) texts = result['text'] glimpses = result['glimpses'] glimpses = glimpses.cpu().numpy() img_infos = result['img_info'] scores = result['scores'] scores = scores.cpu().numpy() scores = scores.reshape((- 1)) batch_size = len(texts) results['texts'].extend(texts) results['img_info'].extend(img_infos) results['glimpses'].extend(glimpses) results['scores'].extend(scores) for _ in range(batch_size): prog_bar.update() new_glimpse = np.stack(results['glimpses']) results['glimpses'] = new_glimpse return results
def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype='', data_rate='', suffix='', autoscale=True): "\n Calculates the omni-directional pitch angle distribution (summed and averaged)\n from the individual tplot variables\n \n Parameters:\n probe: str \n probe, valid values for MMS probes are ['1','2','3','4']. \n\n level: str\n indicates level of data processing. the default if no level is specified is 'sitl'\n\n datatype: str\n Valid datatypes for FPI are:\n Quicklook: ['des', 'dis'] \n SITL: '' (none; loads both electron and ion data from single CDF)\n L1b/L2: ['des-dist', 'dis-dist', 'dis-moms', 'des-moms']\n\n data_rate: str\n instrument data rates for FPI include 'brst' and 'fast'. The\n default is 'fast'.\n\n suffix: str\n The tplot variable names will be given this suffix. By default, \n no suffix is added.\n\n autoscale: bool\n If set, use the default zrange; otherwise, use the min and max of the data for the zrange\n\n Returns:\n List of tplot variables created.\n\n " out_vars = [] if isinstance(datatype, str): if ((datatype == '*') or (datatype == '')): if (level.lower() == 'ql'): datatype = ['des', 'dis'] else: datatype = ['des-dist', 'dis-dist'] if isinstance(datatype, str): datatype = [datatype] for dtype in datatype: species = dtype[1] if (level.lower() == 'sitl'): spec_str_format = 'PitchAngDist' obs_str_format = ('_fpi_' + species) else: spec_str_format = 'pitchAngDist' obs_str_format = (('_d' + species) + 's_') obsstr = (('mms' + str(probe)) + obs_str_format) if (level.lower() == 'l2'): spec_str_format = 'pitchangdist' pad_vars = [((((((obsstr + spec_str_format) + '_') + erange) + 'en_') + data_rate) + suffix) for erange in ['low', 'mid', 'high']] else: pad_vars = [(((((obsstr + spec_str_format) + '_') + erange) + 'En') + suffix) for erange in ['low', 'mid', 'high']] pad_avg_name = ((obsstr + 'PitchAngDist_avg') + suffix) low_en = get_data(pad_vars[0]) mid_en = get_data(pad_vars[1]) high_en = get_data(pad_vars[2]) if ((low_en is None) or (mid_en is None) or (high_en is None)): v3_low_pad = tnames(((pad_vars[0].lower() + '_') + data_rate)) v3_mid_pad = tnames(((pad_vars[1].lower() + '_') + data_rate)) v3_high_pad = tnames(((pad_vars[2].lower() + '_') + data_rate)) if ((v3_low_pad == []) or (v3_mid_pad == []) or (v3_high_pad == [])): continue low_en = get_data(v3_low_pad[0]) mid_en = get_data(v3_mid_pad[0]) high_en = get_data(v3_high_pad[0]) pad_avg_name = pad_avg_name.lower() e_pad_sum = ((low_en.y + mid_en.y) + high_en.y) e_pad_avg = (e_pad_sum / 3.0) if (level == 'l2'): pad_avg_name = pad_avg_name.lower() if (species == 'e'): species_str = 'electron' elif (species == 'i'): species_str = 'ion' if (level == 'ql'): store_data(((obsstr + 'PitchAngDist_sum') + suffix), data={'x': low_en.times, 'y': e_pad_sum, 'v': low_en.v}) options(((obsstr + 'PitchAngDist_sum') + suffix), 'ytitle', (((('MMS' + str(probe)) + ' \\ ') + species_str) + ' \\ PAD \\ SUM')) options(((obsstr + 'PitchAngDist_sum') + suffix), 'yrange', [0, 180]) options(((obsstr + 'PitchAngDist_sum') + suffix), 'zlog', True) options(((obsstr + 'PitchAngDist_sum') + suffix), 'spec', True) options(((obsstr + 'PitchAngDist_sum') + suffix), 'Colormap', 'jet') out_vars.append(((obsstr + 'PitchAngDist_sum') + suffix)) store_data(pad_avg_name, data={'x': low_en.times, 'y': e_pad_avg, 'v': low_en.v}) options(pad_avg_name, 'ztitle', 'eV/(cm!U2!N s sr eV)') options(pad_avg_name, 'ytitle', (((('MMS' + str(probe)) + ' \\ ') + species_str) + ' \\ PAD \\ AVG')) options(pad_avg_name, 'yrange', [0, 180]) options(pad_avg_name, 'zlog', True) options(pad_avg_name, 'spec', True) options(pad_avg_name, 'Colormap', 'jet') out_vars.append(pad_avg_name) return out_vars
-5,691,318,386,164,602,000
Calculates the omni-directional pitch angle distribution (summed and averaged) from the individual tplot variables Parameters: probe: str probe, valid values for MMS probes are ['1','2','3','4']. level: str indicates level of data processing. the default if no level is specified is 'sitl' datatype: str Valid datatypes for FPI are: Quicklook: ['des', 'dis'] SITL: '' (none; loads both electron and ion data from single CDF) L1b/L2: ['des-dist', 'dis-dist', 'dis-moms', 'des-moms'] data_rate: str instrument data rates for FPI include 'brst' and 'fast'. The default is 'fast'. suffix: str The tplot variable names will be given this suffix. By default, no suffix is added. autoscale: bool If set, use the default zrange; otherwise, use the min and max of the data for the zrange Returns: List of tplot variables created.
pyspedas/mms/fpi/mms_load_fpi_calc_pad.py
mms_load_fpi_calc_pad
shihikoo/pyspedas
python
def mms_load_fpi_calc_pad(probe='1', level='sitl', datatype=, data_rate=, suffix=, autoscale=True): "\n Calculates the omni-directional pitch angle distribution (summed and averaged)\n from the individual tplot variables\n \n Parameters:\n probe: str \n probe, valid values for MMS probes are ['1','2','3','4']. \n\n level: str\n indicates level of data processing. the default if no level is specified is 'sitl'\n\n datatype: str\n Valid datatypes for FPI are:\n Quicklook: ['des', 'dis'] \n SITL: (none; loads both electron and ion data from single CDF)\n L1b/L2: ['des-dist', 'dis-dist', 'dis-moms', 'des-moms']\n\n data_rate: str\n instrument data rates for FPI include 'brst' and 'fast'. The\n default is 'fast'.\n\n suffix: str\n The tplot variable names will be given this suffix. By default, \n no suffix is added.\n\n autoscale: bool\n If set, use the default zrange; otherwise, use the min and max of the data for the zrange\n\n Returns:\n List of tplot variables created.\n\n " out_vars = [] if isinstance(datatype, str): if ((datatype == '*') or (datatype == )): if (level.lower() == 'ql'): datatype = ['des', 'dis'] else: datatype = ['des-dist', 'dis-dist'] if isinstance(datatype, str): datatype = [datatype] for dtype in datatype: species = dtype[1] if (level.lower() == 'sitl'): spec_str_format = 'PitchAngDist' obs_str_format = ('_fpi_' + species) else: spec_str_format = 'pitchAngDist' obs_str_format = (('_d' + species) + 's_') obsstr = (('mms' + str(probe)) + obs_str_format) if (level.lower() == 'l2'): spec_str_format = 'pitchangdist' pad_vars = [((((((obsstr + spec_str_format) + '_') + erange) + 'en_') + data_rate) + suffix) for erange in ['low', 'mid', 'high']] else: pad_vars = [(((((obsstr + spec_str_format) + '_') + erange) + 'En') + suffix) for erange in ['low', 'mid', 'high']] pad_avg_name = ((obsstr + 'PitchAngDist_avg') + suffix) low_en = get_data(pad_vars[0]) mid_en = get_data(pad_vars[1]) high_en = get_data(pad_vars[2]) if ((low_en is None) or (mid_en is None) or (high_en is None)): v3_low_pad = tnames(((pad_vars[0].lower() + '_') + data_rate)) v3_mid_pad = tnames(((pad_vars[1].lower() + '_') + data_rate)) v3_high_pad = tnames(((pad_vars[2].lower() + '_') + data_rate)) if ((v3_low_pad == []) or (v3_mid_pad == []) or (v3_high_pad == [])): continue low_en = get_data(v3_low_pad[0]) mid_en = get_data(v3_mid_pad[0]) high_en = get_data(v3_high_pad[0]) pad_avg_name = pad_avg_name.lower() e_pad_sum = ((low_en.y + mid_en.y) + high_en.y) e_pad_avg = (e_pad_sum / 3.0) if (level == 'l2'): pad_avg_name = pad_avg_name.lower() if (species == 'e'): species_str = 'electron' elif (species == 'i'): species_str = 'ion' if (level == 'ql'): store_data(((obsstr + 'PitchAngDist_sum') + suffix), data={'x': low_en.times, 'y': e_pad_sum, 'v': low_en.v}) options(((obsstr + 'PitchAngDist_sum') + suffix), 'ytitle', (((('MMS' + str(probe)) + ' \\ ') + species_str) + ' \\ PAD \\ SUM')) options(((obsstr + 'PitchAngDist_sum') + suffix), 'yrange', [0, 180]) options(((obsstr + 'PitchAngDist_sum') + suffix), 'zlog', True) options(((obsstr + 'PitchAngDist_sum') + suffix), 'spec', True) options(((obsstr + 'PitchAngDist_sum') + suffix), 'Colormap', 'jet') out_vars.append(((obsstr + 'PitchAngDist_sum') + suffix)) store_data(pad_avg_name, data={'x': low_en.times, 'y': e_pad_avg, 'v': low_en.v}) options(pad_avg_name, 'ztitle', 'eV/(cm!U2!N s sr eV)') options(pad_avg_name, 'ytitle', (((('MMS' + str(probe)) + ' \\ ') + species_str) + ' \\ PAD \\ AVG')) options(pad_avg_name, 'yrange', [0, 180]) options(pad_avg_name, 'zlog', True) options(pad_avg_name, 'spec', True) options(pad_avg_name, 'Colormap', 'jet') out_vars.append(pad_avg_name) return out_vars
def read_text_file(filename, encoding='utf-8'): '\n Reads a file under python3 with encoding (default UTF-8).\n Also works under python2, without encoding.\n Uses the EAFP (https://docs.python.org/2/glossary.html#term-eafp)\n principle.\n ' try: with open(filename, 'r', encoding) as f: r = f.read() except TypeError: with open(filename, 'r') as f: r = f.read() return r
6,698,377,301,607,065,000
Reads a file under python3 with encoding (default UTF-8). Also works under python2, without encoding. Uses the EAFP (https://docs.python.org/2/glossary.html#term-eafp) principle.
Corrfunc/__init__.py
read_text_file
dfm/suave
python
def read_text_file(filename, encoding='utf-8'): '\n Reads a file under python3 with encoding (default UTF-8).\n Also works under python2, without encoding.\n Uses the EAFP (https://docs.python.org/2/glossary.html#term-eafp)\n principle.\n ' try: with open(filename, 'r', encoding) as f: r = f.read() except TypeError: with open(filename, 'r') as f: r = f.read() return r
def write_text_file(filename, contents, encoding='utf-8'): '\n Writes a file under python3 with encoding (default UTF-8).\n Also works under python2, without encoding.\n Uses the EAFP (https://docs.python.org/2/glossary.html#term-eafp)\n principle.\n ' try: with open(filename, 'w', encoding) as f: f.write(contents) except TypeError: with open(filename, 'w') as f: f.write(contents)
-7,734,683,783,031,064,000
Writes a file under python3 with encoding (default UTF-8). Also works under python2, without encoding. Uses the EAFP (https://docs.python.org/2/glossary.html#term-eafp) principle.
Corrfunc/__init__.py
write_text_file
dfm/suave
python
def write_text_file(filename, contents, encoding='utf-8'): '\n Writes a file under python3 with encoding (default UTF-8).\n Also works under python2, without encoding.\n Uses the EAFP (https://docs.python.org/2/glossary.html#term-eafp)\n principle.\n ' try: with open(filename, 'w', encoding) as f: f.write(contents) except TypeError: with open(filename, 'w') as f: f.write(contents)
def which(program, mode=(os.F_OK | os.X_OK), path=None): '\n Mimics the Unix utility which.\n For python3.3+, shutil.which provides all of the required functionality.\n An implementation is provided in case shutil.which does\n not exist.\n\n :param program: (required) string\n Name of program (can be fully-qualified path as well)\n :param mode: (optional) integer flag bits\n Permissions to check for in the executable\n Default: os.F_OK (file exists) | os.X_OK (executable file)\n :param path: (optional) string\n A custom path list to check against. Implementation taken from\n shutil.py.\n\n Returns:\n A fully qualified path to program as resolved by path or\n user environment.\n Returns None when program can not be resolved.\n ' try: from shutil import which as shwhich return shwhich(program, mode, path) except ImportError: def is_exe(fpath): return (os.path.isfile(fpath) and os.access(fpath, os.X_OK)) (fpath, _) = os.path.split(program) if fpath: if is_exe(program): return program else: if (path is None): path = os.environ.get('PATH', os.defpath) if (not path): return None path = path.split(os.pathsep) for pathdir in path: pathdir = pathdir.strip('"') exe_file = os.path.join(pathdir, program) if is_exe(exe_file): return exe_file return None
5,508,605,140,352,203,000
Mimics the Unix utility which. For python3.3+, shutil.which provides all of the required functionality. An implementation is provided in case shutil.which does not exist. :param program: (required) string Name of program (can be fully-qualified path as well) :param mode: (optional) integer flag bits Permissions to check for in the executable Default: os.F_OK (file exists) | os.X_OK (executable file) :param path: (optional) string A custom path list to check against. Implementation taken from shutil.py. Returns: A fully qualified path to program as resolved by path or user environment. Returns None when program can not be resolved.
Corrfunc/__init__.py
which
dfm/suave
python
def which(program, mode=(os.F_OK | os.X_OK), path=None): '\n Mimics the Unix utility which.\n For python3.3+, shutil.which provides all of the required functionality.\n An implementation is provided in case shutil.which does\n not exist.\n\n :param program: (required) string\n Name of program (can be fully-qualified path as well)\n :param mode: (optional) integer flag bits\n Permissions to check for in the executable\n Default: os.F_OK (file exists) | os.X_OK (executable file)\n :param path: (optional) string\n A custom path list to check against. Implementation taken from\n shutil.py.\n\n Returns:\n A fully qualified path to program as resolved by path or\n user environment.\n Returns None when program can not be resolved.\n ' try: from shutil import which as shwhich return shwhich(program, mode, path) except ImportError: def is_exe(fpath): return (os.path.isfile(fpath) and os.access(fpath, os.X_OK)) (fpath, _) = os.path.split(program) if fpath: if is_exe(program): return program else: if (path is None): path = os.environ.get('PATH', os.defpath) if (not path): return None path = path.split(os.pathsep) for pathdir in path: pathdir = pathdir.strip('"') exe_file = os.path.join(pathdir, program) if is_exe(exe_file): return exe_file return None
def removeElements(self, head, val): '\n :type head: ListNode\n :type val: int\n :rtype: ListNode\n ' while head: if (head.val == val): head = head.next else: break if (not head): return head cur = head pre = cur while cur: if (cur.val != val): pre = cur cur = cur.next else: cur = cur.next pre.next = cur return head
-4,401,839,335,475,699,000
:type head: ListNode :type val: int :rtype: ListNode
src/main/python/leetcode-python/easy/203.Remove Linked List Elements.py
removeElements
sonymoon/algorithm
python
def removeElements(self, head, val): '\n :type head: ListNode\n :type val: int\n :rtype: ListNode\n ' while head: if (head.val == val): head = head.next else: break if (not head): return head cur = head pre = cur while cur: if (cur.val != val): pre = cur cur = cur.next else: cur = cur.next pre.next = cur return head
@property def ConfigFlags(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ConfigFlags'])
1,462,245,169,808,264,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
ConfigFlags
Vibaswan/ixnetwork_restpy
python
@property def ConfigFlags(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ConfigFlags'])
@property def DataPathId(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['DataPathId'])
8,598,606,963,486,531,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
DataPathId
Vibaswan/ixnetwork_restpy
python
@property def DataPathId(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['DataPathId'])
@property def DataPathIdAsHex(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['DataPathIdAsHex'])
804,803,447,564,948,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
DataPathIdAsHex
Vibaswan/ixnetwork_restpy
python
@property def DataPathIdAsHex(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['DataPathIdAsHex'])
@property def ErrorCode(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ErrorCode'])
5,799,400,849,845,319,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
ErrorCode
Vibaswan/ixnetwork_restpy
python
@property def ErrorCode(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ErrorCode'])
@property def ErrorType(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ErrorType'])
-7,110,418,267,919,914,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
ErrorType
Vibaswan/ixnetwork_restpy
python
@property def ErrorType(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ErrorType'])
@property def Latency(self): '\n Returns\n -------\n - number: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['Latency'])
-4,419,917,132,471,396,000
Returns ------- - number: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
Latency
Vibaswan/ixnetwork_restpy
python
@property def Latency(self): '\n Returns\n -------\n - number: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['Latency'])
@property def LocalIp(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['LocalIp'])
-5,974,784,433,635,664,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
LocalIp
Vibaswan/ixnetwork_restpy
python
@property def LocalIp(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['LocalIp'])
@property def MissSendLength(self): '\n Returns\n -------\n - number: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['MissSendLength'])
1,972,557,766,165,532,200
Returns ------- - number: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
MissSendLength
Vibaswan/ixnetwork_restpy
python
@property def MissSendLength(self): '\n Returns\n -------\n - number: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['MissSendLength'])
@property def NegotiatedVersion(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['NegotiatedVersion'])
-1,810,185,652,348,757,800
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
NegotiatedVersion
Vibaswan/ixnetwork_restpy
python
@property def NegotiatedVersion(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['NegotiatedVersion'])
@property def RemoteIp(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['RemoteIp'])
-8,948,208,293,276,165,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
RemoteIp
Vibaswan/ixnetwork_restpy
python
@property def RemoteIp(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['RemoteIp'])
@property def ReplyState(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ReplyState'])
-4,484,793,304,505,861,000
Returns ------- - str: NOT DEFINED
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
ReplyState
Vibaswan/ixnetwork_restpy
python
@property def ReplyState(self): '\n Returns\n -------\n - str: NOT DEFINED\n ' return self._get_attribute(self._SDM_ATT_MAP['ReplyState'])
def find(self, ConfigFlags=None, DataPathId=None, DataPathIdAsHex=None, ErrorCode=None, ErrorType=None, Latency=None, LocalIp=None, MissSendLength=None, NegotiatedVersion=None, RemoteIp=None, ReplyState=None): 'Finds and retrieves switchConfigLearnedInformation resources from the server.\n\n All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve switchConfigLearnedInformation resources from the server.\n To retrieve an exact match ensure the parameter value starts with ^ and ends with $\n By default the find method takes no parameters and will retrieve all switchConfigLearnedInformation resources from the server.\n\n Args\n ----\n - ConfigFlags (str): NOT DEFINED\n - DataPathId (str): NOT DEFINED\n - DataPathIdAsHex (str): NOT DEFINED\n - ErrorCode (str): NOT DEFINED\n - ErrorType (str): NOT DEFINED\n - Latency (number): NOT DEFINED\n - LocalIp (str): NOT DEFINED\n - MissSendLength (number): NOT DEFINED\n - NegotiatedVersion (str): NOT DEFINED\n - RemoteIp (str): NOT DEFINED\n - ReplyState (str): NOT DEFINED\n\n Returns\n -------\n - self: This instance with matching switchConfigLearnedInformation resources retrieved from the server available through an iterator or index\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
2,930,267,066,556,517,400
Finds and retrieves switchConfigLearnedInformation resources from the server. All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve switchConfigLearnedInformation resources from the server. To retrieve an exact match ensure the parameter value starts with ^ and ends with $ By default the find method takes no parameters and will retrieve all switchConfigLearnedInformation resources from the server. Args ---- - ConfigFlags (str): NOT DEFINED - DataPathId (str): NOT DEFINED - DataPathIdAsHex (str): NOT DEFINED - ErrorCode (str): NOT DEFINED - ErrorType (str): NOT DEFINED - Latency (number): NOT DEFINED - LocalIp (str): NOT DEFINED - MissSendLength (number): NOT DEFINED - NegotiatedVersion (str): NOT DEFINED - RemoteIp (str): NOT DEFINED - ReplyState (str): NOT DEFINED Returns ------- - self: This instance with matching switchConfigLearnedInformation resources retrieved from the server available through an iterator or index Raises ------ - ServerError: The server has encountered an uncategorized error condition
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
find
Vibaswan/ixnetwork_restpy
python
def find(self, ConfigFlags=None, DataPathId=None, DataPathIdAsHex=None, ErrorCode=None, ErrorType=None, Latency=None, LocalIp=None, MissSendLength=None, NegotiatedVersion=None, RemoteIp=None, ReplyState=None): 'Finds and retrieves switchConfigLearnedInformation resources from the server.\n\n All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve switchConfigLearnedInformation resources from the server.\n To retrieve an exact match ensure the parameter value starts with ^ and ends with $\n By default the find method takes no parameters and will retrieve all switchConfigLearnedInformation resources from the server.\n\n Args\n ----\n - ConfigFlags (str): NOT DEFINED\n - DataPathId (str): NOT DEFINED\n - DataPathIdAsHex (str): NOT DEFINED\n - ErrorCode (str): NOT DEFINED\n - ErrorType (str): NOT DEFINED\n - Latency (number): NOT DEFINED\n - LocalIp (str): NOT DEFINED\n - MissSendLength (number): NOT DEFINED\n - NegotiatedVersion (str): NOT DEFINED\n - RemoteIp (str): NOT DEFINED\n - ReplyState (str): NOT DEFINED\n\n Returns\n -------\n - self: This instance with matching switchConfigLearnedInformation resources retrieved from the server available through an iterator or index\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href): 'Retrieves a single instance of switchConfigLearnedInformation data from the server.\n\n Args\n ----\n - href (str): An href to the instance to be retrieved\n\n Returns\n -------\n - self: This instance with the switchConfigLearnedInformation resources from the server available through an iterator or index\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._read(href)
469,075,206,055,040,000
Retrieves a single instance of switchConfigLearnedInformation data from the server. Args ---- - href (str): An href to the instance to be retrieved Returns ------- - self: This instance with the switchConfigLearnedInformation resources from the server available through an iterator or index Raises ------ - NotFoundError: The requested resource does not exist on the server - ServerError: The server has encountered an uncategorized error condition
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/switchconfiglearnedinformation_e983ca5da0eadbba93d1ce1d2903a5b7.py
read
Vibaswan/ixnetwork_restpy
python
def read(self, href): 'Retrieves a single instance of switchConfigLearnedInformation data from the server.\n\n Args\n ----\n - href (str): An href to the instance to be retrieved\n\n Returns\n -------\n - self: This instance with the switchConfigLearnedInformation resources from the server available through an iterator or index\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n ' return self._read(href)
@pytest.fixture(name='light') async def light_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_light: Light): 'Fixture for a single light for testing the switch platform.' Light.__config__.validate_assignment = False light_obj = mock_light.copy(deep=True) light_obj._api = mock_entry.api light_obj.name = 'Test Light' light_obj.is_ssh_enabled = False light_obj.light_device_settings.is_indicator_enabled = False mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.lights = {light_obj.id: light_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 2, 1) (yield light_obj) Light.__config__.validate_assignment = True
8,335,336,190,965,998,000
Fixture for a single light for testing the switch platform.
tests/components/unifiprotect/test_switch.py
light_fixture
LW-Ho/home-assistant
python
@pytest.fixture(name='light') async def light_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_light: Light): Light.__config__.validate_assignment = False light_obj = mock_light.copy(deep=True) light_obj._api = mock_entry.api light_obj.name = 'Test Light' light_obj.is_ssh_enabled = False light_obj.light_device_settings.is_indicator_enabled = False mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.lights = {light_obj.id: light_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 2, 1) (yield light_obj) Light.__config__.validate_assignment = True
@pytest.fixture(name='camera') async def camera_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera): 'Fixture for a single camera for testing the switch platform.' Camera.__config__.validate_assignment = False camera_obj = mock_camera.copy(deep=True) camera_obj._api = mock_entry.api camera_obj.channels[0]._api = mock_entry.api camera_obj.channels[1]._api = mock_entry.api camera_obj.channels[2]._api = mock_entry.api camera_obj.name = 'Test Camera' camera_obj.recording_settings.mode = RecordingMode.DETECTIONS camera_obj.feature_flags.has_led_status = True camera_obj.feature_flags.has_hdr = True camera_obj.feature_flags.video_modes = [VideoMode.DEFAULT, VideoMode.HIGH_FPS] camera_obj.feature_flags.has_privacy_mask = True camera_obj.feature_flags.has_speaker = True camera_obj.feature_flags.has_smart_detect = True camera_obj.feature_flags.smart_detect_types = [SmartDetectObjectType.PERSON, SmartDetectObjectType.VEHICLE] camera_obj.is_ssh_enabled = False camera_obj.led_settings.is_enabled = False camera_obj.hdr_mode = False camera_obj.video_mode = VideoMode.DEFAULT camera_obj.remove_privacy_zone() camera_obj.speaker_settings.are_system_sounds_enabled = False camera_obj.osd_settings.is_name_enabled = False camera_obj.osd_settings.is_date_enabled = False camera_obj.osd_settings.is_logo_enabled = False camera_obj.osd_settings.is_debug_enabled = False camera_obj.smart_detect_settings.object_types = [] mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.cameras = {camera_obj.id: camera_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 12, 11) (yield camera_obj) Camera.__config__.validate_assignment = True
-5,056,614,102,248,992,000
Fixture for a single camera for testing the switch platform.
tests/components/unifiprotect/test_switch.py
camera_fixture
LW-Ho/home-assistant
python
@pytest.fixture(name='camera') async def camera_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera): Camera.__config__.validate_assignment = False camera_obj = mock_camera.copy(deep=True) camera_obj._api = mock_entry.api camera_obj.channels[0]._api = mock_entry.api camera_obj.channels[1]._api = mock_entry.api camera_obj.channels[2]._api = mock_entry.api camera_obj.name = 'Test Camera' camera_obj.recording_settings.mode = RecordingMode.DETECTIONS camera_obj.feature_flags.has_led_status = True camera_obj.feature_flags.has_hdr = True camera_obj.feature_flags.video_modes = [VideoMode.DEFAULT, VideoMode.HIGH_FPS] camera_obj.feature_flags.has_privacy_mask = True camera_obj.feature_flags.has_speaker = True camera_obj.feature_flags.has_smart_detect = True camera_obj.feature_flags.smart_detect_types = [SmartDetectObjectType.PERSON, SmartDetectObjectType.VEHICLE] camera_obj.is_ssh_enabled = False camera_obj.led_settings.is_enabled = False camera_obj.hdr_mode = False camera_obj.video_mode = VideoMode.DEFAULT camera_obj.remove_privacy_zone() camera_obj.speaker_settings.are_system_sounds_enabled = False camera_obj.osd_settings.is_name_enabled = False camera_obj.osd_settings.is_date_enabled = False camera_obj.osd_settings.is_logo_enabled = False camera_obj.osd_settings.is_debug_enabled = False camera_obj.smart_detect_settings.object_types = [] mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.cameras = {camera_obj.id: camera_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 12, 11) (yield camera_obj) Camera.__config__.validate_assignment = True
@pytest.fixture(name='camera_none') async def camera_none_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera): 'Fixture for a single camera for testing the switch platform.' Camera.__config__.validate_assignment = False camera_obj = mock_camera.copy(deep=True) camera_obj._api = mock_entry.api camera_obj.channels[0]._api = mock_entry.api camera_obj.channels[1]._api = mock_entry.api camera_obj.channels[2]._api = mock_entry.api camera_obj.name = 'Test Camera' camera_obj.recording_settings.mode = RecordingMode.DETECTIONS camera_obj.feature_flags.has_led_status = False camera_obj.feature_flags.has_hdr = False camera_obj.feature_flags.video_modes = [VideoMode.DEFAULT] camera_obj.feature_flags.has_privacy_mask = False camera_obj.feature_flags.has_speaker = False camera_obj.feature_flags.has_smart_detect = False camera_obj.is_ssh_enabled = False camera_obj.osd_settings.is_name_enabled = False camera_obj.osd_settings.is_date_enabled = False camera_obj.osd_settings.is_logo_enabled = False camera_obj.osd_settings.is_debug_enabled = False mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.cameras = {camera_obj.id: camera_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 5, 4) (yield camera_obj) Camera.__config__.validate_assignment = True
-4,512,770,344,431,090,000
Fixture for a single camera for testing the switch platform.
tests/components/unifiprotect/test_switch.py
camera_none_fixture
LW-Ho/home-assistant
python
@pytest.fixture(name='camera_none') async def camera_none_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera): Camera.__config__.validate_assignment = False camera_obj = mock_camera.copy(deep=True) camera_obj._api = mock_entry.api camera_obj.channels[0]._api = mock_entry.api camera_obj.channels[1]._api = mock_entry.api camera_obj.channels[2]._api = mock_entry.api camera_obj.name = 'Test Camera' camera_obj.recording_settings.mode = RecordingMode.DETECTIONS camera_obj.feature_flags.has_led_status = False camera_obj.feature_flags.has_hdr = False camera_obj.feature_flags.video_modes = [VideoMode.DEFAULT] camera_obj.feature_flags.has_privacy_mask = False camera_obj.feature_flags.has_speaker = False camera_obj.feature_flags.has_smart_detect = False camera_obj.is_ssh_enabled = False camera_obj.osd_settings.is_name_enabled = False camera_obj.osd_settings.is_date_enabled = False camera_obj.osd_settings.is_logo_enabled = False camera_obj.osd_settings.is_debug_enabled = False mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.cameras = {camera_obj.id: camera_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 5, 4) (yield camera_obj) Camera.__config__.validate_assignment = True
@pytest.fixture(name='camera_privacy') async def camera_privacy_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera): 'Fixture for a single camera for testing the switch platform.' Camera.__config__.validate_assignment = False camera_obj = mock_camera.copy(deep=True) camera_obj._api = mock_entry.api camera_obj.channels[0]._api = mock_entry.api camera_obj.channels[1]._api = mock_entry.api camera_obj.channels[2]._api = mock_entry.api camera_obj.name = 'Test Camera' camera_obj.recording_settings.mode = RecordingMode.NEVER camera_obj.feature_flags.has_led_status = False camera_obj.feature_flags.has_hdr = False camera_obj.feature_flags.video_modes = [VideoMode.DEFAULT] camera_obj.feature_flags.has_privacy_mask = True camera_obj.feature_flags.has_speaker = False camera_obj.feature_flags.has_smart_detect = False camera_obj.add_privacy_zone() camera_obj.is_ssh_enabled = False camera_obj.osd_settings.is_name_enabled = False camera_obj.osd_settings.is_date_enabled = False camera_obj.osd_settings.is_logo_enabled = False camera_obj.osd_settings.is_debug_enabled = False mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.cameras = {camera_obj.id: camera_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 6, 5) (yield camera_obj) Camera.__config__.validate_assignment = True
-6,874,387,683,140,718,000
Fixture for a single camera for testing the switch platform.
tests/components/unifiprotect/test_switch.py
camera_privacy_fixture
LW-Ho/home-assistant
python
@pytest.fixture(name='camera_privacy') async def camera_privacy_fixture(hass: HomeAssistant, mock_entry: MockEntityFixture, mock_camera: Camera): Camera.__config__.validate_assignment = False camera_obj = mock_camera.copy(deep=True) camera_obj._api = mock_entry.api camera_obj.channels[0]._api = mock_entry.api camera_obj.channels[1]._api = mock_entry.api camera_obj.channels[2]._api = mock_entry.api camera_obj.name = 'Test Camera' camera_obj.recording_settings.mode = RecordingMode.NEVER camera_obj.feature_flags.has_led_status = False camera_obj.feature_flags.has_hdr = False camera_obj.feature_flags.video_modes = [VideoMode.DEFAULT] camera_obj.feature_flags.has_privacy_mask = True camera_obj.feature_flags.has_speaker = False camera_obj.feature_flags.has_smart_detect = False camera_obj.add_privacy_zone() camera_obj.is_ssh_enabled = False camera_obj.osd_settings.is_name_enabled = False camera_obj.osd_settings.is_date_enabled = False camera_obj.osd_settings.is_logo_enabled = False camera_obj.osd_settings.is_debug_enabled = False mock_entry.api.bootstrap.reset_objects() mock_entry.api.bootstrap.cameras = {camera_obj.id: camera_obj} (await hass.config_entries.async_setup(mock_entry.entry.entry_id)) (await hass.async_block_till_done()) assert_entity_counts(hass, Platform.SWITCH, 6, 5) (yield camera_obj) Camera.__config__.validate_assignment = True
async def test_switch_setup_light(hass: HomeAssistant, mock_entry: MockEntityFixture, light: Light): 'Test switch entity setup for light devices.' entity_registry = er.async_get(hass) description = LIGHT_SWITCHES[1] (unique_id, entity_id) = ids_from_device_description(Platform.SWITCH, light, description) entity = entity_registry.async_get(entity_id) assert entity assert (entity.unique_id == unique_id) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION) description = LIGHT_SWITCHES[0] unique_id = f'{light.id}_{description.key}' entity_id = f"switch.test_light_{description.name.lower().replace(' ', '_')}" entity = entity_registry.async_get(entity_id) assert entity assert (entity.disabled is True) assert (entity.unique_id == unique_id) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION)
745,013,318,945,976,300
Test switch entity setup for light devices.
tests/components/unifiprotect/test_switch.py
test_switch_setup_light
LW-Ho/home-assistant
python
async def test_switch_setup_light(hass: HomeAssistant, mock_entry: MockEntityFixture, light: Light): entity_registry = er.async_get(hass) description = LIGHT_SWITCHES[1] (unique_id, entity_id) = ids_from_device_description(Platform.SWITCH, light, description) entity = entity_registry.async_get(entity_id) assert entity assert (entity.unique_id == unique_id) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION) description = LIGHT_SWITCHES[0] unique_id = f'{light.id}_{description.key}' entity_id = f"switch.test_light_{description.name.lower().replace(' ', '_')}" entity = entity_registry.async_get(entity_id) assert entity assert (entity.disabled is True) assert (entity.unique_id == unique_id) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION)
async def test_switch_setup_camera_all(hass: HomeAssistant, mock_entry: MockEntityFixture, camera: Camera): 'Test switch entity setup for camera devices (all enabled feature flags).' entity_registry = er.async_get(hass) for description in CAMERA_SWITCHES_BASIC: (unique_id, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) entity = entity_registry.async_get(entity_id) assert entity assert (entity.unique_id == unique_id) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION) description = CAMERA_SWITCHES[0] description_entity_name = description.name.lower().replace(':', '').replace(' ', '_') unique_id = f'{camera.id}_{description.key}' entity_id = f'switch.test_camera_{description_entity_name}' entity = entity_registry.async_get(entity_id) assert entity assert (entity.disabled is True) assert (entity.unique_id == unique_id) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION)
-5,643,885,727,098,207,000
Test switch entity setup for camera devices (all enabled feature flags).
tests/components/unifiprotect/test_switch.py
test_switch_setup_camera_all
LW-Ho/home-assistant
python
async def test_switch_setup_camera_all(hass: HomeAssistant, mock_entry: MockEntityFixture, camera: Camera): entity_registry = er.async_get(hass) for description in CAMERA_SWITCHES_BASIC: (unique_id, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) entity = entity_registry.async_get(entity_id) assert entity assert (entity.unique_id == unique_id) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION) description = CAMERA_SWITCHES[0] description_entity_name = description.name.lower().replace(':', ).replace(' ', '_') unique_id = f'{camera.id}_{description.key}' entity_id = f'switch.test_camera_{description_entity_name}' entity = entity_registry.async_get(entity_id) assert entity assert (entity.disabled is True) assert (entity.unique_id == unique_id) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION)
async def test_switch_setup_camera_none(hass: HomeAssistant, mock_entry: MockEntityFixture, camera_none: Camera): 'Test switch entity setup for camera devices (no enabled feature flags).' entity_registry = er.async_get(hass) for description in CAMERA_SWITCHES_BASIC: if (description.ufp_required_field is not None): continue (unique_id, entity_id) = ids_from_device_description(Platform.SWITCH, camera_none, description) entity = entity_registry.async_get(entity_id) assert entity assert (entity.unique_id == unique_id) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION) description = CAMERA_SWITCHES[0] description_entity_name = description.name.lower().replace(':', '').replace(' ', '_') unique_id = f'{camera_none.id}_{description.key}' entity_id = f'switch.test_camera_{description_entity_name}' entity = entity_registry.async_get(entity_id) assert entity assert (entity.disabled is True) assert (entity.unique_id == unique_id) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION)
2,929,762,239,909,379,600
Test switch entity setup for camera devices (no enabled feature flags).
tests/components/unifiprotect/test_switch.py
test_switch_setup_camera_none
LW-Ho/home-assistant
python
async def test_switch_setup_camera_none(hass: HomeAssistant, mock_entry: MockEntityFixture, camera_none: Camera): entity_registry = er.async_get(hass) for description in CAMERA_SWITCHES_BASIC: if (description.ufp_required_field is not None): continue (unique_id, entity_id) = ids_from_device_description(Platform.SWITCH, camera_none, description) entity = entity_registry.async_get(entity_id) assert entity assert (entity.unique_id == unique_id) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION) description = CAMERA_SWITCHES[0] description_entity_name = description.name.lower().replace(':', ).replace(' ', '_') unique_id = f'{camera_none.id}_{description.key}' entity_id = f'switch.test_camera_{description_entity_name}' entity = entity_registry.async_get(entity_id) assert entity assert (entity.disabled is True) assert (entity.unique_id == unique_id) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) state = hass.states.get(entity_id) assert state assert (state.state == STATE_OFF) assert (state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION)
async def test_switch_light_status(hass: HomeAssistant, light: Light): 'Tests status light switch for lights.' description = LIGHT_SWITCHES[1] light.__fields__['set_status_light'] = Mock() light.set_status_light = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, light, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) light.set_status_light.assert_called_once_with(True) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) light.set_status_light.assert_called_with(False)
-7,921,302,965,398,987,000
Tests status light switch for lights.
tests/components/unifiprotect/test_switch.py
test_switch_light_status
LW-Ho/home-assistant
python
async def test_switch_light_status(hass: HomeAssistant, light: Light): description = LIGHT_SWITCHES[1] light.__fields__['set_status_light'] = Mock() light.set_status_light = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, light, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) light.set_status_light.assert_called_once_with(True) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) light.set_status_light.assert_called_with(False)
async def test_switch_camera_ssh(hass: HomeAssistant, camera: Camera, mock_entry: MockEntityFixture): 'Tests SSH switch for cameras.' description = CAMERA_SWITCHES[0] camera.__fields__['set_ssh'] = Mock() camera.set_ssh = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_ssh.assert_called_once_with(True) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_ssh.assert_called_with(False)
-5,878,164,364,571,807,000
Tests SSH switch for cameras.
tests/components/unifiprotect/test_switch.py
test_switch_camera_ssh
LW-Ho/home-assistant
python
async def test_switch_camera_ssh(hass: HomeAssistant, camera: Camera, mock_entry: MockEntityFixture): description = CAMERA_SWITCHES[0] camera.__fields__['set_ssh'] = Mock() camera.set_ssh = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await enable_entity(hass, mock_entry.entry.entry_id, entity_id)) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_ssh.assert_called_once_with(True) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_ssh.assert_called_with(False)
@pytest.mark.parametrize('description', CAMERA_SWITCHES_NO_EXTRA) async def test_switch_camera_simple(hass: HomeAssistant, camera: Camera, description: ProtectSwitchEntityDescription): 'Tests all simple switches for cameras.' assert (description.ufp_set_method is not None) camera.__fields__[description.ufp_set_method] = Mock() setattr(camera, description.ufp_set_method, AsyncMock()) set_method = getattr(camera, description.ufp_set_method) (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) set_method.assert_called_once_with(True) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) set_method.assert_called_with(False)
-7,129,835,431,222,116,000
Tests all simple switches for cameras.
tests/components/unifiprotect/test_switch.py
test_switch_camera_simple
LW-Ho/home-assistant
python
@pytest.mark.parametrize('description', CAMERA_SWITCHES_NO_EXTRA) async def test_switch_camera_simple(hass: HomeAssistant, camera: Camera, description: ProtectSwitchEntityDescription): assert (description.ufp_set_method is not None) camera.__fields__[description.ufp_set_method] = Mock() setattr(camera, description.ufp_set_method, AsyncMock()) set_method = getattr(camera, description.ufp_set_method) (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) set_method.assert_called_once_with(True) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) set_method.assert_called_with(False)
async def test_switch_camera_highfps(hass: HomeAssistant, camera: Camera): 'Tests High FPS switch for cameras.' description = CAMERA_SWITCHES[3] camera.__fields__['set_video_mode'] = Mock() camera.set_video_mode = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_video_mode.assert_called_once_with(VideoMode.HIGH_FPS) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_video_mode.assert_called_with(VideoMode.DEFAULT)
-8,116,559,982,682,837,000
Tests High FPS switch for cameras.
tests/components/unifiprotect/test_switch.py
test_switch_camera_highfps
LW-Ho/home-assistant
python
async def test_switch_camera_highfps(hass: HomeAssistant, camera: Camera): description = CAMERA_SWITCHES[3] camera.__fields__['set_video_mode'] = Mock() camera.set_video_mode = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_video_mode.assert_called_once_with(VideoMode.HIGH_FPS) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_video_mode.assert_called_with(VideoMode.DEFAULT)
async def test_switch_camera_privacy(hass: HomeAssistant, camera: Camera): 'Tests Privacy Mode switch for cameras.' description = CAMERA_SWITCHES[4] camera.__fields__['set_privacy'] = Mock() camera.set_privacy = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_privacy.assert_called_once_with(True, 0, RecordingMode.NEVER) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_privacy.assert_called_with(False, camera.mic_volume, camera.recording_settings.mode)
-806,886,069,751,213,200
Tests Privacy Mode switch for cameras.
tests/components/unifiprotect/test_switch.py
test_switch_camera_privacy
LW-Ho/home-assistant
python
async def test_switch_camera_privacy(hass: HomeAssistant, camera: Camera): description = CAMERA_SWITCHES[4] camera.__fields__['set_privacy'] = Mock() camera.set_privacy = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera, description) (await hass.services.async_call('switch', 'turn_on', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_privacy.assert_called_once_with(True, 0, RecordingMode.NEVER) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera.set_privacy.assert_called_with(False, camera.mic_volume, camera.recording_settings.mode)
async def test_switch_camera_privacy_already_on(hass: HomeAssistant, camera_privacy: Camera): 'Tests Privacy Mode switch for cameras with privacy mode defaulted on.' description = CAMERA_SWITCHES[4] camera_privacy.__fields__['set_privacy'] = Mock() camera_privacy.set_privacy = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera_privacy, description) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera_privacy.set_privacy.assert_called_once_with(False, 100, RecordingMode.ALWAYS)
150,585,740,807,563,780
Tests Privacy Mode switch for cameras with privacy mode defaulted on.
tests/components/unifiprotect/test_switch.py
test_switch_camera_privacy_already_on
LW-Ho/home-assistant
python
async def test_switch_camera_privacy_already_on(hass: HomeAssistant, camera_privacy: Camera): description = CAMERA_SWITCHES[4] camera_privacy.__fields__['set_privacy'] = Mock() camera_privacy.set_privacy = AsyncMock() (_, entity_id) = ids_from_device_description(Platform.SWITCH, camera_privacy, description) (await hass.services.async_call('switch', 'turn_off', {ATTR_ENTITY_ID: entity_id}, blocking=True)) camera_privacy.set_privacy.assert_called_once_with(False, 100, RecordingMode.ALWAYS)
def __init__(self, svm_kernel='linear', svm_c=0.1, fs=250, bands=None, time_windows=None, riem_opt='Riemann', rho=0.1, filter_type='butter', filter_order=2, random_state=None): ' Constructor\n\n Args:\n\n Parameters\n ----------\n\n svm_kernel: str {\'linear\', \'sigmoid\', \'rbf\'}\n kernel used for classifier\n\n svm_c: float\n regularization parameter for the classifier\n\n fs: int\n sampling rate of the data\n\n bands: list of int\n bandwidths used in filterbanks (default: [2, 4, 8, 16, 32])\n\n time_windows: list of list of ints, shape = (N, 2)\n time windows used, in seconds (default: [[2,5, 6]])\n\n riem_opt: str {"riemann", "Riemann_Euclid", "Whitened_Euclid", "No_Adaptation"}\n type of riemannian used\n\n rho: float\n Normalization parameter for the covariance matrix of the riemannian\n\n filter_type: str {"butter", "fir"}\n Type of the filter\n\n filter_order: int\n Order of the filter\n\n random_state: int or None\n random seed used in the SVM\n ' if (svm_kernel == 'linear'): self.classifier = LinearSVC(C=svm_c, loss='hinge', random_state=random_state, tol=1e-05) else: self.classifier = SVC(C=svm_c, kernel=svm_kernel, degree=10, gamma='auto', cache_size=10000, random_state=random_state) if (bands is None): bandwidths = np.array([2, 4, 8, 16, 32]) else: bandwidths = np.array(bands) filter_bank = load_filterbank(bandwidths, fs, order=filter_order, max_freq=40, ftype=filter_type) if (time_windows is None): time_windows = (np.array([[2.5, 6]]) * fs).astype(int) else: time_windows = (np.array(time_windows) * fs).astype(int) self.riemannian = RiemannianMultiscale(filter_bank, time_windows, riem_opt=riem_opt, rho=rho, vectorized=True) self.no_bands = filter_bank.shape[0] self.no_time_windows = time_windows.shape[0] self.no_riem = None self.no_features = None
985,681,782,856,049,500
Constructor Args: Parameters ---------- svm_kernel: str {'linear', 'sigmoid', 'rbf'} kernel used for classifier svm_c: float regularization parameter for the classifier fs: int sampling rate of the data bands: list of int bandwidths used in filterbanks (default: [2, 4, 8, 16, 32]) time_windows: list of list of ints, shape = (N, 2) time windows used, in seconds (default: [[2,5, 6]]) riem_opt: str {"riemann", "Riemann_Euclid", "Whitened_Euclid", "No_Adaptation"} type of riemannian used rho: float Normalization parameter for the covariance matrix of the riemannian filter_type: str {"butter", "fir"} Type of the filter filter_order: int Order of the filter random_state: int or None random seed used in the SVM
multiscale_bci_python/riemannian_model.py
__init__
pulp-platform/multispectral-riemannian
python
def __init__(self, svm_kernel='linear', svm_c=0.1, fs=250, bands=None, time_windows=None, riem_opt='Riemann', rho=0.1, filter_type='butter', filter_order=2, random_state=None): ' Constructor\n\n Args:\n\n Parameters\n ----------\n\n svm_kernel: str {\'linear\', \'sigmoid\', \'rbf\'}\n kernel used for classifier\n\n svm_c: float\n regularization parameter for the classifier\n\n fs: int\n sampling rate of the data\n\n bands: list of int\n bandwidths used in filterbanks (default: [2, 4, 8, 16, 32])\n\n time_windows: list of list of ints, shape = (N, 2)\n time windows used, in seconds (default: [[2,5, 6]])\n\n riem_opt: str {"riemann", "Riemann_Euclid", "Whitened_Euclid", "No_Adaptation"}\n type of riemannian used\n\n rho: float\n Normalization parameter for the covariance matrix of the riemannian\n\n filter_type: str {"butter", "fir"}\n Type of the filter\n\n filter_order: int\n Order of the filter\n\n random_state: int or None\n random seed used in the SVM\n ' if (svm_kernel == 'linear'): self.classifier = LinearSVC(C=svm_c, loss='hinge', random_state=random_state, tol=1e-05) else: self.classifier = SVC(C=svm_c, kernel=svm_kernel, degree=10, gamma='auto', cache_size=10000, random_state=random_state) if (bands is None): bandwidths = np.array([2, 4, 8, 16, 32]) else: bandwidths = np.array(bands) filter_bank = load_filterbank(bandwidths, fs, order=filter_order, max_freq=40, ftype=filter_type) if (time_windows is None): time_windows = (np.array([[2.5, 6]]) * fs).astype(int) else: time_windows = (np.array(time_windows) * fs).astype(int) self.riemannian = RiemannianMultiscale(filter_bank, time_windows, riem_opt=riem_opt, rho=rho, vectorized=True) self.no_bands = filter_bank.shape[0] self.no_time_windows = time_windows.shape[0] self.no_riem = None self.no_features = None
def fit(self, samples, labels): ' Training\n\n Parameters\n ----------\n\n samples: np.array, size=(N, C, T)\n training samples\n\n labels: np.array, size=(N)\n training labels\n ' assert (len(samples.shape) == 3) no_channels = samples.shape[1] self.no_riem = int(((no_channels * (no_channels + 1)) / 2)) self.no_features = ((self.no_riem * self.no_bands) * self.no_time_windows) features = self.riemannian.fit(samples) self.classifier.fit(features, labels)
-7,846,581,110,877,279,000
Training Parameters ---------- samples: np.array, size=(N, C, T) training samples labels: np.array, size=(N) training labels
multiscale_bci_python/riemannian_model.py
fit
pulp-platform/multispectral-riemannian
python
def fit(self, samples, labels): ' Training\n\n Parameters\n ----------\n\n samples: np.array, size=(N, C, T)\n training samples\n\n labels: np.array, size=(N)\n training labels\n ' assert (len(samples.shape) == 3) no_channels = samples.shape[1] self.no_riem = int(((no_channels * (no_channels + 1)) / 2)) self.no_features = ((self.no_riem * self.no_bands) * self.no_time_windows) features = self.riemannian.fit(samples) self.classifier.fit(features, labels)