body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
9ca76e0340f9c6ad0e2ba552e10e43d1ec671a37429a95399c914b11ee2214d5
def __init__(self, model, input_size=(1, 1, 32, 32), bits=32): 'Estimates the size of PyTorch models in memory for a given input size.' self.model = model self.input_size = input_size self.bits = 32 return
Estimates the size of PyTorch models in memory for a given input size.
src/deps/pretorched/models/utils/core.py
__init__
ericotjo001/neuron-descriptions
5
python
def __init__(self, model, input_size=(1, 1, 32, 32), bits=32): self.model = model self.input_size = input_size self.bits = 32 return
def __init__(self, model, input_size=(1, 1, 32, 32), bits=32): self.model = model self.input_size = input_size self.bits = 32 return<|docstring|>Estimates the size of PyTorch models in memory for a given input size.<|endoftext|>
6b4e126c5d460f96848108dea498865e82f6fb88ee44a254833272a762d57ae8
def get_parameter_sizes(self): 'Get sizes of all parameters in `model`.' mods = list(self.model.modules()) sizes = [] for i in range(1, len(mods)): m = mods[i] p = list(m.parameters()) for j in range(len(p)): sizes.append(np.array(p[j].size())) self.param_sizes = sizes return
Get sizes of all parameters in `model`.
src/deps/pretorched/models/utils/core.py
get_parameter_sizes
ericotjo001/neuron-descriptions
5
python
def get_parameter_sizes(self): mods = list(self.model.modules()) sizes = [] for i in range(1, len(mods)): m = mods[i] p = list(m.parameters()) for j in range(len(p)): sizes.append(np.array(p[j].size())) self.param_sizes = sizes return
def get_parameter_sizes(self): mods = list(self.model.modules()) sizes = [] for i in range(1, len(mods)): m = mods[i] p = list(m.parameters()) for j in range(len(p)): sizes.append(np.array(p[j].size())) self.param_sizes = sizes return<|docstring|>Get sizes of all parameters in `model`.<|endoftext|>
7ea06add834fee3ad661972eea7909c2035329c0183432dc4e51a28697c57a68
def get_output_sizes(self): 'Run sample input through each layer to get output sizes.' input_ = torch.FloatTensor(*self.input_size) mods = list(self.model.modules()) out_sizes = [] for i in range(1, len(mods)): m = mods[i] out = m(input_) out_sizes.append(np.array(out.size())) input_ = out self.out_sizes = out_sizes return
Run sample input through each layer to get output sizes.
src/deps/pretorched/models/utils/core.py
get_output_sizes
ericotjo001/neuron-descriptions
5
python
def get_output_sizes(self): input_ = torch.FloatTensor(*self.input_size) mods = list(self.model.modules()) out_sizes = [] for i in range(1, len(mods)): m = mods[i] out = m(input_) out_sizes.append(np.array(out.size())) input_ = out self.out_sizes = out_sizes return
def get_output_sizes(self): input_ = torch.FloatTensor(*self.input_size) mods = list(self.model.modules()) out_sizes = [] for i in range(1, len(mods)): m = mods[i] out = m(input_) out_sizes.append(np.array(out.size())) input_ = out self.out_sizes = out_sizes return<|docstring|>Run sample input through each layer to get output sizes.<|endoftext|>
11b7701f5e8efd49fe75d4258aae83994a2caf177153da806f9c8d2fdcaf4edc
def calc_param_bits(self): 'Calculate total number of bits to store `model` parameters.' total_bits = 0 for i in range(len(self.param_sizes)): s = self.param_sizes[i] bits = (np.prod(np.array(s)) * self.bits) total_bits += bits self.param_bits = total_bits return
Calculate total number of bits to store `model` parameters.
src/deps/pretorched/models/utils/core.py
calc_param_bits
ericotjo001/neuron-descriptions
5
python
def calc_param_bits(self): total_bits = 0 for i in range(len(self.param_sizes)): s = self.param_sizes[i] bits = (np.prod(np.array(s)) * self.bits) total_bits += bits self.param_bits = total_bits return
def calc_param_bits(self): total_bits = 0 for i in range(len(self.param_sizes)): s = self.param_sizes[i] bits = (np.prod(np.array(s)) * self.bits) total_bits += bits self.param_bits = total_bits return<|docstring|>Calculate total number of bits to store `model` parameters.<|endoftext|>
d39579ba919412504a4e18753c4af6a9bd3cd838d272fb42f145860e82d126dc
def calc_forward_backward_bits(self): 'Calculate bits to store forward and backward pass.' total_bits = 0 for i in range(len(self.out_sizes)): s = self.out_sizes[i] bits = (np.prod(np.array(s)) * self.bits) total_bits += bits self.forward_backward_bits = (total_bits * 2) return
Calculate bits to store forward and backward pass.
src/deps/pretorched/models/utils/core.py
calc_forward_backward_bits
ericotjo001/neuron-descriptions
5
python
def calc_forward_backward_bits(self): total_bits = 0 for i in range(len(self.out_sizes)): s = self.out_sizes[i] bits = (np.prod(np.array(s)) * self.bits) total_bits += bits self.forward_backward_bits = (total_bits * 2) return
def calc_forward_backward_bits(self): total_bits = 0 for i in range(len(self.out_sizes)): s = self.out_sizes[i] bits = (np.prod(np.array(s)) * self.bits) total_bits += bits self.forward_backward_bits = (total_bits * 2) return<|docstring|>Calculate bits to store forward and backward pass.<|endoftext|>
fea4808d4777169ccc27ce3c7115d1fe91257676ac3ac7e273e0b4a449c5318d
def calc_input_bits(self): 'Calculate bits to store input.' self.input_bits = (np.prod(np.array(self.input_size)) * self.bits) return
Calculate bits to store input.
src/deps/pretorched/models/utils/core.py
calc_input_bits
ericotjo001/neuron-descriptions
5
python
def calc_input_bits(self): self.input_bits = (np.prod(np.array(self.input_size)) * self.bits) return
def calc_input_bits(self): self.input_bits = (np.prod(np.array(self.input_size)) * self.bits) return<|docstring|>Calculate bits to store input.<|endoftext|>
0641030e62b3fa48f970083b5c86c27fba59c4448a4b081189346c06fd373a47
def estimate_size(self): 'Estimate model size in memory in megabytes and bits.' self.get_parameter_sizes() self.get_output_sizes() self.calc_param_bits() self.calc_forward_backward_bits() self.calc_input_bits() total = ((self.param_bits + self.forward_backward_bits) + self.input_bits) total_megabytes = ((total / 8) / (1024 ** 2)) return (total_megabytes, total)
Estimate model size in memory in megabytes and bits.
src/deps/pretorched/models/utils/core.py
estimate_size
ericotjo001/neuron-descriptions
5
python
def estimate_size(self): self.get_parameter_sizes() self.get_output_sizes() self.calc_param_bits() self.calc_forward_backward_bits() self.calc_input_bits() total = ((self.param_bits + self.forward_backward_bits) + self.input_bits) total_megabytes = ((total / 8) / (1024 ** 2)) return (total_megabytes, total)
def estimate_size(self): self.get_parameter_sizes() self.get_output_sizes() self.calc_param_bits() self.calc_forward_backward_bits() self.calc_input_bits() total = ((self.param_bits + self.forward_backward_bits) + self.input_bits) total_megabytes = ((total / 8) / (1024 ** 2)) return (total_megabytes, total)<|docstring|>Estimate model size in memory in megabytes and bits.<|endoftext|>
2e0821401e0724944e02b94c71a3aa0763cf2a57e4c7c553cf1c10f6caff0961
def get_input_string_port(self, port_name, default=None): '\n Get input string port value\n :param port_name:\n :param default:\n :return: :rtype:\n ' if self.__string_input_ports: return self.__string_input_ports.get(port_name, default) return default
Get input string port value :param port_name: :param default: :return: :rtype:
gbdx_task_template/gbdx_task_interface.py
get_input_string_port
michaelconnor00/gbdx-task-template
1
python
def get_input_string_port(self, port_name, default=None): '\n Get input string port value\n :param port_name:\n :param default:\n :return: :rtype:\n ' if self.__string_input_ports: return self.__string_input_ports.get(port_name, default) return default
def get_input_string_port(self, port_name, default=None): '\n Get input string port value\n :param port_name:\n :param default:\n :return: :rtype:\n ' if self.__string_input_ports: return self.__string_input_ports.get(port_name, default) return default<|docstring|>Get input string port value :param port_name: :param default: :return: :rtype:<|endoftext|>
0f1919fc8067a678f21482763aaa70bf4de8b662b75df484d722a0da9a09ac95
def get_input_data_port(self, port_name): '\n Get the input location for a specific port\n :param port_name:\n :return: :rtype:\n ' return os.path.join(self.input_path, port_name)
Get the input location for a specific port :param port_name: :return: :rtype:
gbdx_task_template/gbdx_task_interface.py
get_input_data_port
michaelconnor00/gbdx-task-template
1
python
def get_input_data_port(self, port_name): '\n Get the input location for a specific port\n :param port_name:\n :return: :rtype:\n ' return os.path.join(self.input_path, port_name)
def get_input_data_port(self, port_name): '\n Get the input location for a specific port\n :param port_name:\n :return: :rtype:\n ' return os.path.join(self.input_path, port_name)<|docstring|>Get the input location for a specific port :param port_name: :return: :rtype:<|endoftext|>
8a241521b2a9d086e7b2260f045fb53b7c5580c54ed02aa1e6be34a02dd1ad28
def get_output_data_port(self, port_name): '\n Get the output location for a specific port\n :param port_name:\n :return: :rtype:\n ' return os.path.join(self.output_path, port_name)
Get the output location for a specific port :param port_name: :return: :rtype:
gbdx_task_template/gbdx_task_interface.py
get_output_data_port
michaelconnor00/gbdx-task-template
1
python
def get_output_data_port(self, port_name): '\n Get the output location for a specific port\n :param port_name:\n :return: :rtype:\n ' return os.path.join(self.output_path, port_name)
def get_output_data_port(self, port_name): '\n Get the output location for a specific port\n :param port_name:\n :return: :rtype:\n ' return os.path.join(self.output_path, port_name)<|docstring|>Get the output location for a specific port :param port_name: :return: :rtype:<|endoftext|>
9fac99f83ab127932ffbdfdf41b8c08cb675c8cf5c5036b9af0ab45924d11846
def set_output_string_port(self, port_name, value): '\n Set output string port value\n :param port_name:\n :param value:\n :return: :rtype:\n ' if (not self.__string_output_ports): self.__string_output_ports = {} self.__string_output_ports[port_name] = value
Set output string port value :param port_name: :param value: :return: :rtype:
gbdx_task_template/gbdx_task_interface.py
set_output_string_port
michaelconnor00/gbdx-task-template
1
python
def set_output_string_port(self, port_name, value): '\n Set output string port value\n :param port_name:\n :param value:\n :return: :rtype:\n ' if (not self.__string_output_ports): self.__string_output_ports = {} self.__string_output_ports[port_name] = value
def set_output_string_port(self, port_name, value): '\n Set output string port value\n :param port_name:\n :param value:\n :return: :rtype:\n ' if (not self.__string_output_ports): self.__string_output_ports = {} self.__string_output_ports[port_name] = value<|docstring|>Set output string port value :param port_name: :param value: :return: :rtype:<|endoftext|>
fe4b0fcf78bdc032113dbf6a542513bed4347230147726bfc987f4562951ae1c
def invoke(self): '\n The do something method\n :rtype : bool\n :raise RuntimeError:\n ' raise RuntimeError('JobRunner Baseclass invoke is not callable')
The do something method :rtype : bool :raise RuntimeError:
gbdx_task_template/gbdx_task_interface.py
invoke
michaelconnor00/gbdx-task-template
1
python
def invoke(self): '\n The do something method\n :rtype : bool\n :raise RuntimeError:\n ' raise RuntimeError('JobRunner Baseclass invoke is not callable')
def invoke(self): '\n The do something method\n :rtype : bool\n :raise RuntimeError:\n ' raise RuntimeError('JobRunner Baseclass invoke is not callable')<|docstring|>The do something method :rtype : bool :raise RuntimeError:<|endoftext|>
1839ec0e456019016dd2e33269741b1419012bfffca3d2e43c96575b8a6fe2b1
def finalize(self, success_or_fail, message=''): "\n :param success_or_fail: string that is 'success' or 'fail'\n :param message:\n " if self.__string_output_ports: with open(os.path.join(self.output_path, 'ports.json'), 'w') as opf: json.dump(self.__string_output_ports, opf, indent=4) with open(os.path.join(self.base_path, 'status.json'), 'w') as sf: json.dump({'status': success_or_fail, 'reason': message}, sf, indent=4)
:param success_or_fail: string that is 'success' or 'fail' :param message:
gbdx_task_template/gbdx_task_interface.py
finalize
michaelconnor00/gbdx-task-template
1
python
def finalize(self, success_or_fail, message=): "\n :param success_or_fail: string that is 'success' or 'fail'\n :param message:\n " if self.__string_output_ports: with open(os.path.join(self.output_path, 'ports.json'), 'w') as opf: json.dump(self.__string_output_ports, opf, indent=4) with open(os.path.join(self.base_path, 'status.json'), 'w') as sf: json.dump({'status': success_or_fail, 'reason': message}, sf, indent=4)
def finalize(self, success_or_fail, message=): "\n :param success_or_fail: string that is 'success' or 'fail'\n :param message:\n " if self.__string_output_ports: with open(os.path.join(self.output_path, 'ports.json'), 'w') as opf: json.dump(self.__string_output_ports, opf, indent=4) with open(os.path.join(self.base_path, 'status.json'), 'w') as sf: json.dump({'status': success_or_fail, 'reason': message}, sf, indent=4)<|docstring|>:param success_or_fail: string that is 'success' or 'fail' :param message:<|endoftext|>
29f4dc00a3bba5c43dc9e41f4d1d95e91c24fc835d03d79dc9587d1aa5a90bb7
def test_nirsport_v2_matches_snirf(nirx_snirf): 'Test NIRSport2 raw files return same data as snirf.' (raw, raw_snirf) = nirx_snirf _reorder_nirx(raw_snirf) assert (raw.ch_names == raw_snirf.ch_names) assert_allclose(raw._data, raw_snirf._data) assert_allclose(raw.annotations.onset, raw_snirf.annotations.onset) assert_array_equal(raw.ch_names, raw_snirf.ch_names)
Test NIRSport2 raw files return same data as snirf.
mne/io/nirx/tests/test_nirx.py
test_nirsport_v2_matches_snirf
snwnde/mne-python
0
python
def test_nirsport_v2_matches_snirf(nirx_snirf): (raw, raw_snirf) = nirx_snirf _reorder_nirx(raw_snirf) assert (raw.ch_names == raw_snirf.ch_names) assert_allclose(raw._data, raw_snirf._data) assert_allclose(raw.annotations.onset, raw_snirf.annotations.onset) assert_array_equal(raw.ch_names, raw_snirf.ch_names)
def test_nirsport_v2_matches_snirf(nirx_snirf): (raw, raw_snirf) = nirx_snirf _reorder_nirx(raw_snirf) assert (raw.ch_names == raw_snirf.ch_names) assert_allclose(raw._data, raw_snirf._data) assert_allclose(raw.annotations.onset, raw_snirf.annotations.onset) assert_array_equal(raw.ch_names, raw_snirf.ch_names)<|docstring|>Test NIRSport2 raw files return same data as snirf.<|endoftext|>
9b833ae2ae02bfd4c00e3ce936300b744e17dfd1126aaa9e634f9515f4c02f04
@requires_testing_data @pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') def test_nirsport_v2(): 'Test NIRSport2 file.' raw = read_raw_nirx(nirsport2, preload=True) assert (raw._data.shape == (40, 128)) allowed_distance_error = 0.005 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S2_D2 760').info), [0.04], atol=allowed_distance_error) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][2][3:5] == 'D6') assert_allclose(mni_locs[2], [(- 0.0841), (- 0.0138), 0.0248], atol=allowed_dist_error) assert (raw.info['ch_names'][34][3:5] == 'D5') assert_allclose(mni_locs[34], [0.0845, (- 0.0451), (- 0.0123)], atol=allowed_dist_error) locs = [ch['loc'][3:6] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][:2] == 'S1') assert_allclose(mni_locs[0], [(- 0.0848), (- 0.0162), (- 0.0163)], atol=allowed_dist_error) assert (raw.info['ch_names'][9][:2] == 'S2') assert_allclose(mni_locs[9], [(- 0.0), (- 0.1195), 0.0142], atol=allowed_dist_error) assert (raw.info['ch_names'][39][:2] == 'S8') assert_allclose(mni_locs[34], [0.0828, (- 0.046), 0.0285], atol=allowed_dist_error) assert (len(raw.annotations) == 3) assert (raw.annotations.description[0] == '1.0') assert (raw.annotations.description[2] == '6.0') assert_allclose(np.diff(raw.annotations.onset), [2.3, 3.1], atol=0.1) mon = raw.get_montage() assert (len(mon.dig) == 27)
Test NIRSport2 file.
mne/io/nirx/tests/test_nirx.py
test_nirsport_v2
snwnde/mne-python
0
python
@requires_testing_data @pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') def test_nirsport_v2(): raw = read_raw_nirx(nirsport2, preload=True) assert (raw._data.shape == (40, 128)) allowed_distance_error = 0.005 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S2_D2 760').info), [0.04], atol=allowed_distance_error) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][2][3:5] == 'D6') assert_allclose(mni_locs[2], [(- 0.0841), (- 0.0138), 0.0248], atol=allowed_dist_error) assert (raw.info['ch_names'][34][3:5] == 'D5') assert_allclose(mni_locs[34], [0.0845, (- 0.0451), (- 0.0123)], atol=allowed_dist_error) locs = [ch['loc'][3:6] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][:2] == 'S1') assert_allclose(mni_locs[0], [(- 0.0848), (- 0.0162), (- 0.0163)], atol=allowed_dist_error) assert (raw.info['ch_names'][9][:2] == 'S2') assert_allclose(mni_locs[9], [(- 0.0), (- 0.1195), 0.0142], atol=allowed_dist_error) assert (raw.info['ch_names'][39][:2] == 'S8') assert_allclose(mni_locs[34], [0.0828, (- 0.046), 0.0285], atol=allowed_dist_error) assert (len(raw.annotations) == 3) assert (raw.annotations.description[0] == '1.0') assert (raw.annotations.description[2] == '6.0') assert_allclose(np.diff(raw.annotations.onset), [2.3, 3.1], atol=0.1) mon = raw.get_montage() assert (len(mon.dig) == 27)
@requires_testing_data @pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') def test_nirsport_v2(): raw = read_raw_nirx(nirsport2, preload=True) assert (raw._data.shape == (40, 128)) allowed_distance_error = 0.005 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S2_D2 760').info), [0.04], atol=allowed_distance_error) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][2][3:5] == 'D6') assert_allclose(mni_locs[2], [(- 0.0841), (- 0.0138), 0.0248], atol=allowed_dist_error) assert (raw.info['ch_names'][34][3:5] == 'D5') assert_allclose(mni_locs[34], [0.0845, (- 0.0451), (- 0.0123)], atol=allowed_dist_error) locs = [ch['loc'][3:6] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][:2] == 'S1') assert_allclose(mni_locs[0], [(- 0.0848), (- 0.0162), (- 0.0163)], atol=allowed_dist_error) assert (raw.info['ch_names'][9][:2] == 'S2') assert_allclose(mni_locs[9], [(- 0.0), (- 0.1195), 0.0142], atol=allowed_dist_error) assert (raw.info['ch_names'][39][:2] == 'S8') assert_allclose(mni_locs[34], [0.0828, (- 0.046), 0.0285], atol=allowed_dist_error) assert (len(raw.annotations) == 3) assert (raw.annotations.description[0] == '1.0') assert (raw.annotations.description[2] == '6.0') assert_allclose(np.diff(raw.annotations.onset), [2.3, 3.1], atol=0.1) mon = raw.get_montage() assert (len(mon.dig) == 27)<|docstring|>Test NIRSport2 file.<|endoftext|>
6d217aee4f9f2a0d618300178e9fd96105879da45c82229b60b73ce3dddd179e
@requires_testing_data @pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') def test_nirsport_v1_wo_sat(): 'Test NIRSport1 file with no saturation.' raw = read_raw_nirx(nirsport1_wo_sat, preload=True) assert (raw._data.shape == (26, 164)) assert (raw.info['sfreq'] == 10.416667) assert (np.sum(np.isnan(raw.get_data())) == 0) raw = read_raw_nirx(nirsport1_wo_sat, preload=True, saturated='nan') data = raw.get_data() assert (data.shape == (26, 164)) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_wo_sat, saturated='annotate') data = raw.get_data() assert (data.shape == (26, 164)) assert (np.sum(np.isnan(data)) == 0)
Test NIRSport1 file with no saturation.
mne/io/nirx/tests/test_nirx.py
test_nirsport_v1_wo_sat
snwnde/mne-python
0
python
@requires_testing_data @pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') def test_nirsport_v1_wo_sat(): raw = read_raw_nirx(nirsport1_wo_sat, preload=True) assert (raw._data.shape == (26, 164)) assert (raw.info['sfreq'] == 10.416667) assert (np.sum(np.isnan(raw.get_data())) == 0) raw = read_raw_nirx(nirsport1_wo_sat, preload=True, saturated='nan') data = raw.get_data() assert (data.shape == (26, 164)) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_wo_sat, saturated='annotate') data = raw.get_data() assert (data.shape == (26, 164)) assert (np.sum(np.isnan(data)) == 0)
@requires_testing_data @pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') def test_nirsport_v1_wo_sat(): raw = read_raw_nirx(nirsport1_wo_sat, preload=True) assert (raw._data.shape == (26, 164)) assert (raw.info['sfreq'] == 10.416667) assert (np.sum(np.isnan(raw.get_data())) == 0) raw = read_raw_nirx(nirsport1_wo_sat, preload=True, saturated='nan') data = raw.get_data() assert (data.shape == (26, 164)) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_wo_sat, saturated='annotate') data = raw.get_data() assert (data.shape == (26, 164)) assert (np.sum(np.isnan(data)) == 0)<|docstring|>Test NIRSport1 file with no saturation.<|endoftext|>
ef00a20f91fbadb22d0804a3c930658e64f41925bed51d5b3d588901c34a801a
@pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') @requires_testing_data def test_nirsport_v1_w_sat(): 'Test NIRSport1 file with NaNs but not in channel of interest.' raw = read_raw_nirx(nirsport1_w_sat) data = raw.get_data() assert (data.shape == (26, 176)) assert (raw.info['sfreq'] == 10.416667) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_w_sat, saturated='nan') data = raw.get_data() assert (data.shape == (26, 176)) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_w_sat, saturated='annotate') data = raw.get_data() assert (data.shape == (26, 176)) assert (np.sum(np.isnan(data)) == 0)
Test NIRSport1 file with NaNs but not in channel of interest.
mne/io/nirx/tests/test_nirx.py
test_nirsport_v1_w_sat
snwnde/mne-python
0
python
@pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') @requires_testing_data def test_nirsport_v1_w_sat(): raw = read_raw_nirx(nirsport1_w_sat) data = raw.get_data() assert (data.shape == (26, 176)) assert (raw.info['sfreq'] == 10.416667) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_w_sat, saturated='nan') data = raw.get_data() assert (data.shape == (26, 176)) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_w_sat, saturated='annotate') data = raw.get_data() assert (data.shape == (26, 176)) assert (np.sum(np.isnan(data)) == 0)
@pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') @requires_testing_data def test_nirsport_v1_w_sat(): raw = read_raw_nirx(nirsport1_w_sat) data = raw.get_data() assert (data.shape == (26, 176)) assert (raw.info['sfreq'] == 10.416667) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_w_sat, saturated='nan') data = raw.get_data() assert (data.shape == (26, 176)) assert (np.sum(np.isnan(data)) == 0) raw = read_raw_nirx(nirsport1_w_sat, saturated='annotate') data = raw.get_data() assert (data.shape == (26, 176)) assert (np.sum(np.isnan(data)) == 0)<|docstring|>Test NIRSport1 file with NaNs but not in channel of interest.<|endoftext|>
2131d3dbcacbcb0631c80dcb43a7e19fbc20001e0a39d76d9e489ca7b0c6b37a
@pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') @requires_testing_data @pytest.mark.parametrize('preload', (True, False)) @pytest.mark.parametrize('meas_date', (None, 'orig')) def test_nirsport_v1_w_bad_sat(preload, meas_date): 'Test NIRSport1 file with NaNs.' fname = nirsport1_w_fullsat raw = read_raw_nirx(fname, preload=preload) data = raw.get_data() assert (not np.isnan(data).any()) assert (len(raw.annotations) == 5) raw_ignore = read_raw_nirx(fname, saturated='ignore', preload=preload) assert_allclose(raw_ignore.get_data(), data) assert (len(raw_ignore.annotations) == 2) assert (not any((('NAN' in d) for d in raw_ignore.annotations.description))) raw_nan = read_raw_nirx(fname, saturated='nan', preload=preload) data_nan = raw_nan.get_data() assert np.isnan(data_nan).any() assert (not np.allclose(raw_nan.get_data(), data)) raw_nan_annot = raw_ignore.copy() if (meas_date is None): raw.set_meas_date(None) raw_nan.set_meas_date(None) raw_nan_annot.set_meas_date(None) nan_annots = annotate_nan(raw_nan) assert (nan_annots.orig_time == raw_nan.info['meas_date']) raw_nan_annot.set_annotations(nan_annots) use_mask = np.where((raw.annotations.description == 'BAD_SATURATED')) for key in ('onset', 'duration'): a = getattr(raw_nan_annot.annotations, key)[::2] b = getattr(raw.annotations, key)[use_mask] assert_allclose(a, b)
Test NIRSport1 file with NaNs.
mne/io/nirx/tests/test_nirx.py
test_nirsport_v1_w_bad_sat
snwnde/mne-python
0
python
@pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') @requires_testing_data @pytest.mark.parametrize('preload', (True, False)) @pytest.mark.parametrize('meas_date', (None, 'orig')) def test_nirsport_v1_w_bad_sat(preload, meas_date): fname = nirsport1_w_fullsat raw = read_raw_nirx(fname, preload=preload) data = raw.get_data() assert (not np.isnan(data).any()) assert (len(raw.annotations) == 5) raw_ignore = read_raw_nirx(fname, saturated='ignore', preload=preload) assert_allclose(raw_ignore.get_data(), data) assert (len(raw_ignore.annotations) == 2) assert (not any((('NAN' in d) for d in raw_ignore.annotations.description))) raw_nan = read_raw_nirx(fname, saturated='nan', preload=preload) data_nan = raw_nan.get_data() assert np.isnan(data_nan).any() assert (not np.allclose(raw_nan.get_data(), data)) raw_nan_annot = raw_ignore.copy() if (meas_date is None): raw.set_meas_date(None) raw_nan.set_meas_date(None) raw_nan_annot.set_meas_date(None) nan_annots = annotate_nan(raw_nan) assert (nan_annots.orig_time == raw_nan.info['meas_date']) raw_nan_annot.set_annotations(nan_annots) use_mask = np.where((raw.annotations.description == 'BAD_SATURATED')) for key in ('onset', 'duration'): a = getattr(raw_nan_annot.annotations, key)[::2] b = getattr(raw.annotations, key)[use_mask] assert_allclose(a, b)
@pytest.mark.filterwarnings('ignore:.*Extraction of measurement.*:') @requires_testing_data @pytest.mark.parametrize('preload', (True, False)) @pytest.mark.parametrize('meas_date', (None, 'orig')) def test_nirsport_v1_w_bad_sat(preload, meas_date): fname = nirsport1_w_fullsat raw = read_raw_nirx(fname, preload=preload) data = raw.get_data() assert (not np.isnan(data).any()) assert (len(raw.annotations) == 5) raw_ignore = read_raw_nirx(fname, saturated='ignore', preload=preload) assert_allclose(raw_ignore.get_data(), data) assert (len(raw_ignore.annotations) == 2) assert (not any((('NAN' in d) for d in raw_ignore.annotations.description))) raw_nan = read_raw_nirx(fname, saturated='nan', preload=preload) data_nan = raw_nan.get_data() assert np.isnan(data_nan).any() assert (not np.allclose(raw_nan.get_data(), data)) raw_nan_annot = raw_ignore.copy() if (meas_date is None): raw.set_meas_date(None) raw_nan.set_meas_date(None) raw_nan_annot.set_meas_date(None) nan_annots = annotate_nan(raw_nan) assert (nan_annots.orig_time == raw_nan.info['meas_date']) raw_nan_annot.set_annotations(nan_annots) use_mask = np.where((raw.annotations.description == 'BAD_SATURATED')) for key in ('onset', 'duration'): a = getattr(raw_nan_annot.annotations, key)[::2] b = getattr(raw.annotations, key)[use_mask] assert_allclose(a, b)<|docstring|>Test NIRSport1 file with NaNs.<|endoftext|>
4afcb1cd56c650713ef68b89ee9f76e683aac40aec6c1516ed96467ff561b3c4
@requires_testing_data def test_nirx_hdr_load(): 'Test reading NIRX files using path to header file.' fname = (fname_nirx_15_2_short + '/NIRS-2019-08-23_001.hdr') raw = read_raw_nirx(fname, preload=True) assert (raw._data.shape == (26, 145)) assert (raw.info['sfreq'] == 12.5)
Test reading NIRX files using path to header file.
mne/io/nirx/tests/test_nirx.py
test_nirx_hdr_load
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_hdr_load(): fname = (fname_nirx_15_2_short + '/NIRS-2019-08-23_001.hdr') raw = read_raw_nirx(fname, preload=True) assert (raw._data.shape == (26, 145)) assert (raw.info['sfreq'] == 12.5)
@requires_testing_data def test_nirx_hdr_load(): fname = (fname_nirx_15_2_short + '/NIRS-2019-08-23_001.hdr') raw = read_raw_nirx(fname, preload=True) assert (raw._data.shape == (26, 145)) assert (raw.info['sfreq'] == 12.5)<|docstring|>Test reading NIRX files using path to header file.<|endoftext|>
a2a4743d31061cff0e045d7b93376c4cd8602c3061b7958f99e837a2a818e781
@requires_testing_data def test_nirx_missing_warn(): 'Test reading NIRX files when missing data.' with pytest.raises(FileNotFoundError, match='does not exist'): read_raw_nirx((fname_nirx_15_2_short + '1'), preload=True)
Test reading NIRX files when missing data.
mne/io/nirx/tests/test_nirx.py
test_nirx_missing_warn
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_missing_warn(): with pytest.raises(FileNotFoundError, match='does not exist'): read_raw_nirx((fname_nirx_15_2_short + '1'), preload=True)
@requires_testing_data def test_nirx_missing_warn(): with pytest.raises(FileNotFoundError, match='does not exist'): read_raw_nirx((fname_nirx_15_2_short + '1'), preload=True)<|docstring|>Test reading NIRX files when missing data.<|endoftext|>
f8f744e73dcfcb05ec99054f60077d48304aa6e484b94286d299fee4091aa458
@requires_testing_data def test_nirx_missing_evt(tmp_path): 'Test reading NIRX files when missing data.' shutil.copytree(fname_nirx_15_2_short, (str(tmp_path) + '/data/')) os.rename(((tmp_path / 'data') / 'NIRS-2019-08-23_001.evt'), ((tmp_path / 'data') / 'NIRS-2019-08-23_001.xxx')) fname = ((tmp_path / 'data') / 'NIRS-2019-08-23_001.hdr') raw = read_raw_nirx(fname, preload=True) assert (raw.annotations.onset.shape == (0,))
Test reading NIRX files when missing data.
mne/io/nirx/tests/test_nirx.py
test_nirx_missing_evt
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_missing_evt(tmp_path): shutil.copytree(fname_nirx_15_2_short, (str(tmp_path) + '/data/')) os.rename(((tmp_path / 'data') / 'NIRS-2019-08-23_001.evt'), ((tmp_path / 'data') / 'NIRS-2019-08-23_001.xxx')) fname = ((tmp_path / 'data') / 'NIRS-2019-08-23_001.hdr') raw = read_raw_nirx(fname, preload=True) assert (raw.annotations.onset.shape == (0,))
@requires_testing_data def test_nirx_missing_evt(tmp_path): shutil.copytree(fname_nirx_15_2_short, (str(tmp_path) + '/data/')) os.rename(((tmp_path / 'data') / 'NIRS-2019-08-23_001.evt'), ((tmp_path / 'data') / 'NIRS-2019-08-23_001.xxx')) fname = ((tmp_path / 'data') / 'NIRS-2019-08-23_001.hdr') raw = read_raw_nirx(fname, preload=True) assert (raw.annotations.onset.shape == (0,))<|docstring|>Test reading NIRX files when missing data.<|endoftext|>
a379ee95574e83d36a40e02936fa4730c188e40f342088268244f779d98bd72f
@requires_testing_data def test_nirx_dat_warn(tmp_path): 'Test reading NIRX files when missing data.' shutil.copytree(fname_nirx_15_2_short, (str(tmp_path) + '/data/')) os.rename(((tmp_path / 'data') / 'NIRS-2019-08-23_001.dat'), ((tmp_path / 'data') / 'NIRS-2019-08-23_001.tmp')) fname = ((tmp_path / 'data') / 'NIRS-2019-08-23_001.hdr') with pytest.raises(RuntimeWarning, match='A single dat'): read_raw_nirx(fname, preload=True)
Test reading NIRX files when missing data.
mne/io/nirx/tests/test_nirx.py
test_nirx_dat_warn
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_dat_warn(tmp_path): shutil.copytree(fname_nirx_15_2_short, (str(tmp_path) + '/data/')) os.rename(((tmp_path / 'data') / 'NIRS-2019-08-23_001.dat'), ((tmp_path / 'data') / 'NIRS-2019-08-23_001.tmp')) fname = ((tmp_path / 'data') / 'NIRS-2019-08-23_001.hdr') with pytest.raises(RuntimeWarning, match='A single dat'): read_raw_nirx(fname, preload=True)
@requires_testing_data def test_nirx_dat_warn(tmp_path): shutil.copytree(fname_nirx_15_2_short, (str(tmp_path) + '/data/')) os.rename(((tmp_path / 'data') / 'NIRS-2019-08-23_001.dat'), ((tmp_path / 'data') / 'NIRS-2019-08-23_001.tmp')) fname = ((tmp_path / 'data') / 'NIRS-2019-08-23_001.hdr') with pytest.raises(RuntimeWarning, match='A single dat'): read_raw_nirx(fname, preload=True)<|docstring|>Test reading NIRX files when missing data.<|endoftext|>
e28e4322038518d83fdcd824eab0e01c5e53eaffc54f44241842915372e31dbe
@requires_testing_data def test_nirx_15_2_short(): 'Test reading NIRX files.' raw = read_raw_nirx(fname_nirx_15_2_short, preload=True) assert (raw._data.shape == (26, 145)) assert (raw.info['sfreq'] == 12.5) assert (raw.info['meas_date'] == dt.datetime(2019, 8, 23, 7, 37, 4, 540000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:4] == ['S1_D1 760', 'S1_D1 850', 'S1_D9 760', 'S1_D9 850']) assert (raw.info['ch_names'][24:26] == ['S5_D13 760', 'S5_D13 850']) assert (raw.info['chs'][0]['loc'][9] == 760) assert (raw.info['chs'][1]['loc'][9] == 850) assert (raw.info['subject_info'] == dict(sex=1, first_name='MNE', middle_name='Test', last_name='Recording', birthday=(2014, 8, 23), his_id='MNE_Test_Recording')) allowed_distance_error = 0.0002 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S2_D10 760').info), [0.0086], atol=allowed_distance_error) is_short = short_channels(raw.info) assert_array_equal(is_short[:9:2], [False, True, False, True, False]) is_short = short_channels(raw.info, threshold=0.003) assert_array_equal(is_short[:3:2], [False, False]) is_short = short_channels(raw.info, threshold=50) assert_array_equal(is_short[:3:2], [True, True]) assert_array_equal(raw.annotations.description, ['3.0', '2.0', '1.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][4][3:5] == 'D3') assert_allclose(mni_locs[4], [0.0846, (- 0.0142), (- 0.0156)], atol=allowed_dist_error) assert (raw.info['ch_names'][8][3:5] == 'D2') assert_allclose(mni_locs[8], [0.0207, (- 0.1062), 0.0484], atol=allowed_dist_error) assert (raw.info['ch_names'][12][3:5] == 'D4') assert_allclose(mni_locs[12], [(- 0.0196), 0.0821, 0.0275], atol=allowed_dist_error) assert (raw.info['ch_names'][16][3:5] == 'D5') assert_allclose(mni_locs[16], [(- 0.036), 0.0276, 0.0778], atol=allowed_dist_error) assert (raw.info['ch_names'][19][3:5] == 'D6') assert_allclose(mni_locs[19], [0.0352, 0.0283, 0.078], atol=allowed_dist_error) assert (raw.info['ch_names'][21][3:5] == 'D7') assert_allclose(mni_locs[21], [0.0388, (- 0.0477), 0.0932], atol=allowed_dist_error)
Test reading NIRX files.
mne/io/nirx/tests/test_nirx.py
test_nirx_15_2_short
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_15_2_short(): raw = read_raw_nirx(fname_nirx_15_2_short, preload=True) assert (raw._data.shape == (26, 145)) assert (raw.info['sfreq'] == 12.5) assert (raw.info['meas_date'] == dt.datetime(2019, 8, 23, 7, 37, 4, 540000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:4] == ['S1_D1 760', 'S1_D1 850', 'S1_D9 760', 'S1_D9 850']) assert (raw.info['ch_names'][24:26] == ['S5_D13 760', 'S5_D13 850']) assert (raw.info['chs'][0]['loc'][9] == 760) assert (raw.info['chs'][1]['loc'][9] == 850) assert (raw.info['subject_info'] == dict(sex=1, first_name='MNE', middle_name='Test', last_name='Recording', birthday=(2014, 8, 23), his_id='MNE_Test_Recording')) allowed_distance_error = 0.0002 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S2_D10 760').info), [0.0086], atol=allowed_distance_error) is_short = short_channels(raw.info) assert_array_equal(is_short[:9:2], [False, True, False, True, False]) is_short = short_channels(raw.info, threshold=0.003) assert_array_equal(is_short[:3:2], [False, False]) is_short = short_channels(raw.info, threshold=50) assert_array_equal(is_short[:3:2], [True, True]) assert_array_equal(raw.annotations.description, ['3.0', '2.0', '1.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][4][3:5] == 'D3') assert_allclose(mni_locs[4], [0.0846, (- 0.0142), (- 0.0156)], atol=allowed_dist_error) assert (raw.info['ch_names'][8][3:5] == 'D2') assert_allclose(mni_locs[8], [0.0207, (- 0.1062), 0.0484], atol=allowed_dist_error) assert (raw.info['ch_names'][12][3:5] == 'D4') assert_allclose(mni_locs[12], [(- 0.0196), 0.0821, 0.0275], atol=allowed_dist_error) assert (raw.info['ch_names'][16][3:5] == 'D5') assert_allclose(mni_locs[16], [(- 0.036), 0.0276, 0.0778], atol=allowed_dist_error) assert (raw.info['ch_names'][19][3:5] == 'D6') assert_allclose(mni_locs[19], [0.0352, 0.0283, 0.078], atol=allowed_dist_error) assert (raw.info['ch_names'][21][3:5] == 'D7') assert_allclose(mni_locs[21], [0.0388, (- 0.0477), 0.0932], atol=allowed_dist_error)
@requires_testing_data def test_nirx_15_2_short(): raw = read_raw_nirx(fname_nirx_15_2_short, preload=True) assert (raw._data.shape == (26, 145)) assert (raw.info['sfreq'] == 12.5) assert (raw.info['meas_date'] == dt.datetime(2019, 8, 23, 7, 37, 4, 540000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:4] == ['S1_D1 760', 'S1_D1 850', 'S1_D9 760', 'S1_D9 850']) assert (raw.info['ch_names'][24:26] == ['S5_D13 760', 'S5_D13 850']) assert (raw.info['chs'][0]['loc'][9] == 760) assert (raw.info['chs'][1]['loc'][9] == 850) assert (raw.info['subject_info'] == dict(sex=1, first_name='MNE', middle_name='Test', last_name='Recording', birthday=(2014, 8, 23), his_id='MNE_Test_Recording')) allowed_distance_error = 0.0002 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S2_D10 760').info), [0.0086], atol=allowed_distance_error) is_short = short_channels(raw.info) assert_array_equal(is_short[:9:2], [False, True, False, True, False]) is_short = short_channels(raw.info, threshold=0.003) assert_array_equal(is_short[:3:2], [False, False]) is_short = short_channels(raw.info, threshold=50) assert_array_equal(is_short[:3:2], [True, True]) assert_array_equal(raw.annotations.description, ['3.0', '2.0', '1.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][4][3:5] == 'D3') assert_allclose(mni_locs[4], [0.0846, (- 0.0142), (- 0.0156)], atol=allowed_dist_error) assert (raw.info['ch_names'][8][3:5] == 'D2') assert_allclose(mni_locs[8], [0.0207, (- 0.1062), 0.0484], atol=allowed_dist_error) assert (raw.info['ch_names'][12][3:5] == 'D4') assert_allclose(mni_locs[12], [(- 0.0196), 0.0821, 0.0275], atol=allowed_dist_error) assert (raw.info['ch_names'][16][3:5] == 'D5') assert_allclose(mni_locs[16], [(- 0.036), 0.0276, 0.0778], atol=allowed_dist_error) assert (raw.info['ch_names'][19][3:5] == 'D6') assert_allclose(mni_locs[19], [0.0352, 0.0283, 0.078], atol=allowed_dist_error) assert (raw.info['ch_names'][21][3:5] == 'D7') assert_allclose(mni_locs[21], [0.0388, (- 0.0477), 0.0932], atol=allowed_dist_error)<|docstring|>Test reading NIRX files.<|endoftext|>
e33c13ddf7e14dc9b3576a35d77dd1fd8945c31b0ea45525f4d1ab5ba1c7c9d7
@requires_testing_data def test_nirx_15_3_short(): 'Test reading NIRX files.' raw = read_raw_nirx(fname_nirx_15_3_short, preload=True) assert (raw._data.shape == (26, 220)) assert (raw.info['sfreq'] == 12.5) assert (raw.info['ch_names'][:4] == ['S1_D2 760', 'S1_D2 850', 'S1_D9 760', 'S1_D9 850']) assert (raw.info['ch_names'][24:26] == ['S5_D13 760', 'S5_D13 850']) assert (raw.info['chs'][0]['loc'][9] == 760) assert (raw.info['chs'][1]['loc'][9] == 850) assert (raw.info['subject_info'] == dict(birthday=(2020, 8, 18), sex=0, first_name='testMontage\\0ATestMontage', his_id='testMontage\\0ATestMontage')) allowed_distance_error = 0.001 assert_allclose(source_detector_distances(raw.copy().pick('S1_D2 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S5_D13 760').info), [0.0076], atol=allowed_distance_error) is_short = short_channels(raw.info) assert_array_equal(is_short[:9:2], [False, True, False, True, False]) is_short = short_channels(raw.info, threshold=0.003) assert_array_equal(is_short[:3:2], [False, False]) is_short = short_channels(raw.info, threshold=50) assert_array_equal(is_short[:3:2], [True, True]) assert_array_equal(raw.annotations.description, ['4.0', '2.0', '1.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D2') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][4][3:5] == 'D1') assert_allclose(mni_locs[4], [0.0846, (- 0.0142), (- 0.0156)], atol=allowed_dist_error) assert (raw.info['ch_names'][8][3:5] == 'D3') assert_allclose(mni_locs[8], [0.0207, (- 0.1062), 0.0484], atol=allowed_dist_error) assert (raw.info['ch_names'][12][3:5] == 'D4') assert_allclose(mni_locs[12], [(- 0.0196), 0.0821, 0.0275], atol=allowed_dist_error) assert (raw.info['ch_names'][16][3:5] == 'D5') assert_allclose(mni_locs[16], [(- 0.036), 0.0276, 0.0778], atol=allowed_dist_error) assert (raw.info['ch_names'][19][3:5] == 'D6') assert_allclose(mni_locs[19], [0.0388, (- 0.0477), 0.0932], atol=allowed_dist_error) assert (raw.info['ch_names'][21][3:5] == 'D7') assert_allclose(mni_locs[21], [(- 0.0394), (- 0.0483), 0.0928], atol=allowed_dist_error)
Test reading NIRX files.
mne/io/nirx/tests/test_nirx.py
test_nirx_15_3_short
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_15_3_short(): raw = read_raw_nirx(fname_nirx_15_3_short, preload=True) assert (raw._data.shape == (26, 220)) assert (raw.info['sfreq'] == 12.5) assert (raw.info['ch_names'][:4] == ['S1_D2 760', 'S1_D2 850', 'S1_D9 760', 'S1_D9 850']) assert (raw.info['ch_names'][24:26] == ['S5_D13 760', 'S5_D13 850']) assert (raw.info['chs'][0]['loc'][9] == 760) assert (raw.info['chs'][1]['loc'][9] == 850) assert (raw.info['subject_info'] == dict(birthday=(2020, 8, 18), sex=0, first_name='testMontage\\0ATestMontage', his_id='testMontage\\0ATestMontage')) allowed_distance_error = 0.001 assert_allclose(source_detector_distances(raw.copy().pick('S1_D2 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S5_D13 760').info), [0.0076], atol=allowed_distance_error) is_short = short_channels(raw.info) assert_array_equal(is_short[:9:2], [False, True, False, True, False]) is_short = short_channels(raw.info, threshold=0.003) assert_array_equal(is_short[:3:2], [False, False]) is_short = short_channels(raw.info, threshold=50) assert_array_equal(is_short[:3:2], [True, True]) assert_array_equal(raw.annotations.description, ['4.0', '2.0', '1.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D2') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][4][3:5] == 'D1') assert_allclose(mni_locs[4], [0.0846, (- 0.0142), (- 0.0156)], atol=allowed_dist_error) assert (raw.info['ch_names'][8][3:5] == 'D3') assert_allclose(mni_locs[8], [0.0207, (- 0.1062), 0.0484], atol=allowed_dist_error) assert (raw.info['ch_names'][12][3:5] == 'D4') assert_allclose(mni_locs[12], [(- 0.0196), 0.0821, 0.0275], atol=allowed_dist_error) assert (raw.info['ch_names'][16][3:5] == 'D5') assert_allclose(mni_locs[16], [(- 0.036), 0.0276, 0.0778], atol=allowed_dist_error) assert (raw.info['ch_names'][19][3:5] == 'D6') assert_allclose(mni_locs[19], [0.0388, (- 0.0477), 0.0932], atol=allowed_dist_error) assert (raw.info['ch_names'][21][3:5] == 'D7') assert_allclose(mni_locs[21], [(- 0.0394), (- 0.0483), 0.0928], atol=allowed_dist_error)
@requires_testing_data def test_nirx_15_3_short(): raw = read_raw_nirx(fname_nirx_15_3_short, preload=True) assert (raw._data.shape == (26, 220)) assert (raw.info['sfreq'] == 12.5) assert (raw.info['ch_names'][:4] == ['S1_D2 760', 'S1_D2 850', 'S1_D9 760', 'S1_D9 850']) assert (raw.info['ch_names'][24:26] == ['S5_D13 760', 'S5_D13 850']) assert (raw.info['chs'][0]['loc'][9] == 760) assert (raw.info['chs'][1]['loc'][9] == 850) assert (raw.info['subject_info'] == dict(birthday=(2020, 8, 18), sex=0, first_name='testMontage\\0ATestMontage', his_id='testMontage\\0ATestMontage')) allowed_distance_error = 0.001 assert_allclose(source_detector_distances(raw.copy().pick('S1_D2 760').info), [0.0304], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S5_D13 760').info), [0.0076], atol=allowed_distance_error) is_short = short_channels(raw.info) assert_array_equal(is_short[:9:2], [False, True, False, True, False]) is_short = short_channels(raw.info, threshold=0.003) assert_array_equal(is_short[:3:2], [False, False]) is_short = short_channels(raw.info, threshold=50) assert_array_equal(is_short[:3:2], [True, True]) assert_array_equal(raw.annotations.description, ['4.0', '2.0', '1.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D2') assert_allclose(mni_locs[0], [(- 0.0841), (- 0.0464), (- 0.0129)], atol=allowed_dist_error) assert (raw.info['ch_names'][4][3:5] == 'D1') assert_allclose(mni_locs[4], [0.0846, (- 0.0142), (- 0.0156)], atol=allowed_dist_error) assert (raw.info['ch_names'][8][3:5] == 'D3') assert_allclose(mni_locs[8], [0.0207, (- 0.1062), 0.0484], atol=allowed_dist_error) assert (raw.info['ch_names'][12][3:5] == 'D4') assert_allclose(mni_locs[12], [(- 0.0196), 0.0821, 0.0275], atol=allowed_dist_error) assert (raw.info['ch_names'][16][3:5] == 'D5') assert_allclose(mni_locs[16], [(- 0.036), 0.0276, 0.0778], atol=allowed_dist_error) assert (raw.info['ch_names'][19][3:5] == 'D6') assert_allclose(mni_locs[19], [0.0388, (- 0.0477), 0.0932], atol=allowed_dist_error) assert (raw.info['ch_names'][21][3:5] == 'D7') assert_allclose(mni_locs[21], [(- 0.0394), (- 0.0483), 0.0928], atol=allowed_dist_error)<|docstring|>Test reading NIRX files.<|endoftext|>
3684a4aa5cfaaadd70351fdca6bb639657747565dacbed976a4823a833ed9ae9
@requires_testing_data def test_locale_encoding(tmp_path): 'Test NIRx encoding.' fname = (tmp_path / 'latin') shutil.copytree(fname_nirx_15_2, fname) hdr_fname = op.join(fname, 'NIRS-2019-10-02_003.hdr') hdr = list() with open(hdr_fname, 'rb') as fid: hdr.extend((line for line in fid)) hdr[2] = b'Date="jeu. 13 f\xe9vr. 2020"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) read_raw_nirx(fname, verbose='debug') hdr[2] = b'Date="mi 13 dez 2020"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) read_raw_nirx(fname, verbose='debug') hdr[2] = b'Date="ven 24 gen 2020"\r\n' hdr[3] = b'Time="10:57:41.454"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) raw = read_raw_nirx(fname, verbose='debug') want_dt = dt.datetime(2020, 1, 24, 10, 57, 41, 454000, tzinfo=dt.timezone.utc) assert (raw.info['meas_date'] == want_dt)
Test NIRx encoding.
mne/io/nirx/tests/test_nirx.py
test_locale_encoding
snwnde/mne-python
0
python
@requires_testing_data def test_locale_encoding(tmp_path): fname = (tmp_path / 'latin') shutil.copytree(fname_nirx_15_2, fname) hdr_fname = op.join(fname, 'NIRS-2019-10-02_003.hdr') hdr = list() with open(hdr_fname, 'rb') as fid: hdr.extend((line for line in fid)) hdr[2] = b'Date="jeu. 13 f\xe9vr. 2020"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) read_raw_nirx(fname, verbose='debug') hdr[2] = b'Date="mi 13 dez 2020"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) read_raw_nirx(fname, verbose='debug') hdr[2] = b'Date="ven 24 gen 2020"\r\n' hdr[3] = b'Time="10:57:41.454"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) raw = read_raw_nirx(fname, verbose='debug') want_dt = dt.datetime(2020, 1, 24, 10, 57, 41, 454000, tzinfo=dt.timezone.utc) assert (raw.info['meas_date'] == want_dt)
@requires_testing_data def test_locale_encoding(tmp_path): fname = (tmp_path / 'latin') shutil.copytree(fname_nirx_15_2, fname) hdr_fname = op.join(fname, 'NIRS-2019-10-02_003.hdr') hdr = list() with open(hdr_fname, 'rb') as fid: hdr.extend((line for line in fid)) hdr[2] = b'Date="jeu. 13 f\xe9vr. 2020"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) read_raw_nirx(fname, verbose='debug') hdr[2] = b'Date="mi 13 dez 2020"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) read_raw_nirx(fname, verbose='debug') hdr[2] = b'Date="ven 24 gen 2020"\r\n' hdr[3] = b'Time="10:57:41.454"\r\n' with open(hdr_fname, 'wb') as fid: for line in hdr: fid.write(line) raw = read_raw_nirx(fname, verbose='debug') want_dt = dt.datetime(2020, 1, 24, 10, 57, 41, 454000, tzinfo=dt.timezone.utc) assert (raw.info['meas_date'] == want_dt)<|docstring|>Test NIRx encoding.<|endoftext|>
6e59f376fa60c3ecd653eef83fd09a462ce9d946511b9a25a87a365b5640de33
@requires_testing_data def test_nirx_15_2(): 'Test reading NIRX files.' raw = read_raw_nirx(fname_nirx_15_2, preload=True) assert (raw._data.shape == (64, 67)) assert (raw.info['sfreq'] == 3.90625) assert (raw.info['meas_date'] == dt.datetime(2019, 10, 2, 9, 8, 47, 511000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:4] == ['S1_D1 760', 'S1_D1 850', 'S1_D10 760', 'S1_D10 850']) assert (raw.info['subject_info'] == dict(sex=1, first_name='TestRecording', birthday=(1989, 10, 2), his_id='TestRecording')) assert_array_equal(raw.annotations.description, ['4.0', '6.0', '2.0']) print(raw.annotations.onset) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0292), 0.0852, (- 0.0142)], atol=allowed_dist_error) assert (raw.info['ch_names'][15][3:5] == 'D4') assert_allclose(mni_locs[15], [(- 0.0739), (- 0.0756), (- 0.0075)], atol=allowed_dist_error) assert ('fnirs_cw_amplitude' in raw) with pytest.raises(ValueError, match='Invalid value'): ('fnirs_raw' in raw) assert ('fnirs_od' not in raw) picks = pick_types(raw.info, fnirs='fnirs_cw_amplitude') assert (len(picks) > 0)
Test reading NIRX files.
mne/io/nirx/tests/test_nirx.py
test_nirx_15_2
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_15_2(): raw = read_raw_nirx(fname_nirx_15_2, preload=True) assert (raw._data.shape == (64, 67)) assert (raw.info['sfreq'] == 3.90625) assert (raw.info['meas_date'] == dt.datetime(2019, 10, 2, 9, 8, 47, 511000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:4] == ['S1_D1 760', 'S1_D1 850', 'S1_D10 760', 'S1_D10 850']) assert (raw.info['subject_info'] == dict(sex=1, first_name='TestRecording', birthday=(1989, 10, 2), his_id='TestRecording')) assert_array_equal(raw.annotations.description, ['4.0', '6.0', '2.0']) print(raw.annotations.onset) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0292), 0.0852, (- 0.0142)], atol=allowed_dist_error) assert (raw.info['ch_names'][15][3:5] == 'D4') assert_allclose(mni_locs[15], [(- 0.0739), (- 0.0756), (- 0.0075)], atol=allowed_dist_error) assert ('fnirs_cw_amplitude' in raw) with pytest.raises(ValueError, match='Invalid value'): ('fnirs_raw' in raw) assert ('fnirs_od' not in raw) picks = pick_types(raw.info, fnirs='fnirs_cw_amplitude') assert (len(picks) > 0)
@requires_testing_data def test_nirx_15_2(): raw = read_raw_nirx(fname_nirx_15_2, preload=True) assert (raw._data.shape == (64, 67)) assert (raw.info['sfreq'] == 3.90625) assert (raw.info['meas_date'] == dt.datetime(2019, 10, 2, 9, 8, 47, 511000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:4] == ['S1_D1 760', 'S1_D1 850', 'S1_D10 760', 'S1_D10 850']) assert (raw.info['subject_info'] == dict(sex=1, first_name='TestRecording', birthday=(1989, 10, 2), his_id='TestRecording')) assert_array_equal(raw.annotations.description, ['4.0', '6.0', '2.0']) print(raw.annotations.onset) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [(- 0.0292), 0.0852, (- 0.0142)], atol=allowed_dist_error) assert (raw.info['ch_names'][15][3:5] == 'D4') assert_allclose(mni_locs[15], [(- 0.0739), (- 0.0756), (- 0.0075)], atol=allowed_dist_error) assert ('fnirs_cw_amplitude' in raw) with pytest.raises(ValueError, match='Invalid value'): ('fnirs_raw' in raw) assert ('fnirs_od' not in raw) picks = pick_types(raw.info, fnirs='fnirs_cw_amplitude') assert (len(picks) > 0)<|docstring|>Test reading NIRX files.<|endoftext|>
d5978d1dfb4d88b274dc53e786f0bc5aa59a5e740aa9dab43f5c40cf35e540fb
@requires_testing_data def test_nirx_aurora_2021_9_6(): 'Test reading NIRX files.' raw = read_raw_nirx(nirsport2_2021_9_6, preload=True) assert (len(raw.annotations) == 3) assert (raw.annotations.description[0] == '1.0') assert (raw.annotations.description[2] == '3.0')
Test reading NIRX files.
mne/io/nirx/tests/test_nirx.py
test_nirx_aurora_2021_9_6
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_aurora_2021_9_6(): raw = read_raw_nirx(nirsport2_2021_9_6, preload=True) assert (len(raw.annotations) == 3) assert (raw.annotations.description[0] == '1.0') assert (raw.annotations.description[2] == '3.0')
@requires_testing_data def test_nirx_aurora_2021_9_6(): raw = read_raw_nirx(nirsport2_2021_9_6, preload=True) assert (len(raw.annotations) == 3) assert (raw.annotations.description[0] == '1.0') assert (raw.annotations.description[2] == '3.0')<|docstring|>Test reading NIRX files.<|endoftext|>
b4ca3f577914d905389af684857555b5c88b16d16ecb657e122e8f0df6735217
@requires_testing_data def test_nirx_15_0(): 'Test reading NIRX files.' raw = read_raw_nirx(fname_nirx_15_0, preload=True) assert (raw._data.shape == (20, 92)) assert (raw.info['sfreq'] == 6.25) assert (raw.info['meas_date'] == dt.datetime(2019, 10, 27, 13, 53, 34, 209000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:12] == ['S1_D1 760', 'S1_D1 850', 'S2_D2 760', 'S2_D2 850', 'S3_D3 760', 'S3_D3 850', 'S4_D4 760', 'S4_D4 850', 'S5_D5 760', 'S5_D5 850', 'S6_D6 760', 'S6_D6 850']) assert (raw.info['subject_info'] == {'birthday': (2004, 10, 27), 'first_name': 'NIRX', 'last_name': 'Test', 'sex': FIFF.FIFFV_SUBJ_SEX_UNKNOWN, 'his_id': 'NIRX_Test'}) assert_array_equal(raw.annotations.description, ['1.0', '2.0', '2.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [0.0287, (- 0.1143), (- 0.0332)], atol=allowed_dist_error) assert (raw.info['ch_names'][15][3:5] == 'D8') assert_allclose(mni_locs[15], [(- 0.0693), (- 0.048), 0.0657], atol=allowed_dist_error) allowed_distance_error = 0.0002 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.03], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S7_D7 760').info), [0.0392], atol=allowed_distance_error)
Test reading NIRX files.
mne/io/nirx/tests/test_nirx.py
test_nirx_15_0
snwnde/mne-python
0
python
@requires_testing_data def test_nirx_15_0(): raw = read_raw_nirx(fname_nirx_15_0, preload=True) assert (raw._data.shape == (20, 92)) assert (raw.info['sfreq'] == 6.25) assert (raw.info['meas_date'] == dt.datetime(2019, 10, 27, 13, 53, 34, 209000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:12] == ['S1_D1 760', 'S1_D1 850', 'S2_D2 760', 'S2_D2 850', 'S3_D3 760', 'S3_D3 850', 'S4_D4 760', 'S4_D4 850', 'S5_D5 760', 'S5_D5 850', 'S6_D6 760', 'S6_D6 850']) assert (raw.info['subject_info'] == {'birthday': (2004, 10, 27), 'first_name': 'NIRX', 'last_name': 'Test', 'sex': FIFF.FIFFV_SUBJ_SEX_UNKNOWN, 'his_id': 'NIRX_Test'}) assert_array_equal(raw.annotations.description, ['1.0', '2.0', '2.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [0.0287, (- 0.1143), (- 0.0332)], atol=allowed_dist_error) assert (raw.info['ch_names'][15][3:5] == 'D8') assert_allclose(mni_locs[15], [(- 0.0693), (- 0.048), 0.0657], atol=allowed_dist_error) allowed_distance_error = 0.0002 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.03], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S7_D7 760').info), [0.0392], atol=allowed_distance_error)
@requires_testing_data def test_nirx_15_0(): raw = read_raw_nirx(fname_nirx_15_0, preload=True) assert (raw._data.shape == (20, 92)) assert (raw.info['sfreq'] == 6.25) assert (raw.info['meas_date'] == dt.datetime(2019, 10, 27, 13, 53, 34, 209000, tzinfo=dt.timezone.utc)) assert (raw.info['ch_names'][:12] == ['S1_D1 760', 'S1_D1 850', 'S2_D2 760', 'S2_D2 850', 'S3_D3 760', 'S3_D3 850', 'S4_D4 760', 'S4_D4 850', 'S5_D5 760', 'S5_D5 850', 'S6_D6 760', 'S6_D6 850']) assert (raw.info['subject_info'] == {'birthday': (2004, 10, 27), 'first_name': 'NIRX', 'last_name': 'Test', 'sex': FIFF.FIFFV_SUBJ_SEX_UNKNOWN, 'his_id': 'NIRX_Test'}) assert_array_equal(raw.annotations.description, ['1.0', '2.0', '2.0']) allowed_dist_error = 0.0002 locs = [ch['loc'][6:9] for ch in raw.info['chs']] (head_mri_t, _) = _get_trans('fsaverage', 'head', 'mri') mni_locs = apply_trans(head_mri_t, locs) assert (raw.info['ch_names'][0][3:5] == 'D1') assert_allclose(mni_locs[0], [0.0287, (- 0.1143), (- 0.0332)], atol=allowed_dist_error) assert (raw.info['ch_names'][15][3:5] == 'D8') assert_allclose(mni_locs[15], [(- 0.0693), (- 0.048), 0.0657], atol=allowed_dist_error) allowed_distance_error = 0.0002 assert_allclose(source_detector_distances(raw.copy().pick('S1_D1 760').info), [0.03], atol=allowed_distance_error) assert_allclose(source_detector_distances(raw.copy().pick('S7_D7 760').info), [0.0392], atol=allowed_distance_error)<|docstring|>Test reading NIRX files.<|endoftext|>
c53cafa846b9f78a54ca105c1d5c4517b2e75313783903301f8798cd1cf607b8
@requires_testing_data @pytest.mark.parametrize('fname, boundary_decimal', ([fname_nirx_15_2_short, 1], [fname_nirx_15_2, 0], [fname_nirx_15_2, 0], [nirsport2_2021_9, 0])) def test_nirx_standard(fname, boundary_decimal): 'Test standard operations.' _test_raw_reader(read_raw_nirx, fname=fname, boundary_decimal=boundary_decimal)
Test standard operations.
mne/io/nirx/tests/test_nirx.py
test_nirx_standard
snwnde/mne-python
0
python
@requires_testing_data @pytest.mark.parametrize('fname, boundary_decimal', ([fname_nirx_15_2_short, 1], [fname_nirx_15_2, 0], [fname_nirx_15_2, 0], [nirsport2_2021_9, 0])) def test_nirx_standard(fname, boundary_decimal): _test_raw_reader(read_raw_nirx, fname=fname, boundary_decimal=boundary_decimal)
@requires_testing_data @pytest.mark.parametrize('fname, boundary_decimal', ([fname_nirx_15_2_short, 1], [fname_nirx_15_2, 0], [fname_nirx_15_2, 0], [nirsport2_2021_9, 0])) def test_nirx_standard(fname, boundary_decimal): _test_raw_reader(read_raw_nirx, fname=fname, boundary_decimal=boundary_decimal)<|docstring|>Test standard operations.<|endoftext|>
be5127df9043f3abe8e520d373afe2560d69e381ca1317ca9aad1491ef3b953d
@requires_testing_data @pytest.mark.parametrize('fname, want_order', [(fname_nirx_15_0, ['S1_D1', 'S2_D2', 'S3_D3', 'S4_D4', 'S5_D5', 'S6_D6', 'S7_D7', 'S8_D8', 'S9_D9', 'S10_D10']), (fname_nirx_15_2, ['S1_D1', 'S1_D10', 'S2_D1', 'S2_D2', 'S3_D2', 'S3_D3', 'S4_D3', 'S4_D4', 'S5_D4', 'S5_D5', 'S6_D5', 'S6_D6', 'S7_D6', 'S7_D7', 'S8_D7', 'S8_D8', 'S9_D8', 'S9_D9', 'S10_D9', 'S10_D10', 'S11_D11', 'S11_D12', 'S12_D12', 'S12_D13', 'S13_D13', 'S13_D14', 'S14_D14', 'S14_D15', 'S15_D15', 'S15_D16', 'S16_D11', 'S16_D16']), (fname_nirx_15_2_short, ['S1_D1', 'S1_D9', 'S2_D3', 'S2_D10', 'S3_D2', 'S3_D11', 'S4_D4', 'S4_D12', 'S5_D5', 'S5_D6', 'S5_D7', 'S5_D8', 'S5_D13']), (fname_nirx_15_3_short, ['S1_D2', 'S1_D9', 'S2_D1', 'S2_D10', 'S3_D3', 'S3_D11', 'S4_D4', 'S4_D12', 'S5_D5', 'S5_D6', 'S5_D7', 'S5_D8', 'S5_D13']), (nirsport1_wo_sat, ['S1_D4', 'S1_D5', 'S1_D6', 'S2_D5', 'S2_D6', 'S3_D5', 'S4_D1', 'S4_D3', 'S4_D4', 'S5_D1', 'S5_D2', 'S6_D1', 'S6_D3']), (nirsport2, ['S1_D1', 'S1_D6', 'S1_D9', 'S2_D2', 'S2_D10', 'S3_D5', 'S3_D7', 'S3_D11', 'S4_D8', 'S4_D12', 'S5_D3', 'S5_D13', 'S6_D4', 'S6_D14', 'S7_D1', 'S7_D6', 'S7_D15', 'S8_D5', 'S8_D7', 'S8_D16']), (nirsport2_2021_9, ['S1_D1', 'S1_D3', 'S2_D1', 'S2_D2', 'S2_D4', 'S3_D2', 'S3_D5', 'S4_D1', 'S4_D3', 'S4_D4', 'S4_D6', 'S5_D2', 'S5_D4', 'S5_D5', 'S5_D7', 'S6_D3', 'S6_D6', 'S7_D4', 'S7_D6', 'S7_D7', 'S8_D5', 'S8_D7'])]) def test_channel_order(fname, want_order): 'Test that logical channel order is preserved.' raw = read_raw_nirx(fname) ch_names = raw.ch_names prefixes = [ch_name.split()[0] for ch_name in ch_names] assert (prefixes[::2] == prefixes[1::2]) prefixes = prefixes[::2] assert (prefixes == want_order)
Test that logical channel order is preserved.
mne/io/nirx/tests/test_nirx.py
test_channel_order
snwnde/mne-python
0
python
@requires_testing_data @pytest.mark.parametrize('fname, want_order', [(fname_nirx_15_0, ['S1_D1', 'S2_D2', 'S3_D3', 'S4_D4', 'S5_D5', 'S6_D6', 'S7_D7', 'S8_D8', 'S9_D9', 'S10_D10']), (fname_nirx_15_2, ['S1_D1', 'S1_D10', 'S2_D1', 'S2_D2', 'S3_D2', 'S3_D3', 'S4_D3', 'S4_D4', 'S5_D4', 'S5_D5', 'S6_D5', 'S6_D6', 'S7_D6', 'S7_D7', 'S8_D7', 'S8_D8', 'S9_D8', 'S9_D9', 'S10_D9', 'S10_D10', 'S11_D11', 'S11_D12', 'S12_D12', 'S12_D13', 'S13_D13', 'S13_D14', 'S14_D14', 'S14_D15', 'S15_D15', 'S15_D16', 'S16_D11', 'S16_D16']), (fname_nirx_15_2_short, ['S1_D1', 'S1_D9', 'S2_D3', 'S2_D10', 'S3_D2', 'S3_D11', 'S4_D4', 'S4_D12', 'S5_D5', 'S5_D6', 'S5_D7', 'S5_D8', 'S5_D13']), (fname_nirx_15_3_short, ['S1_D2', 'S1_D9', 'S2_D1', 'S2_D10', 'S3_D3', 'S3_D11', 'S4_D4', 'S4_D12', 'S5_D5', 'S5_D6', 'S5_D7', 'S5_D8', 'S5_D13']), (nirsport1_wo_sat, ['S1_D4', 'S1_D5', 'S1_D6', 'S2_D5', 'S2_D6', 'S3_D5', 'S4_D1', 'S4_D3', 'S4_D4', 'S5_D1', 'S5_D2', 'S6_D1', 'S6_D3']), (nirsport2, ['S1_D1', 'S1_D6', 'S1_D9', 'S2_D2', 'S2_D10', 'S3_D5', 'S3_D7', 'S3_D11', 'S4_D8', 'S4_D12', 'S5_D3', 'S5_D13', 'S6_D4', 'S6_D14', 'S7_D1', 'S7_D6', 'S7_D15', 'S8_D5', 'S8_D7', 'S8_D16']), (nirsport2_2021_9, ['S1_D1', 'S1_D3', 'S2_D1', 'S2_D2', 'S2_D4', 'S3_D2', 'S3_D5', 'S4_D1', 'S4_D3', 'S4_D4', 'S4_D6', 'S5_D2', 'S5_D4', 'S5_D5', 'S5_D7', 'S6_D3', 'S6_D6', 'S7_D4', 'S7_D6', 'S7_D7', 'S8_D5', 'S8_D7'])]) def test_channel_order(fname, want_order): raw = read_raw_nirx(fname) ch_names = raw.ch_names prefixes = [ch_name.split()[0] for ch_name in ch_names] assert (prefixes[::2] == prefixes[1::2]) prefixes = prefixes[::2] assert (prefixes == want_order)
@requires_testing_data @pytest.mark.parametrize('fname, want_order', [(fname_nirx_15_0, ['S1_D1', 'S2_D2', 'S3_D3', 'S4_D4', 'S5_D5', 'S6_D6', 'S7_D7', 'S8_D8', 'S9_D9', 'S10_D10']), (fname_nirx_15_2, ['S1_D1', 'S1_D10', 'S2_D1', 'S2_D2', 'S3_D2', 'S3_D3', 'S4_D3', 'S4_D4', 'S5_D4', 'S5_D5', 'S6_D5', 'S6_D6', 'S7_D6', 'S7_D7', 'S8_D7', 'S8_D8', 'S9_D8', 'S9_D9', 'S10_D9', 'S10_D10', 'S11_D11', 'S11_D12', 'S12_D12', 'S12_D13', 'S13_D13', 'S13_D14', 'S14_D14', 'S14_D15', 'S15_D15', 'S15_D16', 'S16_D11', 'S16_D16']), (fname_nirx_15_2_short, ['S1_D1', 'S1_D9', 'S2_D3', 'S2_D10', 'S3_D2', 'S3_D11', 'S4_D4', 'S4_D12', 'S5_D5', 'S5_D6', 'S5_D7', 'S5_D8', 'S5_D13']), (fname_nirx_15_3_short, ['S1_D2', 'S1_D9', 'S2_D1', 'S2_D10', 'S3_D3', 'S3_D11', 'S4_D4', 'S4_D12', 'S5_D5', 'S5_D6', 'S5_D7', 'S5_D8', 'S5_D13']), (nirsport1_wo_sat, ['S1_D4', 'S1_D5', 'S1_D6', 'S2_D5', 'S2_D6', 'S3_D5', 'S4_D1', 'S4_D3', 'S4_D4', 'S5_D1', 'S5_D2', 'S6_D1', 'S6_D3']), (nirsport2, ['S1_D1', 'S1_D6', 'S1_D9', 'S2_D2', 'S2_D10', 'S3_D5', 'S3_D7', 'S3_D11', 'S4_D8', 'S4_D12', 'S5_D3', 'S5_D13', 'S6_D4', 'S6_D14', 'S7_D1', 'S7_D6', 'S7_D15', 'S8_D5', 'S8_D7', 'S8_D16']), (nirsport2_2021_9, ['S1_D1', 'S1_D3', 'S2_D1', 'S2_D2', 'S2_D4', 'S3_D2', 'S3_D5', 'S4_D1', 'S4_D3', 'S4_D4', 'S4_D6', 'S5_D2', 'S5_D4', 'S5_D5', 'S5_D7', 'S6_D3', 'S6_D6', 'S7_D4', 'S7_D6', 'S7_D7', 'S8_D5', 'S8_D7'])]) def test_channel_order(fname, want_order): raw = read_raw_nirx(fname) ch_names = raw.ch_names prefixes = [ch_name.split()[0] for ch_name in ch_names] assert (prefixes[::2] == prefixes[1::2]) prefixes = prefixes[::2] assert (prefixes == want_order)<|docstring|>Test that logical channel order is preserved.<|endoftext|>
9cca4c211c906acb96c21d301c44a957f4694666e74d5627df0bb65b487fc735
def create_user(self, email, name, password=None): 'Create a new user profile' if (not email): raise ValueError('Users must have an email address') email = self.normalize_email(email) user = self.model(email=email, name=name) user.set_password(password) user.save(using=self._db) return user
Create a new user profile
inventory_api/models.py
create_user
AxanderW/inventory-management-rest-api
0
python
def create_user(self, email, name, password=None): if (not email): raise ValueError('Users must have an email address') email = self.normalize_email(email) user = self.model(email=email, name=name) user.set_password(password) user.save(using=self._db) return user
def create_user(self, email, name, password=None): if (not email): raise ValueError('Users must have an email address') email = self.normalize_email(email) user = self.model(email=email, name=name) user.set_password(password) user.save(using=self._db) return user<|docstring|>Create a new user profile<|endoftext|>
33f7e27b708033167f892cedc734ca949964b623b7c2725c9011bd6f2019082f
def create_superuser(self, email, name, password): 'Create and save new super user' user = self.create_user(email, name, password) user.is_superuser = True user.is_staff = True user.save(using=self._db) return user
Create and save new super user
inventory_api/models.py
create_superuser
AxanderW/inventory-management-rest-api
0
python
def create_superuser(self, email, name, password): user = self.create_user(email, name, password) user.is_superuser = True user.is_staff = True user.save(using=self._db) return user
def create_superuser(self, email, name, password): user = self.create_user(email, name, password) user.is_superuser = True user.is_staff = True user.save(using=self._db) return user<|docstring|>Create and save new super user<|endoftext|>
4e6f5b3a14373008985928fc9d3572c7c290bfc754a88a237db04fed5279278c
def get_full_name(self): 'Retrieve full name of user' return self.name
Retrieve full name of user
inventory_api/models.py
get_full_name
AxanderW/inventory-management-rest-api
0
python
def get_full_name(self): return self.name
def get_full_name(self): return self.name<|docstring|>Retrieve full name of user<|endoftext|>
3f9b9517bba6b2a6fced933f130ab3aeaf9cb1ddec6b8a6c743a6835d464b1c6
def get_short_name(self): 'Retrieve short name of user' return self.name
Retrieve short name of user
inventory_api/models.py
get_short_name
AxanderW/inventory-management-rest-api
0
python
def get_short_name(self): return self.name
def get_short_name(self): return self.name<|docstring|>Retrieve short name of user<|endoftext|>
7f7caa5b1da1ade598ea4ce878b9ffe77b6724242697a5576851dd4cd70701ee
def __str__(self): 'Return string representation of user' return self.email
Return string representation of user
inventory_api/models.py
__str__
AxanderW/inventory-management-rest-api
0
python
def __str__(self): return self.email
def __str__(self): return self.email<|docstring|>Return string representation of user<|endoftext|>
9a728f661a6bab7252432f5fe815b42e70128b59b33febf92b8677b0d94f039d
def __str__(self): 'Return string representation of region' return self.name
Return string representation of region
inventory_api/models.py
__str__
AxanderW/inventory-management-rest-api
0
python
def __str__(self): return self.name
def __str__(self): return self.name<|docstring|>Return string representation of region<|endoftext|>
db0c6014ffd7fd41f73d2d4a09bee4f5d7966d736d2eb87b1e6e3cf1f86391a8
def __str__(self): 'Return string representation of category' return self.name
Return string representation of category
inventory_api/models.py
__str__
AxanderW/inventory-management-rest-api
0
python
def __str__(self): return self.name
def __str__(self): return self.name<|docstring|>Return string representation of category<|endoftext|>
c1ac3cd90320bb57c9a741a5cde0c430b42320adb3e1f21de0cacec897c9bde9
def __str__(self): 'Return string representation of brand' return self.name
Return string representation of brand
inventory_api/models.py
__str__
AxanderW/inventory-management-rest-api
0
python
def __str__(self): return self.name
def __str__(self): return self.name<|docstring|>Return string representation of brand<|endoftext|>
f008b4b7dd7e21d4c6bd7dca3f3660b7e2b2a95a2d7f7d1c0ec83c48d6df06ea
def __str__(self): 'Return string representation of product' return self.name
Return string representation of product
inventory_api/models.py
__str__
AxanderW/inventory-management-rest-api
0
python
def __str__(self): return self.name
def __str__(self): return self.name<|docstring|>Return string representation of product<|endoftext|>
2650094e97463bcb37982913a7eac9a1d73235f6a396bc568c86fb590baadb8a
def __str__(self): 'Return string representation of product item' return f'{self.product}: {self.id}'
Return string representation of product item
inventory_api/models.py
__str__
AxanderW/inventory-management-rest-api
0
python
def __str__(self): return f'{self.product}: {self.id}'
def __str__(self): return f'{self.product}: {self.id}'<|docstring|>Return string representation of product item<|endoftext|>
f4b7787c6c61947c68c5d821ea8a2a1569657409137a7e325ec234fd1e313f5d
def __init__(self, websession, username, password): 'Initialize the Connection Object.' self.__api = Connection(websession, username, password) self.__deviceMap = None self.__invertedMap = None self.__role = '' self.__dataLoaded = False self.__devices = {}
Initialize the Connection Object.
masterthermconnect/controller.py
__init__
sahulkrishan/python-masterthermconnect
0
python
def __init__(self, websession, username, password): self.__api = Connection(websession, username, password) self.__deviceMap = None self.__invertedMap = None self.__role = self.__dataLoaded = False self.__devices = {}
def __init__(self, websession, username, password): self.__api = Connection(websession, username, password) self.__deviceMap = None self.__invertedMap = None self.__role = self.__dataLoaded = False self.__devices = {}<|docstring|>Initialize the Connection Object.<|endoftext|>
a9567517e70f0ec5867a8c49c833e0d1f2383e803a37f566bf9c90434ea3647e
def __invertDeviceMap(self, map, keyList=[]): 'Invert the given map and return, this is a nested method.' invertedMap = {} for (key, item) in map.items(): newKeyList = keyList.copy() newKeyList.append(key) if (not isinstance(item, dict)): itemValue = item[1] if isinstance(itemValue, list): for listValue in itemValue: invertedMap[listValue] = newKeyList else: itemType = item[0] if (not ((itemValue == '') or (itemType == 'fixed'))): invertedMap[itemValue] = newKeyList else: invertedMap.update(self.__invertDeviceMap(item, newKeyList)) return invertedMap
Invert the given map and return, this is a nested method.
masterthermconnect/controller.py
__invertDeviceMap
sahulkrishan/python-masterthermconnect
0
python
def __invertDeviceMap(self, map, keyList=[]): invertedMap = {} for (key, item) in map.items(): newKeyList = keyList.copy() newKeyList.append(key) if (not isinstance(item, dict)): itemValue = item[1] if isinstance(itemValue, list): for listValue in itemValue: invertedMap[listValue] = newKeyList else: itemType = item[0] if (not ((itemValue == ) or (itemType == 'fixed'))): invertedMap[itemValue] = newKeyList else: invertedMap.update(self.__invertDeviceMap(item, newKeyList)) return invertedMap
def __invertDeviceMap(self, map, keyList=[]): invertedMap = {} for (key, item) in map.items(): newKeyList = keyList.copy() newKeyList.append(key) if (not isinstance(item, dict)): itemValue = item[1] if isinstance(itemValue, list): for listValue in itemValue: invertedMap[listValue] = newKeyList else: itemType = item[0] if (not ((itemValue == ) or (itemType == 'fixed'))): invertedMap[itemValue] = newKeyList else: invertedMap.update(self.__invertDeviceMap(item, newKeyList)) return invertedMap<|docstring|>Invert the given map and return, this is a nested method.<|endoftext|>
53859a3f2dafd2b58c363dfff78a731bc03ef66369b504ab451752b1035d00af
def __populateData(self, map, registers): 'Populate the Data from the fullData and DeviceMap.' data = {} for (key, item) in map.items(): if (not isinstance(item, dict)): itemType = item[0] itemValue = item[1] if (itemType == 'fixed'): data[key] = itemValue elif (itemType == 'bool'): if (itemValue == ''): data[key] = False else: data[key] = (registers[itemValue] == '1') elif (itemType == 'float'): if (itemValue == ''): data[key] = 0.0 else: data[key] = float(registers[itemValue]) elif (itemType == 'int'): if (itemValue == ''): data[key] = 0 else: data[key] = int(registers[itemValue]) elif (itemType == 'string'): if (itemValue == ''): data[key] = '' else: itemStr = '' for listValue in itemValue: itemStr = (itemStr + CHAR_MAP[int(registers[listValue])]) data[key] = itemStr else: data[key] = self.__populateData(map[key], registers) return data
Populate the Data from the fullData and DeviceMap.
masterthermconnect/controller.py
__populateData
sahulkrishan/python-masterthermconnect
0
python
def __populateData(self, map, registers): data = {} for (key, item) in map.items(): if (not isinstance(item, dict)): itemType = item[0] itemValue = item[1] if (itemType == 'fixed'): data[key] = itemValue elif (itemType == 'bool'): if (itemValue == ): data[key] = False else: data[key] = (registers[itemValue] == '1') elif (itemType == 'float'): if (itemValue == ): data[key] = 0.0 else: data[key] = float(registers[itemValue]) elif (itemType == 'int'): if (itemValue == ): data[key] = 0 else: data[key] = int(registers[itemValue]) elif (itemType == 'string'): if (itemValue == ): data[key] = else: itemStr = for listValue in itemValue: itemStr = (itemStr + CHAR_MAP[int(registers[listValue])]) data[key] = itemStr else: data[key] = self.__populateData(map[key], registers) return data
def __populateData(self, map, registers): data = {} for (key, item) in map.items(): if (not isinstance(item, dict)): itemType = item[0] itemValue = item[1] if (itemType == 'fixed'): data[key] = itemValue elif (itemType == 'bool'): if (itemValue == ): data[key] = False else: data[key] = (registers[itemValue] == '1') elif (itemType == 'float'): if (itemValue == ): data[key] = 0.0 else: data[key] = float(registers[itemValue]) elif (itemType == 'int'): if (itemValue == ): data[key] = 0 else: data[key] = int(registers[itemValue]) elif (itemType == 'string'): if (itemValue == ): data[key] = else: itemStr = for listValue in itemValue: itemStr = (itemStr + CHAR_MAP[int(registers[listValue])]) data[key] = itemStr else: data[key] = self.__populateData(map[key], registers) return data<|docstring|>Populate the Data from the fullData and DeviceMap.<|endoftext|>
a4146a6c72a54e3a56621daf8fcc8852f6b5440f903fa18009d42b67767d2e12
def __getPadName(self, pad, id): 'Build the Pad Name from the full data.' if (pad not in DEVICE_DATA_PADMAP): return '0' padName = '' padEmpty = '' fullData = self.__devices[id]['fullData'] for key in DEVICE_DATA_PADMAP[pad]['name'][1]: padName = (padName + CHAR_MAP[int(fullData[key])]) padEmpty = (padEmpty + '-') if (padName == padEmpty): padName = '0' return padName
Build the Pad Name from the full data.
masterthermconnect/controller.py
__getPadName
sahulkrishan/python-masterthermconnect
0
python
def __getPadName(self, pad, id): if (pad not in DEVICE_DATA_PADMAP): return '0' padName = padEmpty = fullData = self.__devices[id]['fullData'] for key in DEVICE_DATA_PADMAP[pad]['name'][1]: padName = (padName + CHAR_MAP[int(fullData[key])]) padEmpty = (padEmpty + '-') if (padName == padEmpty): padName = '0' return padName
def __getPadName(self, pad, id): if (pad not in DEVICE_DATA_PADMAP): return '0' padName = padEmpty = fullData = self.__devices[id]['fullData'] for key in DEVICE_DATA_PADMAP[pad]['name'][1]: padName = (padName + CHAR_MAP[int(fullData[key])]) padEmpty = (padEmpty + '-') if (padName == padEmpty): padName = '0' return padName<|docstring|>Build the Pad Name from the full data.<|endoftext|>
27d4653c5033f2a7dc7c9890e86a507096dbc34aff8c0bff7124036d3596d9f4
def __enabledPADs(self, id): 'Enable the Pads for the devices, decoded as best as possible.' fullData = self.__devices[id]['fullData'] padInfo = {} for i in range(2): padInfo[PAD_MAP[i]] = (fullData[DEVICE_SWITCH_MAP[i]] == '1') checkCode = int(fullData['I_104']) if (checkCode < 11): checkCode = 10 elif (checkCode <= 200): checkCode = 11 for i in range(2, 8): padname = self.__getPadName(PAD_MAP[i], id) if ((padname != '0') and (checkCode >= 11)): padInfo[PAD_MAP[i]] = (fullData[DEVICE_SWITCH_MAP[i]] == '1') else: padInfo[PAD_MAP[i]] = False padname = self.__getPadName(PAD_MAP[8], id) padInfo[PAD_MAP[8]] = False if (padname != '0'): if (checkCode >= 11): for i in range(7, 1, (- 1)): if padInfo[PAD_MAP[i]]: padInfo[PAD_MAP[8]] = True break else: padInfo[PAD_MAP[8]] = ((fullData[DEVICE_SWITCH_MAP[8]] == '1') and (float(fullData['A_190']) > 0.1)) return padInfo
Enable the Pads for the devices, decoded as best as possible.
masterthermconnect/controller.py
__enabledPADs
sahulkrishan/python-masterthermconnect
0
python
def __enabledPADs(self, id): fullData = self.__devices[id]['fullData'] padInfo = {} for i in range(2): padInfo[PAD_MAP[i]] = (fullData[DEVICE_SWITCH_MAP[i]] == '1') checkCode = int(fullData['I_104']) if (checkCode < 11): checkCode = 10 elif (checkCode <= 200): checkCode = 11 for i in range(2, 8): padname = self.__getPadName(PAD_MAP[i], id) if ((padname != '0') and (checkCode >= 11)): padInfo[PAD_MAP[i]] = (fullData[DEVICE_SWITCH_MAP[i]] == '1') else: padInfo[PAD_MAP[i]] = False padname = self.__getPadName(PAD_MAP[8], id) padInfo[PAD_MAP[8]] = False if (padname != '0'): if (checkCode >= 11): for i in range(7, 1, (- 1)): if padInfo[PAD_MAP[i]]: padInfo[PAD_MAP[8]] = True break else: padInfo[PAD_MAP[8]] = ((fullData[DEVICE_SWITCH_MAP[8]] == '1') and (float(fullData['A_190']) > 0.1)) return padInfo
def __enabledPADs(self, id): fullData = self.__devices[id]['fullData'] padInfo = {} for i in range(2): padInfo[PAD_MAP[i]] = (fullData[DEVICE_SWITCH_MAP[i]] == '1') checkCode = int(fullData['I_104']) if (checkCode < 11): checkCode = 10 elif (checkCode <= 200): checkCode = 11 for i in range(2, 8): padname = self.__getPadName(PAD_MAP[i], id) if ((padname != '0') and (checkCode >= 11)): padInfo[PAD_MAP[i]] = (fullData[DEVICE_SWITCH_MAP[i]] == '1') else: padInfo[PAD_MAP[i]] = False padname = self.__getPadName(PAD_MAP[8], id) padInfo[PAD_MAP[8]] = False if (padname != '0'): if (checkCode >= 11): for i in range(7, 1, (- 1)): if padInfo[PAD_MAP[i]]: padInfo[PAD_MAP[8]] = True break else: padInfo[PAD_MAP[8]] = ((fullData[DEVICE_SWITCH_MAP[8]] == '1') and (float(fullData['A_190']) > 0.1)) return padInfo<|docstring|>Enable the Pads for the devices, decoded as best as possible.<|endoftext|>
7242548ef644ae90e57a49da8324b75fe94104d9de2a87bcf4548980cdde9da2
async def __fullLoad(self): 'Perform a full load and create structure.' self.__dataLoaded = False for (id, device) in self.__devices.items(): module_id = device['info']['module_id'] device_id = device['info']['device_id'] deviceInfo = (await self.__api.getDeviceInfo(module_id, device_id)) for (key, item) in DEVICE_INFO_MAP.items(): if (item in deviceInfo): device['info'][key] = deviceInfo[item] deviceData = (await self.__api.getDeviceData(module_id, device_id)) device['lastUpdateTime'] = deviceData['timestamp'] device['updatedData'] = deviceData['data']['varfile_mt1_config1']['001'].copy() device['fullData'] = device['updatedData'].copy() self.__deviceMap = DEVICE_DATA_MAP enabledPads = self.__enabledPADs(id) for (pad, padEnabled) in enabledPads.items(): if (not padEnabled): self.__deviceMap['pads'].pop(pad, None) self.__invertedMap = self.__invertDeviceMap(self.__deviceMap) device['data'] = self.__populateData(self.__deviceMap, device['fullData']) self.__dataLoaded = True return True
Perform a full load and create structure.
masterthermconnect/controller.py
__fullLoad
sahulkrishan/python-masterthermconnect
0
python
async def __fullLoad(self): self.__dataLoaded = False for (id, device) in self.__devices.items(): module_id = device['info']['module_id'] device_id = device['info']['device_id'] deviceInfo = (await self.__api.getDeviceInfo(module_id, device_id)) for (key, item) in DEVICE_INFO_MAP.items(): if (item in deviceInfo): device['info'][key] = deviceInfo[item] deviceData = (await self.__api.getDeviceData(module_id, device_id)) device['lastUpdateTime'] = deviceData['timestamp'] device['updatedData'] = deviceData['data']['varfile_mt1_config1']['001'].copy() device['fullData'] = device['updatedData'].copy() self.__deviceMap = DEVICE_DATA_MAP enabledPads = self.__enabledPADs(id) for (pad, padEnabled) in enabledPads.items(): if (not padEnabled): self.__deviceMap['pads'].pop(pad, None) self.__invertedMap = self.__invertDeviceMap(self.__deviceMap) device['data'] = self.__populateData(self.__deviceMap, device['fullData']) self.__dataLoaded = True return True
async def __fullLoad(self): self.__dataLoaded = False for (id, device) in self.__devices.items(): module_id = device['info']['module_id'] device_id = device['info']['device_id'] deviceInfo = (await self.__api.getDeviceInfo(module_id, device_id)) for (key, item) in DEVICE_INFO_MAP.items(): if (item in deviceInfo): device['info'][key] = deviceInfo[item] deviceData = (await self.__api.getDeviceData(module_id, device_id)) device['lastUpdateTime'] = deviceData['timestamp'] device['updatedData'] = deviceData['data']['varfile_mt1_config1']['001'].copy() device['fullData'] = device['updatedData'].copy() self.__deviceMap = DEVICE_DATA_MAP enabledPads = self.__enabledPADs(id) for (pad, padEnabled) in enabledPads.items(): if (not padEnabled): self.__deviceMap['pads'].pop(pad, None) self.__invertedMap = self.__invertDeviceMap(self.__deviceMap) device['data'] = self.__populateData(self.__deviceMap, device['fullData']) self.__dataLoaded = True return True<|docstring|>Perform a full load and create structure.<|endoftext|>
189ebe1e707a136d884149761ae6c54125df40fec2f23f86a8291dce1d21d7d6
async def connect(self, updateData=True): 'Connect to the API, check the supported roles and update if required.' result = (await self.__api.connect()) self.__role = result['role'] if (not (result['role'] in SUPPORTED_ROLES)): raise MasterThermUnsupportedRole('2', ('Unsupported Role ' + result['role'])) self.__devices = {} for module in result['modules']: for device in module['config']: id = ((module['id'] + '-') + device['mb_addr']) self.__devices[id] = {'lastUpdateTime': '0', 'info': {'module_id': module['id'], 'module_name': module['module_name'], 'device_id': device['mb_addr'], 'device_name': device['mb_name']}, 'updatedData': {}, 'fullData': {}, 'data': {}} if updateData: return (await self.__fullLoad()) else: return True
Connect to the API, check the supported roles and update if required.
masterthermconnect/controller.py
connect
sahulkrishan/python-masterthermconnect
0
python
async def connect(self, updateData=True): result = (await self.__api.connect()) self.__role = result['role'] if (not (result['role'] in SUPPORTED_ROLES)): raise MasterThermUnsupportedRole('2', ('Unsupported Role ' + result['role'])) self.__devices = {} for module in result['modules']: for device in module['config']: id = ((module['id'] + '-') + device['mb_addr']) self.__devices[id] = {'lastUpdateTime': '0', 'info': {'module_id': module['id'], 'module_name': module['module_name'], 'device_id': device['mb_addr'], 'device_name': device['mb_name']}, 'updatedData': {}, 'fullData': {}, 'data': {}} if updateData: return (await self.__fullLoad()) else: return True
async def connect(self, updateData=True): result = (await self.__api.connect()) self.__role = result['role'] if (not (result['role'] in SUPPORTED_ROLES)): raise MasterThermUnsupportedRole('2', ('Unsupported Role ' + result['role'])) self.__devices = {} for module in result['modules']: for device in module['config']: id = ((module['id'] + '-') + device['mb_addr']) self.__devices[id] = {'lastUpdateTime': '0', 'info': {'module_id': module['id'], 'module_name': module['module_name'], 'device_id': device['mb_addr'], 'device_name': device['mb_name']}, 'updatedData': {}, 'fullData': {}, 'data': {}} if updateData: return (await self.__fullLoad()) else: return True<|docstring|>Connect to the API, check the supported roles and update if required.<|endoftext|>
80e1bb02cacc735d7f687a2a6ba48ce681ad89b3df8d835609eb2fb28a4bb171
async def refresh(self, fullLoad=False): 'Refresh or Reload all entries for all devices.' if (not (await self.__api.isConnected())): return False if fullLoad: return self.__fullLoad() if (not self.__dataLoaded): return False for id in self.__devices: device = self.__devices[id] module_id = device['info']['module_id'] device_id = device['info']['device_id'] deviceInfo = (await self.__api.getDeviceInfo(module_id, device_id)) if (deviceInfo['returncode'] == '0'): for (key, item) in DEVICE_INFO_MAP.items(): if (item in deviceInfo): device['info'][key] = deviceInfo[item] deviceData = (await self.__api.getDeviceData(module_id, device_id, last_update_time=device['lastUpdateTime'])) device['lastUpdateTime'] = deviceData['timestamp'] device['updatedData'] = deviceData['data']['varfile_mt1_config1']['001'].copy() device['fullData'].update(device['updatedData']) updateData = False for registerKey in device['updatedData']: if (registerKey in self.__invertedMap): updateData = True break if updateData: device['data'] = self.__populateData(self.__deviceMap, device['fullData']) return True
Refresh or Reload all entries for all devices.
masterthermconnect/controller.py
refresh
sahulkrishan/python-masterthermconnect
0
python
async def refresh(self, fullLoad=False): if (not (await self.__api.isConnected())): return False if fullLoad: return self.__fullLoad() if (not self.__dataLoaded): return False for id in self.__devices: device = self.__devices[id] module_id = device['info']['module_id'] device_id = device['info']['device_id'] deviceInfo = (await self.__api.getDeviceInfo(module_id, device_id)) if (deviceInfo['returncode'] == '0'): for (key, item) in DEVICE_INFO_MAP.items(): if (item in deviceInfo): device['info'][key] = deviceInfo[item] deviceData = (await self.__api.getDeviceData(module_id, device_id, last_update_time=device['lastUpdateTime'])) device['lastUpdateTime'] = deviceData['timestamp'] device['updatedData'] = deviceData['data']['varfile_mt1_config1']['001'].copy() device['fullData'].update(device['updatedData']) updateData = False for registerKey in device['updatedData']: if (registerKey in self.__invertedMap): updateData = True break if updateData: device['data'] = self.__populateData(self.__deviceMap, device['fullData']) return True
async def refresh(self, fullLoad=False): if (not (await self.__api.isConnected())): return False if fullLoad: return self.__fullLoad() if (not self.__dataLoaded): return False for id in self.__devices: device = self.__devices[id] module_id = device['info']['module_id'] device_id = device['info']['device_id'] deviceInfo = (await self.__api.getDeviceInfo(module_id, device_id)) if (deviceInfo['returncode'] == '0'): for (key, item) in DEVICE_INFO_MAP.items(): if (item in deviceInfo): device['info'][key] = deviceInfo[item] deviceData = (await self.__api.getDeviceData(module_id, device_id, last_update_time=device['lastUpdateTime'])) device['lastUpdateTime'] = deviceData['timestamp'] device['updatedData'] = deviceData['data']['varfile_mt1_config1']['001'].copy() device['fullData'].update(device['updatedData']) updateData = False for registerKey in device['updatedData']: if (registerKey in self.__invertedMap): updateData = True break if updateData: device['data'] = self.__populateData(self.__deviceMap, device['fullData']) return True<|docstring|>Refresh or Reload all entries for all devices.<|endoftext|>
331f571c492db4e1692ad89e7ebbb926c94fc81b8cc08a0649a7b75f225944e7
def getDevices(self): 'Return a List of the Devices with plus information.' deviceReturn = {} for (device_id, device) in self.__devices.items(): deviceReturn[device_id] = device['info'] return deviceReturn
Return a List of the Devices with plus information.
masterthermconnect/controller.py
getDevices
sahulkrishan/python-masterthermconnect
0
python
def getDevices(self): deviceReturn = {} for (device_id, device) in self.__devices.items(): deviceReturn[device_id] = device['info'] return deviceReturn
def getDevices(self): deviceReturn = {} for (device_id, device) in self.__devices.items(): deviceReturn[device_id] = device['info'] return deviceReturn<|docstring|>Return a List of the Devices with plus information.<|endoftext|>
8343eef208d7087849381d4425aa572acba34ca7bd90ba38b6c0b1313480053f
def getDeviceInfo(self, module_id, device_id): 'Get the Information for a specific device.' info = {} key = ((module_id + '-') + device_id) if (key in self.__devices): info = self.__devices[key]['info'] return info
Get the Information for a specific device.
masterthermconnect/controller.py
getDeviceInfo
sahulkrishan/python-masterthermconnect
0
python
def getDeviceInfo(self, module_id, device_id): info = {} key = ((module_id + '-') + device_id) if (key in self.__devices): info = self.__devices[key]['info'] return info
def getDeviceInfo(self, module_id, device_id): info = {} key = ((module_id + '-') + device_id) if (key in self.__devices): info = self.__devices[key]['info'] return info<|docstring|>Get the Information for a specific device.<|endoftext|>
1f80bbfe66b03991d29965c638871102e306f71e810ba5d0fc9962760ad587b5
def getDeviceRegisters(self, module_id, device_id, lastUpdated=False): 'Get the Device Register Data, if lastUpdated is True then get the latest update data.' data = {} key = ((module_id + '-') + device_id) if (key in self.__devices): if lastUpdated: data = self.__devices[key]['updatedData'] else: data = self.__devices[key]['fullData'] return data
Get the Device Register Data, if lastUpdated is True then get the latest update data.
masterthermconnect/controller.py
getDeviceRegisters
sahulkrishan/python-masterthermconnect
0
python
def getDeviceRegisters(self, module_id, device_id, lastUpdated=False): data = {} key = ((module_id + '-') + device_id) if (key in self.__devices): if lastUpdated: data = self.__devices[key]['updatedData'] else: data = self.__devices[key]['fullData'] return data
def getDeviceRegisters(self, module_id, device_id, lastUpdated=False): data = {} key = ((module_id + '-') + device_id) if (key in self.__devices): if lastUpdated: data = self.__devices[key]['updatedData'] else: data = self.__devices[key]['fullData'] return data<|docstring|>Get the Device Register Data, if lastUpdated is True then get the latest update data.<|endoftext|>
d002a6e747f4f96948df303938883ba50b1e80c7525b792be96d2fa580759b56
def getDeviceData(self, module_id, device_id): 'Get the Device Data, if lastUpdated is True then get the latest update data.' data = {} key = ((module_id + '-') + device_id) if (key in self.__devices): data = self.__devices[key]['data'] return data
Get the Device Data, if lastUpdated is True then get the latest update data.
masterthermconnect/controller.py
getDeviceData
sahulkrishan/python-masterthermconnect
0
python
def getDeviceData(self, module_id, device_id): data = {} key = ((module_id + '-') + device_id) if (key in self.__devices): data = self.__devices[key]['data'] return data
def getDeviceData(self, module_id, device_id): data = {} key = ((module_id + '-') + device_id) if (key in self.__devices): data = self.__devices[key]['data'] return data<|docstring|>Get the Device Data, if lastUpdated is True then get the latest update data.<|endoftext|>
7ea60a695647a315dc552ded4f7166c44681b75c069153a5f280dc0058061fe3
def getCurrentTemperature(self, module_id, device_id): 'Get the current temperature' data = self.getDeviceData(module_id, device_id) return 10.0
Get the current temperature
masterthermconnect/controller.py
getCurrentTemperature
sahulkrishan/python-masterthermconnect
0
python
def getCurrentTemperature(self, module_id, device_id): data = self.getDeviceData(module_id, device_id) return 10.0
def getCurrentTemperature(self, module_id, device_id): data = self.getDeviceData(module_id, device_id) return 10.0<|docstring|>Get the current temperature<|endoftext|>
4bfa84e7d3ee380638077bc8bc1ef5d44ad69b7b96ad12bb490139b380d32d30
def getTemperature(self, module_id, device_id): 'Get the requested temperature' return 11.0
Get the requested temperature
masterthermconnect/controller.py
getTemperature
sahulkrishan/python-masterthermconnect
0
python
def getTemperature(self, module_id, device_id): return 11.0
def getTemperature(self, module_id, device_id): return 11.0<|docstring|>Get the requested temperature<|endoftext|>
de867b88c91cab3db569b94fee781e48c59b641ce25061266907b12947b6ab7b
def setTemperature(self, module_id, device_id): 'Set a new temperature' return 12.0
Set a new temperature
masterthermconnect/controller.py
setTemperature
sahulkrishan/python-masterthermconnect
0
python
def setTemperature(self, module_id, device_id): return 12.0
def setTemperature(self, module_id, device_id): return 12.0<|docstring|>Set a new temperature<|endoftext|>
2dd2387f9e99967ffd090cbf05e4d56ed928458fbbee5bda6ef86b42a1c771c8
def _make_divisible(v, divisor, min_value=None): '\n This function is taken from the original tf repo.\n It ensures that all layers have a channel number that is divisible by 8\n It can be seen here:\n https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py\n :param v:\n :param divisor:\n :param min_value:\n :return:\n ' if (min_value is None): min_value = divisor new_v = max(min_value, ((int((v + (divisor / 2))) // divisor) * divisor)) if (new_v < (0.9 * v)): new_v = (new_v + divisor) return new_v
This function is taken from the original tf repo. It ensures that all layers have a channel number that is divisible by 8 It can be seen here: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py :param v: :param divisor: :param min_value: :return:
src/lib/models/networks/mobilenev2_withFPN.py
_make_divisible
MatthewK3023/FoodCalorieDetector
0
python
def _make_divisible(v, divisor, min_value=None): '\n This function is taken from the original tf repo.\n It ensures that all layers have a channel number that is divisible by 8\n It can be seen here:\n https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py\n :param v:\n :param divisor:\n :param min_value:\n :return:\n ' if (min_value is None): min_value = divisor new_v = max(min_value, ((int((v + (divisor / 2))) // divisor) * divisor)) if (new_v < (0.9 * v)): new_v = (new_v + divisor) return new_v
def _make_divisible(v, divisor, min_value=None): '\n This function is taken from the original tf repo.\n It ensures that all layers have a channel number that is divisible by 8\n It can be seen here:\n https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py\n :param v:\n :param divisor:\n :param min_value:\n :return:\n ' if (min_value is None): min_value = divisor new_v = max(min_value, ((int((v + (divisor / 2))) // divisor) * divisor)) if (new_v < (0.9 * v)): new_v = (new_v + divisor) return new_v<|docstring|>This function is taken from the original tf repo. It ensures that all layers have a channel number that is divisible by 8 It can be seen here: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py :param v: :param divisor: :param min_value: :return:<|endoftext|>
fea09023845c30d681b74937c2fa529fdbb82cca36f0cd3bbff4d3682d7ee8e0
def __init__(self, id=None, display_name=None, email=None): 'UserLiteProfileResponse - a model defined in Swagger' self._id = None self._display_name = None self._email = None self.discriminator = None self.id = id self.display_name = display_name self.email = email
UserLiteProfileResponse - a model defined in Swagger
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
__init__
Brantone/appcenter-sdks
0
python
def __init__(self, id=None, display_name=None, email=None): self._id = None self._display_name = None self._email = None self.discriminator = None self.id = id self.display_name = display_name self.email = email
def __init__(self, id=None, display_name=None, email=None): self._id = None self._display_name = None self._email = None self.discriminator = None self.id = id self.display_name = display_name self.email = email<|docstring|>UserLiteProfileResponse - a model defined in Swagger<|endoftext|>
fbd293c9039ffa3d701aadeaa44e01a058d8a1ba9696e7619c1954205e298df8
@property def id(self): 'Gets the id of this UserLiteProfileResponse. # noqa: E501\n\n The unique id (UUID) of the user # noqa: E501\n\n :return: The id of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._id
Gets the id of this UserLiteProfileResponse. # noqa: E501 The unique id (UUID) of the user # noqa: E501 :return: The id of this UserLiteProfileResponse. # noqa: E501 :rtype: string
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
id
Brantone/appcenter-sdks
0
python
@property def id(self): 'Gets the id of this UserLiteProfileResponse. # noqa: E501\n\n The unique id (UUID) of the user # noqa: E501\n\n :return: The id of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._id
@property def id(self): 'Gets the id of this UserLiteProfileResponse. # noqa: E501\n\n The unique id (UUID) of the user # noqa: E501\n\n :return: The id of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._id<|docstring|>Gets the id of this UserLiteProfileResponse. # noqa: E501 The unique id (UUID) of the user # noqa: E501 :return: The id of this UserLiteProfileResponse. # noqa: E501 :rtype: string<|endoftext|>
3b0950e0236eee5d5bd30f6582060a757f0e360ca593dfc9afae1c3db040d189
@id.setter def id(self, id): 'Sets the id of this UserLiteProfileResponse.\n\n The unique id (UUID) of the user # noqa: E501\n\n :param id: The id of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (id is None): raise ValueError('Invalid value for `id`, must not be `None`') self._id = id
Sets the id of this UserLiteProfileResponse. The unique id (UUID) of the user # noqa: E501 :param id: The id of this UserLiteProfileResponse. # noqa: E501 :type: string
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
id
Brantone/appcenter-sdks
0
python
@id.setter def id(self, id): 'Sets the id of this UserLiteProfileResponse.\n\n The unique id (UUID) of the user # noqa: E501\n\n :param id: The id of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (id is None): raise ValueError('Invalid value for `id`, must not be `None`') self._id = id
@id.setter def id(self, id): 'Sets the id of this UserLiteProfileResponse.\n\n The unique id (UUID) of the user # noqa: E501\n\n :param id: The id of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (id is None): raise ValueError('Invalid value for `id`, must not be `None`') self._id = id<|docstring|>Sets the id of this UserLiteProfileResponse. The unique id (UUID) of the user # noqa: E501 :param id: The id of this UserLiteProfileResponse. # noqa: E501 :type: string<|endoftext|>
40fc9318e78f21fab10b7e864ce27088408a802d86cee61d654615ed323c4c2a
@property def display_name(self): 'Gets the display_name of this UserLiteProfileResponse. # noqa: E501\n\n The full name of the user. Might for example be first and last name # noqa: E501\n\n :return: The display_name of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._display_name
Gets the display_name of this UserLiteProfileResponse. # noqa: E501 The full name of the user. Might for example be first and last name # noqa: E501 :return: The display_name of this UserLiteProfileResponse. # noqa: E501 :rtype: string
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
display_name
Brantone/appcenter-sdks
0
python
@property def display_name(self): 'Gets the display_name of this UserLiteProfileResponse. # noqa: E501\n\n The full name of the user. Might for example be first and last name # noqa: E501\n\n :return: The display_name of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._display_name
@property def display_name(self): 'Gets the display_name of this UserLiteProfileResponse. # noqa: E501\n\n The full name of the user. Might for example be first and last name # noqa: E501\n\n :return: The display_name of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._display_name<|docstring|>Gets the display_name of this UserLiteProfileResponse. # noqa: E501 The full name of the user. Might for example be first and last name # noqa: E501 :return: The display_name of this UserLiteProfileResponse. # noqa: E501 :rtype: string<|endoftext|>
17f4a6554ff2b12e6d9cbe70a448887cec8e6d66a68ceb3e646fcdee9bb7d20d
@display_name.setter def display_name(self, display_name): 'Sets the display_name of this UserLiteProfileResponse.\n\n The full name of the user. Might for example be first and last name # noqa: E501\n\n :param display_name: The display_name of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (display_name is None): raise ValueError('Invalid value for `display_name`, must not be `None`') self._display_name = display_name
Sets the display_name of this UserLiteProfileResponse. The full name of the user. Might for example be first and last name # noqa: E501 :param display_name: The display_name of this UserLiteProfileResponse. # noqa: E501 :type: string
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
display_name
Brantone/appcenter-sdks
0
python
@display_name.setter def display_name(self, display_name): 'Sets the display_name of this UserLiteProfileResponse.\n\n The full name of the user. Might for example be first and last name # noqa: E501\n\n :param display_name: The display_name of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (display_name is None): raise ValueError('Invalid value for `display_name`, must not be `None`') self._display_name = display_name
@display_name.setter def display_name(self, display_name): 'Sets the display_name of this UserLiteProfileResponse.\n\n The full name of the user. Might for example be first and last name # noqa: E501\n\n :param display_name: The display_name of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (display_name is None): raise ValueError('Invalid value for `display_name`, must not be `None`') self._display_name = display_name<|docstring|>Sets the display_name of this UserLiteProfileResponse. The full name of the user. Might for example be first and last name # noqa: E501 :param display_name: The display_name of this UserLiteProfileResponse. # noqa: E501 :type: string<|endoftext|>
cabbdaaa92d7e041f420398a9ac7480f8e59c399d752f882c6f6fc8332ded222
@property def email(self): 'Gets the email of this UserLiteProfileResponse. # noqa: E501\n\n The email address of the user # noqa: E501\n\n :return: The email of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._email
Gets the email of this UserLiteProfileResponse. # noqa: E501 The email address of the user # noqa: E501 :return: The email of this UserLiteProfileResponse. # noqa: E501 :rtype: string
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
email
Brantone/appcenter-sdks
0
python
@property def email(self): 'Gets the email of this UserLiteProfileResponse. # noqa: E501\n\n The email address of the user # noqa: E501\n\n :return: The email of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._email
@property def email(self): 'Gets the email of this UserLiteProfileResponse. # noqa: E501\n\n The email address of the user # noqa: E501\n\n :return: The email of this UserLiteProfileResponse. # noqa: E501\n :rtype: string\n ' return self._email<|docstring|>Gets the email of this UserLiteProfileResponse. # noqa: E501 The email address of the user # noqa: E501 :return: The email of this UserLiteProfileResponse. # noqa: E501 :rtype: string<|endoftext|>
016c8373b970e0cc89b61d481d45e5b100c22cba4e0977306bfebbbf6d8cad8a
@email.setter def email(self, email): 'Sets the email of this UserLiteProfileResponse.\n\n The email address of the user # noqa: E501\n\n :param email: The email of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (email is None): raise ValueError('Invalid value for `email`, must not be `None`') self._email = email
Sets the email of this UserLiteProfileResponse. The email address of the user # noqa: E501 :param email: The email of this UserLiteProfileResponse. # noqa: E501 :type: string
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
email
Brantone/appcenter-sdks
0
python
@email.setter def email(self, email): 'Sets the email of this UserLiteProfileResponse.\n\n The email address of the user # noqa: E501\n\n :param email: The email of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (email is None): raise ValueError('Invalid value for `email`, must not be `None`') self._email = email
@email.setter def email(self, email): 'Sets the email of this UserLiteProfileResponse.\n\n The email address of the user # noqa: E501\n\n :param email: The email of this UserLiteProfileResponse. # noqa: E501\n :type: string\n ' if (email is None): raise ValueError('Invalid value for `email`, must not be `None`') self._email = email<|docstring|>Sets the email of this UserLiteProfileResponse. The email address of the user # noqa: E501 :param email: The email of this UserLiteProfileResponse. # noqa: E501 :type: string<|endoftext|>
137ba0f026bd6074febc2e7ebe1fec840dba70990f936f32b47eaf0fb048bd4a
def to_dict(self): 'Returns the model properties as a dict' result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value return result
Returns the model properties as a dict
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
to_dict
Brantone/appcenter-sdks
0
python
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value return result
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value return result<|docstring|>Returns the model properties as a dict<|endoftext|>
cbb19eaa2fc8a113d9e32f924ef280a7e97563f8915f94f65dab438997af2e99
def to_str(self): 'Returns the string representation of the model' return pprint.pformat(self.to_dict())
Returns the string representation of the model
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
to_str
Brantone/appcenter-sdks
0
python
def to_str(self): return pprint.pformat(self.to_dict())
def to_str(self): return pprint.pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|>
772243a2c2b3261a9b954d07aaf295e3c1242a579a495e2d6a5679c677861703
def __repr__(self): 'For `print` and `pprint`' return self.to_str()
For `print` and `pprint`
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
__repr__
Brantone/appcenter-sdks
0
python
def __repr__(self): return self.to_str()
def __repr__(self): return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|>
4484eef857bd73055bbad6c551448821cdc50614e8286a2c80b161712b1cbe9b
def __eq__(self, other): 'Returns true if both objects are equal' if (not isinstance(other, UserLiteProfileResponse)): return False return (self.__dict__ == other.__dict__)
Returns true if both objects are equal
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
__eq__
Brantone/appcenter-sdks
0
python
def __eq__(self, other): if (not isinstance(other, UserLiteProfileResponse)): return False return (self.__dict__ == other.__dict__)
def __eq__(self, other): if (not isinstance(other, UserLiteProfileResponse)): return False return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|>
43dc6740163eb9fc1161d09cb2208a64c7ad0cc8d9c8637ac3264522d3ec7e42
def __ne__(self, other): 'Returns true if both objects are not equal' return (not (self == other))
Returns true if both objects are not equal
sdks/python/appcenter_sdk/models/UserLiteProfileResponse.py
__ne__
Brantone/appcenter-sdks
0
python
def __ne__(self, other): return (not (self == other))
def __ne__(self, other): return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|>
9d43e52c88841d3006a99fafa4ecbb168718b8ffa07c4d5c1ad1da3bd55d4618
def locate_oc_binary(): ' Find and return oc binary file ' paths = (os.environ.get('PATH', os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS) oc_binary = 'oc' try: which_result = shutil.which(oc_binary, path=os.pathsep.join(paths)) if (which_result is not None): oc_binary = which_result except AttributeError: for path in paths: if os.path.exists(os.path.join(path, oc_binary)): oc_binary = os.path.join(path, oc_binary) break return oc_binary
Find and return oc binary file
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
locate_oc_binary
vizakua/openshift-tools
164
python
def locate_oc_binary(): ' ' paths = (os.environ.get('PATH', os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS) oc_binary = 'oc' try: which_result = shutil.which(oc_binary, path=os.pathsep.join(paths)) if (which_result is not None): oc_binary = which_result except AttributeError: for path in paths: if os.path.exists(os.path.join(path, oc_binary)): oc_binary = os.path.join(path, oc_binary) break return oc_binary
def locate_oc_binary(): ' ' paths = (os.environ.get('PATH', os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS) oc_binary = 'oc' try: which_result = shutil.which(oc_binary, path=os.pathsep.join(paths)) if (which_result is not None): oc_binary = which_result except AttributeError: for path in paths: if os.path.exists(os.path.join(path, oc_binary)): oc_binary = os.path.join(path, oc_binary) break return oc_binary<|docstring|>Find and return oc binary file<|endoftext|>
4709e40d223971cade72e77547f01bf716181c85cdd823773f91eb3ca0129809
def main(): '\n ansible oc module for scaling\n ' module = AnsibleModule(argument_spec=dict(kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), state=dict(default='present', type='str', choices=['present', 'list']), debug=dict(default=False, type='bool'), kind=dict(default='dc', choices=['dc', 'rc'], type='str'), namespace=dict(default='default', type='str'), replicas=dict(default=None, type='int'), name=dict(default=None, type='str')), supports_check_mode=True) rval = OCScale.run_ansible(module.params, module.check_mode) if ('failed' in rval): module.fail_json(**rval) module.exit_json(**rval)
ansible oc module for scaling
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
main
vizakua/openshift-tools
164
python
def main(): '\n \n ' module = AnsibleModule(argument_spec=dict(kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), state=dict(default='present', type='str', choices=['present', 'list']), debug=dict(default=False, type='bool'), kind=dict(default='dc', choices=['dc', 'rc'], type='str'), namespace=dict(default='default', type='str'), replicas=dict(default=None, type='int'), name=dict(default=None, type='str')), supports_check_mode=True) rval = OCScale.run_ansible(module.params, module.check_mode) if ('failed' in rval): module.fail_json(**rval) module.exit_json(**rval)
def main(): '\n \n ' module = AnsibleModule(argument_spec=dict(kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), state=dict(default='present', type='str', choices=['present', 'list']), debug=dict(default=False, type='bool'), kind=dict(default='dc', choices=['dc', 'rc'], type='str'), namespace=dict(default='default', type='str'), replicas=dict(default=None, type='int'), name=dict(default=None, type='str')), supports_check_mode=True) rval = OCScale.run_ansible(module.params, module.check_mode) if ('failed' in rval): module.fail_json(**rval) module.exit_json(**rval)<|docstring|>ansible oc module for scaling<|endoftext|>
42d5c481c1a773e9f6647999754664973383d9fad77ce0f61dc556e550df5436
@property def separator(self): ' getter method for separator ' return self._separator
getter method for separator
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
separator
vizakua/openshift-tools
164
python
@property def separator(self): ' ' return self._separator
@property def separator(self): ' ' return self._separator<|docstring|>getter method for separator<|endoftext|>
85016b7ebc4da7b8408e6c94d1bde6e693ce5a3d6e5aa129b4e347c3b378ac14
@separator.setter def separator(self, inc_sep): ' setter method for separator ' self._separator = inc_sep
setter method for separator
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
separator
vizakua/openshift-tools
164
python
@separator.setter def separator(self, inc_sep): ' ' self._separator = inc_sep
@separator.setter def separator(self, inc_sep): ' ' self._separator = inc_sep<|docstring|>setter method for separator<|endoftext|>
cd1822b71b663c39df4f59c136c350c6b1789209b213cd4a6b9bcb7e52331008
@property def yaml_dict(self): ' getter method for yaml_dict ' return self.__yaml_dict
getter method for yaml_dict
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
yaml_dict
vizakua/openshift-tools
164
python
@property def yaml_dict(self): ' ' return self.__yaml_dict
@property def yaml_dict(self): ' ' return self.__yaml_dict<|docstring|>getter method for yaml_dict<|endoftext|>
6a226d1ca4c0b59248471df80b4f57b8f9c932322ce70f8c59bc277170f42142
@yaml_dict.setter def yaml_dict(self, value): ' setter method for yaml_dict ' self.__yaml_dict = value
setter method for yaml_dict
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
yaml_dict
vizakua/openshift-tools
164
python
@yaml_dict.setter def yaml_dict(self, value): ' ' self.__yaml_dict = value
@yaml_dict.setter def yaml_dict(self, value): ' ' self.__yaml_dict = value<|docstring|>setter method for yaml_dict<|endoftext|>
9d6689d9f5f4d8692f4e7c407bc877675b8a943977c55f0c8d7c8a0629b1aca9
@staticmethod def parse_key(key, sep='.'): 'parse the key allowing the appropriate separator' common_separators = list((Yedit.com_sep - set([sep]))) return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
parse the key allowing the appropriate separator
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
parse_key
vizakua/openshift-tools
164
python
@staticmethod def parse_key(key, sep='.'): common_separators = list((Yedit.com_sep - set([sep]))) return re.findall(Yedit.re_key.format(.join(common_separators)), key)
@staticmethod def parse_key(key, sep='.'): common_separators = list((Yedit.com_sep - set([sep]))) return re.findall(Yedit.re_key.format(.join(common_separators)), key)<|docstring|>parse the key allowing the appropriate separator<|endoftext|>
ba794a3c3b215463fa49a1e971ca6a00d7edcc07e6607784d5fdfbf8ea3073c5
@staticmethod def valid_key(key, sep='.'): 'validate the incoming key' common_separators = list((Yedit.com_sep - set([sep]))) if (not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key)): return False return True
validate the incoming key
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
valid_key
vizakua/openshift-tools
164
python
@staticmethod def valid_key(key, sep='.'): common_separators = list((Yedit.com_sep - set([sep]))) if (not re.match(Yedit.re_valid_key.format(.join(common_separators)), key)): return False return True
@staticmethod def valid_key(key, sep='.'): common_separators = list((Yedit.com_sep - set([sep]))) if (not re.match(Yedit.re_valid_key.format(.join(common_separators)), key)): return False return True<|docstring|>validate the incoming key<|endoftext|>
a28f0a9d77369d170708a960e5347b021348a284feaabc5e2c6d8a1acb695c9d
@staticmethod def remove_entry(data, key, index=None, value=None, sep='.'): ' remove data at location key ' if ((key == '') and isinstance(data, dict)): if (value is not None): data.pop(value) elif (index is not None): raise YeditException('remove_entry for a dictionary does not have an index {}'.format(index)) else: data.clear() return True elif ((key == '') and isinstance(data, list)): ind = None if (value is not None): try: ind = data.index(value) except ValueError: return False elif (index is not None): ind = index else: del data[:] if (ind is not None): data.pop(ind) return True if ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes[:(- 1)]: if (dict_key and isinstance(data, dict)): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: return None if key_indexes[(- 1)][0]: if (isinstance(data, list) and (int(key_indexes[(- 1)][0]) <= (len(data) - 1))): del data[int(key_indexes[(- 1)][0])] return True elif key_indexes[(- 1)][1]: if isinstance(data, dict): del data[key_indexes[(- 1)][1]] return True
remove data at location key
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
remove_entry
vizakua/openshift-tools
164
python
@staticmethod def remove_entry(data, key, index=None, value=None, sep='.'): ' ' if ((key == ) and isinstance(data, dict)): if (value is not None): data.pop(value) elif (index is not None): raise YeditException('remove_entry for a dictionary does not have an index {}'.format(index)) else: data.clear() return True elif ((key == ) and isinstance(data, list)): ind = None if (value is not None): try: ind = data.index(value) except ValueError: return False elif (index is not None): ind = index else: del data[:] if (ind is not None): data.pop(ind) return True if ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes[:(- 1)]: if (dict_key and isinstance(data, dict)): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: return None if key_indexes[(- 1)][0]: if (isinstance(data, list) and (int(key_indexes[(- 1)][0]) <= (len(data) - 1))): del data[int(key_indexes[(- 1)][0])] return True elif key_indexes[(- 1)][1]: if isinstance(data, dict): del data[key_indexes[(- 1)][1]] return True
@staticmethod def remove_entry(data, key, index=None, value=None, sep='.'): ' ' if ((key == ) and isinstance(data, dict)): if (value is not None): data.pop(value) elif (index is not None): raise YeditException('remove_entry for a dictionary does not have an index {}'.format(index)) else: data.clear() return True elif ((key == ) and isinstance(data, list)): ind = None if (value is not None): try: ind = data.index(value) except ValueError: return False elif (index is not None): ind = index else: del data[:] if (ind is not None): data.pop(ind) return True if ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes[:(- 1)]: if (dict_key and isinstance(data, dict)): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: return None if key_indexes[(- 1)][0]: if (isinstance(data, list) and (int(key_indexes[(- 1)][0]) <= (len(data) - 1))): del data[int(key_indexes[(- 1)][0])] return True elif key_indexes[(- 1)][1]: if isinstance(data, dict): del data[key_indexes[(- 1)][1]] return True<|docstring|>remove data at location key<|endoftext|>
5dd6df441bc53688448be5ed5f818cd409e017ac5ac38f0f70783719a362b0cf
@staticmethod def add_entry(data, key, item=None, sep='.'): " Get an item from a dictionary with key notation a.b.c\n d = {'a': {'b': 'c'}}}\n key = a#b\n return c\n " if (key == ''): pass elif ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes[:(- 1)]: if dict_key: if (isinstance(data, dict) and (dict_key in data) and data[dict_key]): data = data[dict_key] continue elif (data and (not isinstance(data, dict))): raise YeditException(('Unexpected item type found while going through key ' + 'path: {} (at key: {})'.format(key, dict_key))) data[dict_key] = {} data = data[dict_key] elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: raise YeditException('Unexpected item type found while going through key path: {}'.format(key)) if (key == ''): data = item elif (key_indexes[(- 1)][0] and isinstance(data, list) and (int(key_indexes[(- 1)][0]) <= (len(data) - 1))): data[int(key_indexes[(- 1)][0])] = item elif (key_indexes[(- 1)][1] and isinstance(data, dict)): data[key_indexes[(- 1)][1]] = item else: raise YeditException('Error adding to object at path: {}'.format(key)) return data
Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a#b return c
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
add_entry
vizakua/openshift-tools
164
python
@staticmethod def add_entry(data, key, item=None, sep='.'): " Get an item from a dictionary with key notation a.b.c\n d = {'a': {'b': 'c'}}}\n key = a#b\n return c\n " if (key == ): pass elif ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes[:(- 1)]: if dict_key: if (isinstance(data, dict) and (dict_key in data) and data[dict_key]): data = data[dict_key] continue elif (data and (not isinstance(data, dict))): raise YeditException(('Unexpected item type found while going through key ' + 'path: {} (at key: {})'.format(key, dict_key))) data[dict_key] = {} data = data[dict_key] elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: raise YeditException('Unexpected item type found while going through key path: {}'.format(key)) if (key == ): data = item elif (key_indexes[(- 1)][0] and isinstance(data, list) and (int(key_indexes[(- 1)][0]) <= (len(data) - 1))): data[int(key_indexes[(- 1)][0])] = item elif (key_indexes[(- 1)][1] and isinstance(data, dict)): data[key_indexes[(- 1)][1]] = item else: raise YeditException('Error adding to object at path: {}'.format(key)) return data
@staticmethod def add_entry(data, key, item=None, sep='.'): " Get an item from a dictionary with key notation a.b.c\n d = {'a': {'b': 'c'}}}\n key = a#b\n return c\n " if (key == ): pass elif ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes[:(- 1)]: if dict_key: if (isinstance(data, dict) and (dict_key in data) and data[dict_key]): data = data[dict_key] continue elif (data and (not isinstance(data, dict))): raise YeditException(('Unexpected item type found while going through key ' + 'path: {} (at key: {})'.format(key, dict_key))) data[dict_key] = {} data = data[dict_key] elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: raise YeditException('Unexpected item type found while going through key path: {}'.format(key)) if (key == ): data = item elif (key_indexes[(- 1)][0] and isinstance(data, list) and (int(key_indexes[(- 1)][0]) <= (len(data) - 1))): data[int(key_indexes[(- 1)][0])] = item elif (key_indexes[(- 1)][1] and isinstance(data, dict)): data[key_indexes[(- 1)][1]] = item else: raise YeditException('Error adding to object at path: {}'.format(key)) return data<|docstring|>Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a#b return c<|endoftext|>
886d67469e1ba3d59f5e0dc848f8da2e4b7156c98305917baf1d6f68cc725eaf
@staticmethod def get_entry(data, key, sep='.'): " Get an item from a dictionary with key notation a.b.c\n d = {'a': {'b': 'c'}}}\n key = a.b\n return c\n " if (key == ''): pass elif ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes: if (dict_key and isinstance(data, dict)): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: return None return data
Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a.b return c
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
get_entry
vizakua/openshift-tools
164
python
@staticmethod def get_entry(data, key, sep='.'): " Get an item from a dictionary with key notation a.b.c\n d = {'a': {'b': 'c'}}}\n key = a.b\n return c\n " if (key == ): pass elif ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes: if (dict_key and isinstance(data, dict)): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: return None return data
@staticmethod def get_entry(data, key, sep='.'): " Get an item from a dictionary with key notation a.b.c\n d = {'a': {'b': 'c'}}}\n key = a.b\n return c\n " if (key == ): pass elif ((not (key and Yedit.valid_key(key, sep))) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for (arr_ind, dict_key) in key_indexes: if (dict_key and isinstance(data, dict)): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and (int(arr_ind) <= (len(data) - 1))): data = data[int(arr_ind)] else: return None return data<|docstring|>Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a.b return c<|endoftext|>
deb4d10a4519c12cc6d63fc7522c2bfc3bc26c617a874bfd3f7cebb54fa68919
@staticmethod def _write(filename, contents): ' Actually write the file contents to disk. This helps with mocking. ' tmp_filename = (filename + '.yedit') with open(tmp_filename, 'w') as yfd: fcntl.flock(yfd, (fcntl.LOCK_EX | fcntl.LOCK_NB)) yfd.write(contents) fcntl.flock(yfd, fcntl.LOCK_UN) os.rename(tmp_filename, filename)
Actually write the file contents to disk. This helps with mocking.
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
_write
vizakua/openshift-tools
164
python
@staticmethod def _write(filename, contents): ' ' tmp_filename = (filename + '.yedit') with open(tmp_filename, 'w') as yfd: fcntl.flock(yfd, (fcntl.LOCK_EX | fcntl.LOCK_NB)) yfd.write(contents) fcntl.flock(yfd, fcntl.LOCK_UN) os.rename(tmp_filename, filename)
@staticmethod def _write(filename, contents): ' ' tmp_filename = (filename + '.yedit') with open(tmp_filename, 'w') as yfd: fcntl.flock(yfd, (fcntl.LOCK_EX | fcntl.LOCK_NB)) yfd.write(contents) fcntl.flock(yfd, fcntl.LOCK_UN) os.rename(tmp_filename, filename)<|docstring|>Actually write the file contents to disk. This helps with mocking.<|endoftext|>
f0c8c8c80ea55aad0b741aebb4a6497a67708741cbf0f4e8173fa56d08b8b667
def write(self): ' write to file ' if (not self.filename): raise YeditException('Please specify a filename.') if (self.backup and self.file_exists()): shutil.copy(self.filename, (self.filename + '.orig')) try: self.yaml_dict.fa.set_block_style() except AttributeError: pass if (self.content_type == 'yaml'): try: Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper)) except AttributeError: Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False)) elif (self.content_type == 'json'): Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True)) else: raise YeditException(('Unsupported content_type: {}.'.format(self.content_type) + 'Please specify a content_type of yaml or json.')) return (True, self.yaml_dict)
write to file
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
write
vizakua/openshift-tools
164
python
def write(self): ' ' if (not self.filename): raise YeditException('Please specify a filename.') if (self.backup and self.file_exists()): shutil.copy(self.filename, (self.filename + '.orig')) try: self.yaml_dict.fa.set_block_style() except AttributeError: pass if (self.content_type == 'yaml'): try: Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper)) except AttributeError: Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False)) elif (self.content_type == 'json'): Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True)) else: raise YeditException(('Unsupported content_type: {}.'.format(self.content_type) + 'Please specify a content_type of yaml or json.')) return (True, self.yaml_dict)
def write(self): ' ' if (not self.filename): raise YeditException('Please specify a filename.') if (self.backup and self.file_exists()): shutil.copy(self.filename, (self.filename + '.orig')) try: self.yaml_dict.fa.set_block_style() except AttributeError: pass if (self.content_type == 'yaml'): try: Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper)) except AttributeError: Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False)) elif (self.content_type == 'json'): Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True)) else: raise YeditException(('Unsupported content_type: {}.'.format(self.content_type) + 'Please specify a content_type of yaml or json.')) return (True, self.yaml_dict)<|docstring|>write to file<|endoftext|>
dbf49ade23339bc0b06f5538fa1247644ea20e48e74a50e44afdad1225694552
def read(self): ' read from file ' if ((self.filename is None) or (not self.file_exists())): return None contents = None with open(self.filename) as yfd: contents = yfd.read() return contents
read from file
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
read
vizakua/openshift-tools
164
python
def read(self): ' ' if ((self.filename is None) or (not self.file_exists())): return None contents = None with open(self.filename) as yfd: contents = yfd.read() return contents
def read(self): ' ' if ((self.filename is None) or (not self.file_exists())): return None contents = None with open(self.filename) as yfd: contents = yfd.read() return contents<|docstring|>read from file<|endoftext|>
36725330824864117dd03a26168a5722868a03af5bae493b1db7b3c3daaaae42
def file_exists(self): ' return whether file exists ' if os.path.exists(self.filename): return True return False
return whether file exists
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
file_exists
vizakua/openshift-tools
164
python
def file_exists(self): ' ' if os.path.exists(self.filename): return True return False
def file_exists(self): ' ' if os.path.exists(self.filename): return True return False<|docstring|>return whether file exists<|endoftext|>
6894fe4f5e813f27cc0c8f49cda24d56bb00ddfdf55d85e11e2b7a0ee2ea037c
def load(self, content_type='yaml'): ' return yaml file ' contents = self.read() if ((not contents) and (not self.content)): return None if self.content: if isinstance(self.content, dict): self.yaml_dict = self.content return self.yaml_dict elif isinstance(self.content, str): contents = self.content try: if ((content_type == 'yaml') and contents): try: self.yaml_dict.fa.set_block_style() except AttributeError: pass try: self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader) except AttributeError: self.yaml_dict = yaml.safe_load(contents) try: self.yaml_dict.fa.set_block_style() except AttributeError: pass elif ((content_type == 'json') and contents): self.yaml_dict = json.loads(contents) except yaml.YAMLError as err: raise YeditException('Problem with loading yaml file. {}'.format(err)) return self.yaml_dict
return yaml file
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
load
vizakua/openshift-tools
164
python
def load(self, content_type='yaml'): ' ' contents = self.read() if ((not contents) and (not self.content)): return None if self.content: if isinstance(self.content, dict): self.yaml_dict = self.content return self.yaml_dict elif isinstance(self.content, str): contents = self.content try: if ((content_type == 'yaml') and contents): try: self.yaml_dict.fa.set_block_style() except AttributeError: pass try: self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader) except AttributeError: self.yaml_dict = yaml.safe_load(contents) try: self.yaml_dict.fa.set_block_style() except AttributeError: pass elif ((content_type == 'json') and contents): self.yaml_dict = json.loads(contents) except yaml.YAMLError as err: raise YeditException('Problem with loading yaml file. {}'.format(err)) return self.yaml_dict
def load(self, content_type='yaml'): ' ' contents = self.read() if ((not contents) and (not self.content)): return None if self.content: if isinstance(self.content, dict): self.yaml_dict = self.content return self.yaml_dict elif isinstance(self.content, str): contents = self.content try: if ((content_type == 'yaml') and contents): try: self.yaml_dict.fa.set_block_style() except AttributeError: pass try: self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader) except AttributeError: self.yaml_dict = yaml.safe_load(contents) try: self.yaml_dict.fa.set_block_style() except AttributeError: pass elif ((content_type == 'json') and contents): self.yaml_dict = json.loads(contents) except yaml.YAMLError as err: raise YeditException('Problem with loading yaml file. {}'.format(err)) return self.yaml_dict<|docstring|>return yaml file<|endoftext|>
7d906609fe2b375090fdf79ccd94f60d3620ffbf1b96d719c75861821e7e9569
def get(self, key): ' get a specified key' try: entry = Yedit.get_entry(self.yaml_dict, key, self.separator) except KeyError: entry = None return entry
get a specified key
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
get
vizakua/openshift-tools
164
python
def get(self, key): ' ' try: entry = Yedit.get_entry(self.yaml_dict, key, self.separator) except KeyError: entry = None return entry
def get(self, key): ' ' try: entry = Yedit.get_entry(self.yaml_dict, key, self.separator) except KeyError: entry = None return entry<|docstring|>get a specified key<|endoftext|>
477659f481f27961c29ef33395d2341a92045b83328825de18d1c6159e9375d6
def pop(self, path, key_or_item): ' remove a key, value pair from a dict or an item for a list' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): return (False, self.yaml_dict) if isinstance(entry, dict): if (key_or_item in entry): entry.pop(key_or_item) return (True, self.yaml_dict) return (False, self.yaml_dict) elif isinstance(entry, list): ind = None try: ind = entry.index(key_or_item) except ValueError: return (False, self.yaml_dict) entry.pop(ind) return (True, self.yaml_dict) return (False, self.yaml_dict)
remove a key, value pair from a dict or an item for a list
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
pop
vizakua/openshift-tools
164
python
def pop(self, path, key_or_item): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): return (False, self.yaml_dict) if isinstance(entry, dict): if (key_or_item in entry): entry.pop(key_or_item) return (True, self.yaml_dict) return (False, self.yaml_dict) elif isinstance(entry, list): ind = None try: ind = entry.index(key_or_item) except ValueError: return (False, self.yaml_dict) entry.pop(ind) return (True, self.yaml_dict) return (False, self.yaml_dict)
def pop(self, path, key_or_item): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): return (False, self.yaml_dict) if isinstance(entry, dict): if (key_or_item in entry): entry.pop(key_or_item) return (True, self.yaml_dict) return (False, self.yaml_dict) elif isinstance(entry, list): ind = None try: ind = entry.index(key_or_item) except ValueError: return (False, self.yaml_dict) entry.pop(ind) return (True, self.yaml_dict) return (False, self.yaml_dict)<|docstring|>remove a key, value pair from a dict or an item for a list<|endoftext|>
2046f8ebe27f7aed96a066bc63a45fbcdab7b73df77e5036101b19b5590bc615
def delete(self, path, index=None, value=None): ' remove path from a dict' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): return (False, self.yaml_dict) result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator) if (not result): return (False, self.yaml_dict) return (True, self.yaml_dict)
remove path from a dict
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
delete
vizakua/openshift-tools
164
python
def delete(self, path, index=None, value=None): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): return (False, self.yaml_dict) result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator) if (not result): return (False, self.yaml_dict) return (True, self.yaml_dict)
def delete(self, path, index=None, value=None): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): return (False, self.yaml_dict) result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator) if (not result): return (False, self.yaml_dict) return (True, self.yaml_dict)<|docstring|>remove path from a dict<|endoftext|>
486176c94c6c33c50ec852215edc99f6f3b140f2d63ed0f1cc2f9cab70d994c8
def exists(self, path, value): ' check if value exists at path' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, list): if (value in entry): return True return False elif isinstance(entry, dict): if isinstance(value, dict): rval = False for (key, val) in value.items(): if (entry[key] != val): rval = False break else: rval = True return rval return (value in entry) return (entry == value)
check if value exists at path
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
exists
vizakua/openshift-tools
164
python
def exists(self, path, value): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, list): if (value in entry): return True return False elif isinstance(entry, dict): if isinstance(value, dict): rval = False for (key, val) in value.items(): if (entry[key] != val): rval = False break else: rval = True return rval return (value in entry) return (entry == value)
def exists(self, path, value): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, list): if (value in entry): return True return False elif isinstance(entry, dict): if isinstance(value, dict): rval = False for (key, val) in value.items(): if (entry[key] != val): rval = False break else: rval = True return rval return (value in entry) return (entry == value)<|docstring|>check if value exists at path<|endoftext|>
dc39bfa2c6b6d87a276d376029a74823962526714e8cf95aec990daa8b76a191
def append(self, path, value): 'append value to a list' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): self.put(path, []) entry = Yedit.get_entry(self.yaml_dict, path, self.separator) if (not isinstance(entry, list)): return (False, self.yaml_dict) entry.append(value) return (True, self.yaml_dict)
append value to a list
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
append
vizakua/openshift-tools
164
python
def append(self, path, value): try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): self.put(path, []) entry = Yedit.get_entry(self.yaml_dict, path, self.separator) if (not isinstance(entry, list)): return (False, self.yaml_dict) entry.append(value) return (True, self.yaml_dict)
def append(self, path, value): try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry is None): self.put(path, []) entry = Yedit.get_entry(self.yaml_dict, path, self.separator) if (not isinstance(entry, list)): return (False, self.yaml_dict) entry.append(value) return (True, self.yaml_dict)<|docstring|>append value to a list<|endoftext|>
a9281c8a5cafb94f01816d6b2567c19591b8afe0b57c981aa3bbe2b2ac9daf49
def update(self, path, value, index=None, curr_value=None): ' put path, value into a dict ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, dict): if (not isinstance(value, dict)): raise YeditException(('Cannot replace key, value entry in dict with non-dict type. ' + 'value=[{}] type=[{}]'.format(value, type(value)))) entry.update(value) return (True, self.yaml_dict) elif isinstance(entry, list): ind = None if curr_value: try: ind = entry.index(curr_value) except ValueError: return (False, self.yaml_dict) elif (index is not None): ind = index if ((ind is not None) and (entry[ind] != value)): entry[ind] = value return (True, self.yaml_dict) try: ind = entry.index(value) except ValueError: entry.append(value) return (True, self.yaml_dict) if (ind is not None): return (False, self.yaml_dict) return (False, self.yaml_dict)
put path, value into a dict
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
update
vizakua/openshift-tools
164
python
def update(self, path, value, index=None, curr_value=None): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, dict): if (not isinstance(value, dict)): raise YeditException(('Cannot replace key, value entry in dict with non-dict type. ' + 'value=[{}] type=[{}]'.format(value, type(value)))) entry.update(value) return (True, self.yaml_dict) elif isinstance(entry, list): ind = None if curr_value: try: ind = entry.index(curr_value) except ValueError: return (False, self.yaml_dict) elif (index is not None): ind = index if ((ind is not None) and (entry[ind] != value)): entry[ind] = value return (True, self.yaml_dict) try: ind = entry.index(value) except ValueError: entry.append(value) return (True, self.yaml_dict) if (ind is not None): return (False, self.yaml_dict) return (False, self.yaml_dict)
def update(self, path, value, index=None, curr_value=None): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, dict): if (not isinstance(value, dict)): raise YeditException(('Cannot replace key, value entry in dict with non-dict type. ' + 'value=[{}] type=[{}]'.format(value, type(value)))) entry.update(value) return (True, self.yaml_dict) elif isinstance(entry, list): ind = None if curr_value: try: ind = entry.index(curr_value) except ValueError: return (False, self.yaml_dict) elif (index is not None): ind = index if ((ind is not None) and (entry[ind] != value)): entry[ind] = value return (True, self.yaml_dict) try: ind = entry.index(value) except ValueError: entry.append(value) return (True, self.yaml_dict) if (ind is not None): return (False, self.yaml_dict) return (False, self.yaml_dict)<|docstring|>put path, value into a dict<|endoftext|>
b3f11484d805ec14170cbac256419c4b5a46e878e6dd6db0620633b086ef11c7
def put(self, path, value): ' put path, value into a dict ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry == value): return (False, self.yaml_dict) try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if (result is None): return (False, self.yaml_dict) if (path == ''): if (isinstance(result, list) or isinstance(result, dict)): self.yaml_dict = result return (True, self.yaml_dict) return (False, self.yaml_dict) self.yaml_dict = tmp_copy return (True, self.yaml_dict)
put path, value into a dict
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
put
vizakua/openshift-tools
164
python
def put(self, path, value): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry == value): return (False, self.yaml_dict) try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if (result is None): return (False, self.yaml_dict) if (path == ): if (isinstance(result, list) or isinstance(result, dict)): self.yaml_dict = result return (True, self.yaml_dict) return (False, self.yaml_dict) self.yaml_dict = tmp_copy return (True, self.yaml_dict)
def put(self, path, value): ' ' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if (entry == value): return (False, self.yaml_dict) try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if (result is None): return (False, self.yaml_dict) if (path == ): if (isinstance(result, list) or isinstance(result, dict)): self.yaml_dict = result return (True, self.yaml_dict) return (False, self.yaml_dict) self.yaml_dict = tmp_copy return (True, self.yaml_dict)<|docstring|>put path, value into a dict<|endoftext|>
9eb7701609c3b0b6dd26be462c3409d63742fb41d9b42dac5d273c25f349c67e
def create(self, path, value): ' create a yaml file ' if (not self.file_exists()): try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if (result is not None): self.yaml_dict = tmp_copy return (True, self.yaml_dict) return (False, self.yaml_dict)
create a yaml file
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
create
vizakua/openshift-tools
164
python
def create(self, path, value): ' ' if (not self.file_exists()): try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if (result is not None): self.yaml_dict = tmp_copy return (True, self.yaml_dict) return (False, self.yaml_dict)
def create(self, path, value): ' ' if (not self.file_exists()): try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if (result is not None): self.yaml_dict = tmp_copy return (True, self.yaml_dict) return (False, self.yaml_dict)<|docstring|>create a yaml file<|endoftext|>
af6cb2cc6e75441739127b425ef4bb89054f0f74e10a665afd1e8a98fe00eb17
@staticmethod def get_curr_value(invalue, val_type): 'return the current value' if (invalue is None): return None curr_value = invalue if (val_type == 'yaml'): try: curr_value = yaml.safe_load(invalue, Loader=yaml.RoundTripLoader) except AttributeError: curr_value = yaml.safe_load(invalue) elif (val_type == 'json'): curr_value = json.loads(invalue) return curr_value
return the current value
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
get_curr_value
vizakua/openshift-tools
164
python
@staticmethod def get_curr_value(invalue, val_type): if (invalue is None): return None curr_value = invalue if (val_type == 'yaml'): try: curr_value = yaml.safe_load(invalue, Loader=yaml.RoundTripLoader) except AttributeError: curr_value = yaml.safe_load(invalue) elif (val_type == 'json'): curr_value = json.loads(invalue) return curr_value
@staticmethod def get_curr_value(invalue, val_type): if (invalue is None): return None curr_value = invalue if (val_type == 'yaml'): try: curr_value = yaml.safe_load(invalue, Loader=yaml.RoundTripLoader) except AttributeError: curr_value = yaml.safe_load(invalue) elif (val_type == 'json'): curr_value = json.loads(invalue) return curr_value<|docstring|>return the current value<|endoftext|>
fdf6e8a3dd0d2394a181fa47b3bf9b82b6c434b2d304c51250c4c4d964070559
@staticmethod def parse_value(inc_value, vtype=''): 'determine value type passed' true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE', 'on', 'On', 'ON'] false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE', 'off', 'Off', 'OFF'] if (isinstance(inc_value, str) and ('bool' in vtype)): if ((inc_value not in true_bools) and (inc_value not in false_bools)): raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype)) elif (isinstance(inc_value, bool) and ('str' in vtype)): inc_value = str(inc_value) if (isinstance(inc_value, str) and (inc_value == '')): pass elif (isinstance(inc_value, str) and ('str' not in vtype)): try: inc_value = yaml.safe_load(inc_value) except Exception: raise YeditException(('Could not determine type of incoming value. ' + 'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))) return inc_value
determine value type passed
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
parse_value
vizakua/openshift-tools
164
python
@staticmethod def parse_value(inc_value, vtype=): true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE', 'on', 'On', 'ON'] false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE', 'off', 'Off', 'OFF'] if (isinstance(inc_value, str) and ('bool' in vtype)): if ((inc_value not in true_bools) and (inc_value not in false_bools)): raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype)) elif (isinstance(inc_value, bool) and ('str' in vtype)): inc_value = str(inc_value) if (isinstance(inc_value, str) and (inc_value == )): pass elif (isinstance(inc_value, str) and ('str' not in vtype)): try: inc_value = yaml.safe_load(inc_value) except Exception: raise YeditException(('Could not determine type of incoming value. ' + 'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))) return inc_value
@staticmethod def parse_value(inc_value, vtype=): true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE', 'on', 'On', 'ON'] false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE', 'off', 'Off', 'OFF'] if (isinstance(inc_value, str) and ('bool' in vtype)): if ((inc_value not in true_bools) and (inc_value not in false_bools)): raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype)) elif (isinstance(inc_value, bool) and ('str' in vtype)): inc_value = str(inc_value) if (isinstance(inc_value, str) and (inc_value == )): pass elif (isinstance(inc_value, str) and ('str' not in vtype)): try: inc_value = yaml.safe_load(inc_value) except Exception: raise YeditException(('Could not determine type of incoming value. ' + 'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))) return inc_value<|docstring|>determine value type passed<|endoftext|>
1b8c330bca596327f34ad883b9524e4f3f137c56aaf4dfc3efa50608eb826e10
@staticmethod def process_edits(edits, yamlfile): 'run through a list of edits and process them one-by-one' results = [] for edit in edits: value = Yedit.parse_value(edit['value'], edit.get('value_type', '')) if (edit.get('action') == 'update'): curr_value = Yedit.get_curr_value(Yedit.parse_value(edit.get('curr_value')), edit.get('curr_value_format')) rval = yamlfile.update(edit['key'], value, edit.get('index'), curr_value) elif (edit.get('action') == 'append'): rval = yamlfile.append(edit['key'], value) else: rval = yamlfile.put(edit['key'], value) if rval[0]: results.append({'key': edit['key'], 'edit': rval[1]}) return {'changed': (len(results) > 0), 'results': results}
run through a list of edits and process them one-by-one
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
process_edits
vizakua/openshift-tools
164
python
@staticmethod def process_edits(edits, yamlfile): results = [] for edit in edits: value = Yedit.parse_value(edit['value'], edit.get('value_type', )) if (edit.get('action') == 'update'): curr_value = Yedit.get_curr_value(Yedit.parse_value(edit.get('curr_value')), edit.get('curr_value_format')) rval = yamlfile.update(edit['key'], value, edit.get('index'), curr_value) elif (edit.get('action') == 'append'): rval = yamlfile.append(edit['key'], value) else: rval = yamlfile.put(edit['key'], value) if rval[0]: results.append({'key': edit['key'], 'edit': rval[1]}) return {'changed': (len(results) > 0), 'results': results}
@staticmethod def process_edits(edits, yamlfile): results = [] for edit in edits: value = Yedit.parse_value(edit['value'], edit.get('value_type', )) if (edit.get('action') == 'update'): curr_value = Yedit.get_curr_value(Yedit.parse_value(edit.get('curr_value')), edit.get('curr_value_format')) rval = yamlfile.update(edit['key'], value, edit.get('index'), curr_value) elif (edit.get('action') == 'append'): rval = yamlfile.append(edit['key'], value) else: rval = yamlfile.put(edit['key'], value) if rval[0]: results.append({'key': edit['key'], 'edit': rval[1]}) return {'changed': (len(results) > 0), 'results': results}<|docstring|>run through a list of edits and process them one-by-one<|endoftext|>
d3a44913b93026cbbfc855e53fda6f146a5308a42df9c1058efddf4397913fc0
@staticmethod def run_ansible(params): 'perform the idempotent crud operations' yamlfile = Yedit(filename=params['src'], backup=params['backup'], content_type=params['content_type'], separator=params['separator']) state = params['state'] if params['src']: rval = yamlfile.load() if ((yamlfile.yaml_dict is None) and (state != 'present')): return {'failed': True, 'msg': ('Error opening file [{}]. Verify that the '.format(params['src']) + 'file exists, that it is has correct permissions, and is valid yaml.')} if (state == 'list'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['key']: rval = yamlfile.get(params['key']) return {'changed': False, 'result': rval, 'state': state} elif (state == 'absent'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['update']: rval = yamlfile.pop(params['key'], params['value']) else: rval = yamlfile.delete(params['key'], params['index'], params['value']) if (rval[0] and params['src']): yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} elif (state == 'present'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) if ((yamlfile.yaml_dict == content) and (params['value'] is None)): return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} yamlfile.yaml_dict = content edits = [] _edit = {} if (params['value'] is not None): _edit['value'] = params['value'] _edit['value_type'] = params['value_type'] _edit['key'] = params['key'] if params['update']: _edit['action'] = 'update' _edit['curr_value'] = params['curr_value'] _edit['curr_value_format'] = params['curr_value_format'] _edit['index'] = params['index'] elif params['append']: _edit['action'] = 'append' edits.append(_edit) elif (params['edits'] is not None): edits = params['edits'] if edits: results = Yedit.process_edits(edits, yamlfile) if (results['changed'] and params['src']): yamlfile.write() return {'changed': results['changed'], 'result': results['results'], 'state': state} if params['src']: rval = yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} return {'failed': True, 'msg': 'Unkown state passed'}
perform the idempotent crud operations
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
run_ansible
vizakua/openshift-tools
164
python
@staticmethod def run_ansible(params): yamlfile = Yedit(filename=params['src'], backup=params['backup'], content_type=params['content_type'], separator=params['separator']) state = params['state'] if params['src']: rval = yamlfile.load() if ((yamlfile.yaml_dict is None) and (state != 'present')): return {'failed': True, 'msg': ('Error opening file [{}]. Verify that the '.format(params['src']) + 'file exists, that it is has correct permissions, and is valid yaml.')} if (state == 'list'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['key']: rval = yamlfile.get(params['key']) return {'changed': False, 'result': rval, 'state': state} elif (state == 'absent'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['update']: rval = yamlfile.pop(params['key'], params['value']) else: rval = yamlfile.delete(params['key'], params['index'], params['value']) if (rval[0] and params['src']): yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} elif (state == 'present'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) if ((yamlfile.yaml_dict == content) and (params['value'] is None)): return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} yamlfile.yaml_dict = content edits = [] _edit = {} if (params['value'] is not None): _edit['value'] = params['value'] _edit['value_type'] = params['value_type'] _edit['key'] = params['key'] if params['update']: _edit['action'] = 'update' _edit['curr_value'] = params['curr_value'] _edit['curr_value_format'] = params['curr_value_format'] _edit['index'] = params['index'] elif params['append']: _edit['action'] = 'append' edits.append(_edit) elif (params['edits'] is not None): edits = params['edits'] if edits: results = Yedit.process_edits(edits, yamlfile) if (results['changed'] and params['src']): yamlfile.write() return {'changed': results['changed'], 'result': results['results'], 'state': state} if params['src']: rval = yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} return {'failed': True, 'msg': 'Unkown state passed'}
@staticmethod def run_ansible(params): yamlfile = Yedit(filename=params['src'], backup=params['backup'], content_type=params['content_type'], separator=params['separator']) state = params['state'] if params['src']: rval = yamlfile.load() if ((yamlfile.yaml_dict is None) and (state != 'present')): return {'failed': True, 'msg': ('Error opening file [{}]. Verify that the '.format(params['src']) + 'file exists, that it is has correct permissions, and is valid yaml.')} if (state == 'list'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['key']: rval = yamlfile.get(params['key']) return {'changed': False, 'result': rval, 'state': state} elif (state == 'absent'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['update']: rval = yamlfile.pop(params['key'], params['value']) else: rval = yamlfile.delete(params['key'], params['index'], params['value']) if (rval[0] and params['src']): yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} elif (state == 'present'): if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) if ((yamlfile.yaml_dict == content) and (params['value'] is None)): return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} yamlfile.yaml_dict = content edits = [] _edit = {} if (params['value'] is not None): _edit['value'] = params['value'] _edit['value_type'] = params['value_type'] _edit['key'] = params['key'] if params['update']: _edit['action'] = 'update' _edit['curr_value'] = params['curr_value'] _edit['curr_value_format'] = params['curr_value_format'] _edit['index'] = params['index'] elif params['append']: _edit['action'] = 'append' edits.append(_edit) elif (params['edits'] is not None): edits = params['edits'] if edits: results = Yedit.process_edits(edits, yamlfile) if (results['changed'] and params['src']): yamlfile.write() return {'changed': results['changed'], 'result': results['results'], 'state': state} if params['src']: rval = yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} return {'failed': True, 'msg': 'Unkown state passed'}<|docstring|>perform the idempotent crud operations<|endoftext|>
310e440e7d9ac75db2435f363167bc5f9070efc42cb2b2d2ecd4b70f806abd62
def __init__(self, namespace, kubeconfig='/etc/origin/master/admin.kubeconfig', verbose=False, all_namespaces=False): ' Constructor for OpenshiftCLI ' self.namespace = namespace self.verbose = verbose self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig) self.all_namespaces = all_namespaces self.oc_binary = locate_oc_binary()
Constructor for OpenshiftCLI
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
__init__
vizakua/openshift-tools
164
python
def __init__(self, namespace, kubeconfig='/etc/origin/master/admin.kubeconfig', verbose=False, all_namespaces=False): ' ' self.namespace = namespace self.verbose = verbose self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig) self.all_namespaces = all_namespaces self.oc_binary = locate_oc_binary()
def __init__(self, namespace, kubeconfig='/etc/origin/master/admin.kubeconfig', verbose=False, all_namespaces=False): ' ' self.namespace = namespace self.verbose = verbose self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig) self.all_namespaces = all_namespaces self.oc_binary = locate_oc_binary()<|docstring|>Constructor for OpenshiftCLI<|endoftext|>
039b8ea527783af2d2601a639d9d8eb446d884a74314dd98b3568682fdaf9e40
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'): ' replace the current object with the content ' res = self._get(resource, rname) if (not res['results']): return res fname = Utils.create_tmpfile((rname + '-')) yed = Yedit(fname, res['results'][0], separator=sep) updated = False if (content is not None): changes = [] for (key, value) in content.items(): changes.append(yed.put(key, value)) if any([change[0] for change in changes]): updated = True elif (edits is not None): results = Yedit.process_edits(edits, yed) if results['changed']: updated = True if updated: yed.write() atexit.register(Utils.cleanup, [fname]) return self._replace(fname, force) return {'returncode': 0, 'updated': False}
replace the current object with the content
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
_replace_content
vizakua/openshift-tools
164
python
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'): ' ' res = self._get(resource, rname) if (not res['results']): return res fname = Utils.create_tmpfile((rname + '-')) yed = Yedit(fname, res['results'][0], separator=sep) updated = False if (content is not None): changes = [] for (key, value) in content.items(): changes.append(yed.put(key, value)) if any([change[0] for change in changes]): updated = True elif (edits is not None): results = Yedit.process_edits(edits, yed) if results['changed']: updated = True if updated: yed.write() atexit.register(Utils.cleanup, [fname]) return self._replace(fname, force) return {'returncode': 0, 'updated': False}
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'): ' ' res = self._get(resource, rname) if (not res['results']): return res fname = Utils.create_tmpfile((rname + '-')) yed = Yedit(fname, res['results'][0], separator=sep) updated = False if (content is not None): changes = [] for (key, value) in content.items(): changes.append(yed.put(key, value)) if any([change[0] for change in changes]): updated = True elif (edits is not None): results = Yedit.process_edits(edits, yed) if results['changed']: updated = True if updated: yed.write() atexit.register(Utils.cleanup, [fname]) return self._replace(fname, force) return {'returncode': 0, 'updated': False}<|docstring|>replace the current object with the content<|endoftext|>
506f0a2fcf77d5cada1b3613d585b6cf9c0e90a4590abcbcc21fc8e742e615e0
def _replace(self, fname, force=False): 'replace the current object with oc replace' yed = Yedit(fname) results = yed.delete('metadata.resourceVersion') if results[0]: yed.write() cmd = ['replace', '-f', fname] if force: cmd.append('--force') return self.openshift_cmd(cmd)
replace the current object with oc replace
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
_replace
vizakua/openshift-tools
164
python
def _replace(self, fname, force=False): yed = Yedit(fname) results = yed.delete('metadata.resourceVersion') if results[0]: yed.write() cmd = ['replace', '-f', fname] if force: cmd.append('--force') return self.openshift_cmd(cmd)
def _replace(self, fname, force=False): yed = Yedit(fname) results = yed.delete('metadata.resourceVersion') if results[0]: yed.write() cmd = ['replace', '-f', fname] if force: cmd.append('--force') return self.openshift_cmd(cmd)<|docstring|>replace the current object with oc replace<|endoftext|>
4089cd556ab90d5b733e41286bbc04de12b2863245c01a5639fa7d9234c9704a
def _create_from_content(self, rname, content): 'create a temporary file and then call oc create on it' fname = Utils.create_tmpfile((rname + '-')) yed = Yedit(fname, content=content) yed.write() atexit.register(Utils.cleanup, [fname]) return self._create(fname)
create a temporary file and then call oc create on it
openshift/installer/vendored/openshift-ansible-3.9.14-1/roles/lib_openshift/library/oc_scale.py
_create_from_content
vizakua/openshift-tools
164
python
def _create_from_content(self, rname, content): fname = Utils.create_tmpfile((rname + '-')) yed = Yedit(fname, content=content) yed.write() atexit.register(Utils.cleanup, [fname]) return self._create(fname)
def _create_from_content(self, rname, content): fname = Utils.create_tmpfile((rname + '-')) yed = Yedit(fname, content=content) yed.write() atexit.register(Utils.cleanup, [fname]) return self._create(fname)<|docstring|>create a temporary file and then call oc create on it<|endoftext|>